Merge ../nom.deleteme

This commit is contained in:
Michael Muller 2019-01-09 11:06:35 -05:00
commit 7920a05bf8
464 changed files with 14044 additions and 4321 deletions

6
.gcloudignore Normal file
View file

@ -0,0 +1,6 @@
docs/**
python/**
bazel-*/**
gradle/**/build/**
gradle/**/WEB-INF/**
gradle/.*/**

7
.gitignore vendored
View file

@ -80,10 +80,9 @@ autogenerated/
######################################################################
# Gradle Ignores
# We don't want to ignore the gradle jar files
!/gradle/gradle/wrapper/**/*.jar
/gradle/.gradle
/gradle/gradle
/gradle/gradlew
/gradle/gradlew.bat
/gradle/**/WEB-INF
/gradle/**/build

View file

@ -33,13 +33,8 @@ env:
# quota)
TERM=dumb
install:
# Install a specific gradle version first, default gradle can't deal with
# our gradle build scripts.
- wget http://services.gradle.org/distributions/gradle-4.10.2-bin.zip && unzip gradle-4.10.2-bin.zip
# Specialize gradle build to use an up-to-date gradle and the /gradle
# directory.
# The "travis_wait 45" lets our build spend up to 45 minutes without writing
# output, instead of the default 10.
script: cd gradle && travis_wait 45 ../gradle-4.10.2/bin/gradle build
script: cd gradle && chmod 755 ./gradlew && travis_wait 45 ./gradlew build

View file

@ -1,6 +1,8 @@
# Nomulus
![Build Status](https://storage.googleapis.com/domain-registry-github-build-status/github-ci-status.png)
| Bazel | Gradle |
|-------|--------|
|![Build Status](https://storage.googleapis.com/domain-registry-github-build-status/github-ci-status.png)|[![Build Status](https://travis-ci.org/google/nomulus.svg?branch=master)](https://travis-ci.org/google/nomulus)|
![Nomulus logo](./nomulus-logo.png)

View file

@ -1,5 +1,7 @@
workspace(name = "domain_registry")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
# https://github.com/bazelbuild/rules_closure/releases/tag/0.8.0
http_archive(
name = "io_bazel_rules_closure",
@ -14,6 +16,7 @@ load("@io_bazel_rules_closure//closure:defs.bzl", "closure_repositories")
closure_repositories(
omit_com_google_auto_factory = True,
omit_com_google_protobuf = True,
omit_com_google_code_findbugs_jsr305 = True,
omit_com_google_guava = True,
omit_com_ibm_icu_icu4j = True,
@ -26,10 +29,11 @@ load("//java/google/registry:repositories.bzl", "domain_registry_repositories")
domain_registry_repositories()
# Setup docker bazel rules
git_repository(
http_archive(
name = "io_bazel_rules_docker",
remote = "https://github.com/bazelbuild/rules_docker.git",
tag = "v0.4.0",
sha256 = "29d109605e0d6f9c892584f07275b8c9260803bf0c6fcb7de2623b2bedc910bd",
strip_prefix = "rules_docker-0.5.1",
urls = ["https://github.com/bazelbuild/rules_docker/archive/v0.5.1.tar.gz"],
)
load(
@ -38,11 +42,14 @@ load(
container_repositories = "repositories",
)
# This is NOT needed when going through the language lang_image
# "repositories" function(s).
container_repositories()
container_pull(
name = "java_base",
registry = "gcr.io",
repository = "distroless/java",
digest = "sha256:780ee786a774a25a4485f491b3e0a21f7faed01864640af7cebec63c46a0845a",
# 'tag' is also supported, but digest is encouraged for reproducibility.
digest = "sha256:8c1769cb253bdecc257470f7fba05446a55b70805fa686f227a11655a90dfe9e",
)

53
cloudbuild-nomulus.yaml Normal file
View file

@ -0,0 +1,53 @@
# To run the build locally, install cloud-build-local first.
# See: https://cloud.google.com/cloud-build/docs/build-debug-locally
# Then run:
# cloud-build-local --config=cloudbuild-nomulus.yaml --dryrun=false --substitutions TAG_NAME=[TAG] .
# This will create a docker image named gcr.io/[PROJECT_ID]/proxy:[TAG] locally.
# The PROJECT_ID is the current project name that gcloud uses.
# You can add "--push true" to have the image pushed to GCR.
#
# To manually trigger a build on GCB, run:
# gcloud builds submit --config cloudbuild-nomulus.yaml --substitutions TAG_NAME=[TAG] .
#
# To trigger a build automatically, follow the instructions below and add a trigger:
# https://cloud.google.com/cloud-build/docs/running-builds/automate-builds
steps:
# Set permissions correctly. Not sure why it is necessary, but it is.
- name: 'alpine'
args: ['chown', '-R', 'root:root', '.']
- name: 'alpine'
args: ['chmod', '-R', '777', '.']
# Clone the private repo and merge its contents.
- name: 'gcr.io/cloud-builders/gcloud'
args: ['source', 'repos', 'clone', 'nomulus-config']
- name: 'alpine'
args: ['sh', '-c', 'cp -r nomulus-config/* .']
# Build the deployment files.
- name: 'google/cloud-sdk'
args: ['./gradlew', 'stage', '-x', 'autoLintGradle']
dir: 'gradle'
# Tar the deployment files as we cannot upload directories to GCS.
- name: 'alpine'
args: ['tar', 'cvf', '../../../default.tar', '.']
dir: 'gradle/services/default/build/staged-app'
- name: 'alpine'
args: ['tar', 'cvf', '../../../pubapi.tar', '.']
dir: 'gradle/services/pubapi/build/staged-app'
- name: 'alpine'
args: ['tar', 'cvf', '../../../backend.tar', '.']
dir: 'gradle/services/backend/build/staged-app'
- name: 'alpine'
args: ['tar', 'cvf', '../../../tools.tar', '.']
dir: 'gradle/services/tools/build/staged-app'
# Tar files to upload to GCS.
artifacts:
objects:
location: 'gs://${PROJECT_ID}-deploy/${TAG_NAME}'
paths:
- 'gradle/services/default.tar'
- 'gradle/services/pubapi.tar'
- 'gradle/services/backend.tar'
- 'gradle/services/tools.tar'
timeout: 3600s
options:
machineType: 'N1_HIGHCPU_8'

37
cloudbuild-proxy.yaml Normal file
View file

@ -0,0 +1,37 @@
# To run the build locally, install cloud-build-local first.
# See: https://cloud.google.com/cloud-build/docs/build-debug-locally
# Then run:
# cloud-build-local --config=cloudbuild-proxy.yaml --dryrun=false --substitutions TAG_NAME=[TAG] .
# This will create a docker image named gcr.io/[PROJECT_ID]/proxy:[TAG] locally.
# The PROJECT_ID is the current project name that gcloud uses.
# You can add "--push true" to have the image pushed to GCR.
#
# To manually trigger a build on GCB, run:
# gcloud builds submit --config cloudbuild-proxy.yaml --substitutions TAG_NAME=[TAG] .
#
# To trigger a build automatically, follow the instructions below and add a trigger:
# https://cloud.google.com/cloud-build/docs/running-builds/automate-builds
steps:
# Set permissions correctly. Not sure why it is necessary, but it is.
- name: 'alpine'
args: ['chown', '-R', 'root:root', '.']
- name: 'alpine'
args: ['chmod', '-R', '777', '.']
# Clone the private repo merge its contents.
- name: 'gcr.io/cloud-builders/gcloud'
args: ['source', 'repos', 'clone', 'nomulus-config']
- name: 'alpine'
args: ['sh', '-c', 'cp -r nomulus-config/* .']
# Build the deploy jar.
- name: 'openjdk:8-slim'
args: ['./gradlew', ':proxy:deployJar', '-x', 'autoLintGradle']
dir: 'gradle'
# Build the docker image.
- name: 'gcr.io/cloud-builders/docker'
args: ['build', '--tag', 'gcr.io/${PROJECT_ID}/proxy:${TAG_NAME}', '.']
dir: 'gradle/proxy'
# Images to upload to GCR.
images: ['gcr.io/${PROJECT_ID}/proxy:${TAG_NAME}']
timeout: 3600s
options:
machineType: 'N1_HIGHCPU_8'

View file

@ -231,6 +231,8 @@ An EPP flow that updates a contact.
An EPP flow that allocates a new domain resource from a domain application.
Note that this flow is only run by superusers.
### Errors
@ -295,7 +297,7 @@ An EPP flow that creates a new application for a domain resource.
* Specified extension is not implemented.
* 2201
* Registrar is not authorized to access this TLD.
* Registrar must be active in order to create domains or applications.
* Registrar must be active in order to perform this operation.
* 2302
* Resource with this id already exists.
* This name has already been claimed by a sunrise applicant.
@ -556,7 +558,7 @@ An EPP flow that creates a new domain resource.
* 2201
* Only a tool can pass a metadata extension.
* Registrar is not authorized to access this TLD.
* Registrar must be active in order to create domains or applications.
* Registrar must be active in order to perform this operation.
* 2302
* Resource with this id already exists.
* 2303
@ -689,6 +691,7 @@ comes in at the exact millisecond that the domain would have expired.
* 2201
* The specified resource belongs to another client.
* Registrar is not authorized to access this TLD.
* Registrar must be active in order to perform this operation.
* 2303
* Resource with this id does not exist.
* 2304
@ -745,6 +748,7 @@ regardless of what the original expiration time was.
* 2201
* The specified resource belongs to another client.
* Registrar is not authorized to access this TLD.
* Registrar must be active in order to perform this operation.
* 2303
* Resource with this id does not exist.
* 2304
@ -907,6 +911,7 @@ new ones with the correct approval time).
* 2201
* Authorization info is required to request a transfer.
* Registrar is not authorized to access this TLD.
* Registrar must be active in order to perform this operation.
* 2202
* Authorization information for accessing resource is invalid.
* 2300
@ -1169,8 +1174,7 @@ An EPP flow for login.
* 2103
* Specified extension is not implemented.
* 2200
* GAE user id is not allowed to login as requested registrar.
* User is not logged in as a GAE user.
* GAE User can't access the requested registrar.
* Registrar certificate does not match stored certificate.
* Registrar IP address is not in stored whitelist.
* Registrar certificate not present.

View file

@ -7,8 +7,8 @@ This document covers the steps necessary to download, build, and deploy Nomulus.
You will need the following programs installed on your local machine:
* A recent version of the [Java 8 JDK][java-jdk8].
* [Bazel build system](http://bazel.io/) (version [0.17.2][bazel-version]
works as of 2018-10-03).
* [Bazel build system](http://bazel.io/) (version [0.21.0][bazel-version]
works as of 2018-12-20).
* [Google App Engine SDK for Java][app-engine-sdk], and configure aliases to
to the `gcloud` and `appcfg.sh` utilities (you'll use them a lot).
* [Git](https://git-scm.com/) version control system.
@ -135,11 +135,11 @@ $ ls /path/to/app-dir/acme-registry-alpha
backend default META-INF tools
```
Now deploy the code to App Engine.
Now deploy the code to App Engine. We must provide a version string, e.g., live.
```shell
$ appcfg.sh -A acme-registry-alpha --enable_jar_splitting \
update /path/to/app-dir/acme-registry-alpha
-V live update /path/to/app-dir/acme-registry-alpha
Reading application configuration data...
Processing module default
Oct 05, 2016 12:16:59 PM com.google.apphosting.utils.config.IndexesXmlReader readConfigXml
@ -181,4 +181,4 @@ See the [first steps tutorial](./first-steps-tutorial.md) for more information.
[app-engine-sdk]: https://cloud.google.com/appengine/docs/java/download
[java-jdk8]: http://www.oracle.com/technetwork/java/javase/downloads
[bazel-version]: https://github.com/bazelbuild/bazel/releases/download/0.17.2/bazel-0.17.2-installer-linux-x86_64.sh
[bazel-version]: https://github.com/bazelbuild/bazel/releases/download/0.21.0/bazel-0.21.0-installer-linux-x86_64.sh

View file

@ -15,8 +15,6 @@ the existing Nomulus source tree.
Dependencies are mostly the same as in Bazel, with a few exceptions:
* org.slf4j:slf4j-simple is added to provide a logging implementation in
tests. Bazel does not need this.
* com.googlecode.java-diff-utils:diffutils is not included. Bazel needs it for
Truth's equality check, but Gradle works fine without it.
* jaxb 2.2.11 is used instead of 2.3 in Bazel, since the latter breaks the
@ -27,18 +25,22 @@ Dependencies are mostly the same as in Bazel, with a few exceptions:
### Notable Issues
Only single-threaded test execution is allowed, due to race condition over
global resources, such as the local Datastore instance, or updates to the System
properties. This is a new problem with Gradle, which does not provide as much
test isolation as Bazel. We are exploring solutions to this problem.
Test suites (RdeTestSuite and TmchTestSuite) are ignored to avoid duplicate
execution of tests. Neither suite performs any shared test setup routine, so it
is easier to exclude the suite classes than individual test classes.
is easier to exclude the suite classes than individual test classes. This is the
reason why all test tasks in the :core project contain the exclude pattern
'"**/*TestCase.*", "**/*TestSuite.*"'
Since Gradle does not support hierarchical build files, all file sets (e.g.,
resources) must be declared at the top, in root project config or the
sub-project configs.
Many Nomulus tests are not hermetic: they modify global state (e.g., the shared
local instance of Datastore) but do not clean up on completion. This becomes a
problem with Gradle. In the beginning we forced Gradle to run every test class
in a new process, and incurred heavy overheads. Since then, we have fixed some
tests, and manged to divide all tests into three suites that do not have
intra-suite conflicts. We will revisit the remaining tests soon.
Note that it is unclear if all conflicting tests have been identified. More may
be exposed if test execution order changes, e.g., when new tests are added or
execution parallelism level changes.
## Initial Setup

View file

@ -2,28 +2,284 @@ buildscript {
repositories {
jcenter()
mavenCentral()
}
maven {
url 'https://plugins.gradle.org/m2/'
}
dependencies {
classpath 'com.google.cloud.tools:appengine-gradle-plugin:1.3.3'
classpath 'org.sonatype.aether:aether-api:1.13.1'
classpath 'org.sonatype.aether:aether-impl:1.13.1'
}
}
allprojects {
plugins {
id 'nebula.dependency-lock' version '7.1.0'
id 'nebula.lint' version '10.3.5'
// Config helper for annotation processors such as AutoValue and Dagger.
// Ensures that source code is generated at an appropriate location.
id 'net.ltgt.apt' version '0.19' apply false
id 'com.bmuschko.docker-java-application' version '4.0.4' apply false
}
// Provide defaults for all of the project properties.
// showAllOutput: boolean. If true, dump all test output during the build.
if (!project.hasProperty('showAllOutput')) {
ext.showAllOutput = 'false'
}
// Only do linting if the build is successful.
gradleLint.autoLintAfterFailure = false
// Paths to main and test sources.
ext.javaDir = "${rootDir}/../java"
ext.javatestsDir = "${rootDir}/../javatests"
// Tasks to deploy/stage all App Engine services
task deploy {
group = 'deployment'
description = 'Deploys all services to App Engine.'
}
task stage {
group = 'deployment'
description = 'Generates application directories for all services.'
}
subprojects {
// Skip no-op project
if (project.name == 'services') return
repositories {
jcenter()
mavenCentral()
flatDir {
// The objectify jar that comes with Nomulus.
dirs "${rootDir}/../third_party/objectify/v4_1"
}
def services = [':services:default',
':services:backend',
':services:tools',
':services:pubapi']
// Set up all of the deployment projects.
if (services.contains(project.path)) {
apply plugin: 'war'
// Set this directory before applying the appengine plugin so that the
// plugin will recognize this as an app-engine standard app (and also
// obtains the appengine-web.xml from the correct location)
project.convention.plugins['war'].webAppDirName =
"../../../java/google/registry/env/crash/${project.name}"
apply plugin: 'com.google.cloud.tools.appengine'
// Get the web.xml file for the service.
war {
webInf {
from "../../../java/google/registry/env/common/${project.name}/WEB-INF"
}
}
appengine {
deploy {
// TODO: change this to a variable.
project = 'domain-registry-crash'
}
}
dependencies {
compile project(':core')
}
rootProject.deploy.dependsOn appengineDeploy
rootProject.stage.dependsOn appengineStage
// Return early, do not apply the settings below.
return
}
apply plugin: 'java'
apply plugin: 'maven-publish'
apply plugin: 'nebula.dependency-lock'
apply plugin: 'nebula.lint'
apply plugin: 'net.ltgt.apt'
version = '1.0'
sourceCompatibility = '1.8'
targetCompatibility = '1.8'
compileJava {options.encoding = "UTF-8"}
gradleLint.rules = [
// Checks if Gradle wrapper is up-to-date
'archaic-wrapper',
// Checks for indirect dependencies with dynamic version spec. Best
// practice calls for declaring them with specific versions.
'undeclared-dependency',
'unused-dependency'
// TODO(weiminyu): enable more dependency checks
]
publishing {
repositories {
maven {
url = project.findProperty('repositoryUrl')
}
}
}
// Single version across all projects for now.
version = '1.0'
ext.getDistinctResolvedArtifacts = {
def distinctResolvedArtifacts = [:]
// Java plugin:
apply plugin: 'java'
configurations.each {
if (!it.isCanBeResolved()) {
return
}
it.resolvedConfiguration.resolvedArtifacts.each { resolvedArtifact ->
if (resolvedArtifact.id.componentIdentifier.displayName in
['project :core', 'project :proxy', 'project :util', 'project :third_party']) {
return
}
distinctResolvedArtifacts[resolvedArtifact.id.toString()] = resolvedArtifact
}
}
return distinctResolvedArtifacts
}
ext.generateDependencyPublications = {
def distinctResolvedArtifacts = project.ext.getDistinctResolvedArtifacts()
distinctResolvedArtifacts.values().eachWithIndex { resolvedArtifact, n ->
project.publishing {
publications {
"maven${n}"(MavenPublication) {
artifact(resolvedArtifact.file) {
groupId = resolvedArtifact.moduleVersion.id.group
artifactId = resolvedArtifact.moduleVersion.id.name
version = resolvedArtifact.moduleVersion.id.version
classifier = resolvedArtifact.classifier
}
}
}
}
}
}
ext.urlExists = { url ->
def connection = (HttpURLConnection) url.openConnection()
connection.setRequestMethod("HEAD")
connection.connect()
if (connection.getResponseCode() == HttpURLConnection.HTTP_OK) {
return true
} else {
return false
}
}
ext.writeMetadata = { resolvedArtifact, url, gitRepositoryPath ->
def groupId = resolvedArtifact.moduleVersion.id.group
def artifactId = resolvedArtifact.moduleVersion.id.name
def version = resolvedArtifact.moduleVersion.id.version
def relativeFileName =
[groupId, artifactId, 'README.domainregistry'].join('/')
def metadataFile = new File(gitRepositoryPath, relativeFileName)
metadataFile.parentFile.mkdirs()
def writer = metadataFile.newWriter()
writer << "Name: ${artifactId}\n"
writer << "Url: ${url}\n"
writer << "Version: ${version}\n"
writer.close()
}
// This task generates a metadata file for each resolved dependency artifact.
// The file contains the name, url and version for the artifact.
task generateDependencyMetadata {
doLast {
def distinctResolvedArtifacts = project.ext.getDistinctResolvedArtifacts()
def defaultLayout = new org.sonatype.aether.util.layout.MavenDefaultLayout()
distinctResolvedArtifacts.values().each { resolvedArtifact ->
def artifact = new org.sonatype.aether.util.artifact.DefaultArtifact(
resolvedArtifact.id.componentIdentifier.toString())
for (repository in project.repositories) {
def mavenRepository = (MavenArtifactRepository) repository
def repositoryUri = URI.create(mavenRepository.url.toString())
def artifactUri = repositoryUri.resolve(defaultLayout.getPath(artifact))
if (project.ext.urlExists(artifactUri.toURL())) {
project.ext.writeMetadata(
resolvedArtifact,
artifactUri.toURL(),
project.findProperty('privateRepository') + "/${project.name}")
break
}
}
}
}
}
if (project.name == 'third_party') return
// Path to code generated with annotation processors. Note that this path is
// chosen by the 'net.ltgt.apt' plugin, and may change if IDE-specific plugins
// are applied, e.g., 'idea' or 'eclipse'
def aptGeneratedDir = "${project.buildDir}/generated/source/apt/main"
def aptGeneratedTestDir = "${project.buildDir}/generated/source/apt/test"
def commonlyExcludedResources = ['**/*.java', '**/BUILD']
sourceSets {
main {
java {
srcDirs = [
project(':').javaDir,
aptGeneratedDir
]
}
resources {
srcDirs = [
project(':').javaDir
]
exclude commonlyExcludedResources
}
}
test {
java {
srcDirs = [
project(':').javatestsDir,
aptGeneratedTestDir
]
}
resources {
srcDirs = [
project(':').javatestsDir,
]
exclude commonlyExcludedResources
}
}
}
test {
testLogging.showStandardStreams = Boolean.parseBoolean(showAllOutput)
}
if (project.name == 'core') return
ext.relativePath = "google/registry/${project.name}"
sourceSets.each {
it.java {
include "${project.relativePath}/"
}
it.resources {
include "${project.relativePath}/"
}
}
project(':core').sourceSets.each {
it.java {
exclude "${project.relativePath}/"
}
it.resources {
exclude "${project.relativePath}/"
}
}
}

View file

@ -2,38 +2,49 @@ plugins {
id 'java-library'
}
def javaDir = "${rootDir}/../java"
def javatestsDir = "${rootDir}/../javatests"
// Path to code generated by ad hoc tasks in this project. A separate path is
// used for easy inspection.
def generatedDir = "${project.buildDir}/generated/source/custom/main"
def generatedDir = "${project.buildDir}/generated-sources"
// Tests that conflict with (mostly unidentified) members of the main test
// suite. It is unclear if they are offenders (i.e., those that pollute global
// state) or victims.
// TODO(weiminyu): identify cause and fix offending tests.
def outcastTestPatterns = [
"google/registry/batch/DeleteContactsAndHostsActionTest.*",
"google/registry/batch/RefreshDnsOnHostRenameActionTest.*",
"google/registry/flows/CheckApiActionTest.*",
"google/registry/flows/EppLifecycleHostTest.*",
"google/registry/flows/domain/DomainAllocateFlowTest.*",
"google/registry/flows/domain/DomainApplicationCreateFlowTest.*",
"google/registry/flows/domain/DomainApplicationUpdateFlowTest.*",
"google/registry/flows/domain/DomainCreateFlowTest.*",
"google/registry/flows/domain/DomainUpdateFlowTest.*",
"google/registry/tools/CreateDomainCommandTest.*",
"google/registry/tools/server/CreatePremiumListActionTest.*",
// Conflicts with WhoisActionTest
"google/registry/whois/WhoisHttpActionTest.*",
]
// Tests that conflict with members of both the main test suite and the
// outcast suite.
// TODO(weiminyu): identify cause and fix offending tests.
def fragileTestPatterns = [
"google/registry/cron/TldFanoutActionTest.*"
]
sourceSets {
main {
java {
srcDirs = [
"${javaDir}",
"${generatedDir}"
]
srcDirs += generatedDir
}
resources {
srcDirs = [
"${javaDir}"
]
exclude '**/*.java', '**/*.xjb'
exclude '**/*.xjb'
}
}
test {
java {
srcDirs = [
"${javatestsDir}",
"${generatedDir}"
]
}
resources {
srcDirs = [
"${javatestsDir}"
]
exclude '**/*.java', '**/*.xsd', '**/*.xjb'
exclude '**/*.xjb', '**/*.xsd'
}
}
}
@ -42,165 +53,200 @@ configurations {
css
jaxb
soy
// Label for all dependencies inherited from Bazel build but not used in
// either compile or testRuntime. However, they may be needed at runtime.
// TODO(weiminyu): identify runtime dependencies and remove the rest.
maybeRuntime
}
// Relevant canned dependency labels:
// - implementation: Dependencies to be included in release distribution.
// - compileOnly: Dependencies used at compile time only for production code. They will not be
// included in release.
// - testImplementation: Dependencies needed for testing only.
// Known issues:
// - The (test/)compile/runtime labels are deprecated. We continue using these
// labels due to nebula-lint.
// TODO(weiminyu): switch to api/implementation labels.
// See https://github.com/nebula-plugins/gradle-lint-plugin/issues/130 for
// issue status.
// - Nebula-lint's conflict between unused and undeclared dependency check.
// If an undeclared dependency is added, the unused-dependency check will flag
// it. For now we wrap affected dependency in gradleLint.ignore block.
// TODO(weiminyu): drop gradleLint.ignore block when issue is fixed.
// See https://github.com/nebula-plugins/gradle-lint-plugin/issues/181 for
// issue status.
dependencies {
implementation 'com.beust:jcommander:1.48'
implementation 'com.fasterxml.jackson.core:jackson-core:2.8.5'
implementation 'com.fasterxml.jackson.core:jackson-annotations:2.8.0'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.8.5'
implementation 'com.google.api-client:google-api-client:1.22.0'
implementation 'com.google.api-client:google-api-client-appengine:1.22.0'
implementation 'com.google.api-client:google-api-client-jackson2:1.20.0'
implementation 'com.google.monitoring-client:metrics:1.0.4'
implementation 'com.google.monitoring-client:stackdriver:1.0.4'
implementation 'com.google.api-client:google-api-client-java6:1.20.0'
implementation 'com.google.api-client:google-api-client-servlet:1.22.0'
implementation 'com.google.apis:google-api-services-admin-directory:directory_v1-rev72-1.22.0'
implementation 'com.google.apis:google-api-services-bigquery:v2-rev325-1.22.0'
implementation 'com.google.apis:google-api-services-clouddebugger:v2-rev8-1.22.0'
implementation 'com.google.apis:google-api-services-cloudkms:v1-rev12-1.22.0'
implementation 'com.google.apis:google-api-services-cloudresourcemanager:v1-rev6-1.22.0'
implementation 'com.google.apis:google-api-services-dataflow:v1b3-rev196-1.22.0'
implementation 'com.google.apis:google-api-services-dns:v2beta1-rev6-1.22.0'
implementation 'com.google.apis:google-api-services-drive:v2-rev160-1.19.1'
implementation 'com.google.apis:google-api-services-groupssettings:v1-rev60-1.22.0'
implementation 'com.google.apis:google-api-services-monitoring:v3-rev11-1.22.0'
implementation 'com.google.apis:google-api-services-sheets:v4-rev483-1.22.0'
implementation 'com.google.apis:google-api-services-storage:v1-rev86-1.22.0'
// TODO(b/71631624): change appengine:appengine-api-1.0-sdk to testCompileOnly after
// BillingEmailUtilsTest.java is fixed.
implementation 'com.google.appengine:appengine-api-1.0-sdk:1.9.48'
implementation 'com.google.appengine:appengine-api-labs:1.9.48'
implementation 'com.google.appengine:appengine-api-stubs:1.9.48'
implementation 'com.google.appengine.tools:appengine-gcs-client:0.6'
implementation 'com.google.appengine.tools:appengine-mapreduce:0.8.5'
implementation 'com.google.appengine.tools:appengine-pipeline:0.2.13'
implementation 'com.google.appengine:appengine-tools-sdk:1.9.48'
implementation 'com.google.auth:google-auth-library-credentials:0.7.1'
implementation 'com.google.auth:google-auth-library-oauth2-http:0.7.1'
implementation 'com.google.auto:auto-common:0.8'
implementation 'com.google.auto.factory:auto-factory:1.0-beta3'
implementation 'com.google.auto.value:auto-value-annotations:1.6.2'
implementation 'com.google.cloud.bigdataoss:gcsio:1.4.5'
implementation 'com.google.cloud.bigdataoss:util:1.4.5'
implementation 'com.google.code.findbugs:jsr305:3.0.2'
implementation 'com.google.dagger:dagger:2.15'
implementation 'com.google.dagger:dagger-producers:2.15'
implementation 'com.google.errorprone:error_prone_annotations:2.1.3'
implementation 'com.google.errorprone:javac-shaded:9-dev-r4023-3'
implementation 'com.google.flogger:flogger:0.1'
implementation 'com.google.flogger:flogger-system-backend:0.1'
implementation 'com.google.gdata:core:1.47.1'
implementation 'com.google.googlejavaformat:google-java-format:1.4'
implementation 'com.google.guava:guava:25.1-jre'
implementation 'com.google.http-client:google-http-client:1.22.0'
implementation 'com.google.http-client:google-http-client-appengine:1.22.0'
implementation 'com.google.http-client:google-http-client-jackson2:1.22.0'
implementation 'com.google.oauth-client:google-oauth-client:1.22.0'
implementation 'com.google.oauth-client:google-oauth-client-appengine:1.22.0'
implementation 'com.google.oauth-client:google-oauth-client-java6:1.22.0'
implementation 'com.google.oauth-client:google-oauth-client-jetty:1.22.0'
implementation 'com.google.oauth-client:google-oauth-client-servlet:1.22.0'
implementation 'com.google.protobuf:protobuf-java:2.6.0'
implementation 'com.google.re2j:re2j:1.1'
implementation 'com.google.template:soy:2018-03-14'
implementation 'com.googlecode.charts4j:charts4j:1.3'
implementation 'com.googlecode.json-simple:json-simple:1.1.1'
implementation 'com.ibm.icu:icu4j:57.1'
implementation 'com.jcraft:jsch:0.1.53'
implementation 'com.jcraft:jzlib:1.1.3'
implementation 'com.squareup:javapoet:1.8.0'
implementation 'com.squareup:javawriter:2.5.1'
implementation 'com.sun.activation:javax.activation:1.2.0'
implementation 'com.thoughtworks.paranamer:paranamer:2.7'
implementation 'commons-codec:commons-codec:1.6'
implementation 'commons-logging:commons-logging:1.1.1'
implementation 'dnsjava:dnsjava:2.1.7'
implementation 'io.netty:netty-buffer:4.1.28.Final'
implementation 'io.netty:netty-codec:4.1.28.Final'
implementation 'io.netty:netty-codec-http:4.1.28.Final'
implementation 'io.netty:netty-common:4.1.28.Final'
implementation 'io.netty:netty-handler:4.1.28.Final'
implementation 'io.netty:netty-resolver:4.1.28.Final'
implementation 'io.netty:netty-tcnative:2.0.12.Final'
implementation 'io.netty:netty-tcnative-boringssl-static:2.0.12.Final'
implementation 'io.netty:netty-transport:4.1.28.Final'
implementation 'it.unimi.dsi:fastutil:6.5.16'
implementation 'javax.annotation:jsr250-api:1.0'
implementation 'javax.inject:javax.inject:1'
implementation 'javax.mail:mail:1.4'
implementation 'javax.servlet:servlet-api:2.5'
implementation 'javax.xml.bind:jaxb-api:2.3.0'
implementation 'javax.xml.soap:javax.xml.soap-api:1.4.0'
implementation 'jline:jline:1.0'
implementation 'joda-time:joda-time:2.3'
implementation 'org.apache.avro:avro:1.8.2'
implementation 'org.apache.beam:beam-runners-direct-java:2.2.0'
implementation 'org.apache.beam:beam-runners-google-cloud-dataflow-java:2.1.0'
implementation 'org.apache.beam:beam-sdks-common-runner-api:2.1.0'
implementation 'org.apache.beam:beam-sdks-java-core:2.2.0'
implementation 'org.apache.beam:beam-sdks-java-extensions-google-cloud-platform-core:2.1.0'
implementation 'org.apache.beam:beam-sdks-java-io-google-cloud-platform:2.2.0'
implementation 'org.apache.commons:commons-compress:1.8.1'
implementation 'org.apache.ftpserver:ftpserver-core:1.0.6'
implementation 'org.apache.httpcomponents:httpclient:4.5.2'
implementation 'org.apache.httpcomponents:httpcore:4.4.4'
implementation 'org.apache.mina:mina-core:2.0.4'
implementation 'org.apache.sshd:sshd-core:2.0.0'
implementation 'org.apache.sshd:sshd-scp:2.0.0'
implementation 'org.apache.sshd:sshd-sftp:2.0.0'
implementation 'org.apache.tomcat:servlet-api:6.0.45'
implementation 'org.apache.tomcat:tomcat-annotations-api:8.0.5'
implementation 'org.bouncycastle:bcpg-jdk15on:1.52'
implementation 'org.bouncycastle:bcpkix-jdk15on:1.52'
implementation 'org.bouncycastle:bcprov-jdk15on:1.52'
implementation 'org.codehaus.jackson:jackson-core-asl:1.9.13'
implementation 'org.codehaus.jackson:jackson-mapper-asl:1.9.13'
implementation 'org.joda:joda-money:0.10.0'
implementation 'org.json:json:20160810'
implementation 'org.khronos:opengl-api:gl1.1-android-2.1_r1'
implementation 'org.mortbay.jetty:jetty:6.1.26'
implementation 'org.mortbay.jetty:servlet-api:2.5-20081211'
implementation 'org.mortbay.jetty:jetty-util:6.1.26'
implementation 'org.slf4j:slf4j-api:1.7.16'
implementation 'org.tukaani:xz:1.5'
implementation 'org.xerial.snappy:snappy-java:1.1.4-M3'
implementation 'org.yaml:snakeyaml:1.17'
implementation 'xerces:xmlParserAPIs:2.6.2'
implementation 'xpp3:xpp3:1.1.4c'
// Custom-built objectify jar at commit ecd5165, included in Nomulus release.
implementation name: 'objectify-4.1.3'
// Custom-built objectify jar at commit ecd5165, included in Nomulus
// release.
implementation files(
"${rootDir}/../third_party/objectify/v4_1/objectify-4.1.3.jar")
testImplementation project(':third_party')
compileOnly 'com.google.appengine:appengine-remote-api:1.9.48' // Also testImplementation
compileOnly 'com.google.auto.service:auto-service:1.0-rc4'
compileOnly 'org.osgi:org.osgi.core:4.3.0'
compile 'com.beust:jcommander:1.48'
compile 'com.google.api-client:google-api-client:1.22.0'
maybeRuntime 'com.google.api-client:google-api-client-appengine:1.22.0'
maybeRuntime 'com.google.api-client:google-api-client-jackson2:1.20.0'
compile 'com.google.monitoring-client:metrics:1.0.4'
compile 'com.google.monitoring-client:stackdriver:1.0.4'
compile 'com.google.api-client:google-api-client-java6:1.27.0'
maybeRuntime 'com.google.api-client:google-api-client-servlet:1.22.0'
compile 'com.google.apis:google-api-services-admin-directory:directory_v1-rev72-1.22.0'
compile 'com.google.apis:google-api-services-appengine:v1-rev85-1.25.0'
compile 'com.google.apis:google-api-services-bigquery:v2-rev325-1.22.0'
maybeRuntime 'com.google.apis:google-api-services-clouddebugger:v2-rev8-1.22.0'
compile 'com.google.apis:google-api-services-cloudkms:v1-rev12-1.22.0'
maybeRuntime 'com.google.apis:google-api-services-cloudresourcemanager:v1-rev6-1.22.0'
compile 'com.google.apis:google-api-services-dataflow:v1b3-rev196-1.22.0'
compile 'com.google.apis:google-api-services-dns:v2beta1-rev6-1.22.0'
compile 'com.google.apis:google-api-services-drive:v2-rev160-1.19.1'
compile 'com.google.apis:google-api-services-groupssettings:v1-rev60-1.22.0'
compile 'com.google.apis:google-api-services-monitoring:v3-rev11-1.22.0'
compile 'com.google.apis:google-api-services-sheets:v4-rev483-1.22.0'
maybeRuntime 'com.google.apis:google-api-services-storage:v1-rev86-1.22.0'
// TODO(b/71631624): change appengine:appengine-api-1.0-sdk to
// testCompileOnly after BillingEmailUtilsTest.java is fixed.
compile 'com.google.appengine:appengine-api-1.0-sdk:1.9.48'
maybeRuntime 'com.google.appengine:appengine-api-labs:1.9.48'
maybeRuntime 'com.google.appengine:appengine-api-stubs:1.9.48'
testCompile 'com.google.appengine:appengine-api-stubs:1.9.48'
compile 'com.google.appengine.tools:appengine-gcs-client:0.6'
compile 'com.google.appengine.tools:appengine-mapreduce:0.8.5'
compile 'com.google.appengine.tools:appengine-pipeline:0.2.13'
compile 'com.google.appengine:appengine-remote-api:1.9.48'
maybeRuntime 'com.google.appengine:appengine-tools-sdk:1.9.48'
compile 'com.google.auth:google-auth-library-credentials:0.7.1'
compile 'com.google.auth:google-auth-library-oauth2-http:0.7.1'
maybeRuntime 'com.google.auto:auto-common:0.8'
maybeRuntime 'com.google.auto.factory:auto-factory:1.0-beta3'
compile 'com.google.code.gson:gson:2.8.5'
compile 'com.google.auto.value:auto-value-annotations:1.6.2'
maybeRuntime 'com.google.cloud.bigdataoss:gcsio:1.4.5'
maybeRuntime 'com.google.cloud.bigdataoss:util:1.4.5'
compile 'com.google.code.findbugs:jsr305:3.0.2'
compile 'com.google.dagger:dagger:2.15'
maybeRuntime 'com.google.dagger:dagger-producers:2.15'
compile 'com.google.errorprone:error_prone_annotations:2.3.1'
maybeRuntime 'com.google.errorprone:javac-shaded:9-dev-r4023-3'
compile 'com.google.flogger:flogger:0.1'
runtime 'com.google.flogger:flogger-system-backend:0.1'
maybeRuntime 'com.google.gdata:core:1.47.1'
maybeRuntime 'com.google.googlejavaformat:google-java-format:1.4'
compile 'com.google.guava:guava:25.1-jre'
gradleLint.ignore('unused-dependency') {
compile 'com.google.gwt:gwt-user:2.8.2'
}
compile 'com.google.http-client:google-http-client:1.25.0'
compile 'com.google.http-client:google-http-client-appengine:1.22.0'
compile 'com.google.http-client:google-http-client-jackson2:1.25.0'
compile 'com.google.oauth-client:google-oauth-client:1.25.0'
maybeRuntime 'com.google.oauth-client:google-oauth-client-appengine:1.22.0'
compile 'com.google.oauth-client:google-oauth-client-java6:1.27.0'
compile 'com.google.oauth-client:google-oauth-client-jetty:1.22.0'
maybeRuntime 'com.google.oauth-client:google-oauth-client-servlet:1.22.0'
maybeRuntime 'com.google.protobuf:protobuf-java:2.6.0'
compile 'com.google.re2j:re2j:1.1'
compile 'com.google.template:soy:2018-03-14'
maybeRuntime 'com.googlecode.charts4j:charts4j:1.3'
compile 'com.googlecode.json-simple:json-simple:1.1.1'
compile 'com.jcraft:jsch:0.1.53'
maybeRuntime 'com.jcraft:jzlib:1.1.3'
maybeRuntime 'com.squareup:javapoet:1.8.0'
maybeRuntime 'com.squareup:javawriter:2.5.1'
maybeRuntime 'com.sun.activation:javax.activation:1.2.0'
maybeRuntime 'com.thoughtworks.paranamer:paranamer:2.7'
maybeRuntime 'commons-codec:commons-codec:1.10'
compile group: 'commons-io', name: 'commons-io', version: '2.6'
maybeRuntime 'commons-logging:commons-logging:1.2'
compile 'dnsjava:dnsjava:2.1.7'
maybeRuntime 'io.netty:netty-buffer:4.1.28.Final'
maybeRuntime 'io.netty:netty-codec:4.1.28.Final'
maybeRuntime 'io.netty:netty-codec-http:4.1.28.Final'
maybeRuntime 'io.netty:netty-common:4.1.28.Final'
maybeRuntime 'io.netty:netty-handler:4.1.28.Final'
maybeRuntime 'io.netty:netty-resolver:4.1.28.Final'
maybeRuntime 'io.netty:netty-tcnative:2.0.12.Final'
maybeRuntime 'io.netty:netty-tcnative-boringssl-static:2.0.12.Final'
maybeRuntime 'io.netty:netty-transport:4.1.28.Final'
maybeRuntime 'it.unimi.dsi:fastutil:6.5.16'
maybeRuntime 'javax.annotation:jsr250-api:1.0'
runtime 'org.glassfish.jaxb:jaxb-runtime:2.3.0'
testCompile 'javax.annotation:jsr250-api:1.0'
compile 'javax.inject:javax.inject:1'
compile 'javax.mail:mail:1.4'
compile 'javax.servlet:servlet-api:2.5'
compile 'javax.xml.bind:jaxb-api:2.3.0'
maybeRuntime 'javax.xml.soap:javax.xml.soap-api:1.4.0'
compile 'jline:jline:1.0'
compile 'joda-time:joda-time:2.3'
compile 'org.apache.avro:avro:1.8.2'
maybeRuntime 'org.apache.beam:beam-runners-direct-java:2.2.0'
testCompile 'org.apache.beam:beam-runners-direct-java:2.2.0'
compile 'org.apache.beam:beam-runners-google-cloud-dataflow-java:2.1.0'
maybeRuntime 'org.apache.beam:beam-sdks-common-runner-api:2.1.0'
compile 'org.apache.beam:beam-sdks-java-core:2.2.0'
compile 'org.apache.beam:beam-sdks-java-extensions-google-cloud-platform-core:2.1.0'
compile 'org.apache.beam:beam-sdks-java-io-google-cloud-platform:2.2.0'
maybeRuntime 'org.apache.commons:commons-compress:1.8.1'
maybeRuntime 'org.apache.ftpserver:ftplet-api:1.0.6'
testCompile 'org.apache.ftpserver:ftplet-api:1.0.6'
maybeRuntime 'org.apache.ftpserver:ftpserver-core:1.0.6'
testCompile 'org.apache.ftpserver:ftpserver-core:1.0.6'
compile 'org.apache.httpcomponents:httpclient:4.5.2'
compile 'org.apache.httpcomponents:httpcore:4.4.4'
maybeRuntime 'org.apache.mina:mina-core:2.0.4'
maybeRuntime 'org.apache.sshd:sshd-core:2.0.0'
testCompile 'org.apache.sshd:sshd-core:2.0.0'
maybeRuntime 'org.apache.sshd:sshd-scp:2.0.0'
testCompile 'org.apache.sshd:sshd-scp:2.0.0'
maybeRuntime 'org.apache.sshd:sshd-sftp:2.0.0'
testCompile 'org.apache.sshd:sshd-sftp:2.0.0'
compile 'org.apache.tomcat:servlet-api:6.0.45'
maybeRuntime 'org.apache.tomcat:tomcat-annotations-api:8.0.5'
testCompile 'org.apache.tomcat:tomcat-annotations-api:8.0.5'
compile 'org.bouncycastle:bcpg-jdk15on:1.52'
testCompile 'org.bouncycastle:bcpkix-jdk15on:1.52'
compile 'org.bouncycastle:bcprov-jdk15on:1.52'
maybeRuntime 'org.codehaus.jackson:jackson-core-asl:1.9.13'
maybeRuntime 'org.codehaus.jackson:jackson-mapper-asl:1.9.13'
compile 'org.joda:joda-money:0.10.0'
compile 'org.json:json:20160810'
maybeRuntime 'org.khronos:opengl-api:gl1.1-android-2.1_r1'
maybeRuntime 'org.mortbay.jetty:jetty:6.1.26'
testCompile 'org.mortbay.jetty:jetty:6.1.26'
compile 'org.mortbay.jetty:servlet-api:2.5-20081211'
maybeRuntime 'org.mortbay.jetty:jetty-util:6.1.26'
maybeRuntime 'org.slf4j:slf4j-api:1.7.16'
maybeRuntime 'org.tukaani:xz:1.5'
maybeRuntime 'org.xerial.snappy:snappy-java:1.1.4-M3'
compile 'xerces:xmlParserAPIs:2.6.2'
compile 'xpp3:xpp3:1.1.4c'
// Known issue: nebula-lint misses inherited dependency.
compile project(':third_party')
compile project(':util')
// Include auto-value in compile until nebula-lint understands
// annotationProcessor
gradleLint.ignore('unused-dependency') {
compile 'com.google.auto.value:auto-value:1.6.2'
}
annotationProcessor 'com.google.auto.value:auto-value:1.6.2'
testAnnotationProcessor 'com.google.auto.value:auto-value:1.6.2'
annotationProcessor 'com.google.dagger:dagger-compiler:2.15'
testAnnotationProcessor 'com.google.dagger:dagger-compiler:2.15'
testImplementation 'com.google.appengine:appengine-remote-api:1.9.48' // Also compileOnly
testImplementation 'com.google.appengine:appengine-testing:1.9.58'
testImplementation 'com.google.guava:guava-testlib:25.0-jre'
testImplementation 'com.google.monitoring-client:contrib:1.0.4'
testImplementation 'com.google.truth:truth:0.42'
testImplementation 'com.google.truth.extensions:truth-java8-extension:0.39'
testImplementation 'org.hamcrest:hamcrest-all:1.3'
testImplementation 'org.hamcrest:hamcrest-core:1.3'
testImplementation 'org.hamcrest:hamcrest-library:1.3'
testImplementation 'junit:junit:4.12'
testImplementation 'org.mockito:mockito-all:1.9.5'
testImplementation 'org.slf4j:slf4j-simple:1.7.16' // Not needed by Bazel
testCompile 'com.google.appengine:appengine-testing:1.9.58'
testCompile 'com.google.guava:guava-testlib:25.0-jre'
testCompile 'com.google.monitoring-client:contrib:1.0.4'
testCompile 'com.google.truth:truth:0.42'
testCompile 'com.google.truth.extensions:truth-java8-extension:0.39'
testCompile 'org.hamcrest:hamcrest-all:1.3'
testCompile 'org.hamcrest:hamcrest-core:1.3'
testCompile 'org.hamcrest:hamcrest-library:1.3'
testCompile 'junit:junit:4.12'
testCompile 'org.mockito:mockito-all:1.9.5'
testImplementation project(':third_party')
// Indirect dependency found by undeclared-dependency check. Such
// dependencies should go after all other compile and testCompile
// dependencies to avoid overriding them accidentally.
compile 'javax.servlet:javax.servlet-api:3.1.0' // google-api-client-appeng
compile 'com.google.oauth-client:google-oauth-client-java6:1.20.0'
// Dependencies needed for jaxb compilation.
// Use jaxb 2.2.11 because 2.3 is known to break the Ant task we use.
@ -217,14 +263,18 @@ dependencies {
// Dependencies needed for compiling stylesheets to javascript
css 'com.google.closure-stylesheets:closure-stylesheets:1.5.0'
css 'args4j:args4j:2.0.26'
// Tool dependencies. used for doc generation.
compile files("${System.properties['java.home']}/../lib/tools.jar")
}
task jaxbToJava() {
task jaxbToJava {
doLast {
file(generatedDir).mkdirs()
// Temp dir to hold schema and bindings files. Files must be in the same directory because
// the bindings (.xjb) file does not declare relative paths to schema (.xsd) files.
// Temp dir to hold schema and bindings files. Files must be in the same
// directory because the bindings (.xjb) file does not declare relative
// paths to schema (.xsd) files.
def xjcTempSourceDir = file("${temporaryDir}/xjc")
xjcTempSourceDir.mkdirs()
ant.copy(
@ -252,7 +302,8 @@ task jaxbToJava() {
dir: new File("$xjcTempSourceDir"),
include: ['**/*.xsd'])
.addToAntBuilder(ant, 'schema', FileCollection.AntType.FileSet)
// -npa: do not generate package-info.java files. They will be generated below.
// -npa: do not generate package-info.java files. They will be generated
// below.
arg(line: '-npa -quiet -extension')
}
exec {
@ -265,7 +316,7 @@ task jaxbToJava() {
}
}
task soyToJava() {
task soyToJava {
ext.soyToJava = { javaPackage, outputDirectory, soyFiles ->
javaexec {
main = "com.google.template.soy.SoyParseInfoGenerator"
@ -280,12 +331,23 @@ task soyToJava() {
doLast {
soyToJava('google.registry.tools.soy', "${generatedDir}/google/registry/tools/soy",
fileTree(dir: "${javaDir}/google/registry/tools/soy", include: ['**/*.soy']))
soyToJava('google.registry.tools.soy',
"${generatedDir}/google/registry/tools/soy",
fileTree(
dir: "${javaDir}/google/registry/tools/soy",
include: ['**/*.soy']))
soyToJava('google.registry.ui.soy.registrar',
"${generatedDir}/google/registry/ui/soy/registrar",
fileTree(dir: "${javaDir}/google/registry/ui/soy/registrar", include: ['**/*.soy']))
fileTree(
dir: "${javaDir}/google/registry/ui/soy/registrar",
include: ['**/*.soy']))
soyToJava('google.registry.ui.soy.otesetup',
"${generatedDir}/google/registry/ui/soy/otesetup",
fileTree(
dir: "${javaDir}/google/registry/ui/soy/otesetup",
include: ['**/*.soy']))
soyToJava('google.registry.ui.soy',
"${generatedDir}/google/registry/ui/soy",
@ -294,6 +356,14 @@ task soyToJava() {
}.filter {
it.name.endsWith(".soy")
})
soyToJava('google.registry.ui.soy.otesetup',
"${generatedDir}/google/registry/ui/soy/otesetup",
files {
file("${javaDir}/google/registry/ui/soy/otesetup").listFiles()
}.filter {
it.name.endsWith(".soy")
})
}
}
@ -327,11 +397,16 @@ task stylesheetsToJavascript {
def outputDir = "${project.buildDir}/resources/main/google/registry/ui/css"
file("${outputDir}").mkdirs()
def srcFiles = [
"${cssSourceDir}/console.css", "${cssSourceDir}/contact-settings.css",
"${cssSourceDir}/contact-us.css", "${cssSourceDir}/dashboard.css",
"${cssSourceDir}/epp.css", "${cssSourceDir}/forms.css",
"${cssSourceDir}/kd_components.css", "${cssSourceDir}/registry.css",
"${cssSourceDir}/resources.css", "${cssSourceDir}/security-settings.css"
"${cssSourceDir}/console.css",
"${cssSourceDir}/contact-settings.css",
"${cssSourceDir}/contact-us.css",
"${cssSourceDir}/dashboard.css",
"${cssSourceDir}/epp.css",
"${cssSourceDir}/forms.css",
"${cssSourceDir}/kd_components.css",
"${cssSourceDir}/registry.css",
"${cssSourceDir}/resources.css",
"${cssSourceDir}/security-settings.css"
]
cssCompile("${outputDir}/registrar_bin", false, srcFiles)
cssCompile("${outputDir}/registrar_dbg", true, srcFiles)
@ -341,26 +416,72 @@ task stylesheetsToJavascript {
compileJava.dependsOn jaxbToJava
compileJava.dependsOn soyToJava
// stylesheetsToJavascript must happen after processResources, which wipes the resources folder
// before copying data into it.
// stylesheetsToJavascript must happen after processResources, which wipes the
// resources folder before copying data into it.
stylesheetsToJavascript.dependsOn processResources
classes.dependsOn stylesheetsToJavascript
// Make testing artifacts available to be depended up on by other projects.
// TODO: factor out google.registry.testing to be a separate project.
task testJar(type: Jar) {
classifier = 'test'
from sourceSets.test.output
}
test {
// Test exclusion patterns:
// - *TestCase.java are inherited by concrete test classes.
// - *TestSuite.java are excluded to avoid duplicate execution of suite members. See README
// in this directory for more information.
artifacts {
testRuntime testJar
}
task fragileTest(type: Test) {
// Common exclude pattern. See README in parent directory for explanation.
exclude "**/*TestCase.*", "**/*TestSuite.*"
include fragileTestPatterns
// Use a single JVM to execute all tests. See README in this directory for more information.
maxParallelForks 1
// Use a single thread to execute all tests in a JVM. See README in this directory for more
// information.
// Run every test class in a freshly started process.
forkEvery 1
// Uncomment to see test outputs in stdout.
//testLogging.showStandardStreams = true
}
task outcastTest(type: Test) {
// Common exclude pattern. See README in parent directory for explanation.
exclude "**/*TestCase.*", "**/*TestSuite.*"
include outcastTestPatterns
// Sets the maximum number of test executors that may exist at the same time.
maxParallelForks 5
}
test {
// Common exclude pattern. See README in parent directory for explanation.
exclude "**/*TestCase.*", "**/*TestSuite.*"
exclude fragileTestPatterns
exclude outcastTestPatterns
// Sets the maximum number of test executors that may exist at the same time.
maxParallelForks 5
}.dependsOn(fragileTest, outcastTest)
task nomulus(type: Jar) {
manifest {
attributes 'Main-Class': 'google.registry.tools.RegistryTool'
}
zip64 = true
baseName = 'nomulus'
version = null
from {
configurations.runtimeClasspath.collect {
it.isDirectory() ? it : zipTree(it)
}
}
// Excludes signature files that accompany some dependency jars, like
// bonuncycastle. It they are present, only classes from those signed jars are
// made available to the class loader.
// see https://discuss.gradle.org/t/signing-a-custom-gradle-plugin-thats-downloaded-by-the-build-system-from-github/1365
exclude "META-INF/*.SF", "META-INF/*.DSA", "META-INF/*.RSA"
with jar
dependsOn project(':third_party').jar
}
ext.generateDependencyPublications()

View file

@ -0,0 +1,38 @@
#!/bin/bash
# Copyright 2018 The Nomulus Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script runs a workflow to clone a git repository to local, generate
# a metadata file for each dependency artifact and check in the file to remote
# repository.
set -e
ALL_SUBPROJECTS="core proxy util"
USAGE="Usage: ${0} REPO_URL"
REPO_URL=${1:?${USAGE}}
REPO_DIR="$(mktemp -d)"
git clone ${REPO_URL} ${REPO_DIR}
for PROJECT in ${ALL_SUBPROJECTS}; do
$(dirname $0)/gradlew -PprivateRepository="${REPO_DIR}" \
":${PROJECT}:generateDependencyMetadata"
done
cd "${REPO_DIR}"
git add -A
git diff-index --quiet HEAD \
|| git commit -m "Update dependency metadata file" && git push
rm -rf "${REPO_DIR}"

BIN
gradle/gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View file

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

172
gradle/gradlew vendored Normal file
View file

@ -0,0 +1,172 @@
#!/usr/bin/env sh
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=$(save "$@")
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
cd "$(dirname "$0")"
fi
exec "$JAVACMD" "$@"

84
gradle/gradlew.bat vendored Normal file
View file

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

5
gradle/proxy/Dockerfile Normal file
View file

@ -0,0 +1,5 @@
# TODO(jianglai): Peg to a specific sha256 hash to enable reproducible build.
FROM gcr.io/distroless/java
ADD build/libs/proxy_server.jar .
ENTRYPOINT ["java", "-jar", "proxy_server.jar"]
EXPOSE 30000 30001 30002 30010 30012

103
gradle/proxy/build.gradle Normal file
View file

@ -0,0 +1,103 @@
apply plugin: 'com.google.osdetector'
apply plugin: 'application'
apply plugin: 'com.bmuschko.docker-java-application'
// TODO(jianglai): use plugins block once the osdetctor v1.6.0 works with it.
// see: https://github.com/google/osdetector-gradle-plugin/issues/15
buildscript {
repositories {
mavenCentral()
}
dependencies {
classpath 'com.google.gradle:osdetector-gradle-plugin:1.6.0'
}
}
sourceSets {
main {
resources {
exclude "${project.relativePath}/terraform/"
exclude "${project.relativePath}/kubernetes/"
}
}
}
mainClassName = 'google.registry.proxy.ProxyServer'
task deployJar(type: Jar) {
manifest {
attributes 'Main-Class': 'google.registry.proxy.ProxyServer'
}
baseName = 'proxy_server'
version = null
from {
configurations.runtimeClasspath.collect {
it.isDirectory() ? it : zipTree(it)
}
}
// Excludes signature files that accompany some dependency jars, like
// bonuncycastle. It they are present, only classes from those signed jars are
// made available to the class loader.
// see https://discuss.gradle.org/t/signing-a-custom-gradle-plugin-thats-downloaded-by-the-build-system-from-github/1365
exclude "META-INF/*.SF", "META-INF/*.DSA", "META-INF/*.RSA"
with jar
}
dependencies {
compile 'com.beust:jcommander:1.48'
compile 'com.google.api-client:google-api-client:1.27.0'
compile 'com.google.api-client:google-api-client:1.27.0'
compile 'com.google.apis:google-api-services-cloudkms:v1-rev12-1.22.0'
compile 'com.google.apis:google-api-services-monitoring:v3-rev11-1.22.0'
compile 'com.google.apis:google-api-services-storage:v1-rev86-1.22.0'
compile 'com.google.auto.value:auto-value-annotations:1.6.2'
compile 'com.google.code.findbugs:jsr305:3.0.2'
compile 'com.google.code.gson:gson:2.8.5'
compile 'com.google.dagger:dagger:2.15'
compile 'com.google.flogger:flogger:0.1'
compile 'com.google.guava:guava:27.0-jre'
compile 'com.google.http-client:google-http-client:1.27.0'
compile 'com.google.monitoring-client:metrics:1.0.4'
compile 'com.google.monitoring-client:stackdriver:1.0.4'
compile 'io.netty:netty-buffer:4.1.31.Final'
compile 'io.netty:netty-codec-http:4.1.31.Final'
compile 'io.netty:netty-codec:4.1.31.Final'
compile 'io.netty:netty-common:4.1.31.Final'
compile 'io.netty:netty-handler:4.1.31.Final'
compile 'io.netty:netty-transport:4.1.31.Final'
compile 'javax.inject:javax.inject:1'
compile 'joda-time:joda-time:2.3'
compile 'org.bouncycastle:bcpkix-jdk15on:1.52'
compile 'org.bouncycastle:bcprov-jdk15on:1.52'
compile project(':util')
runtime 'com.google.flogger:flogger-system-backend:0.1'
runtime 'com.google.auto.value:auto-value:1.6.2'
runtime group: 'io.netty', name: 'netty-tcnative-boringssl-static',
version: '2.0.20.Final', classifier: osdetector.classifier
testCompile 'com.google.monitoring-client:contrib:1.0.4'
testCompile 'com.google.truth:truth:0.42'
testCompile 'org.yaml:snakeyaml:1.17'
testCompile 'junit:junit:4.12'
testCompile 'org.mockito:mockito-all:1.9.5'
testCompile project(':third_party')
testCompile project(path: ':core', configuration: 'testRuntime')
// Include auto-value in compile until nebula-lint understands
// annotationProcessor
annotationProcessor 'com.google.auto.value:auto-value:1.6.2'
testAnnotationProcessor 'com.google.auto.value:auto-value:1.6.2'
annotationProcessor 'com.google.dagger:dagger-compiler:2.15'
testAnnotationProcessor 'com.google.dagger:dagger-compiler:2.15'
}
docker {
javaApplication {
// TODO(jianglai): Peg to a specific hash to enable reproducible build.
baseImage = 'openjdk:8-jre-alpine'
ports = [30000, 30001, 30002, 30011, 30012]
}
}
ext.generateDependencyPublications()

View file

@ -0,0 +1,2 @@
This directory is intentionally empty. This subproject is configred through root
project build script.

View file

@ -0,0 +1,2 @@
This directory is intentionally empty. This subproject is configred through root
project build script.

View file

@ -0,0 +1,2 @@
This directory is intentionally empty. This subproject is configred through root
project build script.

View file

@ -0,0 +1,2 @@
This directory is intentionally empty. This subproject is configred through root
project build script.

View file

@ -1,5 +1,10 @@
rootProject.name = 'nomulus'
include 'third_party'
include 'core'
include 'proxy'
include 'third_party'
include 'util'
include 'services:default'
include 'services:backend'
include 'services:tools'
include 'services:pubapi'

58
gradle/update_dependency.sh Executable file
View file

@ -0,0 +1,58 @@
#!/bin/bash
# Copyright 2018 The Nomulus Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This script runs a workflow to generate dependency lock file, run a build against
# the generated lock file, save the lock file and upload dependency JARs to a private
# Maven repository if the build succeeds.
set -e
ALL_SUBPROJECTS="core proxy util"
SUBPROJECTS=
REPOSITORY_URL=
while [[ $# -gt 0 ]]; do
KEY="$1"
case ${KEY} in
--repositoryUrl)
shift
REPOSITORY_URL="$1"
;;
*)
SUBPROJECTS="${SUBPROJECTS} ${KEY}"
;;
esac
shift
done
if [[ -z ${SUBPROJECTS} ]]; then
SUBPROJECTS="${ALL_SUBPROJECTS}"
fi
if [[ -z ${REPOSITORY_URL} ]]; then
echo "--repositoryUrl must be specified"
exit 1
fi
WORKING_DIR=$(dirname $0)
for PROJECT in ${SUBPROJECTS}; do
${WORKING_DIR}/gradlew ":${PROJECT}:generateLock"
${WORKING_DIR}/gradlew -PdependencyLock.useGeneratedLock=true \
":${PROJECT}:build"
${WORKING_DIR}/gradlew ":${PROJECT}:saveLock"
${WORKING_DIR}/gradlew -PrepositoryUrl="${REPOSITORY_URL}" \
":${PROJECT}:publish"
done

28
gradle/util/build.gradle Normal file
View file

@ -0,0 +1,28 @@
dependencies {
compile 'com.google.appengine:appengine-api-1.0-sdk:1.9.48'
compile 'com.google.appengine:appengine-testing:1.9.58'
compile 'com.google.code.findbugs:jsr305:3.0.2'
compile 'com.google.dagger:dagger:2.15'
compile 'com.google.flogger:flogger:0.1'
compile 'com.google.guava:guava:25.1-jre'
compile 'com.google.re2j:re2j:1.1'
compile 'com.ibm.icu:icu4j:57.1'
compile 'javax.inject:javax.inject:1'
compile 'javax.mail:mail:1.4'
compile 'javax.xml.bind:jaxb-api:2.3.0'
compile 'joda-time:joda-time:2.9.2'
compile 'nomulus:util:1.0'
compile 'org.yaml:snakeyaml:1.17'
testCompile 'com.google.appengine:appengine-api-stubs:1.9.48'
testCompile 'com.google.guava:guava-testlib:25.0-jre'
testCompile 'com.google.truth:truth:0.42'
testCompile 'junit:junit:4.12'
testCompile 'org.hamcrest:hamcrest-all:1.3'
testCompile 'org.hamcrest:hamcrest-core:1.3'
testCompile 'org.mockito:mockito-all:1.9.5'
testCompile files("${rootDir}/../third_party/objectify/v4_1/objectify-4.1.3.jar")
testCompile project(':third_party')
testCompile project(path: ':core', configuration: 'testRuntime')
annotationProcessor 'com.google.dagger:dagger-compiler:2.15'
testAnnotationProcessor 'com.google.dagger:dagger-compiler:2.15'
}

View file

@ -39,6 +39,7 @@ import static google.registry.model.reporting.HistoryEntry.Type.CONTACT_DELETE;
import static google.registry.model.reporting.HistoryEntry.Type.CONTACT_DELETE_FAILURE;
import static google.registry.model.reporting.HistoryEntry.Type.HOST_DELETE;
import static google.registry.model.reporting.HistoryEntry.Type.HOST_DELETE_FAILURE;
import static google.registry.model.transfer.TransferStatus.SERVER_CANCELLED;
import static google.registry.util.PipelineUtils.createJobPath;
import static java.math.RoundingMode.CEILING;
import static java.util.concurrent.TimeUnit.DAYS;
@ -85,7 +86,6 @@ import google.registry.model.poll.PendingActionNotificationResponse.HostPendingA
import google.registry.model.poll.PollMessage;
import google.registry.model.reporting.HistoryEntry;
import google.registry.model.server.Lock;
import google.registry.model.transfer.TransferStatus;
import google.registry.request.Action;
import google.registry.request.Response;
import google.registry.request.auth.Auth;
@ -394,7 +394,9 @@ public class DeleteContactsAndHostsAction implements Runnable {
ContactResource contact = (ContactResource) resource;
// Handle pending transfers on contact deletion.
if (contact.getStatusValues().contains(StatusValue.PENDING_TRANSFER)) {
contact = denyPendingTransfer(contact, TransferStatus.SERVER_CANCELLED, now);
contact =
denyPendingTransfer(
contact, SERVER_CANCELLED, now, deletionRequest.requestingClientId());
}
// Wipe out PII on contact deletion.
resourceToSaveBuilder = contact.asBuilder().wipeOut();

View file

@ -15,6 +15,7 @@ java_library(
"//java/google/registry/reporting/billing",
"//java/google/registry/util",
"@com_google_apis_google_api_services_bigquery",
"@com_google_auth_library_oauth2_http",
"@com_google_auto_value",
"@com_google_dagger",
"@com_google_flogger",

View file

@ -14,11 +14,17 @@
package google.registry.beam.invoicing;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.auth.oauth2.GoogleCredentials;
import google.registry.beam.invoicing.BillingEvent.InvoiceGroupingKey;
import google.registry.beam.invoicing.BillingEvent.InvoiceGroupingKey.InvoiceGroupingKeyCoder;
import google.registry.config.CredentialModule.LocalCredentialJson;
import google.registry.config.RegistryConfig.Config;
import google.registry.reporting.billing.BillingModule;
import google.registry.reporting.billing.GenerateInvoicesAction;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.Serializable;
import javax.inject.Inject;
import org.apache.beam.runners.dataflow.DataflowRunner;
@ -75,6 +81,12 @@ public class InvoicingPipeline implements Serializable {
@Config("billingBucketUrl")
String billingBucketUrl;
@Inject
@Config("invoiceFilePrefix")
String invoiceFilePrefix;
@Inject @LocalCredentialJson String credentialJson;
@Inject
InvoicingPipeline() {}
@ -96,6 +108,13 @@ public class InvoicingPipeline implements Serializable {
public void deploy() {
// We can't store options as a member variable due to serialization concerns.
InvoicingPipelineOptions options = PipelineOptionsFactory.as(InvoicingPipelineOptions.class);
try {
options.setGcpCredential(
GoogleCredentials.fromStream(new ByteArrayInputStream(credentialJson.getBytes(UTF_8))));
} catch (IOException e) {
throw new RuntimeException(
"Cannot obtain local credential to deploy the invoicing pipeline", e);
}
options.setProject(projectId);
options.setRunner(DataflowRunner.class);
// This causes p.run() to stage the pipeline as a template on GCS, as opposed to running it.
@ -164,7 +183,7 @@ public class InvoicingPipeline implements Serializable {
billingBucketUrl,
BillingModule.INVOICES_DIRECTORY,
yearMonth,
BillingModule.OVERALL_INVOICE_PREFIX,
invoiceFilePrefix,
yearMonth)))
.withHeader(InvoiceGroupingKey.invoiceHeader())
.withoutSharding()

View file

@ -12,6 +12,7 @@ java_library(
"//java/google/registry/beam",
"//java/google/registry/config",
"//java/google/registry/util",
"@com_google_auth_library_oauth2_http",
"@com_google_auto_value",
"@com_google_dagger",
"@com_google_flogger",

View file

@ -39,6 +39,8 @@ import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.TypeDescriptors;
import org.joda.time.LocalDate;
import org.joda.time.YearMonth;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
@ -56,13 +58,14 @@ import org.json.JSONObject;
public class Spec11Pipeline implements Serializable {
/**
* Returns the subdirectory spec11 reports reside in for a given yearMonth in yyyy-MM format.
* Returns the subdirectory spec11 reports reside in for a given local date in yyyy-MM-dd format.
*
* @see google.registry.beam.spec11.Spec11Pipeline
* @see google.registry.reporting.spec11.Spec11EmailUtils
*/
public static String getSpec11Subdirectory(String yearMonth) {
return String.format("icann/spec11/%s/SPEC11_MONTHLY_REPORT", yearMonth);
public static String getSpec11ReportFilePath(LocalDate localDate) {
YearMonth yearMonth = new YearMonth(localDate);
return String.format("icann/spec11/%s/SPEC11_MONTHLY_REPORT_%s", yearMonth, localDate);
}
/** The JSON object field we put the registrar's e-mail address for Spec11 reports. */
@ -86,25 +89,24 @@ public class Spec11Pipeline implements Serializable {
@Config("reportingBucketUrl")
String reportingBucketUrl;
@Inject
Retrier retrier;
@Inject Retrier retrier;
@Inject
Spec11Pipeline() {}
/** Custom options for running the spec11 pipeline. */
interface Spec11PipelineOptions extends DataflowPipelineOptions {
/** Returns the yearMonth we're generating the report for, in yyyy-MM format. */
@Description("The yearMonth we generate the report for, in yyyy-MM format.")
ValueProvider<String> getYearMonth();
/** Returns the local date we're generating the report for, in yyyy-MM-dd format. */
@Description("The local date we generate the report for, in yyyy-MM-dd format.")
ValueProvider<String> getDate();
/**
* Sets the yearMonth we generate invoices for.
* Sets the local date we generate invoices for.
*
* <p>This is implicitly set when executing the Dataflow template, by specifying the "yearMonth"
* <p>This is implicitly set when executing the Dataflow template, by specifying the "date"
* parameter.
*/
void setYearMonth(ValueProvider<String> value);
void setDate(ValueProvider<String> value);
/** Returns the SafeBrowsing API key we use to evaluate subdomain health. */
@Description("The API key we use to access the SafeBrowsing API.")
@ -149,7 +151,7 @@ public class Spec11Pipeline implements Serializable {
evaluateUrlHealth(
domains,
new EvaluateSafeBrowsingFn(options.getSafeBrowsingApiKey(), retrier),
options.getYearMonth());
options.getDate());
p.run();
}
@ -161,7 +163,7 @@ public class Spec11Pipeline implements Serializable {
void evaluateUrlHealth(
PCollection<Subdomain> domains,
EvaluateSafeBrowsingFn evaluateSafeBrowsingFn,
ValueProvider<String> yearMonthProvider) {
ValueProvider<String> dateProvider) {
domains
.apply("Run through SafeBrowsingAPI", ParDo.of(evaluateSafeBrowsingFn))
.apply(
@ -199,13 +201,13 @@ public class Spec11Pipeline implements Serializable {
TextIO.write()
.to(
NestedValueProvider.of(
yearMonthProvider,
yearMonth ->
dateProvider,
date ->
String.format(
"%s/%s",
reportingBucketUrl, getSpec11Subdirectory(yearMonth))))
reportingBucketUrl,
getSpec11ReportFilePath(LocalDate.parse(date)))))
.withoutSharding()
.withHeader("Map from registrar email to detected subdomain threats:"));
}
}

View file

@ -676,7 +676,7 @@ public class BigqueryConnection implements AutoCloseable {
bigquery.datasets().get(getProjectId(), datasetName).execute();
return true;
} catch (GoogleJsonResponseException e) {
if (e.getDetails().getCode() == 404) {
if (e.getDetails() != null && e.getDetails().getCode() == 404) {
return false;
}
throw e;
@ -689,7 +689,7 @@ public class BigqueryConnection implements AutoCloseable {
bigquery.tables().get(getProjectId(), datasetName, tableName).execute();
return true;
} catch (GoogleJsonResponseException e) {
if (e.getDetails().getCode() == 404) {
if (e.getDetails() != null && e.getDetails().getCode() == 404) {
return false;
}
throw e;

View file

@ -30,15 +30,20 @@ public final class BigqueryJobFailureException extends RuntimeException {
/** Delegate {@link IOException} errors, checking for {@link GoogleJsonResponseException} */
public static BigqueryJobFailureException create(IOException cause) {
if (cause instanceof GoogleJsonResponseException) {
return create(((GoogleJsonResponseException) cause).getDetails());
return create((GoogleJsonResponseException) cause);
} else {
return new BigqueryJobFailureException(cause.getMessage(), cause, null, null);
}
}
/** Create an error for JSON server response errors. */
public static BigqueryJobFailureException create(GoogleJsonError error) {
return new BigqueryJobFailureException(error.getMessage(), null, null, error);
public static BigqueryJobFailureException create(GoogleJsonResponseException cause) {
GoogleJsonError err = cause.getDetails();
if (err != null) {
return new BigqueryJobFailureException(err.getMessage(), null, null, err);
} else {
return new BigqueryJobFailureException(cause.getMessage(), cause, null, null);
}
}
/** Create an error from a failed job. */

View file

@ -161,7 +161,7 @@ def _zip_file(ctx):
for _, zip_path in mapped
if "/" in zip_path
],
)
).to_list()
]
cmd += [
'ln -sf "${repo}/%s" "${tmp}/%s"' % (path, zip_path)
@ -181,7 +181,7 @@ def _zip_file(ctx):
ctx.file_action(output = script, content = "\n".join(cmd), executable = True)
inputs = [ctx.file._zipper]
inputs += [dep.zip_file for dep in ctx.attr.deps]
inputs += list(srcs)
inputs += srcs.to_list()
ctx.action(
inputs = inputs,
outputs = [ctx.outputs.out],

View file

@ -12,6 +12,7 @@ java_library(
"//java/google/registry/keyring/api",
"//java/google/registry/util",
"@com_google_api_client",
"@com_google_api_client_appengine",
"@com_google_appengine_api_1_0_sdk",
"@com_google_auto_value",
"@com_google_code_findbugs_jsr305",
@ -23,6 +24,5 @@ java_library(
"@javax_inject",
"@joda_time",
"@org_joda_money",
"@org_yaml_snakeyaml",
],
)

View file

@ -26,6 +26,9 @@ import google.registry.config.RegistryConfig.Config;
import google.registry.keyring.api.KeyModule.Key;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.security.GeneralSecurityException;
import javax.inject.Qualifier;
import javax.inject.Singleton;
@ -37,7 +40,21 @@ import javax.inject.Singleton;
@Module
public abstract class CredentialModule {
/** Provides the default {@link GoogleCredential} from the Google Cloud runtime. */
/**
* Provides the default {@link GoogleCredential} from the Google Cloud runtime.
*
* <p>The credential returned depends on the runtime environment:
*
* <ul>
* <li>On AppEngine, returns the service account credential for
* PROJECT_ID@appspot.gserviceaccount.com
* <li>On Compute Engine, returns the service account credential for
* PROJECT_NUMBER-compute@developer.gserviceaccount.com
* <li>On end user host, this returns the credential downloaded by gcloud. Please refer to <a
* href="https://cloud.google.com/sdk/gcloud/reference/auth/application-default/login">Cloud
* SDK documentation</a> for details.
* </ul>
*/
@DefaultCredential
@Provides
@Singleton
@ -109,6 +126,8 @@ public abstract class CredentialModule {
/** Dagger qualifier for the Application Default Credential. */
@Qualifier
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface DefaultCredential {}
/**
@ -116,6 +135,8 @@ public abstract class CredentialModule {
* threads.
*/
@Qualifier
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface JsonCredential {}
/**
@ -123,5 +144,19 @@ public abstract class CredentialModule {
* Suite).
*/
@Qualifier
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface DelegatedCredential {}
/** Dagger qualifier for the local credential used in the nomulus tool. */
@Qualifier
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface LocalCredential {}
/** Dagger qualifier for the JSON string used to create the local credential. */
@Qualifier
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface LocalCredentialJson {}
}

View file

@ -16,10 +16,13 @@ package google.registry.config;
import static com.google.common.base.Suppliers.memoize;
import static google.registry.config.ConfigUtils.makeUrl;
import static google.registry.util.ResourceUtils.readResourceUtf8;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Ascii;
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.base.Supplier;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
@ -29,6 +32,7 @@ import dagger.Provides;
import google.registry.util.RandomStringGenerator;
import google.registry.util.StringGenerator;
import google.registry.util.TaskQueueUtils;
import google.registry.util.YamlUtils;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.net.URI;
@ -56,6 +60,10 @@ import org.joda.time.Duration;
*/
public final class RegistryConfig {
private static final String ENVIRONMENT_CONFIG_FORMAT = "files/nomulus-config-%s.yaml";
private static final String YAML_CONFIG_PROD =
readResourceUtf8(RegistryConfig.class, "files/default-config.yaml");
/** Dagger qualifier for configuration settings. */
@Qualifier
@Retention(RUNTIME)
@ -64,6 +72,22 @@ public final class RegistryConfig {
String value() default "";
}
/**
* Loads the {@link RegistryConfigSettings} POJO from the YAML configuration files.
*
* <p>The {@code default-config.yaml} file in this directory is loaded first, and a fatal error is
* thrown if it cannot be found or if there is an error parsing it. Separately, the
* environment-specific config file named {@code nomulus-config-ENVIRONMENT.yaml} is also loaded
* and those values merged into the POJO.
*/
static RegistryConfigSettings getConfigSettings() {
String configFilePath =
String.format(
ENVIRONMENT_CONFIG_FORMAT, Ascii.toLowerCase(RegistryEnvironment.get().name()));
String customYaml = readResourceUtf8(RegistryConfig.class, configFilePath);
return YamlUtils.getConfigSettings(YAML_CONFIG_PROD, customYaml, RegistryConfigSettings.class);
}
/** Dagger module for providing configuration settings. */
@Module
public static final class ConfigModule {
@ -392,6 +416,20 @@ public final class RegistryConfig {
return config.gSuite.adminAccountEmailAddress;
}
/**
* Returns the email address of the group containing emails of support accounts.
*
* <p>These accounts will have "ADMIN" access to the registrar console.
*
* @see google.registry.groups.DirectoryGroupsConnection
*/
@Provides
@Config("gSuiteSupportGroupEmailAddress")
public static Optional<String> provideGSuiteSupportGroupEmailAddress(
RegistryConfigSettings config) {
return Optional.ofNullable(Strings.emptyToNull(config.gSuite.supportGroupEmailAddress));
}
/**
* Returns the email address(es) that notifications of registrar and/or registrar contact
* updates should be sent to, or the empty list if updates should not be sent.
@ -471,7 +509,7 @@ public final class RegistryConfig {
/**
* The email address that outgoing emails from the app are sent from.
*
* @see google.registry.ui.server.registrar.SendEmailUtils
* @see google.registry.ui.server.SendEmailUtils
*/
@Provides
@Config("gSuiteOutgoingEmailAddress")
@ -482,10 +520,10 @@ public final class RegistryConfig {
/**
* The display name that is used on outgoing emails sent by Nomulus.
*
* @see google.registry.ui.server.registrar.SendEmailUtils
* @see google.registry.ui.server.SendEmailUtils
*/
@Provides
@Config("gSuiteOutoingEmailDisplayName")
@Config("gSuiteOutgoingEmailDisplayName")
public static String provideGSuiteOutgoingEmailDisplayName(RegistryConfigSettings config) {
return config.gSuite.outgoingEmailDisplayName;
}
@ -537,6 +575,17 @@ public final class RegistryConfig {
return beamBucketUrl + "/templates/spec11";
}
/**
* Returns whether an SSL certificate hash is required to log in via EPP and run flows.
*
* @see google.registry.flows.TlsCredentials
*/
@Provides
@Config("requireSslCertificates")
public static boolean provideRequireSslCertificates(RegistryConfigSettings config) {
return config.registryPolicy.requireSslCertificates;
}
/**
* Returns the default job zone to run Apache Beam (Cloud Dataflow) jobs in.
*
@ -656,6 +705,18 @@ public final class RegistryConfig {
return ImmutableList.copyOf(config.billing.invoiceEmailRecipients);
}
/**
* Returns the file prefix for the invoice CSV file.
*
* @see google.registry.beam.invoicing.InvoicingPipeline
* @see google.registry.reporting.billing.BillingEmailUtils
*/
@Provides
@Config("invoiceFilePrefix")
public static String provideInvoiceFilePrefix(RegistryConfigSettings config) {
return config.billing.invoiceFilePrefix;
}
/**
* Returns the Google Cloud Storage bucket for staging escrow deposits pending upload.
*
@ -827,20 +888,6 @@ public final class RegistryConfig {
return config.misc.alertRecipientEmailAddress;
}
/**
* Returns the email address we send emails from.
*
* @see google.registry.reporting.icann.ReportingEmailUtils
* @see google.registry.reporting.billing.BillingEmailUtils
* @see google.registry.reporting.spec11.Spec11EmailUtils
*/
@Provides
@Config("alertSenderEmailAddress")
public static String provideAlertSenderEmailAddress(
@Config("projectId") String projectId, RegistryConfigSettings config) {
return String.format("%s-no-reply@%s", projectId, config.misc.alertEmailSenderDomain);
}
/**
* Returns the email address to which spec 11 email should be replied.
*
@ -1090,6 +1137,12 @@ public final class RegistryConfig {
return config.registryPolicy.allocationTokenCustomLogicClass;
}
@Provides
@Config("dnsCountQueryCoordinatorClass")
public static String dnsCountQueryCoordinatorClass(RegistryConfigSettings config) {
return config.registryPolicy.dnsCountQueryCoordinatorClass;
}
/** Returns the disclaimer text for the exported premium terms. */
@Provides
@Config("premiumTermsExportDisclaimer")
@ -1195,6 +1248,14 @@ public final class RegistryConfig {
return ImmutableList.copyOf(config.credentialOAuth.delegatedCredentialOauthScopes);
}
/** Provides the OAuth scopes required for credentials created locally for the nomulus tool. */
@Provides
@Config("localCredentialOauthScopes")
public static ImmutableList<String> provideLocalCredentialOauthScopes(
RegistryConfigSettings config) {
return ImmutableList.copyOf(config.credentialOAuth.localCredentialOauthScopes);
}
/**
* Returns the help path for the RDAP terms of service.
*
@ -1208,15 +1269,18 @@ public final class RegistryConfig {
return "/tos";
}
/**
* Returns the name of the OAuth2 client secrets file.
*
* <p>This is the name of a resource relative to the root of the class tree.
*/
/** OAuth client ID used by the nomulus tool. */
@Provides
@Config("clientSecretFilename")
public static String provideClientSecretFilename(RegistryConfigSettings config) {
return config.registryTool.clientSecretFilename;
@Config("toolsClientId")
public static String provideToolsClientId(RegistryConfigSettings config) {
return config.registryTool.clientId;
}
/** OAuth client secret used by the nomulus tool. */
@Provides
@Config("toolsClientSecret")
public static String provideToolsClientSecret(RegistryConfigSettings config) {
return config.registryTool.clientSecret;
}
@Provides
@ -1225,6 +1289,18 @@ public final class RegistryConfig {
return ImmutableList.copyOf(Splitter.on('\n').split(config.registryPolicy.rdapTos));
}
/**
* Link to static Web page with RDAP terms of service. Displayed in RDAP responses.
*
* @see google.registry.rdap.RdapJsonFormatter
*/
@Provides
@Config("rdapTosStaticUrl")
@Nullable
public static String provideRdapTosStaticUrl(RegistryConfigSettings config) {
return config.registryPolicy.rdapTosStaticUrl;
}
/**
* Returns the help text to be used by RDAP.
*
@ -1235,30 +1311,38 @@ public final class RegistryConfig {
@Provides
@Config("rdapHelpMap")
public static ImmutableMap<String, RdapNoticeDescriptor> provideRdapHelpMap(
@Config("rdapTos") ImmutableList<String> rdapTos) {
@Config("rdapTos") ImmutableList<String> rdapTos,
@Config("rdapTosStaticUrl") @Nullable String rdapTosStaticUrl) {
return new ImmutableMap.Builder<String, RdapNoticeDescriptor>()
.put("/", RdapNoticeDescriptor.builder()
.setTitle("RDAP Help")
.setDescription(ImmutableList.of(
"domain/XXXX",
"nameserver/XXXX",
"entity/XXXX",
"domains?name=XXXX",
"domains?nsLdhName=XXXX",
"domains?nsIp=XXXX",
"nameservers?name=XXXX",
"nameservers?ip=XXXX",
"entities?fn=XXXX",
"entities?handle=XXXX",
"help/XXXX"))
.setLinkValueSuffix("help/")
.setLinkHrefUrlString("https://github.com/google/nomulus/blob/master/docs/rdap.md")
.build())
.put("/tos", RdapNoticeDescriptor.builder()
.setTitle("RDAP Terms of Service")
.setDescription(rdapTos)
.setLinkValueSuffix("help/tos")
.build())
.put(
"/",
RdapNoticeDescriptor.builder()
.setTitle("RDAP Help")
.setDescription(
ImmutableList.of(
"domain/XXXX",
"nameserver/XXXX",
"entity/XXXX",
"domains?name=XXXX",
"domains?nsLdhName=XXXX",
"domains?nsIp=XXXX",
"nameservers?name=XXXX",
"nameservers?ip=XXXX",
"entities?fn=XXXX",
"entities?handle=XXXX",
"help/XXXX"))
.setLinkValueSuffix("help/")
.setLinkHrefUrlString(
"https://github.com/google/nomulus/blob/master/docs/rdap.md")
.build())
.put(
"/tos",
RdapNoticeDescriptor.builder()
.setTitle("RDAP Terms of Service")
.setDescription(rdapTos)
.setLinkValueSuffix("help/tos")
.setLinkHrefUrlString(rdapTosStaticUrl)
.build())
.build();
}
@ -1319,6 +1403,15 @@ public final class RegistryConfig {
return getProjectId() + "-snapshots";
}
/**
* Returns the Google Cloud Storage bucket for storing Datastore backups.
*
* @see google.registry.export.BackupDatastoreAction
*/
public static String getDatastoreBackupsBucket() {
return "gs://" + getProjectId() + "-datastore-backups";
}
/**
* Number of sharded commit log buckets.
*
@ -1481,12 +1574,6 @@ public final class RegistryConfig {
return Duration.standardDays(CONFIG_SETTINGS.get().registryPolicy.contactAutomaticTransferDays);
}
/** Provided for testing. */
@VisibleForTesting
public static String getClientSecretFilename() {
return CONFIG_SETTINGS.get().registryTool.clientSecretFilename;
}
/**
* Memoizes loading of the {@link RegistryConfigSettings} POJO.
*
@ -1495,7 +1582,7 @@ public final class RegistryConfig {
*/
@VisibleForTesting
public static final Supplier<RegistryConfigSettings> CONFIG_SETTINGS =
memoize(YamlUtils::getConfigSettings);
memoize(RegistryConfig::getConfigSettings);
private static String formatComments(String text) {
return Splitter.on('\n').omitEmptyStrings().trimResults().splitToList(text).stream()

View file

@ -58,6 +58,7 @@ public class RegistryConfigSettings {
public static class CredentialOAuth {
public List<String> defaultCredentialOauthScopes;
public List<String> delegatedCredentialOauthScopes;
public List<String> localCredentialOauthScopes;
}
/** Configuration options for the G Suite account used by Nomulus. */
@ -66,6 +67,7 @@ public class RegistryConfigSettings {
public String outgoingEmailAddress;
public String outgoingEmailDisplayName;
public String adminAccountEmailAddress;
public String supportGroupEmailAddress;
}
/** Configuration options for registry policy. */
@ -75,6 +77,7 @@ public class RegistryConfigSettings {
public String customLogicFactoryClass;
public String whoisCommandFactoryClass;
public String allocationTokenCustomLogicClass;
public String dnsCountQueryCoordinatorClass;
public int contactAutomaticTransferDays;
public String greetingServerId;
public List<String> registrarChangesNotificationEmailAddresses;
@ -88,7 +91,9 @@ public class RegistryConfigSettings {
public String reservedTermsExportDisclaimer;
public String whoisDisclaimer;
public String rdapTos;
public String rdapTosStaticUrl;
public String spec11EmailBodyTemplate;
public boolean requireSslCertificates;
}
/** Configuration for Cloud Datastore. */
@ -129,6 +134,7 @@ public class RegistryConfigSettings {
/** Configuration for monthly invoices. */
public static class Billing {
public List<String> invoiceEmailRecipients;
public String invoiceFilePrefix;
}
/** Configuration for Registry Data Escrow (RDE). */
@ -160,7 +166,6 @@ public class RegistryConfigSettings {
public String sheetExportId;
public String alertRecipientEmailAddress;
public String spec11ReplyToEmailAddress;
public String alertEmailSenderDomain;
public int asyncDeleteDelaySeconds;
}
@ -178,6 +183,7 @@ public class RegistryConfigSettings {
/** Configuration options for the registry tool. */
public static class RegistryTool {
public String clientSecretFilename;
public String clientId;
public String clientSecret;
}
}

View file

@ -47,11 +47,25 @@ public enum RegistryEnvironment {
*/
UNITTEST;
/** Sets this enum as the name of the registry environment. */
public RegistryEnvironment setup() {
return setup(SystemPropertySetter.PRODUCTION_IMPL);
}
/**
* Sets this enum as the name of the registry environment using specified {@link
* SystemPropertySetter}.
*/
public RegistryEnvironment setup(SystemPropertySetter systemPropertySetter) {
systemPropertySetter.setProperty(PROPERTY, name());
return this;
}
/** Returns environment configured by system property {@value #PROPERTY}. */
public static RegistryEnvironment get() {
return valueOf(Ascii.toUpperCase(System.getProperty(PROPERTY, UNITTEST.name())));
}
/** System property for configuring which environment we should use. */
public static final String PROPERTY = "google.registry.environment";
private static final String PROPERTY = "google.registry.environment";
}

View file

@ -0,0 +1,47 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.config;
import javax.annotation.Nullable;
/**
* Wrapper interface around {@link System#setProperty(String, String)} and {@link
* System#clearProperty(String)}. Tests that modify system properties may provide custom
* implementations that keeps track of changes and restores original property values on test
* completion.
*/
public interface SystemPropertySetter {
/**
* Updates the system property specified by {@code key}. If {@code value} is not null, {@link
* System#setProperty(String, String)} is invoked; otherwise {@link System#clearProperty(String)}
* is invoked.
*/
SystemPropertySetter setProperty(String key, @Nullable String value);
/** Production implementation of {@link SystemPropertySetter}. */
SystemPropertySetter PRODUCTION_IMPL =
new SystemPropertySetter() {
@Override
public SystemPropertySetter setProperty(String key, @Nullable String value) {
if (value == null) {
System.clearProperty(key);
} else {
System.setProperty(key, value);
}
return this;
}
};
}

View file

@ -22,6 +22,9 @@ gSuite:
domainName: domain-registry.example
# Display name and email address used on outgoing emails through G Suite.
# The email address must be valid and have permission in the GAE app to send
# emails. For more info see:
# https://cloud.google.com/appengine/docs/standard/java/mail/#who_can_send_mail
outgoingEmailDisplayName: Example Registry
outgoingEmailAddress: noreply@project-id.appspotmail.com
@ -29,6 +32,10 @@ gSuite:
# logging in to perform administrative actions, not sending emails.
adminAccountEmailAddress: admin@example.com
# Group containing the emails of the support accounts. These accounts will be
# given "ADMIN" role on the registrar console.
supportGroupEmailAddress: support@example.com
registryPolicy:
# Repository identifier (ROID) suffix for contacts and hosts.
contactAndHostRoidSuffix: ROID
@ -48,6 +55,10 @@ registryPolicy:
# See flows/domain/token/AllocationTokenCustomLogic.java
allocationTokenCustomLogicClass: google.registry.flows.domain.token.AllocationTokenCustomLogic
# Custom logic class for handling DNS query count reporting for ICANN.
# See reporting/icann/DnsCountQueryCoordinator.java
dnsCountQueryCoordinatorClass: google.registry.reporting.icann.BasicDnsCountQueryCoordinator
# Length of time after which contact transfers automatically conclude.
contactAutomaticTransferDays: 5
@ -145,6 +156,10 @@ registryPolicy:
We reserve the right to modify this agreement at any time.
# Link to static Web page with RDAP terms of service. Displayed in RDAP
# responses. If null, no static Web page link is generated.
rdapTosStaticUrl: null
# Body of the spec 11 email sent to registrars.
# Items in braces are to be replaced.
spec11EmailBodyTemplate: |
@ -172,6 +187,11 @@ registryPolicy:
If you have any questions regarding this notice, please contact
{REPLY_TO_EMAIL}.
# Whether to require an SSL certificate hash in order to be able to log in
# via EPP and run commands. This can be false for testing environments but
# should generally be true for production environments, for added security.
requireSslCertificates: true
datastore:
# Number of commit log buckets in Datastore. Lowering this after initial
# install risks losing up to a days' worth of differential backups.
@ -186,13 +206,14 @@ datastore:
baseOfyRetryMillis: 100
cloudDns:
# Set both properties to null in Production.
# The root url for the Cloud DNS API. Set this to a non-null value to
# override the default API server used by the googleapis library.
rootUrl: null
rootUrl: https://staging-www.sandbox.googleapis.com
# The service endpoint path for the Cloud DNS API. Set this to a non-null
# value to override the default API path used by the googleapis library.
servicePath: null
servicePath: dns/v2beta1_staging/projects/
caching:
# Length of time that a singleton should be cached before expiring.
@ -245,8 +266,8 @@ oAuth:
- https://www.googleapis.com/auth/userinfo.email
# OAuth client IDs that are allowed to authenticate and communicate with
# backend services, e. g. nomulus tool, EPP proxy, etc. All client_id values
# used in client_secret.json files for associated tooling should be included
# backend services, e. g. nomulus tool, EPP proxy, etc. The client_id value
# used in registryTool.clientId field for associated tooling should be included
# in this list. Client IDs are typically of the format
# numbers-alphanumerics.apps.googleusercontent.com
allowedOauthClientIds: []
@ -271,7 +292,16 @@ credentialOAuth:
- https://www.googleapis.com/auth/admin.directory.group
# View and manage group settings in Group Settings API.
- https://www.googleapis.com/auth/apps.groups.settings
# OAuth scopes required to create a credential locally in for the nomulus tool.
localCredentialOauthScopes:
# View and manage data in all Google Cloud APIs.
- https://www.googleapis.com/auth/cloud-platform
# Call App Engine APIs locally.
- https://www.googleapis.com/auth/appengine.apis
# View your email address.
- https://www.googleapis.com/auth/userinfo.email
# View and manage your applications deployed on Google App Engine
- https://www.googleapis.com/auth/appengine.admin
icannReporting:
# URL we PUT monthly ICANN transactions reports to.
@ -282,6 +312,7 @@ icannReporting:
billing:
invoiceEmailRecipients: []
invoiceFilePrefix: REG-INV
rde:
# URL prefix of ICANN's server to upload RDE reports to. Nomulus adds /TLD/ID
@ -339,19 +370,11 @@ misc:
# to be a deliverable email address in case the registrars want to contact us.
spec11ReplyToEmailAddress: reply-to@example.com
# Domain for the email address we send alert summary emails from.
alertEmailSenderDomain: appspotmail.com
# How long to delay processing of asynchronous deletions. This should always
# be longer than eppResourceCachingSeconds, to prevent deleted contacts or
# hosts from being used on domains.
asyncDeleteDelaySeconds: 90
cloudDns:
# CloudDns testing config. Set both properties to null in Production.
rootUrl: https://staging-www.sandbox.googleapis.com
servicePath: dns/v2beta1_staging/projects/
beam:
# The default zone to run Apache Beam (Cloud Dataflow) jobs in.
defaultJobZone: us-east1-c
@ -372,5 +395,7 @@ keyring:
# Configuration options relevant to the "nomulus" registry tool.
registryTool:
# Name of the client secret file used for authenticating with App Engine.
clientSecretFilename: /google/registry/tools/resources/client_secret.json
# OAuth client Id used by the tool.
clientId: YOUR_CLIENT_ID
# OAuth client secret used by the tool.
clientSecret: YOUR_CLIENT_SECRET

View file

@ -18,6 +18,7 @@ gSuite:
outgoingEmailDisplayName: placeholder
outgoingEmailAddress: placeholder
adminAccountEmailAddress: placeholder
supportGroupEmailAddress: placeholder
registryPolicy:
contactAndHostRoidSuffix: placeholder
@ -43,6 +44,7 @@ icannReporting:
oAuth:
allowedOauthClientIds:
- placeholder.apps.googleusercontent.com
- placeholder-for-proxy
rde:
reportUrlPrefix: https://ry-api.icann.org/report/registry-escrow-report
@ -68,3 +70,7 @@ keyring:
activeKeyring: KMS
kms:
projectId: placeholder
registryTool:
clientId: placeholder.apps.googleusercontent.com
clientSecret: placeholder

View file

@ -22,3 +22,8 @@ caching:
staticPremiumListMaxCachedEntries: 50
eppResourceCachingEnabled: true
eppResourceCachingSeconds: 0
# Remove the support G Suite group, because we don't want to try connecting to G Suite servers from
# tests
gSuite:
supportGroupEmailAddress:

View file

@ -65,13 +65,14 @@ public final class RefreshDnsAction implements Runnable {
private <T extends EppResource & ForeignKeyedEppResource>
T loadAndVerifyExistence(Class<T> clazz, String foreignKey) {
T resource = loadByForeignKey(clazz, foreignKey, clock.nowUtc());
if (resource == null) {
String typeName = clazz.getAnnotation(ExternalMessagingName.class).value();
throw new NotFoundException(
String.format("%s %s not found", typeName, domainOrHostName));
}
return resource;
return loadByForeignKey(clazz, foreignKey, clock.nowUtc())
.orElseThrow(
() ->
new NotFoundException(
String.format(
"%s %s not found",
clazz.getAnnotation(ExternalMessagingName.class).value(),
domainOrHostName)));
}
private static void verifyHostIsSubordinate(HostResource host) {

View file

@ -19,7 +19,7 @@ import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static google.registry.model.EppResourceUtils.loadByForeignKey;
import static google.registry.util.DomainNameUtils.getSecondLevelDomain;
import com.google.api.client.googleapis.json.GoogleJsonError.ErrorInfo;
import com.google.api.client.googleapis.json.GoogleJsonError;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.api.services.dns.Dns;
import com.google.api.services.dns.model.Change;
@ -120,9 +120,9 @@ public class CloudDnsWriter extends BaseDnsWriter {
// Canonicalize name
String absoluteDomainName = getAbsoluteHostName(domainName);
// Load the target domain. Note that it can be null if this domain was just deleted.
// Load the target domain. Note that it can be absent if this domain was just deleted.
Optional<DomainResource> domainResource =
Optional.ofNullable(loadByForeignKey(DomainResource.class, domainName, clock.nowUtc()));
loadByForeignKey(DomainResource.class, domainName, clock.nowUtc());
// Return early if no DNS records should be published.
// desiredRecordsBuilder is populated with an empty set to indicate that all existing records
@ -188,11 +188,10 @@ public class CloudDnsWriter extends BaseDnsWriter {
// Canonicalize name
String absoluteHostName = getAbsoluteHostName(hostName);
// Load the target host. Note that it can be null if this host was just deleted.
// Load the target host. Note that it can be absent if this host was just deleted.
// desiredRecords is populated with an empty set to indicate that all existing records
// should be deleted.
Optional<HostResource> host =
Optional.ofNullable(loadByForeignKey(HostResource.class, hostName, clock.nowUtc()));
Optional<HostResource> host = loadByForeignKey(HostResource.class, hostName, clock.nowUtc());
// Return early if the host is deleted.
if (!host.isPresent()) {
@ -390,12 +389,12 @@ public class CloudDnsWriter extends BaseDnsWriter {
try {
dnsConnection.changes().create(projectId, zoneName, change).execute();
} catch (GoogleJsonResponseException e) {
List<ErrorInfo> errors = e.getDetails().getErrors();
GoogleJsonError err = e.getDetails();
// We did something really wrong here, just give up and re-throw
if (errors.size() > 1) {
if (err == null || err.getErrors().size() > 1) {
throw new RuntimeException(e);
}
String errorReason = errors.get(0).getReason();
String errorReason = err.getErrors().get(0).getReason();
if (RETRYABLE_EXCEPTION_REASONS.contains(errorReason)) {
throw new ZoneStateException(errorReason);

View file

@ -14,6 +14,7 @@
package google.registry.dns.writer.dnsupdate;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Verify.verify;
import static com.google.common.collect.Sets.intersection;
import static com.google.common.collect.Sets.union;
@ -126,9 +127,12 @@ public class DnsUpdateWriter extends BaseDnsWriter {
* this domain refresh request
*/
private void publishDomain(String domainName, String requestingHostName) {
DomainResource domain = loadByForeignKey(DomainResource.class, domainName, clock.nowUtc());
Optional<DomainResource> domainOptional =
loadByForeignKey(DomainResource.class, domainName, clock.nowUtc());
update.delete(toAbsoluteName(domainName), Type.ANY);
if (domain != null) {
// If the domain is now deleted, then don't update DNS for it.
if (domainOptional.isPresent()) {
DomainResource domain = domainOptional.get();
// As long as the domain exists, orphan glues should be cleaned.
deleteSubordinateHostAddressSet(domain, requestingHostName, update);
if (domain.shouldPublishToDns()) {
@ -213,9 +217,10 @@ public class DnsUpdateWriter extends BaseDnsWriter {
for (String hostName :
intersection(
domain.loadNameserverFullyQualifiedHostNames(), domain.getSubordinateHosts())) {
HostResource host = loadByForeignKey(HostResource.class, hostName, clock.nowUtc());
update.add(makeAddressSet(host));
update.add(makeV6AddressSet(host));
Optional<HostResource> host = loadByForeignKey(HostResource.class, hostName, clock.nowUtc());
checkState(host.isPresent(), "Host %s cannot be loaded", hostName);
update.add(makeAddressSet(host.get()));
update.add(makeV6AddressSet(host.get()));
}
}

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>backend</module>
<service>backend</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>default</module>
<service>default</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -142,6 +142,24 @@
<target>backend</target>
</cron>
<cron>
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/backupDatastore&runInEmpty]]></url>
<description>
This job fires off a Datastore managed-export job that generates snapshot files in GCS.
It also enqueues a new task to wait on the completion of that job and then load the resulting
snapshot into bigquery.
</description>
<!--
Keep google.registry.export.CheckBackupAction.MAXIMUM_BACKUP_RUNNING_TIME less than
this interval.
To facilitate troubleshooting, run after the exportSnapshot task below (typically finishes in
90 minutes in production) has completed.
TODO(b/122271637): move to 06:00 (UTC) when the exportSnapshot task is removed. -->
<schedule>every day 09:00</schedule>
<target>backend</target>
</cron>
<cron>
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/exportSnapshot&runInEmpty]]></url>
<description>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>pubapi</module>
<service>pubapi</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>tools</module>
<service>tools</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -174,6 +174,24 @@
<url-pattern>/_dr/dnsRefresh</url-pattern>
</servlet-mapping>
<!-- Exports a Datastore backup snapshot to GCS. -->
<servlet-mapping>
<servlet-name>backend-servlet</servlet-name>
<url-pattern>/_dr/task/backupDatastore</url-pattern>
</servlet-mapping>
<!-- Checks the completion of a Datastore backup snapshot. -->
<servlet-mapping>
<servlet-name>backend-servlet</servlet-name>
<url-pattern>/_dr/task/checkDatastoreBackup</url-pattern>
</servlet-mapping>
<!-- Loads a Datastore backup snapshot into BigQuery. -->
<servlet-mapping>
<servlet-name>backend-servlet</servlet-name>
<url-pattern>/_dr/task/uploadDatastoreBackup</url-pattern>
</servlet-mapping>
<!-- Exports a Datastore backup snapshot to GCS. -->
<servlet-mapping>
<servlet-name>backend-servlet</servlet-name>

View file

@ -69,10 +69,14 @@
<property name="clientId" direction="asc"/>
<property name="eventTime" direction="asc"/>
</datastore-index>
<!-- For the history viewer. -->
<!-- For querying HistoryEntries. -->
<datastore-index kind="HistoryEntry" ancestor="true" source="manual">
<property name="modificationTime" direction="asc"/>
</datastore-index>
<datastore-index kind="HistoryEntry" ancestor="false" source="manual">
<property name="clientId" direction="asc"/>
<property name="modificationTime" direction="asc"/>
</datastore-index>
<!-- For RDAP. -->
<datastore-index kind="DomainBase" ancestor="false" source="manual">
<property name="^i" direction="asc"/>

View file

@ -37,6 +37,12 @@
<url-pattern>/registrar-settings</url-pattern>
</servlet-mapping>
<!-- OT&E creation console. -->
<servlet-mapping>
<servlet-name>frontend-servlet</servlet-name>
<url-pattern>/registrar-ote-setup</url-pattern>
</servlet-mapping>
<!-- Security config -->
<security-constraint>
<web-resource-collection>

View file

@ -72,6 +72,12 @@
<url-pattern>/_dr/task/resaveAllHistoryEntries</url-pattern>
</servlet-mapping>
<!-- Mapreduce to delete all domain applications. -->
<servlet-mapping>
<servlet-name>tools-servlet</servlet-name>
<url-pattern>/_dr/task/killAllDomainApplications</url-pattern>
</servlet-mapping>
<!-- Mapreduce to delete EppResources, children, and indices. -->
<servlet-mapping>
<servlet-name>tools-servlet</servlet-name>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>backend</module>
<service>backend</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>default</module>
<service>default</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>pubapi</module>
<service>pubapi</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>tools</module>
<service>tools</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>backend</module>
<service>backend</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>default</module>
<service>default</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>pubapi</module>
<service>pubapi</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>tools</module>
<service>tools</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>backend</module>
<service>backend</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>

View file

@ -1,15 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>default</module>
<service>default</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>
<manual-scaling>
<instances>30</instances>
<instances>15</instances>
</manual-scaling>
<system-properties>

View file

@ -174,6 +174,24 @@
<target>backend</target>
</cron>
<cron>
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/backupDatastore&runInEmpty]]></url>
<description>
This job fires off a Datastore managed-export job that generates snapshot files in GCS.
It also enqueues a new task to wait on the completion of that job and then load the resulting
snapshot into bigquery.
</description>
<!--
Keep google.registry.export.CheckBackupAction.MAXIMUM_BACKUP_RUNNING_TIME less than
this interval.
To facilitate troubleshooting, run after the exportSnapshot task below (typically finishes in
90 minutes in production) has completed.
TODO(b/122271637): move to 06:00 (UTC) when the exportSnapshot task is removed. -->
<schedule>every day 09:00</schedule>
<target>backend</target>
</cron>
<cron>
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/exportSnapshot&runInEmpty]]></url>
<description>
@ -289,13 +307,12 @@
<cron>
<url><![CDATA[/_dr/cron/fanout?queue=retryable-cron-tasks&endpoint=/_dr/task/generateSpec11&runInEmpty]]></url>
<description>
Starts the beam/spec11/Spec11Pipeline Dataflow template, which creates last month's Spec11
report. This report is stored in gs://[PROJECT-ID]-reporting/icann/spec11/yyyy-MM.
Upon Dataflow job completion, sends an e-mail to all registrars with domain registrations
flagged by the SafeBrowsing API.
Starts the beam/spec11/Spec11Pipeline Dataflow template, which creates today's Spec11
report. This report is stored in gs://[PROJECT-ID]-reporting/icann/spec11/yyyy-MM/.
This job will only send email notifications on the second of every month.
See GenerateSpec11ReportAction for more details.
</description>
<schedule>2 of month 15:00</schedule>
<schedule>every day 15:00</schedule>
<target>backend</target>
</cron>
</cronentries>

View file

@ -1,15 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>pubapi</module>
<service>pubapi</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>
<manual-scaling>
<instances>20</instances>
<instances>10</instances>
</manual-scaling>
<system-properties>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>tools</module>
<service>tools</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>backend</module>
<service>backend</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>default</module>
<service>default</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>F4_1G</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>pubapi</module>
<service>pubapi</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>tools</module>
<service>tools</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>backend</module>
<service>backend</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -1,15 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>default</module>
<service>default</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>
<manual-scaling>
<instances>30</instances>
<instances>10</instances>
</manual-scaling>
<system-properties>

View file

@ -151,6 +151,24 @@
<target>backend</target>
</cron>
<cron>
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/backupDatastore&runInEmpty]]></url>
<description>
This job fires off a Datastore managed-export job that generates snapshot files in GCS.
It also enqueues a new task to wait on the completion of that job and then load the resulting
snapshot into bigquery.
</description>
<!--
Keep google.registry.export.CheckBackupAction.MAXIMUM_BACKUP_RUNNING_TIME less than
this interval.
To facilitate troubleshooting, run after the exportSnapshot task below (typically finishes in
90 minutes in production) has completed.
TODO(b/122271637): move to 06:00 (UTC) when the exportSnapshot task is removed. -->
<schedule>every day 09:00</schedule>
<target>backend</target>
</cron>
<cron>
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/exportSnapshot&runInEmpty]]></url>
<description>

View file

@ -1,15 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>pubapi</module>
<service>pubapi</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4_1G</instance-class>
<manual-scaling>
<instances>20</instances>
<instances>5</instances>
</manual-scaling>
<system-properties>

View file

@ -1,10 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<appengine-web-app xmlns="http://appengine.google.com/ns/1.0">
<application>domain-registry</application>
<version>1</version>
<runtime>java8</runtime>
<module>tools</module>
<service>tools</service>
<threadsafe>true</threadsafe>
<sessions-enabled>true</sessions-enabled>
<instance-class>B4</instance-class>

View file

@ -10,6 +10,7 @@ java_library(
deps = [
"//java/google/registry/bigquery",
"//java/google/registry/config",
"//java/google/registry/export/datastore",
"//java/google/registry/gcs",
"//java/google/registry/groups",
"//java/google/registry/mapreduce",

View file

@ -0,0 +1,86 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.export;
import static google.registry.export.CheckBackupAction.enqueuePollTask;
import static google.registry.request.Action.Method.POST;
import com.google.common.flogger.FluentLogger;
import google.registry.config.RegistryConfig;
import google.registry.export.datastore.DatastoreAdmin;
import google.registry.export.datastore.Operation;
import google.registry.request.Action;
import google.registry.request.HttpException.InternalServerErrorException;
import google.registry.request.Response;
import google.registry.request.auth.Auth;
import javax.inject.Inject;
/**
* Action to trigger a Datastore backup job that writes a snapshot to Google Cloud Storage. This
* class is introduced as an experimental feature, and will eventually replace {@link
* ExportSnapshotAction}.
*
* <p>This is the first step of a four step workflow for exporting snapshots, with each step calling
* the next upon successful completion:
*
* <ol>
* <li>The snapshot is exported to Google Cloud Storage (this action).
* <li>The {@link CheckBackupAction} polls until the export is completed.
* <li>The {@link UploadDatastoreBackupAction} uploads the data from GCS to BigQuery.
* <li>The {@link UpdateSnapshotViewAction} updates the view in latest_datastore_export.
* </ol>
*/
@Action(
path = BackupDatastoreAction.PATH,
method = POST,
automaticallyPrintOk = true,
auth = Auth.AUTH_INTERNAL_ONLY)
public class BackupDatastoreAction implements Runnable {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
/** Queue to use for enqueuing the task that will actually launch the backup. */
static final String QUEUE = "export-snapshot"; // See queue.xml.
static final String PATH = "/_dr/task/backupDatastore"; // See web.xml.
@Inject DatastoreAdmin datastoreAdmin;
@Inject Response response;
@Inject
BackupDatastoreAction() {}
@Override
public void run() {
try {
Operation backup =
datastoreAdmin
.export(RegistryConfig.getDatastoreBackupsBucket(), ExportConstants.getBackupKinds())
.execute();
String backupName = backup.getName();
// Enqueue a poll task to monitor the backup and load REPORTING-related kinds into bigquery.
enqueuePollTask(backupName, ExportConstants.getReportingKinds());
String message =
String.format(
"Datastore backup started with name: %s\nSaving to %s",
backupName, backup.getExportFolderUrl());
logger.atInfo().log(message);
response.setPayload(message);
} catch (Throwable e) {
throw new InternalServerErrorException("Exception occurred while backing up datastore.", e);
}
}
}

View file

@ -0,0 +1,189 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.export;
import static com.google.common.collect.Sets.intersection;
import static google.registry.export.UploadDatastoreBackupAction.enqueueUploadBackupTask;
import static google.registry.request.Action.Method.GET;
import static google.registry.request.Action.Method.POST;
import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND;
import com.google.api.client.googleapis.json.GoogleJsonResponseException;
import com.google.appengine.api.taskqueue.QueueFactory;
import com.google.appengine.api.taskqueue.TaskHandle;
import com.google.appengine.api.taskqueue.TaskOptions;
import com.google.appengine.api.taskqueue.TaskOptions.Method;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import com.google.common.flogger.FluentLogger;
import google.registry.export.datastore.DatastoreAdmin;
import google.registry.export.datastore.Operation;
import google.registry.request.Action;
import google.registry.request.HttpException;
import google.registry.request.HttpException.BadRequestException;
import google.registry.request.HttpException.InternalServerErrorException;
import google.registry.request.HttpException.NoContentException;
import google.registry.request.HttpException.NotModifiedException;
import google.registry.request.Parameter;
import google.registry.request.RequestMethod;
import google.registry.request.Response;
import google.registry.request.auth.Auth;
import google.registry.util.Clock;
import java.io.IOException;
import java.util.Set;
import javax.inject.Inject;
import org.joda.time.Duration;
import org.joda.time.PeriodType;
import org.joda.time.format.PeriodFormat;
/**
* Action that checks the status of a snapshot, and if complete, trigger loading it into BigQuery.
*/
@Action(
path = CheckBackupAction.PATH,
method = {POST, GET},
automaticallyPrintOk = true,
auth = Auth.AUTH_INTERNAL_ONLY)
public class CheckBackupAction implements Runnable {
/** Parameter names for passing parameters into this action. */
static final String CHECK_BACKUP_NAME_PARAM = "name";
static final String CHECK_BACKUP_KINDS_TO_LOAD_PARAM = "kindsToLoad";
/** Action-specific details needed for enqueuing tasks against itself. */
static final String QUEUE = "export-snapshot-poll"; // See queue.xml.
static final String PATH = "/_dr/task/checkDatastoreBackup"; // See web.xml.
static final Duration POLL_COUNTDOWN = Duration.standardMinutes(2);
/** The maximum amount of time we allow a backup to run before abandoning it. */
static final Duration MAXIMUM_BACKUP_RUNNING_TIME = Duration.standardHours(20);
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
@Inject DatastoreAdmin datastoreAdmin;
@Inject Clock clock;
@Inject Response response;
@Inject @RequestMethod Action.Method requestMethod;
@Inject
@Parameter(CHECK_BACKUP_NAME_PARAM)
String backupName;
@Inject
@Parameter(CHECK_BACKUP_KINDS_TO_LOAD_PARAM)
String kindsToLoadParam;
@Inject
CheckBackupAction() {}
@Override
public void run() {
try {
if (requestMethod == POST) {
checkAndLoadBackupIfComplete();
} else {
// This is a GET request.
// TODO(weiminyu): consider moving this functionality to Registry tool.
response.setPayload(getExportStatus().toPrettyString());
}
} catch (HttpException e) {
// Rethrow and let caller propagate status code and error message to the response.
// See google.registry.request.RequestHandler#handleRequest.
throw e;
} catch (Throwable e) {
throw new InternalServerErrorException(
String.format("Exception occurred while checking datastore exports."), e);
}
}
private Operation getExportStatus() throws IOException {
try {
return datastoreAdmin.get(backupName).execute();
} catch (GoogleJsonResponseException e) {
if (e.getStatusCode() == SC_NOT_FOUND) {
String message = String.format("Bad backup name %s: %s", backupName, e.getMessage());
// TODO(b/19081569): Ideally this would return a 2XX error so the task would not be
// retried but we might abandon backups that start late and haven't yet written to
// Datastore. We could fix that by replacing this with a two-phase polling strategy.
throw new BadRequestException(message, e);
}
throw e;
}
}
private void checkAndLoadBackupIfComplete() throws IOException {
Set<String> kindsToLoad = ImmutableSet.copyOf(Splitter.on(',').split(kindsToLoadParam));
Operation backup = getExportStatus();
if (backup.isProcessing()
&& backup.getRunningTime(clock).isShorterThan(MAXIMUM_BACKUP_RUNNING_TIME)) {
// Backup might still be running, so send a 304 to have the task retry.
throw new NotModifiedException(
String.format(
"Datastore backup %s still in progress: %s", backupName, backup.getProgress()));
}
if (!backup.isSuccessful()) {
// Declare the backup a lost cause, and send 204 No Content so the task will
// not be retried.
String message =
String.format(
"Datastore backup %s abandoned - not complete after %s. Progress: %s",
backupName,
PeriodFormat.getDefault()
.print(
backup
.getRunningTime(clock)
.toPeriod()
.normalizedStandard(PeriodType.dayTime().withMillisRemoved())),
backup.getProgress());
throw new NoContentException(message);
}
String backupId = backup.getExportId();
// Log a warning if kindsToLoad is not a subset of the exported kinds.
if (!backup.getKinds().containsAll(kindsToLoad)) {
logger.atWarning().log(
"Kinds to load included non-exported kinds: %s",
Sets.difference(kindsToLoad, backup.getKinds()));
}
// Load kinds from the backup, limited to those also in kindsToLoad (if it's present).
ImmutableSet<String> exportedKindsToLoad =
ImmutableSet.copyOf(intersection(backup.getKinds(), kindsToLoad));
String message = String.format("Datastore backup %s complete - ", backupName);
if (exportedKindsToLoad.isEmpty()) {
message += "no kinds to load into BigQuery";
} else {
enqueueUploadBackupTask(backupId, backup.getExportFolderUrl(), exportedKindsToLoad);
message += "BigQuery load task enqueued";
}
logger.atInfo().log(message);
response.setPayload(message);
}
/** Enqueue a poll task to monitor the named backup for completion. */
static TaskHandle enqueuePollTask(String backupId, ImmutableSet<String> kindsToLoad) {
return QueueFactory.getQueue(QUEUE)
.add(
TaskOptions.Builder.withUrl(PATH)
.method(Method.POST)
.countdownMillis(POLL_COUNTDOWN.getMillis())
.param(CHECK_BACKUP_NAME_PARAM, backupId)
.param(CHECK_BACKUP_KINDS_TO_LOAD_PARAM, Joiner.on(',').join(kindsToLoad)));
}
}

View file

@ -25,6 +25,8 @@ import static google.registry.export.LoadSnapshotAction.LOAD_SNAPSHOT_KINDS_PARA
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_DATASET_ID_PARAM;
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_KIND_PARAM;
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_TABLE_ID_PARAM;
import static google.registry.export.UpdateSnapshotViewAction.UPDATE_SNAPSHOT_VIEWNAME_PARAM;
import static google.registry.export.UploadDatastoreBackupAction.UPLOAD_BACKUP_FOLDER_PARAM;
import static google.registry.request.RequestParameters.extractRequiredHeader;
import static google.registry.request.RequestParameters.extractRequiredParameter;
@ -56,12 +58,24 @@ public final class ExportRequestModule {
return extractRequiredParameter(req, UPDATE_SNAPSHOT_KIND_PARAM);
}
@Provides
@Parameter(UPDATE_SNAPSHOT_VIEWNAME_PARAM)
static String provideUpdateSnapshotViewName(HttpServletRequest req) {
return extractRequiredParameter(req, UPDATE_SNAPSHOT_VIEWNAME_PARAM);
}
@Provides
@Parameter(LOAD_SNAPSHOT_FILE_PARAM)
static String provideLoadSnapshotFile(HttpServletRequest req) {
return extractRequiredParameter(req, LOAD_SNAPSHOT_FILE_PARAM);
}
@Provides
@Parameter(UPLOAD_BACKUP_FOLDER_PARAM)
static String provideSnapshotUrlPrefix(HttpServletRequest req) {
return extractRequiredParameter(req, UPLOAD_BACKUP_FOLDER_PARAM);
}
@Provides
@Parameter(LOAD_SNAPSHOT_ID_PARAM)
static String provideLoadSnapshotId(HttpServletRequest req) {

View file

@ -64,6 +64,8 @@ public class LoadSnapshotAction implements Runnable {
static final String SNAPSHOTS_DATASET = "snapshots";
static final String LATEST_SNAPSHOT_VIEW_NAME = "latest_datastore_export";
/** Servlet-specific details needed for enqueuing tasks against itself. */
static final String QUEUE = "export-snapshot"; // See queue.xml.
static final String PATH = "/_dr/task/loadSnapshot"; // See web.xml.
@ -131,7 +133,7 @@ public class LoadSnapshotAction implements Runnable {
// well-known view in BigQuery to point at the newly loaded snapshot table for this kind.
bigqueryPollEnqueuer.enqueuePollTask(
jobRef,
createViewUpdateTask(SNAPSHOTS_DATASET, tableId, kindName),
createViewUpdateTask(SNAPSHOTS_DATASET, tableId, kindName, LATEST_SNAPSHOT_VIEW_NAME),
getQueue(UpdateSnapshotViewAction.QUEUE));
builder.append(String.format(" - %s:%s\n", projectId, jobId));

View file

@ -43,10 +43,12 @@ public class UpdateSnapshotViewAction implements Runnable {
static final String UPDATE_SNAPSHOT_TABLE_ID_PARAM = "table";
static final String UPDATE_SNAPSHOT_KIND_PARAM = "kind";
private static final String TARGET_DATASET_NAME = "latest_datastore_export";
static final String UPDATE_SNAPSHOT_VIEWNAME_PARAM = "viewname";
/** Servlet-specific details needed for enqueuing tasks against itself. */
// For now this queue is shared by the backup workflows started by both ExportSnapshotAction
// and BackupDatastoreAction.
// TODO(weiminyu): update queue name (snapshot->backup) after ExportSnapshot flow is removed.
static final String QUEUE = "export-snapshot-update-view"; // See queue.xml.
static final String PATH = "/_dr/task/updateSnapshotView"; // See web.xml.
@ -65,6 +67,10 @@ public class UpdateSnapshotViewAction implements Runnable {
@Parameter(UPDATE_SNAPSHOT_KIND_PARAM)
String kindName;
@Inject
@Parameter(UPDATE_SNAPSHOT_VIEWNAME_PARAM)
String viewName;
@Inject
@Config("projectId")
String projectId;
@ -75,12 +81,14 @@ public class UpdateSnapshotViewAction implements Runnable {
UpdateSnapshotViewAction() {}
/** Create a task for updating a snapshot view. */
static TaskOptions createViewUpdateTask(String datasetId, String tableId, String kindName) {
static TaskOptions createViewUpdateTask(
String datasetId, String tableId, String kindName, String viewName) {
return TaskOptions.Builder.withUrl(PATH)
.method(Method.POST)
.param(UPDATE_SNAPSHOT_DATASET_ID_PARAM, datasetId)
.param(UPDATE_SNAPSHOT_TABLE_ID_PARAM, tableId)
.param(UPDATE_SNAPSHOT_KIND_PARAM, kindName);
.param(UPDATE_SNAPSHOT_KIND_PARAM, kindName)
.param(UPDATE_SNAPSHOT_VIEWNAME_PARAM, viewName);
}
@Override
@ -89,12 +97,10 @@ public class UpdateSnapshotViewAction implements Runnable {
SqlTemplate sqlTemplate =
SqlTemplate.create(
"#standardSQL\nSELECT * FROM `%PROJECT%.%SOURCE_DATASET%.%SOURCE_TABLE%`");
updateSnapshotView(datasetId, tableId, kindName, TARGET_DATASET_NAME, sqlTemplate);
updateSnapshotView(datasetId, tableId, kindName, viewName, sqlTemplate);
} catch (Throwable e) {
throw new InternalServerErrorException(
String.format(
"Could not update snapshot view %s for table %s", TARGET_DATASET_NAME, tableId),
e);
String.format("Could not update snapshot view %s for table %s", viewName, tableId), e);
}
}
@ -138,7 +144,7 @@ public class UpdateSnapshotViewAction implements Runnable {
.update(ref.getProjectId(), ref.getDatasetId(), ref.getTableId(), table)
.execute();
} catch (GoogleJsonResponseException e) {
if (e.getDetails().getCode() == 404) {
if (e.getDetails() != null && e.getDetails().getCode() == 404) {
bigquery.tables().insert(ref.getProjectId(), ref.getDatasetId(), table).execute();
} else {
logger.atWarning().withCause(e).log(

View file

@ -0,0 +1,181 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.export;
import static com.google.appengine.api.taskqueue.QueueFactory.getQueue;
import static com.google.common.base.MoreObjects.firstNonNull;
import static google.registry.export.UpdateSnapshotViewAction.createViewUpdateTask;
import static google.registry.request.Action.Method.POST;
import com.google.api.services.bigquery.Bigquery;
import com.google.api.services.bigquery.model.Job;
import com.google.api.services.bigquery.model.JobConfiguration;
import com.google.api.services.bigquery.model.JobConfigurationLoad;
import com.google.api.services.bigquery.model.JobReference;
import com.google.api.services.bigquery.model.TableReference;
import com.google.appengine.api.taskqueue.TaskHandle;
import com.google.appengine.api.taskqueue.TaskOptions;
import com.google.appengine.api.taskqueue.TaskOptions.Method;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.flogger.FluentLogger;
import google.registry.bigquery.BigqueryUtils.SourceFormat;
import google.registry.bigquery.BigqueryUtils.WriteDisposition;
import google.registry.bigquery.CheckedBigquery;
import google.registry.config.RegistryConfig.Config;
import google.registry.export.BigqueryPollJobAction.BigqueryPollJobEnqueuer;
import google.registry.request.Action;
import google.registry.request.HttpException.BadRequestException;
import google.registry.request.HttpException.InternalServerErrorException;
import google.registry.request.Parameter;
import google.registry.request.auth.Auth;
import java.io.IOException;
import javax.inject.Inject;
/** Action to load a Datastore backup from Google Cloud Storage into BigQuery. */
@Action(path = UploadDatastoreBackupAction.PATH, method = POST, auth = Auth.AUTH_INTERNAL_ONLY)
public class UploadDatastoreBackupAction implements Runnable {
/** Parameter names for passing parameters into the servlet. */
static final String UPLOAD_BACKUP_ID_PARAM = "id";
static final String UPLOAD_BACKUP_FOLDER_PARAM = "folder";
static final String UPLOAD_BACKUP_KINDS_PARAM = "kinds";
static final String BACKUP_DATASET = "datastore_backups";
/** Servlet-specific details needed for enqueuing tasks against itself. */
static final String QUEUE = "export-snapshot"; // See queue.xml.
static final String LATEST_BACKUP_VIEW_NAME = "latest_datastore_backup";
static final String PATH = "/_dr/task/uploadDatastoreBackup"; // See web.xml.
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
@Inject CheckedBigquery checkedBigquery;
@Inject BigqueryPollJobEnqueuer bigqueryPollEnqueuer;
@Inject @Config("projectId") String projectId;
@Inject
@Parameter(UPLOAD_BACKUP_FOLDER_PARAM)
String backupFolderUrl;
@Inject
@Parameter(UPLOAD_BACKUP_ID_PARAM)
String backupId;
@Inject
@Parameter(UPLOAD_BACKUP_KINDS_PARAM)
String backupKinds;
@Inject
UploadDatastoreBackupAction() {}
/** Enqueue a task for starting a backup load. */
public static TaskHandle enqueueUploadBackupTask(
String backupId, String gcsFile, ImmutableSet<String> kinds) {
return getQueue(QUEUE)
.add(
TaskOptions.Builder.withUrl(PATH)
.method(Method.POST)
.param(UPLOAD_BACKUP_ID_PARAM, backupId)
.param(UPLOAD_BACKUP_FOLDER_PARAM, gcsFile)
.param(UPLOAD_BACKUP_KINDS_PARAM, Joiner.on(',').join(kinds)));
}
@Override
public void run() {
try {
String message = uploadBackup(backupId, backupFolderUrl, Splitter.on(',').split(backupKinds));
logger.atInfo().log("Loaded backup successfully: %s", message);
} catch (Throwable e) {
logger.atSevere().withCause(e).log("Error loading backup");
if (e instanceof IllegalArgumentException) {
throw new BadRequestException("Error calling load backup: " + e.getMessage(), e);
} else {
throw new InternalServerErrorException(
"Error loading backup: " + firstNonNull(e.getMessage(), e.toString()));
}
}
}
private String uploadBackup(String backupId, String backupFolderUrl, Iterable<String> kinds)
throws IOException {
Bigquery bigquery = checkedBigquery.ensureDataSetExists(projectId, BACKUP_DATASET);
String loadMessage =
String.format("Loading Datastore backup %s from %s...", backupId, backupFolderUrl);
logger.atInfo().log(loadMessage);
String sanitizedBackupId = sanitizeForBigquery(backupId);
StringBuilder builder = new StringBuilder(loadMessage + "\n");
builder.append("Load jobs:\n");
for (String kindName : kinds) {
String jobId = String.format("load-backup-%s-%s", sanitizedBackupId, kindName);
JobReference jobRef = new JobReference().setProjectId(projectId).setJobId(jobId);
String sourceUri = getBackupInfoFileForKind(backupFolderUrl, kindName);
String tableId = String.format("%s_%s", sanitizedBackupId, kindName);
// Launch the load job.
Job job = makeLoadJob(jobRef, sourceUri, tableId);
bigquery.jobs().insert(projectId, job).execute();
// Enqueue a task to check on the load job's completion, and if it succeeds, to update a
// well-known view in BigQuery to point at the newly loaded backup table for this kind.
bigqueryPollEnqueuer.enqueuePollTask(
jobRef,
createViewUpdateTask(BACKUP_DATASET, tableId, kindName, LATEST_BACKUP_VIEW_NAME),
getQueue(UpdateSnapshotViewAction.QUEUE));
builder.append(String.format(" - %s:%s\n", projectId, jobId));
logger.atInfo().log("Submitted load job %s:%s", projectId, jobId);
}
return builder.toString();
}
static String sanitizeForBigquery(String backupId) {
return backupId.replaceAll("[^a-zA-Z0-9_]", "_");
}
@VisibleForTesting
static String getBackupInfoFileForKind(String backupFolderUrl, String kindName) {
return Joiner.on('/')
.join(
backupFolderUrl,
"all_namespaces",
String.format("kind_%s", kindName),
String.format("all_namespaces_kind_%s.%s", kindName, "export_metadata"));
}
private Job makeLoadJob(JobReference jobRef, String sourceUri, String tableId) {
TableReference tableReference =
new TableReference()
.setProjectId(jobRef.getProjectId())
.setDatasetId(BACKUP_DATASET)
.setTableId(tableId);
return new Job()
.setJobReference(jobRef)
.setConfiguration(new JobConfiguration()
.setLoad(new JobConfigurationLoad()
.setWriteDisposition(WriteDisposition.WRITE_EMPTY.toString())
.setSourceFormat(SourceFormat.DATASTORE_BACKUP.toString())
.setSourceUris(ImmutableList.of(sourceUri))
.setDestinationTable(tableReference)));
}
}

View file

@ -0,0 +1,21 @@
package(
default_visibility = ["//visibility:public"],
)
licenses(["notice"]) # Apache 2.0
java_library(
name = "datastore",
srcs = glob(["*.java"]),
deps = [
"//java/google/registry/config",
"//java/google/registry/util",
"@com_google_api_client",
"@com_google_code_findbugs_jsr305",
"@com_google_dagger",
"@com_google_guava",
"@com_google_http_client",
"@com_google_http_client_jackson2",
"@joda_time",
],
)

View file

@ -0,0 +1,223 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.export.datastore;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.api.client.googleapis.services.json.AbstractGoogleJsonClient;
import com.google.api.client.googleapis.services.json.AbstractGoogleJsonClientRequest;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.GenericJson;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.util.Key;
import com.google.common.base.Strings;
import java.util.Collection;
import java.util.Optional;
/**
* Java client to <a href="https://cloud.google.com/datastore/docs/reference/admin/rest/">Cloud
* Datastore Admin REST API</a>.
*/
public class DatastoreAdmin extends AbstractGoogleJsonClient {
private static final String ROOT_URL = "https://datastore.googleapis.com/v1/";
private static final String SERVICE_PATH = "";
// GCP project that this instance is associated with.
private final String projectId;
protected DatastoreAdmin(Builder builder) {
super(builder);
this.projectId = checkNotNull(builder.projectId, "GCP projectId missing.");
}
/**
* Returns an {@link Export} request that starts exporting all Cloud Datastore databases owned by
* the GCP project identified by {@link #projectId}.
*
* <p>Typical usage is:
*
* <pre>
* {@code Export export = datastoreAdmin.export(parameters ...);}
* {@code Operation operation = export.execute();}
* {@code while (!operation.isSuccessful()) { ...}}
* </pre>
*
* <p>Please see the <a
* href="https://cloud.google.com/datastore/docs/reference/admin/rest/v1/projects/export">API
* specification of the export method for details</a>.
*
* <p>The following undocumented behaviors with regard to {@code outputUrlPrefix} have been
* observed:
*
* <ul>
* <li>If outputUrlPrefix refers to a GCS bucket, exported data will be nested deeper in the
* bucket with a timestamped path. This is useful when periodical backups are desired
* <li>If outputUrlPrefix is a already a nested path in a GCS bucket, exported data will be put
* under this path. This means that a nested path is not reusable, since the export process
* by default would not overwrite existing files.
* </ul>
*
* @param outputUrlPrefix the full resource URL of the external storage location
* @param kinds the datastore 'kinds' to be exported
*/
public Export export(String outputUrlPrefix, Collection<String> kinds) {
return new Export(new ExportRequest(outputUrlPrefix, kinds));
}
/**
* Returns a {@link Get} request that retrieves the details of an export or import {@link
* Operation}.
*
* @param operationName name of the {@code Operation} as returned by an export or import request
*/
public Get get(String operationName) {
return new Get(operationName);
}
/**
* Returns a {@link ListOperations} request that retrieves all export or import {@link Operation
* operations} matching {@code filter}.
*
* <p>Sample usage: find all operations started after 2018-10-31 00:00:00 UTC and has stopped:
*
* <pre>
* {@code String filter = "metadata.common.startTime>\"2018-10-31T0:0:0Z\" AND done=true";}
* {@code List<Operation> operations = datastoreAdmin.list(filter);}
* </pre>
*
* <p>Please refer to {@link Operation} for how to reference operation properties.
*/
public ListOperations list(String filter) {
checkArgument(!Strings.isNullOrEmpty(filter), "Filter must not be null or empty.");
return new ListOperations(Optional.of(filter));
}
/**
* Returns a {@link ListOperations} request that retrieves all export or import {@link Operation *
* operations}.
*/
public ListOperations listAll() {
return new ListOperations(Optional.empty());
}
/** Builder for {@link DatastoreAdmin}. */
public static class Builder extends AbstractGoogleJsonClient.Builder {
private String projectId;
public Builder(
HttpTransport httpTransport,
JsonFactory jsonFactory,
HttpRequestInitializer httpRequestInitializer) {
super(httpTransport, jsonFactory, ROOT_URL, SERVICE_PATH, httpRequestInitializer, false);
}
@Override
public Builder setApplicationName(String applicationName) {
return (Builder) super.setApplicationName(applicationName);
}
/** Sets the GCP project ID of the Cloud Datastore databases being managed. */
public Builder setProjectId(String projectId) {
this.projectId = projectId;
return this;
}
@Override
public DatastoreAdmin build() {
return new DatastoreAdmin(this);
}
}
/** A request to export Cloud Datastore databases. */
public class Export extends DatastoreAdminRequest<Operation> {
Export(ExportRequest exportRequest) {
super(
DatastoreAdmin.this,
"POST",
"projects/{projectId}:export",
exportRequest,
Operation.class);
set("projectId", projectId);
}
}
/** A request to retrieve details of an export or import operation. */
public class Get extends DatastoreAdminRequest<Operation> {
Get(String operationName) {
super(DatastoreAdmin.this, "GET", operationName, null, Operation.class);
}
}
/** A request to retrieve all export or import operations matching a given filter. */
public class ListOperations extends DatastoreAdminRequest<Operation.OperationList> {
ListOperations(Optional<String> filter) {
super(
DatastoreAdmin.this,
"GET",
"projects/{projectId}/operations",
null,
Operation.OperationList.class);
set("projectId", projectId);
filter.ifPresent(f -> set("filter", f));
}
}
/** Base class of all DatastoreAdmin requests. */
abstract static class DatastoreAdminRequest<T> extends AbstractGoogleJsonClientRequest<T> {
/**
* @param client Google JSON client
* @param requestMethod HTTP Method
* @param uriTemplate URI template for the path relative to the base URL. If it starts with a
* "/" the base path from the base URL will be stripped out. The URI template can also be a
* full URL. URI template expansion is done using {@link
* com.google.api.client.http.UriTemplate#expand(String, String, Object, boolean)}
* @param jsonContent POJO that can be serialized into JSON content or {@code null} for none
* @param responseClass response class to parse into
*/
protected DatastoreAdminRequest(
DatastoreAdmin client,
String requestMethod,
String uriTemplate,
Object jsonContent,
Class<T> responseClass) {
super(client, requestMethod, uriTemplate, jsonContent, responseClass);
}
}
/**
* Model object that describes the JSON content in an export request.
*
* <p>Please note that some properties defined in the API are excluded, e.g., {@code databaseId}
* (not supported by Cloud Datastore) and labels (not used by Domain Registry).
*/
@SuppressWarnings("unused")
static class ExportRequest extends GenericJson {
@Key private final String outputUrlPrefix;
@Key private final EntityFilter entityFilter;
ExportRequest(String outputUrlPrefix, Collection<String> kinds) {
checkNotNull(outputUrlPrefix, "outputUrlPrefix");
this.outputUrlPrefix = outputUrlPrefix;
this.entityFilter = new EntityFilter(kinds);
}
}
}

View file

@ -0,0 +1,39 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.export.datastore;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import dagger.Module;
import dagger.Provides;
import google.registry.config.CredentialModule;
import google.registry.config.RegistryConfig;
import javax.inject.Singleton;
/** Dagger module that configures provision of {@link DatastoreAdmin}. */
@Module
public abstract class DatastoreAdminModule {
@Singleton
@Provides
static DatastoreAdmin provideDatastoreAdmin(
@CredentialModule.DefaultCredential GoogleCredential credential,
@RegistryConfig.Config("projectId") String projectId) {
return new DatastoreAdmin.Builder(
credential.getTransport(), credential.getJsonFactory(), credential)
.setApplicationName(projectId)
.setProjectId(projectId)
.build();
}
}

View file

@ -0,0 +1,49 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.export.datastore;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.api.client.json.GenericJson;
import com.google.api.client.util.Key;
import com.google.common.collect.ImmutableList;
import java.util.Collection;
import java.util.List;
/**
* Model object that describes the Cloud Datastore 'kinds' to be exported or imported. The JSON form
* of this type is found in export/import requests and responses.
*
* <p>Please note that properties not used by Domain Registry are not included, e.g., {@code
* namespaceIds}.
*/
public class EntityFilter extends GenericJson {
@Key private List<String> kinds = ImmutableList.of();
/** For JSON deserialization. */
public EntityFilter() {}
EntityFilter(Collection<String> kinds) {
checkNotNull(kinds, "kinds");
checkArgument(!kinds.isEmpty(), "kinds must not be empty");
this.kinds = ImmutableList.copyOf(kinds);
}
List<String> getKinds() {
return ImmutableList.copyOf(kinds);
}
}

View file

@ -0,0 +1,222 @@
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.export.datastore;
import static com.google.common.base.Preconditions.checkState;
import com.google.api.client.json.GenericJson;
import com.google.api.client.util.Key;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import google.registry.export.datastore.DatastoreAdmin.Get;
import google.registry.util.Clock;
import java.util.List;
import java.util.Optional;
import javax.annotation.Nullable;
import org.joda.time.DateTime;
import org.joda.time.Duration;
/**
* Model object that describes the details of an export or import operation in Cloud Datastore.
*
* <p>{@link Operation} instances are parsed from the JSON payload in Datastore response messages.
*/
public class Operation extends GenericJson {
private static final String STATE_SUCCESS = "SUCCESSFUL";
private static final String STATE_PROCESSING = "PROCESSING";
@Key private String name;
@Key private Metadata metadata;
@Key private boolean done;
/** For JSON deserialization. */
public Operation() {}
/** Returns the name of this operation, which may be used in a {@link Get} request. */
public String getName() {
checkState(name != null, "Name must not be null.");
return name;
}
public boolean isDone() {
return done;
}
private String getState() {
return getMetadata().getCommonMetadata().getState();
}
public boolean isSuccessful() {
return getState().equals(STATE_SUCCESS);
}
public boolean isProcessing() {
return getState().equals(STATE_PROCESSING);
}
/**
* Returns the elapsed time since starting if this operation is still running, or the total
* running time if this operation has completed.
*/
public Duration getRunningTime(Clock clock) {
return new Duration(
getStartTime(), getMetadata().getCommonMetadata().getEndTime().orElse(clock.nowUtc()));
}
public DateTime getStartTime() {
return getMetadata().getCommonMetadata().getStartTime();
}
public ImmutableSet<String> getKinds() {
return ImmutableSet.copyOf(getMetadata().getEntityFilter().getKinds());
}
/**
* Returns the URL to the GCS folder that holds the exported data. This folder is created by
* Datastore and is under the {@code outputUrlPrefix} set to {@linkplain
* DatastoreAdmin#export(String, List) the export request}.
*/
public String getExportFolderUrl() {
return getMetadata().getOutputUrlPrefix();
}
/**
* Returns the last segment of the {@linkplain #getExportFolderUrl() export folder URL} which can
* be used as unique identifier of this export operation. This is a better ID than the {@linkplain
* #getName() operation name}, which is opaque.
*/
public String getExportId() {
String exportFolderUrl = getExportFolderUrl();
return exportFolderUrl.substring(exportFolderUrl.lastIndexOf('/') + 1);
}
public String getProgress() {
StringBuilder result = new StringBuilder();
Progress progress = getMetadata().getProgressBytes();
if (progress != null) {
result.append(
String.format(" [%s/%s bytes]", progress.workCompleted, progress.workEstimated));
}
progress = getMetadata().getProgressEntities();
if (progress != null) {
result.append(
String.format(" [%s/%s entities]", progress.workCompleted, progress.workEstimated));
}
if (result.length() == 0) {
return "Progress: N/A";
}
return "Progress:" + result;
}
private Metadata getMetadata() {
checkState(metadata != null, "Response metadata missing.");
return metadata;
}
/** Models the common metadata properties of all operations. */
public static class CommonMetadata extends GenericJson {
@Key private String startTime;
@Key @Nullable private String endTime;
@Key private String operationType;
@Key private String state;
public CommonMetadata() {}
String getOperationType() {
checkState(!Strings.isNullOrEmpty(operationType), "operationType may not be null or empty");
return operationType;
}
String getState() {
checkState(!Strings.isNullOrEmpty(state), "state may not be null or empty");
return state;
}
DateTime getStartTime() {
checkState(startTime != null, "StartTime missing.");
return DateTime.parse(startTime);
}
Optional<DateTime> getEndTime() {
return Optional.ofNullable(endTime).map(DateTime::parse);
}
}
/** Models the metadata of a Cloud Datatore export or import operation. */
public static class Metadata extends GenericJson {
@Key("common")
private CommonMetadata commonMetadata;
@Key private Progress progressEntities;
@Key private Progress progressBytes;
@Key private EntityFilter entityFilter;
@Key private String outputUrlPrefix;
public Metadata() {}
CommonMetadata getCommonMetadata() {
checkState(commonMetadata != null, "CommonMetadata field is null.");
return commonMetadata;
}
public Progress getProgressEntities() {
return progressEntities;
}
public Progress getProgressBytes() {
return progressBytes;
}
public EntityFilter getEntityFilter() {
return entityFilter;
}
public String getOutputUrlPrefix() {
checkState(!Strings.isNullOrEmpty(outputUrlPrefix), "outputUrlPrefix");
return outputUrlPrefix;
}
}
/** Progress of an export or import operation. */
public static class Progress extends GenericJson {
@Key private long workCompleted;
@Key private long workEstimated;
public Progress() {}
long getWorkCompleted() {
return workCompleted;
}
public long getWorkEstimated() {
return workEstimated;
}
}
/** List of {@link Operation Operations}. */
public static class OperationList extends GenericJson {
@Key private List<Operation> operations;
/** For JSON deserialization. */
public OperationList() {}
ImmutableList<Operation> toList() {
return ImmutableList.copyOf(operations);
}
}
}

View file

@ -15,10 +15,13 @@
package google.registry.flows;
import com.google.appengine.api.users.UserService;
import google.registry.model.eppcommon.ProtocolDefinition;
import google.registry.request.Action;
import google.registry.request.Action.Method;
import google.registry.request.Parameter;
import google.registry.request.Payload;
import google.registry.request.auth.Auth;
import google.registry.request.auth.AuthenticatedRegistrarAccessor;
import javax.inject.Inject;
import javax.servlet.http.HttpSession;
@ -34,13 +37,16 @@ public class EppConsoleAction implements Runnable {
@Inject HttpSession session;
@Inject EppRequestHandler eppRequestHandler;
@Inject UserService userService;
@Inject AuthenticatedRegistrarAccessor registrarAccessor;
@Inject @Parameter("clientId") String clientId;
@Inject EppConsoleAction() {}
@Override
public void run() {
eppRequestHandler.executeEpp(
new HttpSessionMetadata(session),
GaeUserCredentials.forCurrentUser(userService),
new StatelessRequestSessionMetadata(clientId,
ProtocolDefinition.getVisibleServiceExtensionUris()),
new GaeUserCredentials(registrarAccessor),
EppRequestSource.CONSOLE,
false, // This endpoint is never a dry run.
false, // This endpoint is never a superuser.

View file

@ -17,8 +17,8 @@ package google.registry.flows;
import static com.google.common.base.Strings.nullToEmpty;
import static com.google.common.flogger.LazyArgs.lazy;
import static com.google.common.io.BaseEncoding.base64;
import static google.registry.flows.EppXmlTransformer.unmarshal;
import static google.registry.flows.FlowReporter.extractTlds;
import static google.registry.flows.FlowUtils.unmarshalEpp;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.annotations.VisibleForTesting;
@ -65,7 +65,7 @@ public final class EppController {
try {
EppInput eppInput;
try {
eppInput = unmarshal(EppInput.class, inputXmlBytes);
eppInput = unmarshalEpp(EppInput.class, inputXmlBytes);
} catch (EppException e) {
// Log the unmarshalling error, with the raw bytes (in base64) to help with debugging.
logger.atInfo().withCause(e).log(

View file

@ -14,7 +14,7 @@
package google.registry.flows;
import static google.registry.flows.EppXmlTransformer.marshalWithLenientRetry;
import static google.registry.flows.FlowUtils.marshalWithLenientRetry;
import static google.registry.model.eppoutput.Result.Code.SUCCESS_AND_CLOSE;
import static google.registry.xml.XmlTransformer.prettyPrint;
import static java.nio.charset.StandardCharsets.UTF_8;

View file

@ -1,179 +0,0 @@
// Copyright 2017 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package google.registry.flows;
import static com.google.common.base.Preconditions.checkState;
import static google.registry.xml.ValidationMode.LENIENT;
import static google.registry.xml.ValidationMode.STRICT;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.flogger.FluentLogger;
import google.registry.flows.EppException.ParameterValueRangeErrorException;
import google.registry.flows.EppException.ParameterValueSyntaxErrorException;
import google.registry.flows.EppException.SyntaxErrorException;
import google.registry.flows.EppException.UnimplementedProtocolVersionException;
import google.registry.model.EppResourceUtils.InvalidRepoIdException;
import google.registry.model.ImmutableObject;
import google.registry.model.eppinput.EppInput;
import google.registry.model.eppinput.EppInput.WrongProtocolVersionException;
import google.registry.model.eppoutput.EppOutput;
import google.registry.model.host.InetAddressAdapter.IpVersionMismatchException;
import google.registry.model.translators.CurrencyUnitAdapter.UnknownCurrencyException;
import google.registry.xml.ValidationMode;
import google.registry.xml.XmlException;
import google.registry.xml.XmlTransformer;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.List;
/** {@link XmlTransformer} for marshalling to and from the Epp model classes. */
public class EppXmlTransformer {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
// Hardcoded XML schemas, ordered with respect to dependency.
private static final ImmutableList<String> SCHEMAS = ImmutableList.of(
"eppcom.xsd",
"epp.xsd",
"contact.xsd",
"host.xsd",
"domain.xsd",
"rgp.xsd",
"secdns.xsd",
"fee06.xsd",
"fee11.xsd",
"fee12.xsd",
"metadata.xsd",
"mark.xsd",
"dsig.xsd",
"smd.xsd",
"launch.xsd",
"allocate.xsd",
"superuser.xsd",
"allocationToken-1.0.xsd");
private static final XmlTransformer INPUT_TRANSFORMER =
new XmlTransformer(SCHEMAS, EppInput.class);
private static final XmlTransformer OUTPUT_TRANSFORMER =
new XmlTransformer(SCHEMAS, EppOutput.class);
public static void validateOutput(String xml) throws XmlException {
OUTPUT_TRANSFORMER.validate(xml);
}
/**
* Unmarshal bytes into Epp classes.
*
* @param clazz type to return, specified as a param to enforce typesafe generics
* @see <a href="https://errorprone.info/bugpattern/TypeParameterUnusedInFormals">TypeParameterUnusedInFormals</a>
*/
public static <T> T unmarshal(Class<T> clazz, byte[] bytes) throws EppException {
try {
return INPUT_TRANSFORMER.unmarshal(clazz, new ByteArrayInputStream(bytes));
} catch (XmlException e) {
// If this XmlException is wrapping a known type find it. If not, it's a syntax error.
List<Throwable> causalChain = Throwables.getCausalChain(e);
if (causalChain.stream().anyMatch(IpVersionMismatchException.class::isInstance)) {
throw new IpAddressVersionMismatchException();
}
if (causalChain.stream().anyMatch(WrongProtocolVersionException.class::isInstance)) {
throw new UnimplementedProtocolVersionException();
}
if (causalChain.stream().anyMatch(InvalidRepoIdException.class::isInstance)) {
throw new InvalidRepoIdEppException();
}
if (causalChain.stream().anyMatch(UnknownCurrencyException.class::isInstance)) {
throw new UnknownCurrencyEppException();
}
throw new GenericSyntaxErrorException(e.getMessage());
}
}
private static byte[] marshal(
XmlTransformer transformer,
ImmutableObject root,
ValidationMode validation) throws XmlException {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
transformer.marshal(root, byteArrayOutputStream, UTF_8, validation);
return byteArrayOutputStream.toByteArray();
}
public static byte[] marshal(EppOutput root, ValidationMode validation) throws XmlException {
return marshal(OUTPUT_TRANSFORMER, root, validation);
}
public static byte[] marshalWithLenientRetry(EppOutput eppOutput) {
checkState(eppOutput != null);
// We need to marshal to a string instead of writing the response directly to the servlet's
// response writer, so that partial results don't get written on failure.
try {
return EppXmlTransformer.marshal(eppOutput, STRICT);
} catch (XmlException e) {
// We failed to marshal with validation. This is very bad, but we can potentially still send
// back slightly invalid xml, so try again without validation.
try {
byte[] lenient = EppXmlTransformer.marshal(eppOutput, LENIENT);
// Marshaling worked even though the results didn't validate against the schema.
logger.atSevere().withCause(e).log(
"Result marshaled but did not validate: %s", new String(lenient, UTF_8));
return lenient;
} catch (XmlException e2) {
throw new RuntimeException(e2); // Failing to marshal at all is not recoverable.
}
}
}
@VisibleForTesting
public static byte[] marshalInput(EppInput root, ValidationMode validation) throws XmlException {
return marshal(INPUT_TRANSFORMER, root, validation);
}
@VisibleForTesting
public static void validateInput(String xml) throws XmlException {
INPUT_TRANSFORMER.validate(xml);
}
/** IP address version mismatch. */
public static class IpAddressVersionMismatchException extends ParameterValueRangeErrorException {
public IpAddressVersionMismatchException() {
super("IP adddress version mismatch");
}
}
/** Invalid format for repository id. */
public static class InvalidRepoIdEppException extends ParameterValueSyntaxErrorException {
public InvalidRepoIdEppException() {
super("Invalid format for repository id");
}
}
/** Unknown currency. */
static class UnknownCurrencyEppException extends ParameterValueRangeErrorException {
public UnknownCurrencyEppException() {
super("Unknown currency.");
}
}
/** Generic syntax error that can be thrown by any flow. */
static class GenericSyntaxErrorException extends SyntaxErrorException {
public GenericSyntaxErrorException(String message) {
super(message);
}
}
}

View file

@ -14,14 +14,32 @@
package google.registry.flows;
import static com.google.common.base.Preconditions.checkState;
import static google.registry.model.ofy.ObjectifyService.ofy;
import static google.registry.xml.ValidationMode.LENIENT;
import static google.registry.xml.ValidationMode.STRICT;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.base.Throwables;
import com.google.common.flogger.FluentLogger;
import google.registry.flows.EppException.CommandUseErrorException;
import google.registry.flows.EppException.ParameterValueRangeErrorException;
import google.registry.flows.EppException.SyntaxErrorException;
import google.registry.flows.EppException.UnimplementedProtocolVersionException;
import google.registry.flows.custom.EntityChanges;
import google.registry.model.eppcommon.EppXmlTransformer;
import google.registry.model.eppinput.EppInput.WrongProtocolVersionException;
import google.registry.model.eppoutput.EppOutput;
import google.registry.model.host.InetAddressAdapter.IpVersionMismatchException;
import google.registry.model.translators.CurrencyUnitAdapter.UnknownCurrencyException;
import google.registry.xml.XmlException;
import java.util.List;
/** Static utility functions for flows. */
public final class FlowUtils {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private FlowUtils() {}
/** Validate that there is a logged in client. */
@ -37,10 +55,75 @@ public final class FlowUtils {
ofy().delete().keys(entityChanges.getDeletes());
}
/**
* Unmarshal bytes into Epp classes. Does the same as {@link EppXmlTransformer#unmarshal(Class,
* byte[])} but with exception-handling logic to throw {@link EppException} instead.
*/
public static <T> T unmarshalEpp(Class<T> clazz, byte[] bytes) throws EppException {
try {
return EppXmlTransformer.unmarshal(clazz, bytes);
} catch (XmlException e) {
// If this XmlException is wrapping a known type find it. If not, it's a syntax error.
List<Throwable> causalChain = Throwables.getCausalChain(e);
if (causalChain.stream().anyMatch(IpVersionMismatchException.class::isInstance)) {
throw new IpAddressVersionMismatchException();
}
if (causalChain.stream().anyMatch(WrongProtocolVersionException.class::isInstance)) {
throw new UnimplementedProtocolVersionException();
}
if (causalChain.stream().anyMatch(UnknownCurrencyException.class::isInstance)) {
throw new UnknownCurrencyEppException();
}
throw new GenericXmlSyntaxErrorException(e.getMessage());
}
}
public static byte[] marshalWithLenientRetry(EppOutput eppOutput) {
checkState(eppOutput != null);
// We need to marshal to a string instead of writing the response directly to the servlet's
// response writer, so that partial results don't get written on failure.
try {
return EppXmlTransformer.marshal(eppOutput, STRICT);
} catch (XmlException e) {
// We failed to marshal with validation. This is very bad, but we can potentially still send
// back slightly invalid xml, so try again without validation.
try {
byte[] lenient = EppXmlTransformer.marshal(eppOutput, LENIENT);
// Marshaling worked even though the results didn't validate against the schema.
logger.atSevere().withCause(e).log(
"Result marshaled but did not validate: %s", new String(lenient, UTF_8));
return lenient;
} catch (XmlException e2) {
throw new RuntimeException(e2); // Failing to marshal at all is not recoverable.
}
}
}
/** Registrar is not logged in. */
public static class NotLoggedInException extends CommandUseErrorException {
public NotLoggedInException() {
super("Registrar is not logged in.");
}
}
/** IP address version mismatch. */
public static class IpAddressVersionMismatchException extends ParameterValueRangeErrorException {
public IpAddressVersionMismatchException() {
super("IP adddress version mismatch");
}
}
/** Unknown currency. */
static class UnknownCurrencyEppException extends ParameterValueRangeErrorException {
public UnknownCurrencyEppException() {
super("Unknown currency.");
}
}
/** Generic XML syntax error that can be thrown by any flow. */
public static class GenericXmlSyntaxErrorException extends SyntaxErrorException {
public GenericXmlSyntaxErrorException(String message) {
super(message);
}
}
}

View file

@ -15,98 +15,40 @@
package google.registry.flows;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Strings.nullToEmpty;
import static google.registry.util.PreconditionsUtils.checkArgumentNotNull;
import com.google.appengine.api.users.User;
import com.google.appengine.api.users.UserService;
import com.google.common.annotations.VisibleForTesting;
import google.registry.flows.EppException.AuthenticationErrorException;
import google.registry.model.registrar.Registrar;
import google.registry.model.registrar.RegistrarContact;
import javax.annotation.Nullable;
import google.registry.request.auth.AuthenticatedRegistrarAccessor;
import google.registry.request.auth.AuthenticatedRegistrarAccessor.RegistrarAccessDeniedException;
/** Credentials provided by {@link com.google.appengine.api.users.UserService}. */
public class GaeUserCredentials implements TransportCredentials {
private final User gaeUser;
private final Boolean isAdmin;
private final AuthenticatedRegistrarAccessor registrarAccessor;
/**
* Create an instance for the current user, as determined by {@code UserService}.
*
* <p>Note that the current user may be null (i.e. there is no logged in user).
*/
public static GaeUserCredentials forCurrentUser(UserService userService) {
User user = userService.getCurrentUser();
return new GaeUserCredentials(user, user != null ? userService.isUserAdmin() : null);
}
/** Create an instance that represents an explicit user (for testing purposes). */
@VisibleForTesting
public static GaeUserCredentials forTestingUser(User gaeUser, Boolean isAdmin) {
checkArgumentNotNull(gaeUser);
checkArgumentNotNull(isAdmin);
return new GaeUserCredentials(gaeUser, isAdmin);
}
/** Create an instance that represents a non-logged in user (for testing purposes). */
@VisibleForTesting
public static GaeUserCredentials forLoggedOutUser() {
return new GaeUserCredentials(null, null);
}
private GaeUserCredentials(@Nullable User gaeUser, @Nullable Boolean isAdmin) {
this.gaeUser = gaeUser;
this.isAdmin = isAdmin;
}
@VisibleForTesting
User getUser() {
return gaeUser;
public GaeUserCredentials(AuthenticatedRegistrarAccessor registrarAccessor) {
this.registrarAccessor = registrarAccessor;
}
@Override
public void validate(Registrar registrar, String ignoredPassword)
throws AuthenticationErrorException {
if (gaeUser == null) {
throw new UserNotLoggedInException();
try {
registrarAccessor.verifyAccess(registrar.getClientId());
} catch (RegistrarAccessDeniedException e) {
throw new UserForbiddenException(e);
}
// Allow admins to act as any registrar.
if (Boolean.TRUE.equals(isAdmin)) {
return;
}
// Check Registrar's contacts to see if any are associated with this gaeUserId.
final String gaeUserId = gaeUser.getUserId();
for (RegistrarContact rc : registrar.getContacts()) {
if (gaeUserId.equals(rc.getGaeUserId())) {
return;
}
}
throw new BadGaeUserIdException(gaeUser);
}
@Override
public String toString() {
return toStringHelper(getClass())
.add("gaeUser", gaeUser)
.add("isAdmin", isAdmin)
.toString();
return toStringHelper(getClass()).add("user", registrarAccessor.userIdForLogging()).toString();
}
/** User is not logged in as a GAE user. */
public static class UserNotLoggedInException extends AuthenticationErrorException {
public UserNotLoggedInException() {
super("User is not logged in");
}
}
/** GAE user id is not allowed to login as requested registrar. */
public static class BadGaeUserIdException extends AuthenticationErrorException {
public BadGaeUserIdException(User user) {
super(
"User id is not allowed to login as requested registrar: "
+ (nullToEmpty(user.getEmail())));
/** GAE User can't access the requested registrar. */
public static class UserForbiddenException extends AuthenticationErrorException {
public UserForbiddenException(RegistrarAccessDeniedException e) {
super(e.getMessage());
}
}
}

View file

@ -258,13 +258,17 @@ public final class ResourceFlowUtils {
* Resolve a pending transfer by denying it.
*
* <p>This removes the {@link StatusValue#PENDING_TRANSFER} status, sets the {@link
* TransferStatus}, clears all the server-approve fields on the {@link TransferData}, and sets the
* expiration time of the last pending transfer to now.
* TransferStatus}, clears all the server-approve fields on the {@link TransferData}, sets the
* expiration time of the last pending transfer to now, sets the last EPP update time to now, and
* sets the last EPP update client id to the given client id.
*/
public static <R extends EppResource & ResourceWithTransferData> R denyPendingTransfer(
R resource, TransferStatus transferStatus, DateTime now) {
R resource, TransferStatus transferStatus, DateTime now, String lastEppUpdateClientId) {
checkArgument(transferStatus.isDenied(), "Not a denial transfer status");
return resolvePendingTransfer(resource, transferStatus, now).build();
return resolvePendingTransfer(resource, transferStatus, now)
.setLastEppUpdateTime(now)
.setLastEppUpdateClientId(lastEppUpdateClientId)
.build();
}
public static <R extends EppResource & ResourceWithTransferData> void verifyHasPendingTransfer(
@ -288,11 +292,8 @@ public final class ResourceFlowUtils {
}
public static <R extends EppResource> R verifyExistence(
Class<R> clazz, String targetId, R resource) throws ResourceDoesNotExistException {
if (resource == null) {
throw new ResourceDoesNotExistException(clazz, targetId);
}
return resource;
Class<R> clazz, String targetId, Optional<R> resource) throws ResourceDoesNotExistException {
return resource.orElseThrow(() -> new ResourceDoesNotExistException(clazz, targetId));
}
public static <R extends EppResource> void verifyResourceDoesNotExist(

View file

@ -26,6 +26,7 @@ import com.google.common.net.HostAndPort;
import com.google.common.net.InetAddresses;
import dagger.Module;
import dagger.Provides;
import google.registry.config.RegistryConfig.Config;
import google.registry.flows.EppException.AuthenticationErrorException;
import google.registry.model.registrar.Registrar;
import google.registry.request.Header;
@ -54,14 +55,17 @@ public class TlsCredentials implements TransportCredentials {
private static final FluentLogger logger = FluentLogger.forEnclosingClass();
private final boolean requireSslCertificates;
private final String clientCertificateHash;
private final InetAddress clientInetAddr;
@Inject
@VisibleForTesting
public TlsCredentials(
@Config("requireSslCertificates") boolean requireSslCertificates,
@Header("X-SSL-Certificate") String clientCertificateHash,
@Header("X-Forwarded-For") Optional<String> clientAddress) {
this.requireSslCertificates = requireSslCertificates;
this.clientCertificateHash = clientCertificateHash;
this.clientInetAddr = clientAddress.isPresent() ? parseInetAddress(clientAddress.get()) : null;
}
@ -112,13 +116,17 @@ public class TlsCredentials implements TransportCredentials {
* @throws MissingRegistrarCertificateException if frontend didn't send certificate hash header
* @throws BadRegistrarCertificateException if registrar requires certificate and it didn't match
*/
private void validateCertificate(Registrar registrar) throws AuthenticationErrorException {
@VisibleForTesting
void validateCertificate(Registrar registrar) throws AuthenticationErrorException {
if (isNullOrEmpty(registrar.getClientCertificateHash())
&& isNullOrEmpty(registrar.getFailoverClientCertificateHash())) {
logger.atInfo().log(
"Skipping SSL certificate check because %s doesn't have any certificate hashes on file",
registrar.getClientId());
return;
if (requireSslCertificates) {
throw new RegistrarCertificateNotConfiguredException();
} else {
// If the environment is configured to allow missing SSL certificate hashes and this hash is
// missing, then bypass the certificate hash checks.
return;
}
}
if (isNullOrEmpty(clientCertificateHash)) {
logger.atInfo().log("Request did not include X-SSL-Certificate");
@ -165,6 +173,14 @@ public class TlsCredentials implements TransportCredentials {
}
}
/** Registrar certificate is not configured. */
public static class RegistrarCertificateNotConfiguredException
extends AuthenticationErrorException {
public RegistrarCertificateNotConfiguredException() {
super("Registrar certificate is not configured");
}
}
/** Registrar IP address is not in stored whitelist. */
public static class BadRegistrarIpAddressException extends AuthenticationErrorException {
public BadRegistrarIpAddressException() {

View file

@ -80,7 +80,7 @@ public final class ContactTransferCancelFlow implements TransactionalFlow {
verifyHasPendingTransfer(existingContact);
verifyTransferInitiator(clientId, existingContact);
ContactResource newContact =
denyPendingTransfer(existingContact, TransferStatus.CLIENT_CANCELLED, now);
denyPendingTransfer(existingContact, TransferStatus.CLIENT_CANCELLED, now, clientId);
HistoryEntry historyEntry = historyBuilder
.setType(HistoryEntry.Type.CONTACT_TRANSFER_CANCEL)
.setModificationTime(now)

View file

@ -78,7 +78,7 @@ public final class ContactTransferRejectFlow implements TransactionalFlow {
verifyHasPendingTransfer(existingContact);
verifyResourceOwnership(clientId, existingContact);
ContactResource newContact =
denyPendingTransfer(existingContact, TransferStatus.CLIENT_REJECTED, now);
denyPendingTransfer(existingContact, TransferStatus.CLIENT_REJECTED, now, clientId);
HistoryEntry historyEntry = historyBuilder
.setType(HistoryEntry.Type.CONTACT_TRANSFER_REJECT)
.setModificationTime(now)

View file

@ -98,6 +98,8 @@ import org.joda.time.Duration;
/**
* An EPP flow that allocates a new domain resource from a domain application.
*
* <p>Note that this flow is only run by superusers.
*
* @error {@link google.registry.flows.exceptions.ResourceAlreadyExistsException}
* @error {@link DomainAllocateFlow.HasFinalStatusException}
* @error {@link DomainAllocateFlow.MissingApplicationException}
@ -222,10 +224,9 @@ public class DomainAllocateFlow implements TransactionalFlow {
private DomainApplication loadAndValidateApplication(
String applicationRoid, DateTime now) throws EppException {
DomainApplication application = loadDomainApplication(applicationRoid, now);
if (application == null) {
throw new MissingApplicationException(applicationRoid);
}
DomainApplication application =
loadDomainApplication(applicationRoid, now)
.orElseThrow(() -> new MissingApplicationException(applicationRoid));
if (application.getApplicationStatus().isFinalStatus()) {
throw new HasFinalStatusException();
}

Some files were not shown because too many files have changed in this diff Show more