Make it possible to stage a single Beam pipeline (#1351)

This commit is contained in:
Lai Jiang 2021-09-29 18:27:23 -04:00 committed by GitHub
parent 47a38e8309
commit 0d8f9882e4
3 changed files with 47 additions and 36 deletions

View file

@ -140,6 +140,8 @@ PROPERTIES = [
'a BEAM pipeline to image. Setting this property to empty string ' 'a BEAM pipeline to image. Setting this property to empty string '
'will disable image generation.', 'will disable image generation.',
'/usr/bin/dot'), '/usr/bin/dot'),
Property('pipeline',
'The name of the Beam pipeline being staged.')
] ]
GRADLE_FLAGS = [ GRADLE_FLAGS = [

View file

@ -772,48 +772,55 @@ createUberJar('nomulus', 'nomulus', 'google.registry.tools.RegistryTool')
// This packages more code and dependency than necessary. However, without // This packages more code and dependency than necessary. However, without
// restructuring the source tree it is difficult to generate leaner jars. // restructuring the source tree it is difficult to generate leaner jars.
createUberJar( createUberJar(
'beam_pipeline_common', 'beamPipelineCommon',
'beam_pipeline_common', 'beam_pipeline_common',
'') '')
// Create beam staging task if environment is alpha or crash. // Create beam staging task if the environment is alpha. Production, sandbox and
// All other environments use formally released pipelines through CloudBuild. // qa use formally released pipelines through CloudBuild, whereas crash and
// alpha use the pipelines staged on alpha deployment project.
// //
// User should install gcloud and login to GCP before invoking this tasks. // User should install gcloud and login to GCP before invoking this tasks.
if (environment in ['alpha', 'crash']) { if (environment == 'alpha') {
def pipelines = [ def pipelines = [
InitSql :
[ [
mainClass: 'google.registry.beam.initsql.InitSqlPipeline', mainClass: 'google.registry.beam.initsql.InitSqlPipeline',
metaData: 'google/registry/beam/init_sql_pipeline_metadata.json' metaData : 'google/registry/beam/init_sql_pipeline_metadata.json'
], ],
BulkDeleteDatastore:
[ [
mainClass: 'google.registry.beam.datastore.BulkDeleteDatastorePipeline', mainClass: 'google.registry.beam.datastore.BulkDeleteDatastorePipeline',
metaData: 'google/registry/beam/bulk_delete_datastore_pipeline_metadata.json' metaData : 'google/registry/beam/bulk_delete_datastore_pipeline_metadata.json'
], ],
Spec11 :
[ [
mainClass: 'google.registry.beam.spec11.Spec11Pipeline', mainClass: 'google.registry.beam.spec11.Spec11Pipeline',
metaData: 'google/registry/beam/spec11_pipeline_metadata.json' metaData : 'google/registry/beam/spec11_pipeline_metadata.json'
], ],
Invoicing :
[ [
mainClass: 'google.registry.beam.invoicing.InvoicingPipeline', mainClass: 'google.registry.beam.invoicing.InvoicingPipeline',
metaData: 'google/registry/beam/invoicing_pipeline_metadata.json' metaData : 'google/registry/beam/invoicing_pipeline_metadata.json'
], ],
Rde :
[ [
mainClass: 'google.registry.beam.rde.RdePipeline', mainClass: 'google.registry.beam.rde.RdePipeline',
metaData: 'google/registry/beam/rde_pipeline_metadata.json' metaData : 'google/registry/beam/rde_pipeline_metadata.json'
], ],
] ]
project.tasks.create("stage_beam_pipelines") { project.tasks.create("stageBeamPipelines") {
doLast { doLast {
pipelines.each { pipelines.each {
def mainClass = it['mainClass'] if (rootProject.pipeline == ''|| rootProject.pipeline == it.key) {
def metaData = it['metaData'] def mainClass = it.value['mainClass']
def metaData = it.value['metaData']
def pipelineName = CaseFormat.UPPER_CAMEL.to( def pipelineName = CaseFormat.UPPER_CAMEL.to(
CaseFormat.LOWER_UNDERSCORE, CaseFormat.LOWER_UNDERSCORE,
mainClass.substring(mainClass.lastIndexOf('.') + 1)) mainClass.substring(mainClass.lastIndexOf('.') + 1))
def imageName = "gcr.io/${gcpProject}/beam/${pipelineName}" def imageName = "gcr.io/${gcpProject}/beam/${pipelineName}"
def metaDataBaseName = metaData.substring(metaData.lastIndexOf('/') + 1) def metaDataBaseName = metaData.substring(metaData.lastIndexOf('/') + 1)
def uberJarName = tasks.beam_pipeline_common.outputs.files.asPath def uberJarName = tasks.beamPipelineCommon.outputs.files.asPath
def command = "\ def command = "\
gcloud dataflow flex-template build \ gcloud dataflow flex-template build \
@ -828,7 +835,8 @@ if (environment in ['alpha', 'crash']) {
rootProject.ext.execInBash(command, '/tmp') rootProject.ext.execInBash(command, '/tmp')
} }
} }
}.dependsOn(tasks.beam_pipeline_common) }
}.dependsOn(tasks.beamPipelineCommon)
} }
// A jar with classes and resources from main sourceSet, excluding internal // A jar with classes and resources from main sourceSet, excluding internal

View file

@ -28,3 +28,4 @@ baseSchemaTag=
schema_version= schema_version=
nomulus_version= nomulus_version=
dot_path=/usr/bin/dot dot_path=/usr/bin/dot
pipeline=