mirror of
https://github.com/google/nomulus.git
synced 2025-05-02 13:07:50 +02:00
We never really used it and it'll be obsolete come Registry 3.0 anyway. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=213274520
213 lines
7.7 KiB
XML
213 lines
7.7 KiB
XML
<?xml version="1.0" encoding="UTF-8"?>
|
|
<cronentries>
|
|
|
|
<!--
|
|
/cron/fanout params:
|
|
queue=<QUEUE_NAME>
|
|
endpoint=<ENDPOINT_NAME> // URL Path of servlet, which may contain placeholders:
|
|
runInEmpty // Run once, with no tld parameter
|
|
forEachRealTld // Run for tlds with getTldType() == TldType.REAL
|
|
forEachTestTld // Run for tlds with getTldType() == TldType.TEST
|
|
exclude=TLD1[,TLD2] // exclude something otherwise included
|
|
-->
|
|
|
|
<cron>
|
|
<url>/_dr/task/rdeStaging</url>
|
|
<description>
|
|
This job generates a full RDE escrow deposit as a single gigantic XML document
|
|
and streams it to cloud storage. When this job has finished successfully, it'll
|
|
launch a separate task that uploads the deposit file to Iron Mountain via SFTP.
|
|
</description>
|
|
<schedule>every day 00:07</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=rde-upload&endpoint=/_dr/task/rdeUpload&forEachRealTld]]></url>
|
|
<description>
|
|
This job is a no-op unless RdeUploadCursor falls behind for some reason.
|
|
</description>
|
|
<schedule>every 4 hours synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=marksdb&endpoint=/_dr/task/tmchDnl&runInEmpty]]></url>
|
|
<description>
|
|
This job downloads the latest DNL from MarksDB and inserts it into the database.
|
|
(See: TmchDnlServlet, ClaimsList)
|
|
</description>
|
|
<schedule>every 12 hours synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=marksdb&endpoint=/_dr/task/tmchSmdrl&runInEmpty]]></url>
|
|
<description>
|
|
This job downloads the latest SMDRL from MarksDB and inserts it into the database.
|
|
(See: TmchSmdrlServlet, SignedMarkRevocationList)
|
|
</description>
|
|
<schedule>every 12 hours from 00:15 to 12:15</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=marksdb&endpoint=/_dr/task/tmchCrl&runInEmpty]]></url>
|
|
<description>
|
|
This job downloads the latest CRL from MarksDB and inserts it into the database.
|
|
(See: TmchCrlServlet)
|
|
</description>
|
|
<schedule>every 12 hours synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=retryable-cron-tasks&endpoint=/_dr/task/syncGroupMembers&runInEmpty]]></url>
|
|
<description>
|
|
Syncs RegistrarContact changes in the past hour to Google Groups.
|
|
</description>
|
|
<schedule>every 1 hours synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=sheet&endpoint=/_dr/task/syncRegistrarsSheet&runInEmpty]]></url>
|
|
<description>
|
|
Synchronize Registrar entities to Google Spreadsheets.
|
|
</description>
|
|
<schedule>every 1 hours synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/task/resaveAllEppResources]]></url>
|
|
<description>
|
|
This job resaves all our resources, projected in time to "now".
|
|
It is needed for "deleteOldCommitLogs" to work correctly.
|
|
</description>
|
|
<schedule>1st monday of month 09:00</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/task/deleteOldCommitLogs]]></url>
|
|
<description>
|
|
This job deletes unreferenced commit logs from Datastore that are older than thirty days.
|
|
Since references are only updated on save, if we want to delete "unneeded" commit logs, we
|
|
also need "resaveAllEppResources" to run periodically.
|
|
</description>
|
|
<schedule>3rd monday of month 09:00</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/commitLogCheckpoint]]></url>
|
|
<description>
|
|
This job checkpoints the commit log buckets and exports the diff since last checkpoint to GCS.
|
|
</description>
|
|
<schedule>every 3 minutes synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=retryable-cron-tasks&endpoint=/_dr/task/exportDomainLists&runInEmpty]]></url>
|
|
<description>
|
|
This job exports lists of all active domain names to Google Cloud Storage.
|
|
</description>
|
|
<schedule>every 12 hours synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/task/deleteContactsAndHosts]]></url>
|
|
<description>
|
|
This job runs a mapreduce that processes batch asynchronous deletions of
|
|
contact and host resources by mapping over all EppResources and checking
|
|
for any references to the contacts/hosts in pending deletion.
|
|
</description>
|
|
<schedule>every 5 minutes synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/task/refreshDnsOnHostRename]]></url>
|
|
<description>
|
|
This job runs a mapreduce that asynchronously handles DNS refreshes for
|
|
host renames by mapping over all domains and creating DNS refresh tasks
|
|
for any domains that reference a renamed host.
|
|
</description>
|
|
<schedule>every 5 minutes synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/task/expandRecurringBillingEvents]]></url>
|
|
<description>
|
|
This job runs a mapreduce that creates synthetic OneTime billing events from Recurring billing
|
|
events. Events are created for all instances of Recurring billing events that should exist
|
|
between the RECURRING_BILLING cursor's time and the execution time of the mapreduce.
|
|
</description>
|
|
<schedule>every day 03:00</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=export-snapshot&endpoint=/_dr/task/exportSnapshot&runInEmpty]]></url>
|
|
<description>
|
|
This job fires off a Datastore backup-as-a-service job that generates snapshot files in GCS.
|
|
It also enqueues a new task to wait on the completion of that job and then load the resulting
|
|
snapshot into bigquery.
|
|
</description>
|
|
<!-- Keep the task-age-limit for this job's task queue less than this cron interval. -->
|
|
<schedule>every day 06:00</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=retryable-cron-tasks&endpoint=/_dr/task/deleteProberData&runInEmpty]]></url>
|
|
<description>
|
|
This job clears out data from probers and runs once a week.
|
|
</description>
|
|
<schedule>every monday 14:00</schedule>
|
|
<timezone>UTC</timezone>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=retryable-cron-tasks&endpoint=/_dr/task/exportReservedTerms&forEachRealTld]]></url>
|
|
<description>
|
|
Reserved terms export to Google Drive job for creating once-daily exports.
|
|
</description>
|
|
<schedule>every day 05:30</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/fanout?queue=retryable-cron-tasks&endpoint=/_dr/task/exportPremiumTerms&forEachRealTld]]></url>
|
|
<description>
|
|
Exports premium price lists to the Google Drive folders for each TLD once per day.
|
|
</description>
|
|
<schedule>every day 05:00</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_dr/cron/readDnsQueue?jitterSeconds=45]]></url>
|
|
<description>
|
|
Lease all tasks from the dns-pull queue, group by TLD, and invoke PublishDnsUpdates for each
|
|
group.
|
|
</description>
|
|
<schedule>every 1 minutes synchronized</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
<cron>
|
|
<url><![CDATA[/_ah/sessioncleanup?clear]]></url>
|
|
<description>
|
|
Delete up to 100 expired _ah_SESSION entities from Datastore.
|
|
</description>
|
|
<schedule>every 15 minutes</schedule>
|
|
<target>backend</target>
|
|
</cron>
|
|
|
|
</cronentries>
|