added FederalAgency, updated for using bulk

This commit is contained in:
David Kennedy 2024-06-13 07:21:06 -04:00
parent 1921d99eab
commit 97ee855de6
No known key found for this signature in database
GPG key ID: 6528A5386E66B96B
5 changed files with 30 additions and 6 deletions

View file

@ -32,6 +32,7 @@ For reference, the zip file will contain the following tables in csv form:
* DomainInformation * DomainInformation
* DomainUserRole * DomainUserRole
* DraftDomain * DraftDomain
* FederalAgency
* Websites * Websites
* Host * Host
* HostIP * HostIP
@ -76,13 +77,14 @@ For reference, this deletes all rows from the following tables:
* DomainInformation * DomainInformation
* DomainRequest * DomainRequest
* Domain * Domain
* User (all but the current user) * User
* Contact * Contact
* Websites * Websites
* DraftDomain * DraftDomain
* HostIP * HostIP
* Host * Host
* PublicContact * PublicContact
* FederalAgency
#### Importing into Target Environment #### Importing into Target Environment
@ -115,7 +117,7 @@ cf ssh {target-app}
example cleaning getgov-backup: example cleaning getgov-backup:
cf ssh getgov-backup cf ssh getgov-backup
/tmp/lifecycle/backup /tmp/lifecycle/backup
./manage.py import_tables --skipEppSave=False ./manage.py import_tables --no-skipEppSave
For reference, this imports tables in the following order: For reference, this imports tables in the following order:
@ -126,6 +128,7 @@ For reference, this imports tables in the following order:
* HostIP * HostIP
* DraftDomain * DraftDomain
* Websites * Websites
* FederalAgency
* DomainRequest * DomainRequest
* DomainInformation * DomainInformation
* UserDomainRole * UserDomainRole

View file

@ -2370,6 +2370,9 @@ class PublicContactResource(resources.ModelResource):
class Meta: class Meta:
model = models.PublicContact model = models.PublicContact
use_bulk = True
batch_size = 1000
force_init_instance = True
def __init__(self): def __init__(self):
"""Sets global variables for code tidyness""" """Sets global variables for code tidyness"""
@ -2472,11 +2475,20 @@ class VerifiedByStaffAdmin(ListHeaderAdmin):
super().save_model(request, obj, form, change) super().save_model(request, obj, form, change)
class FederalAgencyAdmin(ListHeaderAdmin): class FederalAgencyResource(resources.ModelResource):
"""defines how each field in the referenced model should be mapped to the corresponding fields in the
import/export file"""
class Meta:
model = models.FederalAgency
class FederalAgencyAdmin(ListHeaderAdmin, ImportExportModelAdmin):
list_display = ["agency"] list_display = ["agency"]
search_fields = ["agency"] search_fields = ["agency"]
search_help_text = "Search by agency name." search_help_text = "Search by agency name."
ordering = ["agency"] ordering = ["agency"]
resource_classes = [FederalAgencyResource]
class UserGroupAdmin(AuditedAdmin): class UserGroupAdmin(AuditedAdmin):

View file

@ -28,6 +28,7 @@ class Command(BaseCommand):
* DomainInformation * DomainInformation
* DomainRequest * DomainRequest
* DraftDomain * DraftDomain
* FederalAgency
* Host * Host
* HostIp * HostIp
* PublicContact * PublicContact
@ -40,14 +41,15 @@ class Command(BaseCommand):
table_names = [ table_names = [
"DomainInformation", "DomainInformation",
"DomainRequest", "DomainRequest",
"FederalAgency",
"PublicContact", "PublicContact",
"HostIp",
"Host",
"Domain", "Domain",
"User", "User",
"Contact", "Contact",
"Website", "Website",
"DraftDomain", "DraftDomain",
"HostIp",
"Host",
] ]
for table_name in table_names: for table_name in table_names:

View file

@ -18,6 +18,7 @@ class Command(BaseCommand):
"Domain", "Domain",
"DomainRequest", "DomainRequest",
"DomainInformation", "DomainInformation",
"FederalAgency",
"UserDomainRole", "UserDomainRole",
"DraftDomain", "DraftDomain",
"Website", "Website",

View file

@ -36,6 +36,7 @@ class Command(BaseCommand):
"HostIp", "HostIp",
"DraftDomain", "DraftDomain",
"Website", "Website",
"FederalAgency",
"DomainRequest", "DomainRequest",
"DomainInformation", "DomainInformation",
"UserDomainRole", "UserDomainRole",
@ -83,7 +84,12 @@ class Command(BaseCommand):
result = resource_instance.import_data(dataset, dry_run=False, skip_epp_save=self.skip_epp_save) result = resource_instance.import_data(dataset, dry_run=False, skip_epp_save=self.skip_epp_save)
if result.has_errors(): if result.has_errors():
logger.error(f"Errors occurred while importing {csv_filename}: {result.row_errors()}") logger.error(f"Errors occurred while importing {csv_filename}:")
for row_error in result.row_errors():
row_index = row_error[0]
errors = row_error[1]
for error in errors:
logger.error(f"Row {row_index} - {error.error} - {error.row}")
else: else:
logger.info(f"Successfully imported {csv_filename} into {table_name}") logger.info(f"Successfully imported {csv_filename} into {table_name}")