mirror of
https://github.com/cisagov/manage.get.gov.git
synced 2025-05-18 18:39:21 +02:00
Add unit test
This commit is contained in:
parent
fe29eaa9fb
commit
0f92f588a8
3 changed files with 34 additions and 17 deletions
|
@ -46,19 +46,13 @@ class Command(BaseCommand):
|
|||
except Exception as err:
|
||||
self.failed_to_update.append(domain)
|
||||
logger.error(err)
|
||||
logger.error(
|
||||
f"{TerminalColors.FAIL}"
|
||||
f"Failed to update {domain}"
|
||||
f"{TerminalColors.ENDC}"
|
||||
)
|
||||
logger.error(f"{TerminalColors.FAIL}" f"Failed to update {domain}" f"{TerminalColors.ENDC}")
|
||||
|
||||
# Do a bulk update on all fields
|
||||
ScriptDataHelper.bulk_update_fields(Domain, self.to_update, ["first_ready"])
|
||||
|
||||
# Log what happened
|
||||
TerminalHelper.log_script_run_summary(
|
||||
self.to_update, self.failed_to_update, self.skipped, debug
|
||||
)
|
||||
TerminalHelper.log_script_run_summary(self.to_update, self.failed_to_update, self.skipped, debug)
|
||||
|
||||
def update_first_ready_for_domain(self, domain: Domain, debug: bool):
|
||||
"""Grabs the created_at field and associates it with the first_ready column.
|
||||
|
|
|
@ -48,7 +48,7 @@ class ScriptDataHelper:
|
|||
"""Helper method with utilities to speed up development of scripts that do DB operations"""
|
||||
|
||||
@staticmethod
|
||||
def bulk_update_fields(model_class, update_list, batch_size=1000):
|
||||
def bulk_update_fields(model_class, update_list, fields_to_update, batch_size=1000):
|
||||
"""
|
||||
This function performs a bulk update operation on a specified Django model class in batches.
|
||||
It uses Django's Paginator to handle large datasets in a memory-efficient manner.
|
||||
|
@ -64,6 +64,8 @@ class ScriptDataHelper:
|
|||
Defaults to 1000. If you're dealing with models that have a large number of fields,
|
||||
or large field values, you may need to decrease this value to prevent out-of-memory errors.
|
||||
|
||||
fields_to_update:
|
||||
|
||||
Usage:
|
||||
bulk_update_fields(Domain, page.object_list, ["first_ready"])
|
||||
"""
|
||||
|
@ -72,10 +74,10 @@ class ScriptDataHelper:
|
|||
paginator = Paginator(update_list, batch_size)
|
||||
for page_num in paginator.page_range:
|
||||
page = paginator.page(page_num)
|
||||
model_class.objects.bulk_update(page.object_list, update_list)
|
||||
model_class.objects.bulk_update(page.object_list, fields_to_update)
|
||||
|
||||
|
||||
class TerminalHelper:
|
||||
|
||||
@staticmethod
|
||||
def log_script_run_summary(to_update, failed_to_update, skipped, debug: bool):
|
||||
"""Prints success, failed, and skipped counts, as well as
|
||||
|
|
|
@ -29,9 +29,7 @@ class TestPopulateFirstReady(TestCase):
|
|||
"""Creates a fake domain object"""
|
||||
super().setUp()
|
||||
|
||||
Domain.objects.get_or_create(
|
||||
name="fake.gov", state=Domain.State.READY, created_at=datetime.date(2024, 12, 31)
|
||||
)
|
||||
Domain.objects.get_or_create(name="fake.gov", state=Domain.State.READY)
|
||||
|
||||
def tearDown(self):
|
||||
"""Deletes all DB objects related to migrations"""
|
||||
|
@ -53,6 +51,29 @@ class TestPopulateFirstReady(TestCase):
|
|||
):
|
||||
call_command("populate_first_ready")
|
||||
|
||||
def test_populate_first_ready(self):
|
||||
"""
|
||||
Tests that the populate_first_ready works as expected
|
||||
"""
|
||||
desired_domain = Domain.objects.filter(name="fake.gov").get()
|
||||
|
||||
# Set the created at date
|
||||
desired_domain.created_at = datetime.date(2024, 12, 31)
|
||||
desired_domain.save()
|
||||
|
||||
desired_domain.first_ready = datetime.date(2024, 12, 31)
|
||||
|
||||
# Run the expiration date script
|
||||
self.run_populate_first_ready()
|
||||
|
||||
current_domain = Domain.objects.filter(name="fake.gov").get()
|
||||
|
||||
self.assertEqual(desired_domain, current_domain)
|
||||
|
||||
# Explicitly test the first_ready date
|
||||
self.assertEqual(current_domain.first_ready, datetime.date(2024, 12, 31))
|
||||
|
||||
|
||||
class TestExtendExpirationDates(MockEppLib):
|
||||
def setUp(self):
|
||||
"""Defines the file name of migration_json and the folder its contained in"""
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue