mirror of
https://github.com/cisagov/manage.get.gov.git
synced 2025-05-19 02:49:21 +02:00
Fix test cases
This commit is contained in:
parent
85ee97d615
commit
f1acd46588
3 changed files with 263 additions and 116 deletions
|
@ -18,6 +18,254 @@ from unittest.mock import patch
|
|||
|
||||
from .common import less_console_noise
|
||||
|
||||
class TestOrganizationMigration(TestCase):
|
||||
def setUp(self):
|
||||
""" """
|
||||
# self.load_transition_domain_script = "load_transition_domain",
|
||||
# self.transfer_script = "transfer_transition_domains_to_domains",
|
||||
# self.master_script = "load_transition_domain",
|
||||
|
||||
self.test_data_file_location = "registrar/tests/data"
|
||||
self.test_domain_contact_filename = "test_domain_contacts.txt"
|
||||
self.test_contact_filename = "test_contacts.txt"
|
||||
self.test_domain_status_filename = "test_domain_statuses.txt"
|
||||
|
||||
# Files for parsing additional TransitionDomain data
|
||||
self.test_agency_adhoc_filename = "test_agency_adhoc.txt"
|
||||
self.test_authority_adhoc_filename = "test_authority_adhoc.txt"
|
||||
self.test_domain_additional = "test_domain_additional.txt"
|
||||
self.test_domain_types_adhoc = "test_domain_types_adhoc.txt"
|
||||
self.test_escrow_domains_daily = "test_escrow_domains_daily"
|
||||
self.test_organization_adhoc = "test_organization_adhoc.txt"
|
||||
self.migration_json_filename = "test_migrationFilepaths.json"
|
||||
|
||||
def tearDown(self):
|
||||
# Delete domain information
|
||||
Domain.objects.all().delete()
|
||||
DomainInformation.objects.all().delete()
|
||||
DomainInvitation.objects.all().delete()
|
||||
TransitionDomain.objects.all().delete()
|
||||
|
||||
# Delete users
|
||||
User.objects.all().delete()
|
||||
UserDomainRole.objects.all().delete()
|
||||
|
||||
def run_load_domains(self):
|
||||
# noqa here because splitting this up makes it confusing.
|
||||
# ES501
|
||||
with patch(
|
||||
"registrar.management.commands.utility.terminal_helper.TerminalHelper.query_yes_no_exit", # noqa
|
||||
return_value=True,
|
||||
):
|
||||
call_command(
|
||||
"load_transition_domain",
|
||||
self.migration_json_filename,
|
||||
directory=self.test_data_file_location,
|
||||
)
|
||||
|
||||
def run_transfer_domains(self):
|
||||
call_command("transfer_transition_domains_to_domains")
|
||||
|
||||
def run_load_organization_data(self):
|
||||
# noqa here (E501) because splitting this up makes it
|
||||
# confusing to read.
|
||||
with patch(
|
||||
"registrar.management.commands.utility.terminal_helper.TerminalHelper.query_yes_no_exit", # noqa
|
||||
return_value=True,
|
||||
):
|
||||
call_command(
|
||||
"load_organization_data",
|
||||
self.migration_json_filename,
|
||||
directory=self.test_data_file_location,
|
||||
)
|
||||
|
||||
def compare_tables(
|
||||
self,
|
||||
expected_total_transition_domains,
|
||||
expected_total_domains,
|
||||
expected_total_domain_informations,
|
||||
expected_total_domain_invitations,
|
||||
expected_missing_domains,
|
||||
expected_duplicate_domains,
|
||||
expected_missing_domain_informations,
|
||||
expected_missing_domain_invitations,
|
||||
):
|
||||
"""Does a diff between the transition_domain and the following tables:
|
||||
domain, domain_information and the domain_invitation.
|
||||
Verifies that the data loaded correctly."""
|
||||
|
||||
missing_domains = []
|
||||
duplicate_domains = []
|
||||
missing_domain_informations = []
|
||||
missing_domain_invites = []
|
||||
for transition_domain in TransitionDomain.objects.all(): # DEBUG:
|
||||
transition_domain_name = transition_domain.domain_name
|
||||
transition_domain_email = transition_domain.username
|
||||
|
||||
# Check Domain table
|
||||
matching_domains = Domain.objects.filter(name=transition_domain_name)
|
||||
# Check Domain Information table
|
||||
matching_domain_informations = DomainInformation.objects.filter(domain__name=transition_domain_name)
|
||||
# Check Domain Invitation table
|
||||
matching_domain_invitations = DomainInvitation.objects.filter(
|
||||
email=transition_domain_email.lower(),
|
||||
domain__name=transition_domain_name,
|
||||
)
|
||||
|
||||
if len(matching_domains) == 0:
|
||||
missing_domains.append(transition_domain_name)
|
||||
elif len(matching_domains) > 1:
|
||||
duplicate_domains.append(transition_domain_name)
|
||||
if len(matching_domain_informations) == 0:
|
||||
missing_domain_informations.append(transition_domain_name)
|
||||
if len(matching_domain_invitations) == 0:
|
||||
missing_domain_invites.append(transition_domain_name)
|
||||
|
||||
total_missing_domains = len(missing_domains)
|
||||
total_duplicate_domains = len(duplicate_domains)
|
||||
total_missing_domain_informations = len(missing_domain_informations)
|
||||
total_missing_domain_invitations = len(missing_domain_invites)
|
||||
|
||||
total_transition_domains = len(TransitionDomain.objects.all())
|
||||
total_domains = len(Domain.objects.all())
|
||||
total_domain_informations = len(DomainInformation.objects.all())
|
||||
total_domain_invitations = len(DomainInvitation.objects.all())
|
||||
|
||||
print(
|
||||
f"""
|
||||
total_missing_domains = {len(missing_domains)}
|
||||
total_duplicate_domains = {len(duplicate_domains)}
|
||||
total_missing_domain_informations = {len(missing_domain_informations)}
|
||||
total_missing_domain_invitations = {total_missing_domain_invitations}
|
||||
|
||||
total_transition_domains = {len(TransitionDomain.objects.all())}
|
||||
total_domains = {len(Domain.objects.all())}
|
||||
total_domain_informations = {len(DomainInformation.objects.all())}
|
||||
total_domain_invitations = {len(DomainInvitation.objects.all())}
|
||||
"""
|
||||
)
|
||||
self.assertEqual(total_missing_domains, expected_missing_domains)
|
||||
self.assertEqual(total_duplicate_domains, expected_duplicate_domains)
|
||||
self.assertEqual(total_missing_domain_informations, expected_missing_domain_informations)
|
||||
self.assertEqual(total_missing_domain_invitations, expected_missing_domain_invitations)
|
||||
|
||||
self.assertEqual(total_transition_domains, expected_total_transition_domains)
|
||||
self.assertEqual(total_domains, expected_total_domains)
|
||||
self.assertEqual(total_domain_informations, expected_total_domain_informations)
|
||||
self.assertEqual(total_domain_invitations, expected_total_domain_invitations)
|
||||
|
||||
def test_load_organization_data_transition_domain(self):
|
||||
# == First, parse all existing data == #
|
||||
self.run_load_domains()
|
||||
self.run_transfer_domains()
|
||||
|
||||
# == Second, try adding org data to it == #
|
||||
self.run_load_organization_data()
|
||||
|
||||
# == Third, test that we've loaded data as we expect == #
|
||||
transition_domains = TransitionDomain.objects.filter(domain_name="fakewebsite2.gov")
|
||||
|
||||
# Should return three objects (three unique emails)
|
||||
self.assertEqual(transition_domains.count(), 3)
|
||||
|
||||
# Lets test the first one
|
||||
transition = transition_domains.first()
|
||||
expected_transition_domain = TransitionDomain(
|
||||
id=24,
|
||||
username='alexandra.bobbitt5@test.com',
|
||||
domain_name='fakewebsite2.gov',
|
||||
status='on hold',
|
||||
email_sent=True,
|
||||
organization_type='Federal',
|
||||
organization_name='Fanoodle',
|
||||
federal_type='Executive',
|
||||
federal_agency='Department of Commerce',
|
||||
epp_creation_date=datetime.date(2004, 5, 7),
|
||||
epp_expiration_date=datetime.date(2023, 9, 30),
|
||||
first_name='Seline',
|
||||
middle_name='testmiddle2',
|
||||
last_name='Tower',
|
||||
title=None,
|
||||
email='stower3@answers.com',
|
||||
phone='151-539-6028',
|
||||
address_line='93001 Arizona Drive',
|
||||
city='Columbus',
|
||||
state_territory='Oh',
|
||||
zipcode='43268'
|
||||
)
|
||||
|
||||
self.assertEqual(transition, expected_transition_domain)
|
||||
|
||||
def test_load_organization_data_domain_information(self):
|
||||
# == First, parse all existing data == #
|
||||
self.run_load_domains()
|
||||
self.run_transfer_domains()
|
||||
|
||||
# == Second, try adding org data to it == #
|
||||
self.run_load_organization_data()
|
||||
|
||||
# == Third, test that we've loaded data as we expect == #
|
||||
_domain = Domain.objects.filter(name="fakewebsite2.gov").get()
|
||||
domain_information = DomainInformation.objects.filter(domain=_domain).get()
|
||||
expected_domain_information = DomainInformation(
|
||||
id=4,
|
||||
creator_id=1,
|
||||
domain_application_id=None,
|
||||
organization_type='federal',
|
||||
federally_recognized_tribe=None,
|
||||
state_recognized_tribe=None,
|
||||
tribe_name=None,
|
||||
federal_agency='Department of Commerce',
|
||||
federal_type='executive',
|
||||
is_election_board=None,
|
||||
organization_name='Fanoodle',
|
||||
address_line1='93001 Arizona Drive',
|
||||
address_line2=None,
|
||||
city='Columbus',
|
||||
state_territory='Oh',
|
||||
zipcode='43268',
|
||||
urbanization=None,
|
||||
about_your_organization=None,
|
||||
authorizing_official_id=5,
|
||||
domain_id=4,
|
||||
submitter_id=None,
|
||||
purpose=None,
|
||||
no_other_contacts_rationale=None,
|
||||
anything_else=None,
|
||||
is_policy_acknowledged=None
|
||||
)
|
||||
self.assertEqual(domain_information, expected_domain_information)
|
||||
|
||||
def test_load_organization_data_integrity(self):
|
||||
"""Validates data integrity with the load_org_data command"""
|
||||
# First, parse all existing data
|
||||
self.run_load_domains()
|
||||
self.run_transfer_domains()
|
||||
|
||||
# Second, try adding org data to it
|
||||
self.run_load_organization_data()
|
||||
|
||||
# Third, test that we didn't corrupt any data
|
||||
expected_total_transition_domains = 9
|
||||
expected_total_domains = 5
|
||||
expected_total_domain_informations = 5
|
||||
expected_total_domain_invitations = 8
|
||||
|
||||
expected_missing_domains = 0
|
||||
expected_duplicate_domains = 0
|
||||
expected_missing_domain_informations = 0
|
||||
# we expect 1 missing invite from anomaly.gov (an injected error)
|
||||
expected_missing_domain_invitations = 1
|
||||
self.compare_tables(
|
||||
expected_total_transition_domains,
|
||||
expected_total_domains,
|
||||
expected_total_domain_informations,
|
||||
expected_total_domain_invitations,
|
||||
expected_missing_domains,
|
||||
expected_duplicate_domains,
|
||||
expected_missing_domain_informations,
|
||||
expected_missing_domain_invitations,
|
||||
)
|
||||
|
||||
class TestMigrations(TestCase):
|
||||
def setUp(self):
|
||||
|
@ -41,11 +289,12 @@ class TestMigrations(TestCase):
|
|||
self.migration_json_filename = "test_migrationFilepaths.json"
|
||||
|
||||
def tearDown(self):
|
||||
super().tearDown()
|
||||
# Delete domain information
|
||||
TransitionDomain.objects.all().delete()
|
||||
Domain.objects.all().delete()
|
||||
DomainInvitation.objects.all().delete()
|
||||
DomainInformation.objects.all().delete()
|
||||
DomainInvitation.objects.all().delete()
|
||||
|
||||
# Delete users
|
||||
User.objects.all().delete()
|
||||
|
@ -170,118 +419,6 @@ class TestMigrations(TestCase):
|
|||
self.assertEqual(total_domain_informations, expected_total_domain_informations)
|
||||
self.assertEqual(total_domain_invitations, expected_total_domain_invitations)
|
||||
|
||||
def test_load_organization_data_transition_domain(self):
|
||||
self.maxDiff = None
|
||||
# == First, parse all existing data == #
|
||||
self.run_master_script()
|
||||
|
||||
# == Second, try adding org data to it == #
|
||||
self.run_load_organization_data()
|
||||
|
||||
# == Third, test that we've loaded data as we expect == #
|
||||
transition_domains = TransitionDomain.objects.filter(domain_name="fakewebsite2.gov")
|
||||
|
||||
# Should return three objects (three unique emails)
|
||||
self.assertEqual(transition_domains.count(), 3)
|
||||
|
||||
# Lets test the first one
|
||||
transition = transition_domains.first()
|
||||
expected_transition_domain = TransitionDomain(
|
||||
id=6,
|
||||
username='alexandra.bobbitt5@test.com',
|
||||
domain_name='fakewebsite2.gov',
|
||||
status='on hold',
|
||||
email_sent=True,
|
||||
organization_type='Federal',
|
||||
organization_name='Fanoodle',
|
||||
federal_type='Executive',
|
||||
federal_agency='Department of Commerce',
|
||||
epp_creation_date=datetime.date(2004, 5, 7),
|
||||
epp_expiration_date=datetime.date(2023, 9, 30),
|
||||
first_name='Seline',
|
||||
middle_name='testmiddle2',
|
||||
last_name='Tower',
|
||||
title=None,
|
||||
email='stower3@answers.com',
|
||||
phone='151-539-6028',
|
||||
address_line='93001 Arizona Drive',
|
||||
city='Columbus',
|
||||
state_territory='Oh',
|
||||
zipcode='43268'
|
||||
)
|
||||
|
||||
self.assertEqual(transition, expected_transition_domain)
|
||||
|
||||
def test_load_organization_data_domain_information(self):
|
||||
self.maxDiff = None
|
||||
# == First, parse all existing data == #
|
||||
self.run_master_script()
|
||||
|
||||
# == Second, try adding org data to it == #
|
||||
self.run_load_organization_data()
|
||||
|
||||
# == Third, test that we've loaded data as we expect == #
|
||||
_domain = Domain.objects.filter(name="fakewebsite2.gov").get()
|
||||
domain_information = DomainInformation.objects.filter(domain=_domain).get()
|
||||
expected_domain_information = DomainInformation(
|
||||
id=4,
|
||||
creator_id=1,
|
||||
domain_application_id=None,
|
||||
organization_type='federal',
|
||||
federally_recognized_tribe=None,
|
||||
state_recognized_tribe=None,
|
||||
tribe_name=None,
|
||||
federal_agency='Department of Commerce',
|
||||
federal_type='executive',
|
||||
is_election_board=None,
|
||||
organization_name='Fanoodle',
|
||||
address_line1='93001 Arizona Drive',
|
||||
address_line2=None,
|
||||
city='Columbus',
|
||||
state_territory='Oh',
|
||||
zipcode='43268',
|
||||
urbanization=None,
|
||||
about_your_organization=None,
|
||||
authorizing_official_id=5,
|
||||
domain_id=4,
|
||||
submitter_id=None,
|
||||
purpose=None,
|
||||
no_other_contacts_rationale=None,
|
||||
anything_else=None,
|
||||
is_policy_acknowledged=None
|
||||
)
|
||||
self.assertEqual(domain_information, expected_domain_information)
|
||||
|
||||
def test_load_organization_data_integrity(self):
|
||||
"""Validates data integrity with the load_org_data command"""
|
||||
# First, parse all existing data
|
||||
self.run_master_script()
|
||||
|
||||
# Second, try adding org data to it
|
||||
self.run_load_organization_data()
|
||||
|
||||
# Third, test that we didn't corrupt any data
|
||||
expected_total_transition_domains = 9
|
||||
expected_total_domains = 5
|
||||
expected_total_domain_informations = 5
|
||||
expected_total_domain_invitations = 8
|
||||
|
||||
expected_missing_domains = 0
|
||||
expected_duplicate_domains = 0
|
||||
expected_missing_domain_informations = 0
|
||||
# we expect 1 missing invite from anomaly.gov (an injected error)
|
||||
expected_missing_domain_invitations = 1
|
||||
self.compare_tables(
|
||||
expected_total_transition_domains,
|
||||
expected_total_domains,
|
||||
expected_total_domain_informations,
|
||||
expected_total_domain_invitations,
|
||||
expected_missing_domains,
|
||||
expected_duplicate_domains,
|
||||
expected_missing_domain_informations,
|
||||
expected_missing_domain_invitations,
|
||||
)
|
||||
|
||||
def test_master_migration_functions(self):
|
||||
"""Run the full master migration script using local test data.
|
||||
NOTE: This is more of an integration test and so far does not
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue