Started on unit tests, started on fix for issue with deleted domains filters, broken, wip

This commit is contained in:
Rachid Mrad 2023-12-20 15:20:53 -05:00
parent aac5cd698c
commit 31031d054d
No known key found for this signature in database
GPG key ID: EF38E4CEC4A8F3CF
4 changed files with 175 additions and 23 deletions

View file

@ -1,3 +1,3 @@
Domain name,Domain type,Agency,Organization name,City,State,Security Contact Email Domain name,Domain type,Agency,Organization name,City,State,Security contact email
cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,
ddomain3.gov,Federal,Armed Forces Retirement Home,,,, ddomain3.gov,Federal,Armed Forces Retirement Home,,,,
1 Domain name Domain type Agency Organization name City State Security Contact Email Security contact email
2 cdomain1.gov Federal - Executive World War I Centennial Commission
3 ddomain3.gov Federal Armed Forces Retirement Home

View file

@ -1,4 +1,4 @@
Domain name,Domain type,Agency,Organization name,City,State,Security Contact Email Domain name,Domain type,Agency,Organization name,City,State,Security contact email
cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,
ddomain3.gov,Federal,Armed Forces Retirement Home,,,, ddomain3.gov,Federal,Armed Forces Retirement Home,,,,
adomain2.gov,Interstate,,,,, adomain2.gov,Interstate,,,,,
1 Domain name Domain type Agency Organization name City State Security Contact Email Security contact email
2 cdomain1.gov Federal - Executive World War I Centennial Commission
3 ddomain3.gov Federal Armed Forces Retirement Home
4 adomain2.gov Interstate

View file

@ -14,7 +14,8 @@ from django.conf import settings
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
import boto3_mocking import boto3_mocking
from registrar.utility.s3_bucket import S3ClientError, S3ClientErrorCodes # type: ignore from registrar.utility.s3_bucket import S3ClientError, S3ClientErrorCodes # type: ignore
from datetime import datetime, timedelta
from django.utils import timezone
class CsvReportsTest(TestCase): class CsvReportsTest(TestCase):
"""Tests to determine if we are uploading our reports correctly""" """Tests to determine if we are uploading our reports correctly"""
@ -75,7 +76,7 @@ class CsvReportsTest(TestCase):
mock_client = MagicMock() mock_client = MagicMock()
fake_open = mock_open() fake_open = mock_open()
expected_file_content = [ expected_file_content = [
call("Domain name,Domain type,Agency,Organization name,City,State,Security Contact Email\r\n"), call("Domain name,Domain type,Agency,Organization name,City,State,Security contact email\r\n"),
call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"), call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"),
call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"), call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"),
] ]
@ -94,7 +95,7 @@ class CsvReportsTest(TestCase):
mock_client = MagicMock() mock_client = MagicMock()
fake_open = mock_open() fake_open = mock_open()
expected_file_content = [ expected_file_content = [
call("Domain name,Domain type,Agency,Organization name,City,State,Security Contact Email\r\n"), call("Domain name,Domain type,Agency,Organization name,City,State,Security contact email\r\n"),
call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"), call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"),
call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"), call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"),
call("adomain2.gov,Interstate,,,,, \r\n"), call("adomain2.gov,Interstate,,,,, \r\n"),
@ -175,7 +176,7 @@ class CsvReportsTest(TestCase):
# Check that the response contains what we expect # Check that the response contains what we expect
expected_file_content = ( expected_file_content = (
"Domain name,Domain type,Agency,Organization name,City,State,Security Contact Email\n" "Domain name,Domain type,Agency,Organization name,City,State,Security contact email\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n" "cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,,,," "ddomain3.gov,Federal,Armed Forces Retirement Home,,,,"
).encode() ).encode()
@ -207,7 +208,7 @@ class CsvReportsTest(TestCase):
# Check that the response contains what we expect # Check that the response contains what we expect
expected_file_content = ( expected_file_content = (
"Domain name,Domain type,Agency,Organization name,City,State,Security Contact Email\n" "Domain name,Domain type,Agency,Organization name,City,State,Security contact email\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n" "cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,,,,\n" "ddomain3.gov,Federal,Armed Forces Retirement Home,,,,\n"
"adomain2.gov,Interstate,,,,," "adomain2.gov,Interstate,,,,,"
@ -231,6 +232,8 @@ class ExportDataTest(TestCase):
self.domain_3, _ = Domain.objects.get_or_create(name="ddomain3.gov", state=Domain.State.ON_HOLD) self.domain_3, _ = Domain.objects.get_or_create(name="ddomain3.gov", state=Domain.State.ON_HOLD)
self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN) self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN)
self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN) self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN)
self.domain_5, _ = Domain.objects.get_or_create(name="bdomain5.gov", state=Domain.State.DELETED, deleted_at=datetime(2023, 11, 1))
self.domain_6, _ = Domain.objects.get_or_create(name="bdomain6.gov", state=Domain.State.DELETED, deleted_at=datetime(1980, 10, 16))
self.domain_information_1, _ = DomainInformation.objects.get_or_create( self.domain_information_1, _ = DomainInformation.objects.get_or_create(
creator=self.user, creator=self.user,
@ -256,6 +259,18 @@ class ExportDataTest(TestCase):
organization_type="federal", organization_type="federal",
federal_agency="Armed Forces Retirement Home", federal_agency="Armed Forces Retirement Home",
) )
self.domain_information_5, _ = DomainInformation.objects.get_or_create(
creator=self.user,
domain=self.domain_5,
organization_type="federal",
federal_agency="Armed Forces Retirement Home",
)
self.domain_information_6, _ = DomainInformation.objects.get_or_create(
creator=self.user,
domain=self.domain_6,
organization_type="federal",
federal_agency="Armed Forces Retirement Home",
)
def tearDown(self): def tearDown(self):
Domain.objects.all().delete() Domain.objects.all().delete()
@ -285,7 +300,7 @@ class ExportDataTest(TestCase):
"Submitter title", "Submitter title",
"Submitter email", "Submitter email",
"Submitter phone", "Submitter phone",
"Security Contact Email", "Security contact email",
"Status", "Status",
] ]
sort_fields = ["domain__name"] sort_fields = ["domain__name"]
@ -311,7 +326,7 @@ class ExportDataTest(TestCase):
expected_content = ( expected_content = (
"Domain name,Domain type,Agency,Organization name,City,State,AO," "Domain name,Domain type,Agency,Organization name,City,State,AO,"
"AO email,Submitter,Submitter title,Submitter email,Submitter phone," "AO email,Submitter,Submitter title,Submitter email,Submitter phone,"
"Security Contact Email,Status\n" "Security contact email,Status\n"
"adomain2.gov,Interstate,dnsneeded\n" "adomain2.gov,Interstate,dnsneeded\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,ready\n" "cdomain1.gov,Federal - Executive,World War I Centennial Commission,ready\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,onhold\n" "ddomain3.gov,Federal,Armed Forces Retirement Home,onhold\n"
@ -338,7 +353,7 @@ class ExportDataTest(TestCase):
"Organization name", "Organization name",
"City", "City",
"State", "State",
"Security Contact Email", "Security contact email",
] ]
sort_fields = ["domain__name", "federal_agency", "organization_type"] sort_fields = ["domain__name", "federal_agency", "organization_type"]
filter_condition = { filter_condition = {
@ -364,7 +379,7 @@ class ExportDataTest(TestCase):
# sorted alphabetially by domain name # sorted alphabetially by domain name
expected_content = ( expected_content = (
"Domain name,Domain type,Agency,Organization name,City," "Domain name,Domain type,Agency,Organization name,City,"
"State,Security Contact Email\n" "State,Security contact email\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission\n" "cdomain1.gov,Federal - Executive,World War I Centennial Commission\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home\n" "ddomain3.gov,Federal,Armed Forces Retirement Home\n"
) )
@ -375,3 +390,130 @@ class ExportDataTest(TestCase):
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip() expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content) self.assertEqual(csv_content, expected_content)
def test_export_domains_to_writer_with_date_filter_pulls_domains_in_range(self):
"""Test that domains that are READY and in range are pulled when the growth report conditions
are applied to export_domains_to_writer."""
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"Status",
"Deleted at",
"Expiration date",
]
sort_fields = ["created_at","domain__name",]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
],
"domain__created_at__lt": timezone.make_aware(datetime.now() + timedelta(days=1)),
"domain__created_at__gt": timezone.make_aware(datetime.now() - timedelta(days=1)),
}
filter_conditions_for_additional_domains = {
"domain__state__in": [
Domain.State.DELETED,
],
"domain__deleted_at__lt": timezone.make_aware(datetime.now() + timedelta(days=1)),
"domain__deleted_at__gt": timezone.make_aware(datetime.now() - timedelta(days=1)),
}
# Call the export function
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
print(f'csv_content {csv_content}')
# We expect READY domains,
# federal only
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,"
"State,Status,Deleted at,Expiration date\n"
"cdomain1.gov,Federal-Executive,World War I Centennial Commission,ready,\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
def test_export_domains_to_writer_with_date_filter_pulls_appropriate_deleted_domains(self):
"""When domain__created_at__gt is in filters, we know it's a growth report
and we need to fetch the domainInfos for the deleted domains that are within
the date range. However, deleted domains that were deleted at a date outside
the range do not get pulled."""
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"Status",
"Deleted at",
"Expiration date",
]
sort_fields = ["created_at","domain__name",]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
],
"domain__created_at__lt": timezone.make_aware(datetime(2023, 10, 1)),
"domain__created_at__gt": timezone.make_aware(datetime(2023, 12, 1)),
}
filter_conditions_for_additional_domains = {
"domain__state__in": [
Domain.State.DELETED,
],
"domain__deleted_at__lt": timezone.make_aware(datetime(2023, 10, 1)),
"domain__deleted_at__gt": timezone.make_aware(datetime(2023, 12, 1)),
}
# Call the export function
export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_conditions_for_additional_domains)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
print(f'csv_content {csv_content}')
# We expect READY domains,
# federal only
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,"
"State,Status,Deleted at,Expiration date\n"
"bdomain5.gov,Federal,Armed Forces Retirement Home,deleted,2023-11-01,\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)

View file

@ -7,20 +7,25 @@ from registrar.models.public_contact import PublicContact
from django.db.models import Value from django.db.models import Value
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
from itertools import chain from itertools import chain
from django.utils import timezone
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def export_domains_to_writer(writer, columns, sort_fields, filter_condition): def export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_condition_for_additional_domains=None):
# write columns headers to writer # write columns headers to writer
writer.writerow(columns) writer.writerow(columns)
logger.info('export_domains_to_writer')
logger.info(filter_condition)
logger.info(filter_condition_for_additional_domains)
# Get the domainInfos # Get the domainInfos
domainInfos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields) domainInfos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields)
# domain__created_at__gt is in filter_conditions. This means that we're querrying for the growth report and # Condition is true for export_data_growth_to_csv. This is an OR situation so we can' combine the filters
# need to fetch the domainInfos for the deleted domains. This is an OR situation so we can' combine the filters # in one query.
# in one query which would be an AND operation. if filter_condition_for_additional_domains is not None and 'domain__deleted_at__lt' in filter_condition_for_additional_domains:
if 'domain__created_at__gt' in filter_condition: logger.info("Fetching deleted domains")
deleted_domainInfos = DomainInformation.objects.filter(domain__state=Domain.State.DELETED).order_by("domain__deleted_at") deleted_domainInfos = DomainInformation.objects.filter(domain__state=Domain.State.DELETED).order_by("domain__deleted_at")
# Combine the two querysets into a single iterable # Combine the two querysets into a single iterable
all_domainInfos = list(chain(domainInfos, deleted_domainInfos)) all_domainInfos = list(chain(domainInfos, deleted_domainInfos))
@ -30,7 +35,6 @@ def export_domains_to_writer(writer, columns, sort_fields, filter_condition):
for domainInfo in all_domainInfos: for domainInfo in all_domainInfos:
security_contacts = domainInfo.domain.contacts.filter(contact_type=PublicContact.ContactTypeChoices.SECURITY) security_contacts = domainInfo.domain.contacts.filter(contact_type=PublicContact.ContactTypeChoices.SECURITY)
print(f"regular filtering {domainInfos}")
# For linter # For linter
ao = " " ao = " "
if domainInfo.authorizing_official: if domainInfo.authorizing_official:
@ -152,19 +156,19 @@ def export_data_federal_to_csv(csv_file):
def export_data_growth_to_csv(csv_file, start_date, end_date): def export_data_growth_to_csv(csv_file, start_date, end_date):
if start_date: if start_date:
start_date_formatted = datetime.strptime(start_date, "%Y-%m-%d") start_date_formatted = timezone.make_aware(datetime.strptime(start_date, "%Y-%m-%d"))
else: else:
# Handle the case where start_date is missing or empty # Handle the case where start_date is missing or empty
# Default to a date that's prior to our first deployment # Default to a date that's prior to our first deployment
logger.error(f"Error fetching the start date, will default to 12023/1/1") logger.error(f"Error fetching the start date, will default to 12023/1/1")
start_date_formatted = datetime(2023, 11, 1) # Replace with appropriate handling start_date_formatted = timezone.make_aware(datetime(2023, 11, 1)) # Replace with appropriate handling
if end_date: if end_date:
end_date_formatted = datetime.strptime(end_date, "%Y-%m-%d") end_date_formatted = timezone.make_aware(datetime.strptime(end_date, "%Y-%m-%d"))
else: else:
# Handle the case where end_date is missing or empty # Handle the case where end_date is missing or empty
logger.error(f"Error fetching the end date, will default to now()") logger.error(f"Error fetching the end date, will default to now()")
end_date_formatted = datetime.now() # Replace with appropriate handling end_date_formatted = timezone.make_aware(datetime.now()) # Replace with appropriate handling
writer = csv.writer(csv_file) writer = csv.writer(csv_file)
# define columns to include in export # define columns to include in export
@ -186,10 +190,16 @@ def export_data_growth_to_csv(csv_file, start_date, end_date):
] ]
filter_condition = { filter_condition = {
"domain__state__in": [ "domain__state__in": [
Domain.State.UNKNOWN, Domain.State.READY,
],
"domain__created_at__lt": end_date_formatted,
"domain__created_at__gt": start_date_formatted,
}
filter_condition_for_additional_domains = {
"domain__state__in": [
Domain.State.DELETED, Domain.State.DELETED,
], ],
"domain__created_at__lt": end_date_formatted, "domain__created_at__lt": end_date_formatted,
"domain__created_at__gt": start_date_formatted, "domain__created_at__gt": start_date_formatted,
} }
export_domains_to_writer(writer, columns, sort_fields, filter_condition) export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_condition_for_additional_domains)