Code cleanup, layout cleanup, unit tests

This commit is contained in:
Rachid Mrad 2023-12-20 21:54:13 -05:00
parent 31031d054d
commit cb16f5eb96
No known key found for this signature in database
GPG key ID: EF38E4CEC4A8F3CF
5 changed files with 229 additions and 151 deletions

View file

@ -282,13 +282,20 @@ function enableRelatedWidgetButtons(changeLink, deleteLink, viewLink, elementPk,
*/ */
(function (){ (function (){
// Get the current date in the format YYYY-MM-DD
var currentDate = new Date().toISOString().split('T')[0];
// Default the value of the end date input field to the current date
let endDateInput =document.getElementById('end');
endDateInput.value = currentDate;
let exportGrowthReportButton = document.getElementById('exportLink'); let exportGrowthReportButton = document.getElementById('exportLink');
if (exportGrowthReportButton) { if (exportGrowthReportButton) {
exportGrowthReportButton.addEventListener('click', function() { exportGrowthReportButton.addEventListener('click', function() {
// Get the selected start and end dates // Get the selected start and end dates
let startDate = document.getElementById('start').value; let startDate = document.getElementById('start').value;
let endDate = document.getElementById('end').value; let endDate = endDateInput.value;
let exportUrl = document.getElementById('exportLink').dataset.exportUrl; let exportUrl = document.getElementById('exportLink').dataset.exportUrl;
// Build the URL with parameters // Build the URL with parameters

View file

@ -14,12 +14,18 @@
See the commit "Review for ticket #999" See the commit "Review for ticket #999"
{% endcomment %} {% endcomment %}
<label for="start">Start date:</label> <div class="display-flex flex-align-baseline flex-justify margin-y-2">
<input type="date" id="start" name="trip-start" value="2018-07-22" min="2018-01-01" /> <div>
<label for="end">End date:</label> <label for="start">Start date:</label>
<input type="date" id="end" name="trip-end" value="2023-12-19" min="2023-12-01" /> <input type="date" id="start" name="start" value="2018-07-22" min="2018-01-01" />
</div>
<div>
<label for="end">End date:</label>
<input type="date" id="end" name="end" value="2023-12-01" min="2023-12-01" />
</div>
<button id="exportLink" data-export-url="{% url 'admin_export_data' %}" type="button" class="button">Export</button> <button id="exportLink" data-export-url="{% url 'admin_export_data' %}" type="button" class="button">Export</button>
</div>
</div> </div>
</div> </div>

View file

@ -0,0 +1,52 @@
from django.test import TestCase, Client
from django.urls import reverse
from registrar.tests.common import create_superuser
from registrar.views.admin_views import ExportData
class TestViews(TestCase):
def setUp(self):
self.client = Client(HTTP_HOST="localhost:8080")
self.superuser = create_superuser()
def test_export_data_view(self):
self.client.force_login(self.superuser)
# Reverse the URL for the admin index page
admin_index_url = reverse("admin:index")
# Make a GET request to the admin index page
response = self.client.get(admin_index_url)
print(f'response1 {response}')
# Assert that the response status code is 200 (OK)
self.assertEqual(response.status_code, 200)
# Ensure that the start_date and end_date are set
start_date = "2023-01-01"
end_date = "2023-12-31"
# Construct the URL for the export data view with start_date and end_date parameters:
# This stuff is currently done in JS
export_data_url = reverse("admin_export_data") + f"?start_date={start_date}&end_date={end_date}"
# Make a GET request to the export data page
response = self.client.get(export_data_url)
print(response)
# Assert that the response status code is 200 (OK) or the expected status code
self.assertEqual(response.status_code, 200)
# Assert that the content type is CSV
self.assertEqual(response["Content-Type"], "text/csv")
# Check if the filename in the Content-Disposition header matches the expected pattern
expected_filename = f'growth-from-{start_date}-to-{end_date}.csv'
self.assertIn(f'attachment; filename="{expected_filename}"', response["Content-Disposition"])

View file

@ -6,7 +6,7 @@ from registrar.models.domain_information import DomainInformation
from registrar.models.domain import Domain from registrar.models.domain import Domain
from registrar.models.user import User from registrar.models.user import User
from django.contrib.auth import get_user_model from django.contrib.auth import get_user_model
from registrar.utility.csv_export import export_domains_to_writer from registrar.utility.csv_export import export_domains_to_writer, get_default_start_date, get_default_end_date, export_data_growth_to_csv
from django.core.management import call_command from django.core.management import call_command
from unittest.mock import MagicMock, call, mock_open, patch from unittest.mock import MagicMock, call, mock_open, patch
from api.views import get_current_federal, get_current_full from api.views import get_current_federal, get_current_full
@ -14,7 +14,7 @@ from django.conf import settings
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
import boto3_mocking import boto3_mocking
from registrar.utility.s3_bucket import S3ClientError, S3ClientErrorCodes # type: ignore from registrar.utility.s3_bucket import S3ClientError, S3ClientErrorCodes # type: ignore
from datetime import datetime, timedelta from datetime import date, datetime, timedelta
from django.utils import timezone from django.utils import timezone
class CsvReportsTest(TestCase): class CsvReportsTest(TestCase):
@ -232,8 +232,12 @@ class ExportDataTest(TestCase):
self.domain_3, _ = Domain.objects.get_or_create(name="ddomain3.gov", state=Domain.State.ON_HOLD) self.domain_3, _ = Domain.objects.get_or_create(name="ddomain3.gov", state=Domain.State.ON_HOLD)
self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN) self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN)
self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN) self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN)
self.domain_5, _ = Domain.objects.get_or_create(name="bdomain5.gov", state=Domain.State.DELETED, deleted_at=datetime(2023, 11, 1)) self.domain_5, _ = Domain.objects.get_or_create(name="bdomain5.gov", state=Domain.State.DELETED, deleted_at=timezone.make_aware(datetime(2023, 11, 1)))
self.domain_6, _ = Domain.objects.get_or_create(name="bdomain6.gov", state=Domain.State.DELETED, deleted_at=datetime(1980, 10, 16)) self.domain_6, _ = Domain.objects.get_or_create(name="bdomain6.gov", state=Domain.State.DELETED, deleted_at=timezone.make_aware(datetime(1980, 10, 16)))
self.domain_7, _ = Domain.objects.get_or_create(name="xdomain7.gov", state=Domain.State.DELETED, deleted_at=timezone.now())
self.domain_8, _ = Domain.objects.get_or_create(name="sdomain8.gov", state=Domain.State.DELETED, deleted_at=timezone.now())
# We use timezone.make_aware to sync to server time a datetime object with the current date (using date.today()) and a specific time (using datetime.min.time()).
self.domain_9, _ = Domain.objects.get_or_create(name="zdomain9.gov", state=Domain.State.DELETED, deleted_at=timezone.make_aware(datetime.combine(date.today() - timedelta(days=1), datetime.min.time())))
self.domain_information_1, _ = DomainInformation.objects.get_or_create( self.domain_information_1, _ = DomainInformation.objects.get_or_create(
creator=self.user, creator=self.user,
@ -271,6 +275,24 @@ class ExportDataTest(TestCase):
organization_type="federal", organization_type="federal",
federal_agency="Armed Forces Retirement Home", federal_agency="Armed Forces Retirement Home",
) )
self.domain_information_7, _ = DomainInformation.objects.get_or_create(
creator=self.user,
domain=self.domain_7,
organization_type="federal",
federal_agency="Armed Forces Retirement Home",
)
self.domain_information_8, _ = DomainInformation.objects.get_or_create(
creator=self.user,
domain=self.domain_8,
organization_type="federal",
federal_agency="Armed Forces Retirement Home",
)
self.domain_information_9, _ = DomainInformation.objects.get_or_create(
creator=self.user,
domain=self.domain_9,
organization_type="federal",
federal_agency="Armed Forces Retirement Home",
)
def tearDown(self): def tearDown(self):
Domain.objects.all().delete() Domain.objects.all().delete()
@ -392,11 +414,23 @@ class ExportDataTest(TestCase):
self.assertEqual(csv_content, expected_content) self.assertEqual(csv_content, expected_content)
def test_export_domains_to_writer_with_date_filter_pulls_domains_in_range(self): def test_export_domains_to_writer_with_date_filter_pulls_domains_in_range(self):
"""Test that domains that are READY and in range are pulled when the growth report conditions """Test that domains that are
are applied to export_domains_to_writer.""" 1. READY and their created_at dates are in range
2. DELETED and their deleted_at dates are in range
are pulled when the growth report conditions are applied to export_domains_to_writed.
Test that ready domains display first and deleted second, sorted according to
specified keys.
We considered testing export_data_growth_to_csv which calls export_domains_to_writer
and would have been easy to set up, but expected_content would contain created_at dates
which are hard to mock."""
# Create a CSV file in memory # Create a CSV file in memory
csv_file = StringIO() csv_file = StringIO()
writer = csv.writer(csv_file) writer = csv.writer(csv_file)
# We use timezone.make_aware to sync to server time a datetime object with the current date (using date.today()) and a specific time (using datetime.min.time()).
end_date = timezone.make_aware(datetime.combine(date.today() + timedelta(days=2), datetime.min.time()))
start_date = timezone.make_aware(datetime.combine(date.today() - timedelta(days=2), datetime.min.time()))
# Define columns, sort fields, and filter condition # Define columns, sort fields, and filter condition
columns = [ columns = [
@ -407,27 +441,30 @@ class ExportDataTest(TestCase):
"City", "City",
"State", "State",
"Status", "Status",
"Deleted at",
"Expiration date", "Expiration date",
] ]
sort_fields = ["created_at","domain__name",] sort_fields = ["created_at","domain__name",]
sort_fields_for_additional_domains = [
"domain__deleted_at",
"domain__name",
]
filter_condition = { filter_condition = {
"domain__state__in": [ "domain__state__in": [
Domain.State.READY, Domain.State.READY,
], ],
"domain__created_at__lt": timezone.make_aware(datetime.now() + timedelta(days=1)), "domain__created_at__lt": end_date,
"domain__created_at__gt": timezone.make_aware(datetime.now() - timedelta(days=1)), "domain__created_at__gt": start_date,
} }
filter_conditions_for_additional_domains = { filter_conditions_for_additional_domains = {
"domain__state__in": [ "domain__state__in": [
Domain.State.DELETED, Domain.State.DELETED,
], ],
"domain__deleted_at__lt": timezone.make_aware(datetime.now() + timedelta(days=1)), "domain__deleted_at__lt": end_date,
"domain__deleted_at__gt": timezone.make_aware(datetime.now() - timedelta(days=1)), "domain__deleted_at__gt": start_date,
} }
# Call the export function # Call the export function
export_domains_to_writer(writer, columns, sort_fields, filter_condition) export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains, filter_conditions_for_additional_domains)
# Reset the CSV file's position to the beginning # Reset the CSV file's position to the beginning
csv_file.seek(0) csv_file.seek(0)
@ -435,15 +472,15 @@ class ExportDataTest(TestCase):
# Read the content into a variable # Read the content into a variable
csv_content = csv_file.read() csv_content = csv_file.read()
print(f'csv_content {csv_content}') # We expect READY domains first, created between today-2 and today+2, sorted by created_at then name
# and DELETED domains deleted between today-2 and today+2, sorted by deleted_at then name
# We expect READY domains,
# federal only
# sorted alphabetially by domain name
expected_content = ( expected_content = (
"Domain name,Domain type,Agency,Organization name,City," "Domain name,Domain type,Agency,Organization name,City,"
"State,Status,Deleted at,Expiration date\n" "State,Status,Expiration date\n"
"cdomain1.gov,Federal-Executive,World War I Centennial Commission,ready,\n" "cdomain1.gov,Federal-Executive,World War I Centennial Commission,ready\n"
"zdomain9.gov,Federal,Armed Forces Retirement Home,,,,deleted,\n"
"sdomain8.gov,Federal,Armed Forces Retirement Home,,,,deleted,\n"
"xdomain7.gov,Federal,Armed Forces Retirement Home,,,,deleted,\n"
) )
# Normalize line endings and remove commas, # Normalize line endings and remove commas,
@ -453,67 +490,16 @@ class ExportDataTest(TestCase):
self.assertEqual(csv_content, expected_content) self.assertEqual(csv_content, expected_content)
def test_export_domains_to_writer_with_date_filter_pulls_appropriate_deleted_domains(self): class HelperFunctions(TestCase):
"""When domain__created_at__gt is in filters, we know it's a growth report """This asserts that 1=1. Its limited usefulness lies in making sure the helper methods stay healthy."""
and we need to fetch the domainInfos for the deleted domains that are within
the date range. However, deleted domains that were deleted at a date outside
the range do not get pulled."""
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition def test_get_default_start_date(self):
columns = [ expected_date = timezone.make_aware(datetime(2023, 11, 1))
"Domain name", actual_date = get_default_start_date()
"Domain type", self.assertEqual(actual_date, expected_date)
"Agency",
"Organization name",
"City",
"State",
"Status",
"Deleted at",
"Expiration date",
]
sort_fields = ["created_at","domain__name",]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
],
"domain__created_at__lt": timezone.make_aware(datetime(2023, 10, 1)),
"domain__created_at__gt": timezone.make_aware(datetime(2023, 12, 1)),
}
filter_conditions_for_additional_domains = {
"domain__state__in": [
Domain.State.DELETED,
],
"domain__deleted_at__lt": timezone.make_aware(datetime(2023, 10, 1)),
"domain__deleted_at__gt": timezone.make_aware(datetime(2023, 12, 1)),
}
# Call the export function
export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_conditions_for_additional_domains)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
print(f'csv_content {csv_content}')
# We expect READY domains,
# federal only
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,"
"State,Status,Deleted at,Expiration date\n"
"bdomain5.gov,Federal,Armed Forces Retirement Home,deleted,2023-11-01,\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
def test_get_default_end_date(self):
# Note: You may need to mock timezone.now() for accurate testing
expected_date = timezone.now()
actual_date = get_default_end_date()
self.assertEqual(actual_date.date(), expected_date.date())

View file

@ -1,6 +1,6 @@
import csv import csv
import logging import logging
from datetime import datetime from datetime import date, datetime
from registrar.models.domain import Domain from registrar.models.domain import Domain
from registrar.models.domain_information import DomainInformation from registrar.models.domain_information import DomainInformation
from registrar.models.public_contact import PublicContact from registrar.models.public_contact import PublicContact
@ -11,58 +11,66 @@ from django.utils import timezone
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_condition_for_additional_domains=None): def get_domain_infos(filter_condition, sort_fields):
domain_infos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields)
return domain_infos
def write_row(writer, columns, domain_info):
security_contacts = domain_info.domain.contacts.filter(contact_type=PublicContact.ContactTypeChoices.SECURITY)
# For linter
ao = " "
if domain_info.authorizing_official:
first_name = domain_info.authorizing_official.first_name or ""
last_name = domain_info.authorizing_official.last_name or ""
ao = first_name + " " + last_name
# create a dictionary of fields which can be included in output
FIELDS = {
"Domain name": domain_info.domain.name,
"Domain type": domain_info.get_organization_type_display() + " - " + domain_info.get_federal_type_display()
if domain_info.federal_type
else domain_info.get_organization_type_display(),
"Agency": domain_info.federal_agency,
"Organization name": domain_info.organization_name,
"City": domain_info.city,
"State": domain_info.state_territory,
"AO": ao,
"AO email": domain_info.authorizing_official.email if domain_info.authorizing_official else " ",
"Security contact email": security_contacts[0].email if security_contacts else " ",
"Status": domain_info.domain.state,
"Expiration date": domain_info.domain.expiration_date,
"Created at": domain_info.domain.created_at,
"Deleted at": domain_info.domain.deleted_at,
}
writer.writerow([FIELDS.get(column, "") for column in columns])
def export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains=None, filter_condition_for_additional_domains=None):
"""
Receives params from the parent methods and outputs a CSV with fltered and sorted domains.
The 'additional' params enable us to concatenate 2 different filtered lists.
"""
# write columns headers to writer # write columns headers to writer
writer.writerow(columns) writer.writerow(columns)
logger.info('export_domains_to_writer')
logger.info(filter_condition)
logger.info(filter_condition_for_additional_domains)
# Get the domainInfos # Get the domainInfos
domainInfos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields) domainInfos = get_domain_infos(filter_condition, sort_fields)
# Condition is true for export_data_growth_to_csv. This is an OR situation so we can' combine the filters # Condition is true for export_data_growth_to_csv. This is an OR situation so we can' combine the filters
# in one query. # in one query.
if filter_condition_for_additional_domains is not None and 'domain__deleted_at__lt' in filter_condition_for_additional_domains: if filter_condition_for_additional_domains is not None and 'domain__deleted_at__lt' in filter_condition_for_additional_domains:
logger.info("Fetching deleted domains") # Get the deleted domain infos
deleted_domainInfos = DomainInformation.objects.filter(domain__state=Domain.State.DELETED).order_by("domain__deleted_at") deleted_domainInfos = get_domain_infos(filter_condition_for_additional_domains, sort_fields_for_additional_domains)
# Combine the two querysets into a single iterable # Combine the two querysets into a single iterable
all_domainInfos = list(chain(domainInfos, deleted_domainInfos)) all_domainInfos = list(chain(domainInfos, deleted_domainInfos))
else: else:
all_domainInfos = list(domainInfos) all_domainInfos = list(domainInfos)
# Write rows to CSV
for domainInfo in all_domainInfos: for domain_info in all_domainInfos:
security_contacts = domainInfo.domain.contacts.filter(contact_type=PublicContact.ContactTypeChoices.SECURITY) write_row(writer, columns, domain_info)
# For linter
ao = " "
if domainInfo.authorizing_official:
first_name = domainInfo.authorizing_official.first_name or ""
last_name = domainInfo.authorizing_official.last_name or ""
ao = first_name + " " + last_name
# create a dictionary of fields which can be included in output
FIELDS = {
"Domain name": domainInfo.domain.name,
"Domain type": domainInfo.get_organization_type_display() + " - " + domainInfo.get_federal_type_display()
if domainInfo.federal_type
else domainInfo.get_organization_type_display(),
"Agency": domainInfo.federal_agency,
"Organization name": domainInfo.organization_name,
"City": domainInfo.city,
"State": domainInfo.state_territory,
"AO": ao,
"AO email": domainInfo.authorizing_official.email if domainInfo.authorizing_official else " ",
"Security contact email": security_contacts[0].email if security_contacts else " ",
"Status": domainInfo.domain.state,
"Expiration date": domainInfo.domain.expiration_date,
"Created at": domainInfo.domain.created_at,
"Deleted at": domainInfo.domain.deleted_at,
}
writer.writerow([FIELDS.get(column, "") for column in columns])
def export_data_type_to_csv(csv_file): def export_data_type_to_csv(csv_file):
"""All domains report with extra columns"""
writer = csv.writer(csv_file) writer = csv.writer(csv_file)
# define columns to include in export # define columns to include in export
columns = [ columns = [
@ -94,8 +102,9 @@ def export_data_type_to_csv(csv_file):
} }
export_domains_to_writer(writer, columns, sort_fields, filter_condition) export_domains_to_writer(writer, columns, sort_fields, filter_condition)
def export_data_full_to_csv(csv_file): def export_data_full_to_csv(csv_file):
"""All domains report"""
writer = csv.writer(csv_file) writer = csv.writer(csv_file)
# define columns to include in export # define columns to include in export
columns = [ columns = [
@ -125,6 +134,8 @@ def export_data_full_to_csv(csv_file):
def export_data_federal_to_csv(csv_file): def export_data_federal_to_csv(csv_file):
"""Federal domains report"""
writer = csv.writer(csv_file) writer = csv.writer(csv_file)
# define columns to include in export # define columns to include in export
columns = [ columns = [
@ -153,24 +164,37 @@ def export_data_federal_to_csv(csv_file):
} }
export_domains_to_writer(writer, columns, sort_fields, filter_condition) export_domains_to_writer(writer, columns, sort_fields, filter_condition)
def get_default_start_date():
# Default to a date that's prior to our first deployment
return timezone.make_aware(datetime(2023, 11, 1))
def get_default_end_date():
# Default to now()
return timezone.now()
def export_data_growth_to_csv(csv_file, start_date, end_date): def export_data_growth_to_csv(csv_file, start_date, end_date):
"""
Growth report:
Receive start and end dates from the view, parse them.
Request from export_domains_to_writer READY domains that are created between
the start and end dates, as well as DELETED domains that are deleted between
the start and end dates. Specify sort params for both lists.
"""
if start_date: start_date_formatted = (
start_date_formatted = timezone.make_aware(datetime.strptime(start_date, "%Y-%m-%d")) timezone.make_aware(datetime.strptime(start_date, "%Y-%m-%d"))
else: if start_date
# Handle the case where start_date is missing or empty else get_default_start_date()
# Default to a date that's prior to our first deployment )
logger.error(f"Error fetching the start date, will default to 12023/1/1")
start_date_formatted = timezone.make_aware(datetime(2023, 11, 1)) # Replace with appropriate handling
if end_date: end_date_formatted = (
end_date_formatted = timezone.make_aware(datetime.strptime(end_date, "%Y-%m-%d")) timezone.make_aware(datetime.strptime(end_date, "%Y-%m-%d"))
else: if end_date
# Handle the case where end_date is missing or empty else get_default_end_date()
logger.error(f"Error fetching the end date, will default to now()") )
end_date_formatted = timezone.make_aware(datetime.now()) # Replace with appropriate handling
writer = csv.writer(csv_file) writer = csv.writer(csv_file)
# define columns to include in export # define columns to include in export
columns = [ columns = [
"Domain name", "Domain name",
@ -189,17 +213,20 @@ def export_data_growth_to_csv(csv_file, start_date, end_date):
"domain__name", "domain__name",
] ]
filter_condition = { filter_condition = {
"domain__state__in": [ "domain__state__in": [Domain.State.READY],
Domain.State.READY,
],
"domain__created_at__lt": end_date_formatted, "domain__created_at__lt": end_date_formatted,
"domain__created_at__gt": start_date_formatted, "domain__created_at__gt": start_date_formatted,
} }
# We also want domains deleted between sar and end dates, sorted
sort_fields_for_additional_domains = [
"domain__deleted_at",
"domain__name",
]
filter_condition_for_additional_domains = { filter_condition_for_additional_domains = {
"domain__state__in": [ "domain__state__in": [Domain.State.DELETED],
Domain.State.DELETED,
],
"domain__created_at__lt": end_date_formatted, "domain__created_at__lt": end_date_formatted,
"domain__created_at__gt": start_date_formatted, "domain__created_at__gt": start_date_formatted,
} }
export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_condition_for_additional_domains)
export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains, filter_condition_for_additional_domains)