From cb16f5eb96a23667c23d3c1eff20b58896aeaccb Mon Sep 17 00:00:00 2001 From: Rachid Mrad Date: Wed, 20 Dec 2023 21:54:13 -0500 Subject: [PATCH] Code cleanup, layout cleanup, unit tests --- src/registrar/assets/js/get-gov-admin.js | 9 +- src/registrar/templates/admin/index.html | 16 ++- src/registrar/tests/test_admin_views.py | 52 ++++++++ src/registrar/tests/test_reports.py | 154 +++++++++++------------ src/registrar/utility/csv_export.py | 149 +++++++++++++--------- 5 files changed, 229 insertions(+), 151 deletions(-) create mode 100644 src/registrar/tests/test_admin_views.py diff --git a/src/registrar/assets/js/get-gov-admin.js b/src/registrar/assets/js/get-gov-admin.js index f5bcd3b26..ce0725a63 100644 --- a/src/registrar/assets/js/get-gov-admin.js +++ b/src/registrar/assets/js/get-gov-admin.js @@ -282,13 +282,20 @@ function enableRelatedWidgetButtons(changeLink, deleteLink, viewLink, elementPk, */ (function (){ + // Get the current date in the format YYYY-MM-DD + var currentDate = new Date().toISOString().split('T')[0]; + + // Default the value of the end date input field to the current date + let endDateInput =document.getElementById('end'); + endDateInput.value = currentDate; + let exportGrowthReportButton = document.getElementById('exportLink'); if (exportGrowthReportButton) { exportGrowthReportButton.addEventListener('click', function() { // Get the selected start and end dates let startDate = document.getElementById('start').value; - let endDate = document.getElementById('end').value; + let endDate = endDateInput.value; let exportUrl = document.getElementById('exportLink').dataset.exportUrl; // Build the URL with parameters diff --git a/src/registrar/templates/admin/index.html b/src/registrar/templates/admin/index.html index 495dbc4f9..a98a09696 100644 --- a/src/registrar/templates/admin/index.html +++ b/src/registrar/templates/admin/index.html @@ -14,12 +14,18 @@ See the commit "Review for ticket #999" {% endcomment %} - - - - +
+
+ + +
+
+ + +
- + +
diff --git a/src/registrar/tests/test_admin_views.py b/src/registrar/tests/test_admin_views.py new file mode 100644 index 000000000..6336ed139 --- /dev/null +++ b/src/registrar/tests/test_admin_views.py @@ -0,0 +1,52 @@ +from django.test import TestCase, Client +from django.urls import reverse +from registrar.tests.common import create_superuser +from registrar.views.admin_views import ExportData + + +class TestViews(TestCase): + def setUp(self): + self.client = Client(HTTP_HOST="localhost:8080") + self.superuser = create_superuser() + + def test_export_data_view(self): + + self.client.force_login(self.superuser) + + # Reverse the URL for the admin index page + admin_index_url = reverse("admin:index") + + # Make a GET request to the admin index page + response = self.client.get(admin_index_url) + + print(f'response1 {response}') + + # Assert that the response status code is 200 (OK) + self.assertEqual(response.status_code, 200) + + # Ensure that the start_date and end_date are set + start_date = "2023-01-01" + end_date = "2023-12-31" + + # Construct the URL for the export data view with start_date and end_date parameters: + # This stuff is currently done in JS + export_data_url = reverse("admin_export_data") + f"?start_date={start_date}&end_date={end_date}" + + # Make a GET request to the export data page + response = self.client.get(export_data_url) + + print(response) + + # Assert that the response status code is 200 (OK) or the expected status code + self.assertEqual(response.status_code, 200) + + # Assert that the content type is CSV + self.assertEqual(response["Content-Type"], "text/csv") + + # Check if the filename in the Content-Disposition header matches the expected pattern + expected_filename = f'growth-from-{start_date}-to-{end_date}.csv' + self.assertIn(f'attachment; filename="{expected_filename}"', response["Content-Disposition"]) + + + + \ No newline at end of file diff --git a/src/registrar/tests/test_reports.py b/src/registrar/tests/test_reports.py index c13570c52..2264adf28 100644 --- a/src/registrar/tests/test_reports.py +++ b/src/registrar/tests/test_reports.py @@ -6,7 +6,7 @@ from registrar.models.domain_information import DomainInformation from registrar.models.domain import Domain from registrar.models.user import User from django.contrib.auth import get_user_model -from registrar.utility.csv_export import export_domains_to_writer +from registrar.utility.csv_export import export_domains_to_writer, get_default_start_date, get_default_end_date, export_data_growth_to_csv from django.core.management import call_command from unittest.mock import MagicMock, call, mock_open, patch from api.views import get_current_federal, get_current_full @@ -14,7 +14,7 @@ from django.conf import settings from botocore.exceptions import ClientError import boto3_mocking from registrar.utility.s3_bucket import S3ClientError, S3ClientErrorCodes # type: ignore -from datetime import datetime, timedelta +from datetime import date, datetime, timedelta from django.utils import timezone class CsvReportsTest(TestCase): @@ -232,9 +232,13 @@ class ExportDataTest(TestCase): self.domain_3, _ = Domain.objects.get_or_create(name="ddomain3.gov", state=Domain.State.ON_HOLD) self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN) self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN) - self.domain_5, _ = Domain.objects.get_or_create(name="bdomain5.gov", state=Domain.State.DELETED, deleted_at=datetime(2023, 11, 1)) - self.domain_6, _ = Domain.objects.get_or_create(name="bdomain6.gov", state=Domain.State.DELETED, deleted_at=datetime(1980, 10, 16)) - + self.domain_5, _ = Domain.objects.get_or_create(name="bdomain5.gov", state=Domain.State.DELETED, deleted_at=timezone.make_aware(datetime(2023, 11, 1))) + self.domain_6, _ = Domain.objects.get_or_create(name="bdomain6.gov", state=Domain.State.DELETED, deleted_at=timezone.make_aware(datetime(1980, 10, 16))) + self.domain_7, _ = Domain.objects.get_or_create(name="xdomain7.gov", state=Domain.State.DELETED, deleted_at=timezone.now()) + self.domain_8, _ = Domain.objects.get_or_create(name="sdomain8.gov", state=Domain.State.DELETED, deleted_at=timezone.now()) + # We use timezone.make_aware to sync to server time a datetime object with the current date (using date.today()) and a specific time (using datetime.min.time()). + self.domain_9, _ = Domain.objects.get_or_create(name="zdomain9.gov", state=Domain.State.DELETED, deleted_at=timezone.make_aware(datetime.combine(date.today() - timedelta(days=1), datetime.min.time()))) + self.domain_information_1, _ = DomainInformation.objects.get_or_create( creator=self.user, domain=self.domain_1, @@ -271,6 +275,24 @@ class ExportDataTest(TestCase): organization_type="federal", federal_agency="Armed Forces Retirement Home", ) + self.domain_information_7, _ = DomainInformation.objects.get_or_create( + creator=self.user, + domain=self.domain_7, + organization_type="federal", + federal_agency="Armed Forces Retirement Home", + ) + self.domain_information_8, _ = DomainInformation.objects.get_or_create( + creator=self.user, + domain=self.domain_8, + organization_type="federal", + federal_agency="Armed Forces Retirement Home", + ) + self.domain_information_9, _ = DomainInformation.objects.get_or_create( + creator=self.user, + domain=self.domain_9, + organization_type="federal", + federal_agency="Armed Forces Retirement Home", + ) def tearDown(self): Domain.objects.all().delete() @@ -392,11 +414,23 @@ class ExportDataTest(TestCase): self.assertEqual(csv_content, expected_content) def test_export_domains_to_writer_with_date_filter_pulls_domains_in_range(self): - """Test that domains that are READY and in range are pulled when the growth report conditions - are applied to export_domains_to_writer.""" + """Test that domains that are + 1. READY and their created_at dates are in range + 2. DELETED and their deleted_at dates are in range + are pulled when the growth report conditions are applied to export_domains_to_writed. + Test that ready domains display first and deleted second, sorted according to + specified keys. + + We considered testing export_data_growth_to_csv which calls export_domains_to_writer + and would have been easy to set up, but expected_content would contain created_at dates + which are hard to mock.""" + # Create a CSV file in memory csv_file = StringIO() writer = csv.writer(csv_file) + # We use timezone.make_aware to sync to server time a datetime object with the current date (using date.today()) and a specific time (using datetime.min.time()). + end_date = timezone.make_aware(datetime.combine(date.today() + timedelta(days=2), datetime.min.time())) + start_date = timezone.make_aware(datetime.combine(date.today() - timedelta(days=2), datetime.min.time())) # Define columns, sort fields, and filter condition columns = [ @@ -407,43 +441,46 @@ class ExportDataTest(TestCase): "City", "State", "Status", - "Deleted at", "Expiration date", ] sort_fields = ["created_at","domain__name",] + sort_fields_for_additional_domains = [ + "domain__deleted_at", + "domain__name", + ] filter_condition = { "domain__state__in": [ Domain.State.READY, ], - "domain__created_at__lt": timezone.make_aware(datetime.now() + timedelta(days=1)), - "domain__created_at__gt": timezone.make_aware(datetime.now() - timedelta(days=1)), + "domain__created_at__lt": end_date, + "domain__created_at__gt": start_date, } filter_conditions_for_additional_domains = { "domain__state__in": [ Domain.State.DELETED, ], - "domain__deleted_at__lt": timezone.make_aware(datetime.now() + timedelta(days=1)), - "domain__deleted_at__gt": timezone.make_aware(datetime.now() - timedelta(days=1)), + "domain__deleted_at__lt": end_date, + "domain__deleted_at__gt": start_date, } # Call the export function - export_domains_to_writer(writer, columns, sort_fields, filter_condition) + export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains, filter_conditions_for_additional_domains) # Reset the CSV file's position to the beginning csv_file.seek(0) # Read the content into a variable csv_content = csv_file.read() - - print(f'csv_content {csv_content}') - - # We expect READY domains, - # federal only - # sorted alphabetially by domain name + + # We expect READY domains first, created between today-2 and today+2, sorted by created_at then name + # and DELETED domains deleted between today-2 and today+2, sorted by deleted_at then name expected_content = ( "Domain name,Domain type,Agency,Organization name,City," - "State,Status,Deleted at,Expiration date\n" - "cdomain1.gov,Federal-Executive,World War I Centennial Commission,ready,\n" + "State,Status,Expiration date\n" + "cdomain1.gov,Federal-Executive,World War I Centennial Commission,ready\n" + "zdomain9.gov,Federal,Armed Forces Retirement Home,,,,deleted,\n" + "sdomain8.gov,Federal,Armed Forces Retirement Home,,,,deleted,\n" + "xdomain7.gov,Federal,Armed Forces Retirement Home,,,,deleted,\n" ) # Normalize line endings and remove commas, @@ -453,67 +490,16 @@ class ExportDataTest(TestCase): self.assertEqual(csv_content, expected_content) - def test_export_domains_to_writer_with_date_filter_pulls_appropriate_deleted_domains(self): - """When domain__created_at__gt is in filters, we know it's a growth report - and we need to fetch the domainInfos for the deleted domains that are within - the date range. However, deleted domains that were deleted at a date outside - the range do not get pulled.""" - # Create a CSV file in memory - csv_file = StringIO() - writer = csv.writer(csv_file) - - # Define columns, sort fields, and filter condition - columns = [ - "Domain name", - "Domain type", - "Agency", - "Organization name", - "City", - "State", - "Status", - "Deleted at", - "Expiration date", - ] - sort_fields = ["created_at","domain__name",] - filter_condition = { - "domain__state__in": [ - Domain.State.READY, - ], - "domain__created_at__lt": timezone.make_aware(datetime(2023, 10, 1)), - "domain__created_at__gt": timezone.make_aware(datetime(2023, 12, 1)), - } - filter_conditions_for_additional_domains = { - "domain__state__in": [ - Domain.State.DELETED, - ], - "domain__deleted_at__lt": timezone.make_aware(datetime(2023, 10, 1)), - "domain__deleted_at__gt": timezone.make_aware(datetime(2023, 12, 1)), - } - - # Call the export function - export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_conditions_for_additional_domains) - - # Reset the CSV file's position to the beginning - csv_file.seek(0) - - # Read the content into a variable - csv_content = csv_file.read() - - print(f'csv_content {csv_content}') - - # We expect READY domains, - # federal only - # sorted alphabetially by domain name - expected_content = ( - "Domain name,Domain type,Agency,Organization name,City," - "State,Status,Deleted at,Expiration date\n" - "bdomain5.gov,Federal,Armed Forces Retirement Home,deleted,2023-11-01,\n" - ) - - # Normalize line endings and remove commas, - # spaces and leading/trailing whitespace - csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip() - expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip() - - self.assertEqual(csv_content, expected_content) +class HelperFunctions(TestCase): + """This asserts that 1=1. Its limited usefulness lies in making sure the helper methods stay healthy.""" + + def test_get_default_start_date(self): + expected_date = timezone.make_aware(datetime(2023, 11, 1)) + actual_date = get_default_start_date() + self.assertEqual(actual_date, expected_date) + def test_get_default_end_date(self): + # Note: You may need to mock timezone.now() for accurate testing + expected_date = timezone.now() + actual_date = get_default_end_date() + self.assertEqual(actual_date.date(), expected_date.date()) \ No newline at end of file diff --git a/src/registrar/utility/csv_export.py b/src/registrar/utility/csv_export.py index d5b133daf..43c532d73 100644 --- a/src/registrar/utility/csv_export.py +++ b/src/registrar/utility/csv_export.py @@ -1,6 +1,6 @@ import csv import logging -from datetime import datetime +from datetime import date, datetime from registrar.models.domain import Domain from registrar.models.domain_information import DomainInformation from registrar.models.public_contact import PublicContact @@ -11,58 +11,66 @@ from django.utils import timezone logger = logging.getLogger(__name__) -def export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_condition_for_additional_domains=None): +def get_domain_infos(filter_condition, sort_fields): + domain_infos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields) + return domain_infos + +def write_row(writer, columns, domain_info): + security_contacts = domain_info.domain.contacts.filter(contact_type=PublicContact.ContactTypeChoices.SECURITY) + # For linter + ao = " " + if domain_info.authorizing_official: + first_name = domain_info.authorizing_official.first_name or "" + last_name = domain_info.authorizing_official.last_name or "" + ao = first_name + " " + last_name + # create a dictionary of fields which can be included in output + FIELDS = { + "Domain name": domain_info.domain.name, + "Domain type": domain_info.get_organization_type_display() + " - " + domain_info.get_federal_type_display() + if domain_info.federal_type + else domain_info.get_organization_type_display(), + "Agency": domain_info.federal_agency, + "Organization name": domain_info.organization_name, + "City": domain_info.city, + "State": domain_info.state_territory, + "AO": ao, + "AO email": domain_info.authorizing_official.email if domain_info.authorizing_official else " ", + "Security contact email": security_contacts[0].email if security_contacts else " ", + "Status": domain_info.domain.state, + "Expiration date": domain_info.domain.expiration_date, + "Created at": domain_info.domain.created_at, + "Deleted at": domain_info.domain.deleted_at, + } + writer.writerow([FIELDS.get(column, "") for column in columns]) + +def export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains=None, filter_condition_for_additional_domains=None): + """ + Receives params from the parent methods and outputs a CSV with fltered and sorted domains. + The 'additional' params enable us to concatenate 2 different filtered lists. + """ # write columns headers to writer writer.writerow(columns) - - logger.info('export_domains_to_writer') - logger.info(filter_condition) - logger.info(filter_condition_for_additional_domains) # Get the domainInfos - domainInfos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields) + domainInfos = get_domain_infos(filter_condition, sort_fields) # Condition is true for export_data_growth_to_csv. This is an OR situation so we can' combine the filters # in one query. if filter_condition_for_additional_domains is not None and 'domain__deleted_at__lt' in filter_condition_for_additional_domains: - logger.info("Fetching deleted domains") - deleted_domainInfos = DomainInformation.objects.filter(domain__state=Domain.State.DELETED).order_by("domain__deleted_at") + # Get the deleted domain infos + deleted_domainInfos = get_domain_infos(filter_condition_for_additional_domains, sort_fields_for_additional_domains) # Combine the two querysets into a single iterable all_domainInfos = list(chain(domainInfos, deleted_domainInfos)) else: all_domainInfos = list(domainInfos) - - for domainInfo in all_domainInfos: - security_contacts = domainInfo.domain.contacts.filter(contact_type=PublicContact.ContactTypeChoices.SECURITY) - # For linter - ao = " " - if domainInfo.authorizing_official: - first_name = domainInfo.authorizing_official.first_name or "" - last_name = domainInfo.authorizing_official.last_name or "" - ao = first_name + " " + last_name - # create a dictionary of fields which can be included in output - FIELDS = { - "Domain name": domainInfo.domain.name, - "Domain type": domainInfo.get_organization_type_display() + " - " + domainInfo.get_federal_type_display() - if domainInfo.federal_type - else domainInfo.get_organization_type_display(), - "Agency": domainInfo.federal_agency, - "Organization name": domainInfo.organization_name, - "City": domainInfo.city, - "State": domainInfo.state_territory, - "AO": ao, - "AO email": domainInfo.authorizing_official.email if domainInfo.authorizing_official else " ", - "Security contact email": security_contacts[0].email if security_contacts else " ", - "Status": domainInfo.domain.state, - "Expiration date": domainInfo.domain.expiration_date, - "Created at": domainInfo.domain.created_at, - "Deleted at": domainInfo.domain.deleted_at, - } - writer.writerow([FIELDS.get(column, "") for column in columns]) - + # Write rows to CSV + for domain_info in all_domainInfos: + write_row(writer, columns, domain_info) def export_data_type_to_csv(csv_file): + """All domains report with extra columns""" + writer = csv.writer(csv_file) # define columns to include in export columns = [ @@ -94,8 +102,9 @@ def export_data_type_to_csv(csv_file): } export_domains_to_writer(writer, columns, sort_fields, filter_condition) - def export_data_full_to_csv(csv_file): + """All domains report""" + writer = csv.writer(csv_file) # define columns to include in export columns = [ @@ -125,6 +134,8 @@ def export_data_full_to_csv(csv_file): def export_data_federal_to_csv(csv_file): + """Federal domains report""" + writer = csv.writer(csv_file) # define columns to include in export columns = [ @@ -152,25 +163,38 @@ def export_data_federal_to_csv(csv_file): ], } export_domains_to_writer(writer, columns, sort_fields, filter_condition) - + +def get_default_start_date(): + # Default to a date that's prior to our first deployment + return timezone.make_aware(datetime(2023, 11, 1)) + +def get_default_end_date(): + # Default to now() + return timezone.now() + def export_data_growth_to_csv(csv_file, start_date, end_date): + """ + Growth report: + Receive start and end dates from the view, parse them. + Request from export_domains_to_writer READY domains that are created between + the start and end dates, as well as DELETED domains that are deleted between + the start and end dates. Specify sort params for both lists. + """ - if start_date: - start_date_formatted = timezone.make_aware(datetime.strptime(start_date, "%Y-%m-%d")) - else: - # Handle the case where start_date is missing or empty - # Default to a date that's prior to our first deployment - logger.error(f"Error fetching the start date, will default to 12023/1/1") - start_date_formatted = timezone.make_aware(datetime(2023, 11, 1)) # Replace with appropriate handling + start_date_formatted = ( + timezone.make_aware(datetime.strptime(start_date, "%Y-%m-%d")) + if start_date + else get_default_start_date() + ) + + end_date_formatted = ( + timezone.make_aware(datetime.strptime(end_date, "%Y-%m-%d")) + if end_date + else get_default_end_date() + ) - if end_date: - end_date_formatted = timezone.make_aware(datetime.strptime(end_date, "%Y-%m-%d")) - else: - # Handle the case where end_date is missing or empty - logger.error(f"Error fetching the end date, will default to now()") - end_date_formatted = timezone.make_aware(datetime.now()) # Replace with appropriate handling - writer = csv.writer(csv_file) + # define columns to include in export columns = [ "Domain name", @@ -189,17 +213,20 @@ def export_data_growth_to_csv(csv_file, start_date, end_date): "domain__name", ] filter_condition = { - "domain__state__in": [ - Domain.State.READY, - ], + "domain__state__in": [Domain.State.READY], "domain__created_at__lt": end_date_formatted, "domain__created_at__gt": start_date_formatted, } + + # We also want domains deleted between sar and end dates, sorted + sort_fields_for_additional_domains = [ + "domain__deleted_at", + "domain__name", + ] filter_condition_for_additional_domains = { - "domain__state__in": [ - Domain.State.DELETED, - ], + "domain__state__in": [Domain.State.DELETED], "domain__created_at__lt": end_date_formatted, "domain__created_at__gt": start_date_formatted, } - export_domains_to_writer(writer, columns, sort_fields, filter_condition, filter_condition_for_additional_domains) + + export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains, filter_condition_for_additional_domains)