mirror of
https://github.com/cisagov/manage.get.gov.git
synced 2025-07-23 19:20:47 +02:00
Refactor csv_export methods to reuse (call multiple times from the wrapper methods) instead of expand one method
This commit is contained in:
parent
49324fcf66
commit
9a78f235d5
2 changed files with 49 additions and 51 deletions
|
@ -7,7 +7,8 @@ from registrar.models.domain import Domain
|
||||||
from registrar.models.user import User
|
from registrar.models.user import User
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from registrar.utility.csv_export import (
|
from registrar.utility.csv_export import (
|
||||||
export_domains_to_writer,
|
write_header,
|
||||||
|
write_body,
|
||||||
get_default_start_date,
|
get_default_start_date,
|
||||||
get_default_end_date,
|
get_default_end_date,
|
||||||
)
|
)
|
||||||
|
@ -41,7 +42,6 @@ class CsvReportsTest(TestCase):
|
||||||
self.domain_2, _ = Domain.objects.get_or_create(name="adomain2.gov", state=Domain.State.DNS_NEEDED)
|
self.domain_2, _ = Domain.objects.get_or_create(name="adomain2.gov", state=Domain.State.DNS_NEEDED)
|
||||||
self.domain_3, _ = Domain.objects.get_or_create(name="ddomain3.gov", state=Domain.State.ON_HOLD)
|
self.domain_3, _ = Domain.objects.get_or_create(name="ddomain3.gov", state=Domain.State.ON_HOLD)
|
||||||
self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN)
|
self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN)
|
||||||
self.domain_4, _ = Domain.objects.get_or_create(name="bdomain4.gov", state=Domain.State.UNKNOWN)
|
|
||||||
|
|
||||||
self.domain_information_1, _ = DomainInformation.objects.get_or_create(
|
self.domain_information_1, _ = DomainInformation.objects.get_or_create(
|
||||||
creator=self.user,
|
creator=self.user,
|
||||||
|
@ -333,8 +333,8 @@ class ExportDataTest(TestCase):
|
||||||
User.objects.all().delete()
|
User.objects.all().delete()
|
||||||
super().tearDown()
|
super().tearDown()
|
||||||
|
|
||||||
def test_export_domains_to_writer(self):
|
def test_write_body(self):
|
||||||
"""Test that export_domains_to_writer returns the
|
"""Test that write_body returns the
|
||||||
existing domain, test that sort by domain name works,
|
existing domain, test that sort by domain name works,
|
||||||
test that filter works"""
|
test that filter works"""
|
||||||
# Create a CSV file in memory
|
# Create a CSV file in memory
|
||||||
|
@ -367,8 +367,9 @@ class ExportDataTest(TestCase):
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
# Call the export function
|
# Call the export functions
|
||||||
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
|
write_header(writer, columns)
|
||||||
|
write_body(writer, columns, sort_fields, filter_condition)
|
||||||
|
|
||||||
# Reset the CSV file's position to the beginning
|
# Reset the CSV file's position to the beginning
|
||||||
csv_file.seek(0)
|
csv_file.seek(0)
|
||||||
|
@ -395,7 +396,7 @@ class ExportDataTest(TestCase):
|
||||||
|
|
||||||
self.assertEqual(csv_content, expected_content)
|
self.assertEqual(csv_content, expected_content)
|
||||||
|
|
||||||
def test_export_domains_to_writer_additional(self):
|
def test_write_body_additional(self):
|
||||||
"""An additional test for filters and multi-column sort"""
|
"""An additional test for filters and multi-column sort"""
|
||||||
# Create a CSV file in memory
|
# Create a CSV file in memory
|
||||||
csv_file = StringIO()
|
csv_file = StringIO()
|
||||||
|
@ -421,8 +422,9 @@ class ExportDataTest(TestCase):
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
# Call the export function
|
# Call the export functions
|
||||||
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
|
write_header(writer, columns)
|
||||||
|
write_body(writer, columns, sort_fields, filter_condition)
|
||||||
|
|
||||||
# Reset the CSV file's position to the beginning
|
# Reset the CSV file's position to the beginning
|
||||||
csv_file.seek(0)
|
csv_file.seek(0)
|
||||||
|
@ -448,14 +450,14 @@ class ExportDataTest(TestCase):
|
||||||
|
|
||||||
self.assertEqual(csv_content, expected_content)
|
self.assertEqual(csv_content, expected_content)
|
||||||
|
|
||||||
def test_export_domains_to_writer_with_date_filter_pulls_domains_in_range(self):
|
def test_write_body_with_date_filter_pulls_domains_in_range(self):
|
||||||
"""Test that domains that are
|
"""Test that domains that are
|
||||||
1. READY and their first_ready_at dates are in range
|
1. READY and their first_ready_at dates are in range
|
||||||
2. DELETED and their deleted_at dates are in range
|
2. DELETED and their deleted_at dates are in range
|
||||||
are pulled when the growth report conditions are applied to export_domains_to_writed.
|
are pulled when the growth report conditions are applied to export_domains_to_writed.
|
||||||
Test that ready domains are sorted by first_ready_at/deleted_at dates first, names second.
|
Test that ready domains are sorted by first_ready_at/deleted_at dates first, names second.
|
||||||
|
|
||||||
We considered testing export_data_growth_to_csv which calls export_domains_to_writer
|
We considered testing export_data_growth_to_csv which calls write_body
|
||||||
and would have been easy to set up, but expected_content would contain created_at dates
|
and would have been easy to set up, but expected_content would contain created_at dates
|
||||||
which are hard to mock.
|
which are hard to mock.
|
||||||
|
|
||||||
|
@ -484,7 +486,7 @@ class ExportDataTest(TestCase):
|
||||||
"created_at",
|
"created_at",
|
||||||
"domain__name",
|
"domain__name",
|
||||||
]
|
]
|
||||||
sort_fields_for_additional_domains = [
|
sort_fields_for_deleted_domains = [
|
||||||
"domain__deleted_at",
|
"domain__deleted_at",
|
||||||
"domain__name",
|
"domain__name",
|
||||||
]
|
]
|
||||||
|
@ -495,7 +497,7 @@ class ExportDataTest(TestCase):
|
||||||
"domain__first_ready_at__lte": end_date,
|
"domain__first_ready_at__lte": end_date,
|
||||||
"domain__first_ready_at__gte": start_date,
|
"domain__first_ready_at__gte": start_date,
|
||||||
}
|
}
|
||||||
filter_conditions_for_additional_domains = {
|
filter_conditions_for_deleted_domains = {
|
||||||
"domain__state__in": [
|
"domain__state__in": [
|
||||||
Domain.State.DELETED,
|
Domain.State.DELETED,
|
||||||
],
|
],
|
||||||
|
@ -503,14 +505,19 @@ class ExportDataTest(TestCase):
|
||||||
"domain__deleted_at__gte": start_date,
|
"domain__deleted_at__gte": start_date,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Call the export function
|
# Call the export functions
|
||||||
export_domains_to_writer(
|
write_header(writer, columns)
|
||||||
|
write_body(
|
||||||
writer,
|
writer,
|
||||||
columns,
|
columns,
|
||||||
sort_fields,
|
sort_fields,
|
||||||
filter_condition,
|
filter_condition,
|
||||||
sort_fields_for_additional_domains,
|
)
|
||||||
filter_conditions_for_additional_domains,
|
write_body(
|
||||||
|
writer,
|
||||||
|
columns,
|
||||||
|
sort_fields_for_deleted_domains,
|
||||||
|
filter_conditions_for_deleted_domains,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Reset the CSV file's position to the beginning
|
# Reset the CSV file's position to the beginning
|
||||||
|
|
|
@ -6,12 +6,19 @@ from registrar.models.domain_information import DomainInformation
|
||||||
from registrar.models.public_contact import PublicContact
|
from registrar.models.public_contact import PublicContact
|
||||||
from django.db.models import Value
|
from django.db.models import Value
|
||||||
from django.db.models.functions import Coalesce
|
from django.db.models.functions import Coalesce
|
||||||
from itertools import chain
|
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def write_header(writer, columns):
|
||||||
|
"""
|
||||||
|
Receives params from the parent methods and outputs a CSV with a header row.
|
||||||
|
Works with write_header as longas the same writer object is passed.
|
||||||
|
"""
|
||||||
|
writer.writerow(columns)
|
||||||
|
|
||||||
|
|
||||||
def get_domain_infos(filter_condition, sort_fields):
|
def get_domain_infos(filter_condition, sort_fields):
|
||||||
domain_infos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields)
|
domain_infos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields)
|
||||||
return domain_infos
|
return domain_infos
|
||||||
|
@ -47,38 +54,24 @@ def write_row(writer, columns, domain_info):
|
||||||
writer.writerow([FIELDS.get(column, "") for column in columns])
|
writer.writerow([FIELDS.get(column, "") for column in columns])
|
||||||
|
|
||||||
|
|
||||||
def export_domains_to_writer(
|
def write_body(
|
||||||
writer,
|
writer,
|
||||||
columns,
|
columns,
|
||||||
sort_fields,
|
sort_fields,
|
||||||
filter_condition,
|
filter_condition,
|
||||||
sort_fields_for_additional_domains=None,
|
|
||||||
filter_condition_for_additional_domains=None,
|
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Receives params from the parent methods and outputs a CSV with fltered and sorted domains.
|
Receives params from the parent methods and outputs a CSV with fltered and sorted domains.
|
||||||
The 'additional' params enable us to concatenate 2 different filtered lists.
|
Works with write_header as longas the same writer object is passed.
|
||||||
"""
|
"""
|
||||||
# write columns headers to writer
|
|
||||||
writer.writerow(columns)
|
|
||||||
|
|
||||||
# Get the domainInfos
|
# Get the domainInfos
|
||||||
domainInfos = get_domain_infos(filter_condition, sort_fields)
|
domain_infos = get_domain_infos(filter_condition, sort_fields)
|
||||||
|
|
||||||
# Condition is true for export_data_growth_to_csv. This is an OR situation so we can' combine the filters
|
all_domain_infos = list(domain_infos)
|
||||||
# in one query.
|
|
||||||
if filter_condition_for_additional_domains is not None:
|
|
||||||
# Get the deleted domain infos
|
|
||||||
deleted_domainInfos = get_domain_infos(
|
|
||||||
filter_condition_for_additional_domains, sort_fields_for_additional_domains
|
|
||||||
)
|
|
||||||
# Combine the two querysets into a single iterable
|
|
||||||
all_domainInfos = list(chain(domainInfos, deleted_domainInfos))
|
|
||||||
else:
|
|
||||||
all_domainInfos = list(domainInfos)
|
|
||||||
|
|
||||||
# Write rows to CSV
|
# Write rows to CSV
|
||||||
for domain_info in all_domainInfos:
|
for domain_info in all_domain_infos:
|
||||||
write_row(writer, columns, domain_info)
|
write_row(writer, columns, domain_info)
|
||||||
|
|
||||||
|
|
||||||
|
@ -114,7 +107,8 @@ def export_data_type_to_csv(csv_file):
|
||||||
Domain.State.ON_HOLD,
|
Domain.State.ON_HOLD,
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
|
write_header(writer, columns)
|
||||||
|
write_body(writer, columns, sort_fields, filter_condition)
|
||||||
|
|
||||||
|
|
||||||
def export_data_full_to_csv(csv_file):
|
def export_data_full_to_csv(csv_file):
|
||||||
|
@ -145,7 +139,8 @@ def export_data_full_to_csv(csv_file):
|
||||||
Domain.State.ON_HOLD,
|
Domain.State.ON_HOLD,
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
|
write_header(writer, columns)
|
||||||
|
write_body(writer, columns, sort_fields, filter_condition)
|
||||||
|
|
||||||
|
|
||||||
def export_data_federal_to_csv(csv_file):
|
def export_data_federal_to_csv(csv_file):
|
||||||
|
@ -177,7 +172,8 @@ def export_data_federal_to_csv(csv_file):
|
||||||
Domain.State.ON_HOLD,
|
Domain.State.ON_HOLD,
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
|
write_header(writer, columns)
|
||||||
|
write_body(writer, columns, sort_fields, filter_condition)
|
||||||
|
|
||||||
|
|
||||||
def get_default_start_date():
|
def get_default_start_date():
|
||||||
|
@ -194,7 +190,7 @@ def export_data_growth_to_csv(csv_file, start_date, end_date):
|
||||||
"""
|
"""
|
||||||
Growth report:
|
Growth report:
|
||||||
Receive start and end dates from the view, parse them.
|
Receive start and end dates from the view, parse them.
|
||||||
Request from export_domains_to_writer READY domains that are created between
|
Request from write_body READY domains that are created between
|
||||||
the start and end dates, as well as DELETED domains that are deleted between
|
the start and end dates, as well as DELETED domains that are deleted between
|
||||||
the start and end dates. Specify sort params for both lists.
|
the start and end dates. Specify sort params for both lists.
|
||||||
"""
|
"""
|
||||||
|
@ -234,21 +230,16 @@ def export_data_growth_to_csv(csv_file, start_date, end_date):
|
||||||
}
|
}
|
||||||
|
|
||||||
# We also want domains deleted between sar and end dates, sorted
|
# We also want domains deleted between sar and end dates, sorted
|
||||||
sort_fields_for_additional_domains = [
|
sort_fields_for_deleted_domains = [
|
||||||
"domain__deleted_at",
|
"domain__deleted_at",
|
||||||
"domain__name",
|
"domain__name",
|
||||||
]
|
]
|
||||||
filter_condition_for_additional_domains = {
|
filter_condition_for_deleted_domains = {
|
||||||
"domain__state__in": [Domain.State.DELETED],
|
"domain__state__in": [Domain.State.DELETED],
|
||||||
"domain__deleted_at__lte": end_date_formatted,
|
"domain__deleted_at__lte": end_date_formatted,
|
||||||
"domain__deleted_at__gte": start_date_formatted,
|
"domain__deleted_at__gte": start_date_formatted,
|
||||||
}
|
}
|
||||||
|
|
||||||
export_domains_to_writer(
|
write_header(writer, columns)
|
||||||
writer,
|
write_body(writer, columns, sort_fields, filter_condition)
|
||||||
columns,
|
write_body(writer, columns, sort_fields_for_deleted_domains, filter_condition_for_deleted_domains)
|
||||||
sort_fields,
|
|
||||||
filter_condition,
|
|
||||||
sort_fields_for_additional_domains,
|
|
||||||
filter_condition_for_additional_domains,
|
|
||||||
)
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue