This commit is contained in:
Rachid Mrad 2023-12-21 12:46:26 -05:00
parent 4b38c4abc8
commit ff32a02022
No known key found for this signature in database
GPG key ID: EF38E4CEC4A8F3CF
7 changed files with 126 additions and 77 deletions

View file

@ -1,6 +1,6 @@
import csv
import logging
from datetime import date, datetime
from datetime import datetime
from registrar.models.domain import Domain
from registrar.models.domain_information import DomainInformation
from registrar.models.public_contact import PublicContact
@ -11,10 +11,12 @@ from django.utils import timezone
logger = logging.getLogger(__name__)
def get_domain_infos(filter_condition, sort_fields):
domain_infos = DomainInformation.objects.filter(**filter_condition).order_by(*sort_fields)
return domain_infos
def write_row(writer, columns, domain_info):
security_contacts = domain_info.domain.contacts.filter(contact_type=PublicContact.ContactTypeChoices.SECURITY)
# For linter
@ -44,34 +46,48 @@ def write_row(writer, columns, domain_info):
}
writer.writerow([FIELDS.get(column, "") for column in columns])
def export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains=None, filter_condition_for_additional_domains=None):
def export_domains_to_writer(
writer,
columns,
sort_fields,
filter_condition,
sort_fields_for_additional_domains=None,
filter_condition_for_additional_domains=None,
):
"""
Receives params from the parent methods and outputs a CSV with fltered and sorted domains.
The 'additional' params enable us to concatenate 2 different filtered lists.
Receives params from the parent methods and outputs a CSV with fltered and sorted domains.
The 'additional' params enable us to concatenate 2 different filtered lists.
"""
# write columns headers to writer
writer.writerow(columns)
# Get the domainInfos
# Get the domainInfos
domainInfos = get_domain_infos(filter_condition, sort_fields)
# Condition is true for export_data_growth_to_csv. This is an OR situation so we can' combine the filters
# in one query.
if filter_condition_for_additional_domains is not None and 'domain__deleted_at__lt' in filter_condition_for_additional_domains:
# in one query.
if (
filter_condition_for_additional_domains is not None
and "domain__deleted_at__lt" in filter_condition_for_additional_domains
):
# Get the deleted domain infos
deleted_domainInfos = get_domain_infos(filter_condition_for_additional_domains, sort_fields_for_additional_domains)
deleted_domainInfos = get_domain_infos(
filter_condition_for_additional_domains, sort_fields_for_additional_domains
)
# Combine the two querysets into a single iterable
all_domainInfos = list(chain(domainInfos, deleted_domainInfos))
else:
all_domainInfos = list(domainInfos)
# Write rows to CSV
for domain_info in all_domainInfos:
write_row(writer, columns, domain_info)
def export_data_type_to_csv(csv_file):
"""All domains report with extra columns"""
writer = csv.writer(csv_file)
# define columns to include in export
columns = [
@ -103,9 +119,10 @@ def export_data_type_to_csv(csv_file):
}
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
def export_data_full_to_csv(csv_file):
"""All domains report"""
writer = csv.writer(csv_file)
# define columns to include in export
columns = [
@ -136,7 +153,7 @@ def export_data_full_to_csv(csv_file):
def export_data_federal_to_csv(csv_file):
"""Federal domains report"""
writer = csv.writer(csv_file)
# define columns to include in export
columns = [
@ -165,14 +182,17 @@ def export_data_federal_to_csv(csv_file):
}
export_domains_to_writer(writer, columns, sort_fields, filter_condition)
def get_default_start_date():
# Default to a date that's prior to our first deployment
return timezone.make_aware(datetime(2023, 11, 1))
def get_default_end_date():
# Default to now()
return timezone.now()
def export_data_growth_to_csv(csv_file, start_date, end_date):
"""
Growth report:
@ -181,21 +201,17 @@ def export_data_growth_to_csv(csv_file, start_date, end_date):
the start and end dates, as well as DELETED domains that are deleted between
the start and end dates. Specify sort params for both lists.
"""
start_date_formatted = (
timezone.make_aware(datetime.strptime(start_date, "%Y-%m-%d"))
if start_date
else get_default_start_date()
timezone.make_aware(datetime.strptime(start_date, "%Y-%m-%d")) if start_date else get_default_start_date()
)
end_date_formatted = (
timezone.make_aware(datetime.strptime(end_date, "%Y-%m-%d"))
if end_date
else get_default_end_date()
timezone.make_aware(datetime.strptime(end_date, "%Y-%m-%d")) if end_date else get_default_end_date()
)
writer = csv.writer(csv_file)
# define columns to include in export
columns = [
"Domain name",
@ -219,7 +235,7 @@ def export_data_growth_to_csv(csv_file, start_date, end_date):
"domain__ready_at__lt": end_date_formatted,
"domain__ready_at__gt": start_date_formatted,
}
# We also want domains deleted between sar and end dates, sorted
sort_fields_for_additional_domains = [
"domain__deleted_at",
@ -230,5 +246,12 @@ def export_data_growth_to_csv(csv_file, start_date, end_date):
"domain__created_at__lt": end_date_formatted,
"domain__created_at__gt": start_date_formatted,
}
export_domains_to_writer(writer, columns, sort_fields, filter_condition, sort_fields_for_additional_domains, filter_condition_for_additional_domains)
export_domains_to_writer(
writer,
columns,
sort_fields,
filter_condition,
sort_fields_for_additional_domains,
filter_condition_for_additional_domains,
)