mirror of
https://github.com/cisagov/manage.get.gov.git
synced 2025-05-17 01:57:03 +02:00
Linting
This commit is contained in:
parent
7bd1356858
commit
3838be3960
1 changed files with 12 additions and 21 deletions
|
@ -7,7 +7,6 @@ from django.db.models import Value
|
||||||
from django.db.models.functions import Coalesce
|
from django.db.models.functions import Coalesce
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.core.paginator import Paginator
|
from django.core.paginator import Paginator
|
||||||
import time
|
|
||||||
from django.db.models import F, Value, CharField
|
from django.db.models import F, Value, CharField
|
||||||
from django.db.models.functions import Concat, Coalesce
|
from django.db.models.functions import Concat, Coalesce
|
||||||
|
|
||||||
|
@ -24,18 +23,20 @@ def write_header(writer, columns):
|
||||||
|
|
||||||
|
|
||||||
def get_domain_infos(filter_condition, sort_fields):
|
def get_domain_infos(filter_condition, sort_fields):
|
||||||
domain_infos = DomainInformation.objects.select_related(
|
domain_infos = (
|
||||||
'domain', 'authorizing_official'
|
DomainInformation.objects.select_related("domain", "authorizing_official")
|
||||||
).filter(**filter_condition).order_by(*sort_fields)
|
.filter(**filter_condition)
|
||||||
|
.order_by(*sort_fields)
|
||||||
|
)
|
||||||
|
|
||||||
# Do a mass concat of the first and last name fields for authorizing_official.
|
# Do a mass concat of the first and last name fields for authorizing_official.
|
||||||
# The old operation was computationally heavy for some reason, so if we precompute
|
# The old operation was computationally heavy for some reason, so if we precompute
|
||||||
# this here, it is vastly more efficient.
|
# this here, it is vastly more efficient.
|
||||||
domain_infos_cleaned = domain_infos.annotate(
|
domain_infos_cleaned = domain_infos.annotate(
|
||||||
ao=Concat(
|
ao=Concat(
|
||||||
Coalesce(F('authorizing_official__first_name'), Value('')),
|
Coalesce(F("authorizing_official__first_name"), Value("")),
|
||||||
Value(' '),
|
Value(" "),
|
||||||
Coalesce(F('authorizing_official__last_name'), Value('')),
|
Coalesce(F("authorizing_official__last_name"), Value("")),
|
||||||
output_field=CharField(),
|
output_field=CharField(),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -47,15 +48,11 @@ def parse_row(columns, domain_info: DomainInformation, skip_epp_call=True):
|
||||||
|
|
||||||
domain = domain_info.domain
|
domain = domain_info.domain
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
security_email = domain.security_contact_registry_id
|
security_email = domain.security_contact_registry_id
|
||||||
if security_email is None:
|
if security_email is None:
|
||||||
cached_sec_email = domain.get_security_email(skip_epp_call)
|
cached_sec_email = domain.get_security_email(skip_epp_call)
|
||||||
security_email = cached_sec_email if cached_sec_email is not None else " "
|
security_email = cached_sec_email if cached_sec_email is not None else " "
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
print(f"parse security email operation took {end_time - start_time} seconds")
|
|
||||||
|
|
||||||
invalid_emails = {"registrar@dotgov.gov", "dotgov@cisa.dhs.gov"}
|
invalid_emails = {"registrar@dotgov.gov", "dotgov@cisa.dhs.gov"}
|
||||||
# These are default emails that should not be displayed in the csv report
|
# These are default emails that should not be displayed in the csv report
|
||||||
if security_email.lower() in invalid_emails:
|
if security_email.lower() in invalid_emails:
|
||||||
|
@ -83,12 +80,11 @@ def parse_row(columns, domain_info: DomainInformation, skip_epp_call=True):
|
||||||
"First ready": domain.first_ready,
|
"First ready": domain.first_ready,
|
||||||
"Deleted": domain.deleted,
|
"Deleted": domain.deleted,
|
||||||
}
|
}
|
||||||
start_time = time.time()
|
|
||||||
row = [FIELDS.get(column, "") for column in columns]
|
row = [FIELDS.get(column, "") for column in columns]
|
||||||
end_time = time.time()
|
|
||||||
print(f"parse some cols operation took {end_time - start_time} seconds")
|
|
||||||
return row
|
return row
|
||||||
|
|
||||||
|
|
||||||
def write_body(
|
def write_body(
|
||||||
writer,
|
writer,
|
||||||
columns,
|
columns,
|
||||||
|
@ -109,17 +105,12 @@ def write_body(
|
||||||
for page_num in paginator.page_range:
|
for page_num in paginator.page_range:
|
||||||
page = paginator.page(page_num)
|
page = paginator.page(page_num)
|
||||||
rows = []
|
rows = []
|
||||||
start_time = time.time()
|
|
||||||
for domain_info in page.object_list:
|
for domain_info in page.object_list:
|
||||||
row = parse_row(columns, domain_info)
|
row = parse_row(columns, domain_info)
|
||||||
rows.append(row)
|
rows.append(row)
|
||||||
|
|
||||||
end_time = time.time()
|
|
||||||
print(f"new parse Operation took {end_time - start_time} seconds")
|
|
||||||
writer.writerows(rows)
|
writer.writerows(rows)
|
||||||
|
|
||||||
a1_end_time = time.time()
|
|
||||||
print(f"parse all stuff operation took {a1_end_time - a1_start_time} seconds")
|
|
||||||
|
|
||||||
def export_data_type_to_csv(csv_file):
|
def export_data_type_to_csv(csv_file):
|
||||||
"""All domains report with extra columns"""
|
"""All domains report with extra columns"""
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue