suppressed logging from all test cases; made slight changes to handling of exceptions in connection pooling

This commit is contained in:
David Kennedy 2024-01-29 18:22:35 -05:00
parent bf8d9159ce
commit 0546bd08e5
No known key found for this signature in database
GPG key ID: 6528A5386E66B96B
12 changed files with 3145 additions and 3166 deletions

View file

@ -23,7 +23,7 @@ import boto3_mocking
from registrar.utility.s3_bucket import S3ClientError, S3ClientErrorCodes # type: ignore
from datetime import date, datetime, timedelta
from django.utils import timezone
from .common import less_console_noise
class CsvReportsTest(TestCase):
"""Tests to determine if we are uploading our reports correctly"""
@ -80,41 +80,43 @@ class CsvReportsTest(TestCase):
@boto3_mocking.patching
def test_generate_federal_report(self):
"""Ensures that we correctly generate current-federal.csv"""
mock_client = MagicMock()
fake_open = mock_open()
expected_file_content = [
call("Domain name,Domain type,Agency,Organization name,City,State,Security contact email\r\n"),
call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"),
call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"),
]
# We don't actually want to write anything for a test case,
# we just want to verify what is being written.
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("builtins.open", fake_open):
call_command("generate_current_federal_report", checkpath=False)
content = fake_open()
with less_console_noise():
mock_client = MagicMock()
fake_open = mock_open()
expected_file_content = [
call("Domain name,Domain type,Agency,Organization name,City,State,Security contact email\r\n"),
call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"),
call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"),
]
# We don't actually want to write anything for a test case,
# we just want to verify what is being written.
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("builtins.open", fake_open):
call_command("generate_current_federal_report", checkpath=False)
content = fake_open()
content.write.assert_has_calls(expected_file_content)
content.write.assert_has_calls(expected_file_content)
@boto3_mocking.patching
def test_generate_full_report(self):
"""Ensures that we correctly generate current-full.csv"""
mock_client = MagicMock()
fake_open = mock_open()
expected_file_content = [
call("Domain name,Domain type,Agency,Organization name,City,State,Security contact email\r\n"),
call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"),
call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"),
call("adomain2.gov,Interstate,,,,, \r\n"),
]
# We don't actually want to write anything for a test case,
# we just want to verify what is being written.
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("builtins.open", fake_open):
call_command("generate_current_full_report", checkpath=False)
content = fake_open()
with less_console_noise():
mock_client = MagicMock()
fake_open = mock_open()
expected_file_content = [
call("Domain name,Domain type,Agency,Organization name,City,State,Security contact email\r\n"),
call("cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,, \r\n"),
call("ddomain3.gov,Federal,Armed Forces Retirement Home,,,, \r\n"),
call("adomain2.gov,Interstate,,,,, \r\n"),
]
# We don't actually want to write anything for a test case,
# we just want to verify what is being written.
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("builtins.open", fake_open):
call_command("generate_current_full_report", checkpath=False)
content = fake_open()
content.write.assert_has_calls(expected_file_content)
content.write.assert_has_calls(expected_file_content)
@boto3_mocking.patching
def test_not_found_full_report(self):
@ -122,20 +124,20 @@ class CsvReportsTest(TestCase):
def side_effect(Bucket, Key):
raise ClientError({"Error": {"Code": "NoSuchKey", "Message": "No such key"}}, "get_object")
with less_console_noise():
mock_client = MagicMock()
mock_client.get_object.side_effect = side_effect
mock_client = MagicMock()
mock_client.get_object.side_effect = side_effect
response = None
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("boto3.client", return_value=mock_client):
with self.assertRaises(S3ClientError) as context:
response = self.client.get("/api/v1/get-report/current-full")
# Check that the response has status code 500
self.assertEqual(response.status_code, 500)
response = None
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("boto3.client", return_value=mock_client):
with self.assertRaises(S3ClientError) as context:
response = self.client.get("/api/v1/get-report/current-full")
# Check that the response has status code 500
self.assertEqual(response.status_code, 500)
# Check that we get the right error back from the page
self.assertEqual(context.exception.code, S3ClientErrorCodes.FILE_NOT_FOUND_ERROR)
# Check that we get the right error back from the page
self.assertEqual(context.exception.code, S3ClientErrorCodes.FILE_NOT_FOUND_ERROR)
@boto3_mocking.patching
def test_not_found_federal_report(self):
@ -143,84 +145,86 @@ class CsvReportsTest(TestCase):
def side_effect(Bucket, Key):
raise ClientError({"Error": {"Code": "NoSuchKey", "Message": "No such key"}}, "get_object")
with less_console_noise():
mock_client = MagicMock()
mock_client.get_object.side_effect = side_effect
mock_client = MagicMock()
mock_client.get_object.side_effect = side_effect
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("boto3.client", return_value=mock_client):
with self.assertRaises(S3ClientError) as context:
response = self.client.get("/api/v1/get-report/current-federal")
# Check that the response has status code 500
self.assertEqual(response.status_code, 500)
with boto3_mocking.clients.handler_for("s3", mock_client):
with patch("boto3.client", return_value=mock_client):
with self.assertRaises(S3ClientError) as context:
response = self.client.get("/api/v1/get-report/current-federal")
# Check that the response has status code 500
self.assertEqual(response.status_code, 500)
# Check that we get the right error back from the page
self.assertEqual(context.exception.code, S3ClientErrorCodes.FILE_NOT_FOUND_ERROR)
# Check that we get the right error back from the page
self.assertEqual(context.exception.code, S3ClientErrorCodes.FILE_NOT_FOUND_ERROR)
@boto3_mocking.patching
def test_load_federal_report(self):
"""Tests the get_current_federal api endpoint"""
mock_client = MagicMock()
mock_client_instance = mock_client.return_value
with less_console_noise():
mock_client = MagicMock()
mock_client_instance = mock_client.return_value
with open("registrar/tests/data/fake_current_federal.csv", "r") as file:
file_content = file.read()
with open("registrar/tests/data/fake_current_federal.csv", "r") as file:
file_content = file.read()
# Mock a recieved file
mock_client_instance.get_object.return_value = {"Body": io.BytesIO(file_content.encode())}
with boto3_mocking.clients.handler_for("s3", mock_client):
request = self.factory.get("/fake-path")
response = get_current_federal(request)
# Mock a recieved file
mock_client_instance.get_object.return_value = {"Body": io.BytesIO(file_content.encode())}
with boto3_mocking.clients.handler_for("s3", mock_client):
request = self.factory.get("/fake-path")
response = get_current_federal(request)
# Check that we are sending the correct calls.
# Ensures that we are decoding the file content recieved from AWS.
expected_call = [call.get_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key="current-federal.csv")]
mock_client_instance.assert_has_calls(expected_call)
# Check that we are sending the correct calls.
# Ensures that we are decoding the file content recieved from AWS.
expected_call = [call.get_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key="current-federal.csv")]
mock_client_instance.assert_has_calls(expected_call)
# Check that the response has status code 200
self.assertEqual(response.status_code, 200)
# Check that the response has status code 200
self.assertEqual(response.status_code, 200)
# Check that the response contains what we expect
expected_file_content = (
"Domain name,Domain type,Agency,Organization name,City,State,Security contact email\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,,,,"
).encode()
# Check that the response contains what we expect
expected_file_content = (
"Domain name,Domain type,Agency,Organization name,City,State,Security contact email\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,,,,"
).encode()
self.assertEqual(expected_file_content, response.content)
self.assertEqual(expected_file_content, response.content)
@boto3_mocking.patching
def test_load_full_report(self):
"""Tests the current-federal api link"""
mock_client = MagicMock()
mock_client_instance = mock_client.return_value
with less_console_noise():
mock_client = MagicMock()
mock_client_instance = mock_client.return_value
with open("registrar/tests/data/fake_current_full.csv", "r") as file:
file_content = file.read()
with open("registrar/tests/data/fake_current_full.csv", "r") as file:
file_content = file.read()
# Mock a recieved file
mock_client_instance.get_object.return_value = {"Body": io.BytesIO(file_content.encode())}
with boto3_mocking.clients.handler_for("s3", mock_client):
request = self.factory.get("/fake-path")
response = get_current_full(request)
# Mock a recieved file
mock_client_instance.get_object.return_value = {"Body": io.BytesIO(file_content.encode())}
with boto3_mocking.clients.handler_for("s3", mock_client):
request = self.factory.get("/fake-path")
response = get_current_full(request)
# Check that we are sending the correct calls.
# Ensures that we are decoding the file content recieved from AWS.
expected_call = [call.get_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key="current-full.csv")]
mock_client_instance.assert_has_calls(expected_call)
# Check that we are sending the correct calls.
# Ensures that we are decoding the file content recieved from AWS.
expected_call = [call.get_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key="current-full.csv")]
mock_client_instance.assert_has_calls(expected_call)
# Check that the response has status code 200
self.assertEqual(response.status_code, 200)
# Check that the response has status code 200
self.assertEqual(response.status_code, 200)
# Check that the response contains what we expect
expected_file_content = (
"Domain name,Domain type,Agency,Organization name,City,State,Security contact email\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,,,,\n"
"adomain2.gov,Interstate,,,,,"
).encode()
# Check that the response contains what we expect
expected_file_content = (
"Domain name,Domain type,Agency,Organization name,City,State,Security contact email\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,,,,\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,,,,\n"
"adomain2.gov,Interstate,,,,,"
).encode()
self.assertEqual(expected_file_content, response.content)
self.assertEqual(expected_file_content, response.content)
class ExportDataTest(MockEppLib):
@ -339,192 +343,170 @@ class ExportDataTest(MockEppLib):
def test_export_domains_to_writer_security_emails(self):
"""Test that export_domains_to_writer returns the
expected security email"""
# Add security email information
self.domain_1.name = "defaultsecurity.gov"
self.domain_1.save()
# Invoke setter
self.domain_1.security_contact
# Invoke setter
self.domain_2.security_contact
# Invoke setter
self.domain_3.security_contact
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"AO",
"AO email",
"Security contact email",
"Status",
"Expiration date",
]
sort_fields = ["domain__name"]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
Domain.State.DNS_NEEDED,
Domain.State.ON_HOLD,
],
}
self.maxDiff = None
# Call the export functions
write_header(writer, columns)
write_body(writer, columns, sort_fields, filter_condition)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
# We expect READY domains,
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,State,AO,"
"AO email,Security contact email,Status,Expiration date\n"
"adomain10.gov,Federal,Armed Forces Retirement Home,Ready\n"
"adomain2.gov,Interstate,(blank),Dns needed\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,123@mail.gov,On hold,2023-05-25\n"
"defaultsecurity.gov,Federal - Executive,World War I Centennial Commission,(blank),Ready"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
with less_console_noise():
# Add security email information
self.domain_1.name = "defaultsecurity.gov"
self.domain_1.save()
# Invoke setter
self.domain_1.security_contact
# Invoke setter
self.domain_2.security_contact
# Invoke setter
self.domain_3.security_contact
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"AO",
"AO email",
"Security contact email",
"Status",
"Expiration date",
]
sort_fields = ["domain__name"]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
Domain.State.DNS_NEEDED,
Domain.State.ON_HOLD,
],
}
self.maxDiff = None
# Call the export functions
write_header(writer, columns)
write_body(writer, columns, sort_fields, filter_condition)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
# We expect READY domains,
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,State,AO,"
"AO email,Security contact email,Status,Expiration date\n"
"adomain10.gov,Federal,Armed Forces Retirement Home,Ready\n"
"adomain2.gov,Interstate,(blank),Dns needed\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,123@mail.gov,On hold,2023-05-25\n"
"defaultsecurity.gov,Federal - Executive,World War I Centennial Commission,(blank),Ready"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
def test_write_body(self):
"""Test that write_body returns the
existing domain, test that sort by domain name works,
test that filter works"""
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
with less_console_noise():
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"AO",
"AO email",
"Submitter",
"Submitter title",
"Submitter email",
"Submitter phone",
"Security contact email",
"Status",
]
sort_fields = ["domain__name"]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
Domain.State.DNS_NEEDED,
Domain.State.ON_HOLD,
],
}
# Call the export functions
write_header(writer, columns)
write_body(writer, columns, sort_fields, filter_condition)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
# We expect READY domains,
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,State,AO,"
"AO email,Submitter,Submitter title,Submitter email,Submitter phone,"
"Security contact email,Status\n"
"adomain10.gov,Federal,Armed Forces Retirement Home,Ready\n"
"adomain2.gov,Interstate,Dns needed\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,Ready\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,On hold\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"AO",
"AO email",
"Submitter",
"Submitter title",
"Submitter email",
"Submitter phone",
"Security contact email",
"Status",
]
sort_fields = ["domain__name"]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
Domain.State.DNS_NEEDED,
Domain.State.ON_HOLD,
],
}
# Call the export functions
write_header(writer, columns)
write_body(writer, columns, sort_fields, filter_condition)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
# We expect READY domains,
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,State,AO,"
"AO email,Submitter,Submitter title,Submitter email,Submitter phone,"
"Security contact email,Status\n"
"adomain10.gov,Federal,Armed Forces Retirement Home,Ready\n"
"adomain2.gov,Interstate,Dns needed\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission,Ready\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home,On hold\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
def test_write_body_additional(self):
"""An additional test for filters and multi-column sort"""
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"Security contact email",
]
sort_fields = ["domain__name", "federal_agency", "organization_type"]
filter_condition = {
"organization_type__icontains": "federal",
"domain__state__in": [
Domain.State.READY,
Domain.State.DNS_NEEDED,
Domain.State.ON_HOLD,
],
}
# Call the export functions
write_header(writer, columns)
write_body(writer, columns, sort_fields, filter_condition)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
# We expect READY domains,
# federal only
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,"
"State,Security contact email\n"
"adomain10.gov,Federal,Armed Forces Retirement Home\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
with less_console_noise():
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"Security contact email",
]
sort_fields = ["domain__name", "federal_agency", "organization_type"]
filter_condition = {
"organization_type__icontains": "federal",
"domain__state__in": [
Domain.State.READY,
Domain.State.DNS_NEEDED,
Domain.State.ON_HOLD,
],
}
# Call the export functions
write_header(writer, columns)
write_body(writer, columns, sort_fields, filter_condition)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
# We expect READY domains,
# federal only
# sorted alphabetially by domain name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,"
"State,Security contact email\n"
"adomain10.gov,Federal,Armed Forces Retirement Home\n"
"cdomain1.gov,Federal - Executive,World War I Centennial Commission\n"
"ddomain3.gov,Federal,Armed Forces Retirement Home\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
def test_write_body_with_date_filter_pulls_domains_in_range(self):
"""Test that domains that are
@ -538,88 +520,88 @@ class ExportDataTest(MockEppLib):
which are hard to mock.
TODO: Simplify is created_at is not needed for the report."""
with less_console_noise():
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# We use timezone.make_aware to sync to server time a datetime object with the current date (using date.today())
# and a specific time (using datetime.min.time()).
end_date = timezone.make_aware(datetime.combine(date.today() + timedelta(days=2), datetime.min.time()))
start_date = timezone.make_aware(datetime.combine(date.today() - timedelta(days=2), datetime.min.time()))
# Create a CSV file in memory
csv_file = StringIO()
writer = csv.writer(csv_file)
# We use timezone.make_aware to sync to server time a datetime object with the current date (using date.today())
# and a specific time (using datetime.min.time()).
end_date = timezone.make_aware(datetime.combine(date.today() + timedelta(days=2), datetime.min.time()))
start_date = timezone.make_aware(datetime.combine(date.today() - timedelta(days=2), datetime.min.time()))
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"Status",
"Expiration date",
]
sort_fields = [
"created_at",
"domain__name",
]
sort_fields_for_deleted_domains = [
"domain__deleted",
"domain__name",
]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
],
"domain__first_ready__lte": end_date,
"domain__first_ready__gte": start_date,
}
filter_conditions_for_deleted_domains = {
"domain__state__in": [
Domain.State.DELETED,
],
"domain__deleted__lte": end_date,
"domain__deleted__gte": start_date,
}
# Define columns, sort fields, and filter condition
columns = [
"Domain name",
"Domain type",
"Agency",
"Organization name",
"City",
"State",
"Status",
"Expiration date",
]
sort_fields = [
"created_at",
"domain__name",
]
sort_fields_for_deleted_domains = [
"domain__deleted",
"domain__name",
]
filter_condition = {
"domain__state__in": [
Domain.State.READY,
],
"domain__first_ready__lte": end_date,
"domain__first_ready__gte": start_date,
}
filter_conditions_for_deleted_domains = {
"domain__state__in": [
Domain.State.DELETED,
],
"domain__deleted__lte": end_date,
"domain__deleted__gte": start_date,
}
# Call the export functions
write_header(writer, columns)
write_body(
writer,
columns,
sort_fields,
filter_condition,
)
write_body(
writer,
columns,
sort_fields_for_deleted_domains,
filter_conditions_for_deleted_domains,
)
# Call the export functions
write_header(writer, columns)
write_body(
writer,
columns,
sort_fields,
filter_condition,
)
write_body(
writer,
columns,
sort_fields_for_deleted_domains,
filter_conditions_for_deleted_domains,
)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Reset the CSV file's position to the beginning
csv_file.seek(0)
# Read the content into a variable
csv_content = csv_file.read()
# Read the content into a variable
csv_content = csv_file.read()
# We expect READY domains first, created between today-2 and today+2, sorted by created_at then name
# and DELETED domains deleted between today-2 and today+2, sorted by deleted then name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,"
"State,Status,Expiration date\n"
"cdomain1.gov,Federal-Executive,World War I Centennial Commission,,,,Ready,\n"
"adomain10.gov,Federal,Armed Forces Retirement Home,,,,Ready,\n"
"zdomain9.gov,Federal,Armed Forces Retirement Home,,,,Deleted,\n"
"sdomain8.gov,Federal,Armed Forces Retirement Home,,,,Deleted,\n"
"xdomain7.gov,Federal,Armed Forces Retirement Home,,,,Deleted,\n"
)
# We expect READY domains first, created between today-2 and today+2, sorted by created_at then name
# and DELETED domains deleted between today-2 and today+2, sorted by deleted then name
expected_content = (
"Domain name,Domain type,Agency,Organization name,City,"
"State,Status,Expiration date\n"
"cdomain1.gov,Federal-Executive,World War I Centennial Commission,,,,Ready,\n"
"adomain10.gov,Federal,Armed Forces Retirement Home,,,,Ready,\n"
"zdomain9.gov,Federal,Armed Forces Retirement Home,,,,Deleted,\n"
"sdomain8.gov,Federal,Armed Forces Retirement Home,,,,Deleted,\n"
"xdomain7.gov,Federal,Armed Forces Retirement Home,,,,Deleted,\n"
)
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
# Normalize line endings and remove commas,
# spaces and leading/trailing whitespace
csv_content = csv_content.replace(",,", "").replace(",", "").replace(" ", "").replace("\r\n", "\n").strip()
expected_content = expected_content.replace(",,", "").replace(",", "").replace(" ", "").strip()
self.assertEqual(csv_content, expected_content)
self.assertEqual(csv_content, expected_content)
class HelperFunctions(TestCase):