mirror of
https://github.com/cisagov/manage.get.gov.git
synced 2025-07-14 06:55:08 +02:00
Merge remote-tracking branch 'origin/za/additional-data-transferred-domains' into za/additional-data-transferred-domains
This commit is contained in:
commit
5b45265d8c
14 changed files with 385 additions and 286 deletions
|
@ -71,6 +71,11 @@ class Command(BaseCommand):
|
|||
parser.add_argument(
|
||||
"--directory", default="migrationdata", help="Desired directory"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--infer_filenames",
|
||||
action=argparse.BooleanOptionalAction,
|
||||
help="Determines if we should infer filenames or not. Recommended to be enabled only in a development or testing setting."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--agency_adhoc_filename",
|
||||
default=EnumFilenames.AGENCY_ADHOC.value[1],
|
||||
|
@ -83,10 +88,9 @@ class Command(BaseCommand):
|
|||
)
|
||||
parser.add_argument(
|
||||
"--domain_escrow_filename",
|
||||
default=EnumFilenames.DOMAIN_ADDITIONAL.value[1],
|
||||
default=EnumFilenames.DOMAIN_ESCROW.value[1],
|
||||
help="Defines the filename for creation/expiration domain data",
|
||||
)
|
||||
#domain_escrow_filename
|
||||
parser.add_argument(
|
||||
"--domain_adhoc_filename",
|
||||
default=EnumFilenames.DOMAIN_ADHOC.value[1],
|
||||
|
@ -97,6 +101,11 @@ class Command(BaseCommand):
|
|||
default=EnumFilenames.ORGANIZATION_ADHOC.value[1],
|
||||
help="Defines the filename for domain type adhocs",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--authority_adhoc_filename",
|
||||
default=EnumFilenames.AUTHORITY_ADHOC.value[1],
|
||||
help="Defines the filename for domain type adhocs",
|
||||
)
|
||||
|
||||
def print_debug_mode_statements(
|
||||
self, debug_on: bool, debug_max_entries_to_parse: int
|
||||
|
@ -551,6 +560,9 @@ class Command(BaseCommand):
|
|||
system_exit_on_terminate=False,
|
||||
info_to_inspect=f"""
|
||||
!!! ENSURE THAT ALL FILENAMES ARE CORRECT BEFORE PROCEEDING
|
||||
==Master data file==
|
||||
domain_additional_filename: {domain_additional_filename}
|
||||
|
||||
==Federal agency information==
|
||||
agency_adhoc_filename: {agency_adhoc_filename}
|
||||
|
||||
|
@ -563,7 +575,7 @@ class Command(BaseCommand):
|
|||
==Creation date / expiration date information==
|
||||
domain_escrow_filename: {domain_escrow_filename}
|
||||
|
||||
domain_additional_filename: {domain_additional_filename}
|
||||
==Containing directory==
|
||||
directory: {directory}
|
||||
""",
|
||||
prompt_title=title,
|
||||
|
|
|
@ -371,7 +371,7 @@ class Command(BaseCommand):
|
|||
org_type = transition_domain.organization_type
|
||||
fed_type = transition_domain.federal_type
|
||||
fed_agency = transition_domain.federal_agency
|
||||
|
||||
|
||||
match org_type:
|
||||
case "Federal":
|
||||
org_type = ("federal", "Federal")
|
||||
|
|
|
@ -89,12 +89,27 @@ class EnumFilenames(Enum):
|
|||
# We are sourcing data from many different locations, so its better to track this
|
||||
# as an Enum rather than multiple spread out variables.
|
||||
# We store the "type" as [0], and we store the "default_filepath" as [1].
|
||||
AGENCY_ADHOC = ("agency_adhoc", "agency.adhoc.dotgov.txt")
|
||||
AGENCY_ADHOC = (
|
||||
"agency_adhoc",
|
||||
"agency.adhoc.dotgov.txt"
|
||||
)
|
||||
DOMAIN_ADDITIONAL = (
|
||||
"domain_additional",
|
||||
"domainadditionaldatalink.adhoc.dotgov.txt",
|
||||
)
|
||||
DOMAIN_ESCROW = ("domain_escrow", "escrow_domains.daily.dotgov.GOV.txt")
|
||||
DOMAIN_ADHOC = ("domain_adhoc", "domaintypes.adhoc.dotgov.txt")
|
||||
ORGANIZATION_ADHOC = ("organization_adhoc", "organization.adhoc.dotgov.txt")
|
||||
AUTHORITY_ADHOC = ("authority_adhoc", "authority.adhoc.dotgov.txt")
|
||||
DOMAIN_ESCROW = (
|
||||
"domain_escrow",
|
||||
"escrow_domains.daily.dotgov.GOV.txt"
|
||||
)
|
||||
DOMAIN_ADHOC = (
|
||||
"domain_adhoc",
|
||||
"domaintypes.adhoc.dotgov.txt"
|
||||
)
|
||||
ORGANIZATION_ADHOC = (
|
||||
"organization_adhoc",
|
||||
"organization.adhoc.dotgov.txt"
|
||||
)
|
||||
AUTHORITY_ADHOC = (
|
||||
"authority_adhoc",
|
||||
"authority.adhoc.dotgov.txt"
|
||||
)
|
||||
|
|
|
@ -136,11 +136,10 @@ class LoadExtraTransitionDomain:
|
|||
def __init__(self, options: TransitionDomainArguments):
|
||||
# Globally stores event logs and organizes them
|
||||
self.parse_logs = FileTransitionLog()
|
||||
|
||||
arguments = options.args_extra_transition_domain()
|
||||
self.debug = options.debug
|
||||
# Reads and parses migration files
|
||||
self.parsed_data_container = ExtraTransitionDomain(**arguments)
|
||||
self.parsed_data_container.parse_all_files()
|
||||
self.parsed_data_container = ExtraTransitionDomain(options)
|
||||
self.parsed_data_container.parse_all_files(options.infer_filenames)
|
||||
|
||||
def update_transition_domain_models(self):
|
||||
"""Updates TransitionDomain objects based off the file content
|
||||
|
@ -152,7 +151,7 @@ class LoadExtraTransitionDomain:
|
|||
updated_transition_domains = []
|
||||
failed_transition_domains = []
|
||||
for transition_domain in all_transition_domains:
|
||||
domain_name = transition_domain.domain_name.upper()
|
||||
domain_name = transition_domain.domain_name
|
||||
updated_transition_domain = transition_domain
|
||||
try:
|
||||
# STEP 1: Parse organization data
|
||||
|
@ -179,8 +178,9 @@ class LoadExtraTransitionDomain:
|
|||
#if updated_transition_domain.__dict__ != transition_domain.__dict__:
|
||||
updated_transition_domain.save()
|
||||
updated_transition_domains.append(updated_transition_domain)
|
||||
|
||||
self.parse_logs.display_logs_by_domain_name(domain_name)
|
||||
if self.debug:
|
||||
# Display errors for this specific domain
|
||||
self.parse_logs.display_logs_by_domain_name(domain_name)
|
||||
logger.info(
|
||||
f"{TerminalColors.OKCYAN}"
|
||||
f"Successfully updated {domain_name}"
|
||||
|
@ -189,6 +189,7 @@ class LoadExtraTransitionDomain:
|
|||
|
||||
# If we run into an exception on this domain,
|
||||
# Just skip over it and log that it happened.
|
||||
# Q: Should we just throw an exception?
|
||||
except Exception as err:
|
||||
logger.debug(err)
|
||||
logger.error(
|
||||
|
@ -198,6 +199,10 @@ class LoadExtraTransitionDomain:
|
|||
)
|
||||
failed_transition_domains.append(domain_name)
|
||||
|
||||
if self.debug:
|
||||
# Display misc errors (not associated to a domain)
|
||||
self.parse_logs.display_logs_by_domain_name(None)
|
||||
|
||||
failed_count = len(failed_transition_domains)
|
||||
if failed_count == 0:
|
||||
logger.info(
|
||||
|
@ -512,6 +517,7 @@ class LoadExtraTransitionDomain:
|
|||
def get_domain_data(self, desired_id) -> DomainAdditionalData:
|
||||
"""Grabs a corresponding row within the DOMAIN_ADDITIONAL file,
|
||||
based off a desired_id"""
|
||||
l = self.get_object_by_id(EnumFilenames.DOMAIN_ADDITIONAL, desired_id.lower())
|
||||
return self.get_object_by_id(EnumFilenames.DOMAIN_ADDITIONAL, desired_id)
|
||||
|
||||
def get_organization_adhoc(self, desired_id) -> OrganizationAdhoc:
|
||||
|
@ -683,24 +689,14 @@ class ExtraTransitionDomain:
|
|||
|
||||
strip_date_regex = re.compile(r"(?:.*\/)?(\d+)\.(.+)")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
agency_adhoc_filename=EnumFilenames.AGENCY_ADHOC.value[1],
|
||||
domain_additional_filename=EnumFilenames.DOMAIN_ADDITIONAL.value[1],
|
||||
domain_escrow_filename=EnumFilenames.DOMAIN_ESCROW.value[1],
|
||||
domain_adhoc_filename=EnumFilenames.DOMAIN_ADHOC.value[1],
|
||||
organization_adhoc_filename=EnumFilenames.ORGANIZATION_ADHOC.value[1],
|
||||
authority_adhoc_filename=EnumFilenames.AUTHORITY_ADHOC.value[1],
|
||||
directory="migrationdata",
|
||||
sep="|",
|
||||
):
|
||||
def __init__(self, options: TransitionDomainArguments):
|
||||
# Add a slash if the last character isn't one
|
||||
if directory and directory[-1] != "/":
|
||||
directory += "/"
|
||||
self.directory = directory
|
||||
self.seperator = sep
|
||||
if options.directory and options.directory[-1] != "/":
|
||||
options.directory += "/"
|
||||
self.directory = options.directory
|
||||
self.seperator = options.sep
|
||||
|
||||
self.all_files = glob.glob(f"{directory}*")
|
||||
self.all_files = glob.glob(f"{self.directory}*")
|
||||
|
||||
# Create a set with filenames as keys for quick lookup
|
||||
self.all_files_set = {os.path.basename(file) for file in self.all_files}
|
||||
|
@ -713,37 +709,37 @@ class ExtraTransitionDomain:
|
|||
pattern_map_params = [
|
||||
(
|
||||
EnumFilenames.AGENCY_ADHOC,
|
||||
agency_adhoc_filename,
|
||||
options.agency_adhoc_filename,
|
||||
AgencyAdhoc,
|
||||
"agencyid",
|
||||
),
|
||||
(
|
||||
EnumFilenames.DOMAIN_ADDITIONAL,
|
||||
domain_additional_filename,
|
||||
options.domain_additional_filename,
|
||||
DomainAdditionalData,
|
||||
"domainname",
|
||||
),
|
||||
(
|
||||
EnumFilenames.DOMAIN_ESCROW,
|
||||
domain_escrow_filename,
|
||||
options.domain_escrow_filename,
|
||||
DomainEscrow,
|
||||
"domainname",
|
||||
),
|
||||
(
|
||||
EnumFilenames.DOMAIN_ADHOC,
|
||||
domain_adhoc_filename,
|
||||
options.domain_adhoc_filename,
|
||||
DomainTypeAdhoc,
|
||||
"domaintypeid",
|
||||
),
|
||||
(
|
||||
EnumFilenames.ORGANIZATION_ADHOC,
|
||||
organization_adhoc_filename,
|
||||
options.organization_adhoc_filename,
|
||||
OrganizationAdhoc,
|
||||
"orgid",
|
||||
),
|
||||
(
|
||||
EnumFilenames.AUTHORITY_ADHOC,
|
||||
authority_adhoc_filename,
|
||||
options.authority_adhoc_filename,
|
||||
AuthorityAdhoc,
|
||||
"authorityid",
|
||||
),
|
||||
|
@ -758,7 +754,7 @@ class ExtraTransitionDomain:
|
|||
|
||||
pattern_map_params must adhere to this format:
|
||||
[
|
||||
(field_type, filename, data_type, id_field),
|
||||
(file_type, filename, data_type, id_field),
|
||||
]
|
||||
|
||||
vars:
|
||||
|
@ -800,8 +796,8 @@ class ExtraTransitionDomain:
|
|||
def parse_all_files(self, infer_filenames=True):
|
||||
"""Clears all preexisting data then parses each related CSV file.
|
||||
|
||||
overwrite_existing_data: bool -> Determines if we should clear
|
||||
file_data.data if it already exists
|
||||
infer_filenames: bool -> Determines if we should try to
|
||||
infer the filename if a default is passed in
|
||||
"""
|
||||
self.clear_file_data()
|
||||
for name, value in self.file_data.items():
|
||||
|
@ -822,13 +818,13 @@ class ExtraTransitionDomain:
|
|||
continue
|
||||
|
||||
# Infer filename logic #
|
||||
# This mode is used for development and testing only. Rather than having
|
||||
# This mode is used for internal development use and testing only. Rather than having
|
||||
# to manually define the filename each time, we can infer what the filename
|
||||
# actually is.
|
||||
|
||||
# Not intended for use outside of that, as it is better to assume
|
||||
# the end-user wants to be specific.
|
||||
logger.warning("Attempting to infer filename" f" for file: {filename}.")
|
||||
logger.warning(f"Attempting to infer filename: {filename}")
|
||||
for filename in self.all_files:
|
||||
default_name = name.value[1]
|
||||
match = value.try_infer_filename(filename, default_name)
|
||||
|
@ -900,11 +896,6 @@ class ExtraTransitionDomain:
|
|||
def _read_csv_file(self, file, seperator, dataclass_type, id_field):
|
||||
with open(file, "r", encoding="utf-8-sig") as requested_file:
|
||||
reader = csv.DictReader(requested_file, delimiter=seperator)
|
||||
"""
|
||||
for row in reader:
|
||||
print({key: type(key) for key in row.keys()}) # print out the keys and their types
|
||||
test = {row[id_field]: dataclass_type(**row)}
|
||||
"""
|
||||
dict_data = {}
|
||||
for row in reader:
|
||||
if None in row:
|
||||
|
@ -914,6 +905,11 @@ class ExtraTransitionDomain:
|
|||
print(f"key: {key} value: {value}")
|
||||
continue
|
||||
row_id = row[id_field]
|
||||
|
||||
# To maintain pairity with the load_transition_domain
|
||||
# script, we store this data in lowercase.
|
||||
if id_field == "domainname" and row_id is not None:
|
||||
row_id = row_id.lower()
|
||||
dict_data[row_id] = dataclass_type(**row)
|
||||
# dict_data = {row[id_field]: dataclass_type(**row) for row in reader}
|
||||
return dict_data
|
||||
|
|
|
@ -1,33 +1,53 @@
|
|||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
|
||||
from registrar.management.commands.utility.epp_data_containers import EnumFilenames
|
||||
|
||||
@dataclass
|
||||
class TransitionDomainArguments:
|
||||
"""Stores arguments for load_transition_domain"""
|
||||
"""Stores arguments for load_transition_domain, structurally a mix
|
||||
of a dataclass and a regular class, meaning we get a hardcoded
|
||||
representation of the values we want, while maintaining flexiblity
|
||||
and reducing boilerplate.
|
||||
|
||||
All pre-defined fields are optional but will remain on the model definition.
|
||||
In this event, they are provided a default value if none is given.
|
||||
"""
|
||||
|
||||
def __init__(self, **options):
|
||||
# Settings #
|
||||
self.directory = options.get("directory")
|
||||
self.sep = options.get("sep")
|
||||
self.limitParse = options.get("limitParse")
|
||||
# Maintains an internal kwargs list and sets values
|
||||
# that match the class definition.
|
||||
def __init__(self, **kwargs):
|
||||
self.kwargs = kwargs
|
||||
for k, v in kwargs.items():
|
||||
if hasattr(self, k):
|
||||
setattr(self, k, v)
|
||||
|
||||
# Filenames #
|
||||
## Adhocs ##
|
||||
self.agency_adhoc_filename = options.get("agency_adhoc_filename")
|
||||
self.domain_adhoc_filename = options.get("domain_adhoc_filename")
|
||||
self.organization_adhoc_filename = options.get("organization_adhoc_filename")
|
||||
# These all use field() to minimize typing and/or lambda.
|
||||
# Since this file is bound to expand, we can save time
|
||||
# by reducing the line count from 2-3 to just 1 line
|
||||
# each time we want to add a new filename or option.
|
||||
|
||||
## Data files ##
|
||||
self.domain_additional_filename = options.get("domain_additional_filename")
|
||||
self.domain_contacts_filename = options.get("domain_contacts_filename")
|
||||
self.domain_statuses_filename = options.get("domain_statuses_filename")
|
||||
# This approach is also used in EppLib internally for similar reasons.
|
||||
|
||||
# Flags #
|
||||
self.debug = options.get("debug")
|
||||
self.resetTable = options.get("resetTable")
|
||||
# Settings #
|
||||
directory: Optional[str] = field(default="migrationdata", repr=True)
|
||||
sep: Optional[str] = field(default="|", repr=True)
|
||||
limitParse: Optional[int] = field(default=None, repr=True)
|
||||
|
||||
def args_extra_transition_domain(self):
|
||||
return {
|
||||
"agency_adhoc_filename": self.agency_adhoc_filename,
|
||||
"domain_adhoc_filename": self.domain_adhoc_filename,
|
||||
"organization_adhoc_filename": self.organization_adhoc_filename,
|
||||
"domain_additional_filename": self.domain_additional_filename,
|
||||
"directory": self.directory,
|
||||
"sep": self.sep,
|
||||
}
|
||||
# Filenames #
|
||||
## Adhocs ##
|
||||
agency_adhoc_filename: Optional[str] = field(default=EnumFilenames.AGENCY_ADHOC.value[1], repr=True)
|
||||
domain_adhoc_filename: Optional[str] = field(default=EnumFilenames.DOMAIN_ADHOC.value[1], repr=True)
|
||||
organization_adhoc_filename: Optional[str] = field(default=EnumFilenames.ORGANIZATION_ADHOC.value[1], repr=True)
|
||||
authority_adhoc_filename: Optional[str] = field(default=EnumFilenames.AUTHORITY_ADHOC.value[1], repr=True)
|
||||
|
||||
## Data files ##
|
||||
domain_escrow_filename: Optional[str] = field(default=EnumFilenames.DOMAIN_ESCROW.value[1], repr=True)
|
||||
domain_additional_filename: Optional[str] = field(default=EnumFilenames.DOMAIN_ADDITIONAL.value[1], repr=True)
|
||||
domain_contacts_filename: Optional[str] = field(default=None, repr=True)
|
||||
domain_statuses_filename: Optional[str] = field(default=None, repr=True)
|
||||
|
||||
# Flags #
|
||||
debug: Optional[bool] = field(default=False, repr=True)
|
||||
resetTable: Optional[bool] = field(default=False, repr=True)
|
||||
infer_filenames: Optional[bool] = field(default=False, repr=True)
|
||||
|
|
|
@ -3,49 +3,4 @@ agencyid|agencyname|active|isfederal
|
|||
2|Minyx|Y|N
|
||||
3|Demivee|N|Y
|
||||
4|InnoZ|Y|Y
|
||||
5|Jayo|N|Y
|
||||
6|Avaveo|N|Y
|
||||
7|Thoughtmix|N|N
|
||||
8|Photofeed|Y|N
|
||||
9|Gabtype|N|Y
|
||||
10|Youfeed|Y|N
|
||||
11|Kwimbee|N|Y
|
||||
12|Linkbridge|N|Y
|
||||
13|Wikibox|Y|Y
|
||||
14|Browsezoom|Y|Y
|
||||
15|Zoozzy|Y|N
|
||||
16|Mydeo|Y|Y
|
||||
17|Chatterpoint|N|Y
|
||||
18|Roodel|N|Y
|
||||
19|Mybuzz|N|N
|
||||
20|Thoughtmix|Y|Y
|
||||
21|Brainlounge|N|Y
|
||||
22|Quinu|Y|Y
|
||||
23|Gigaclub|Y|N
|
||||
24|Eare|Y|N
|
||||
25|Einti|Y|N
|
||||
26|Einti|Y|Y
|
||||
27|Skidoo|Y|Y
|
||||
28|Aibox|N|Y
|
||||
29|Zoozzy|Y|Y
|
||||
30|Centimia|Y|N
|
||||
31|Einti|N|Y
|
||||
32|Devcast|Y|N
|
||||
33|Tagfeed|N|Y
|
||||
34|Jabbersphere|Y|Y
|
||||
35|Yamia|Y|Y
|
||||
36|Fliptune|Y|N
|
||||
37|Oloo|N|Y
|
||||
38|Yozio|N|N
|
||||
39|Brainsphere|Y|Y
|
||||
40|Geba|Y|Y
|
||||
41|Wikibox|N|Y
|
||||
42|Topdrive|Y|Y
|
||||
43|Lazz|N|N
|
||||
44|Rooxo|Y|Y
|
||||
45|Jetwire|N|N
|
||||
46|Zoomzone|Y|Y
|
||||
47|Thoughtbridge|Y|N
|
||||
48|Pixope|Y|N
|
||||
49|Quatz|N|N
|
||||
50|Eare|N|Y
|
||||
5|igorville|Y|N
|
|
@ -1,51 +1,6 @@
|
|||
authorityid|firstname|middlename|lastname|email|phonenumber|agencyid|addlinfo
|
||||
1|Gregoor||Kalinke|gkalinke0@indiegogo.com|(773) 1725515|1|Asparagus - Mexican
|
||||
2|Fayre||Filippozzi|ffilippozzi1@hugedomains.com|(357) 4874280|2|Steampan - Foil
|
||||
3|Gabey||Lightbody|glightbody2@fc2.com|(332) 8165691|3|Soup - Campbells, Minestrone
|
||||
4|Seline||Tower|stower3@answers.com|(151) 5396028|4|Kiwi Gold Zespri
|
||||
5|Rahel||Bruhnsen|rbruhnsen4@google.de|(221) 9271443|5|Miso - Soy Bean Paste
|
||||
6|Barny||Hopfer|bhopfer5@geocities.jp|(785) 6558321|6|Rice - Jasmine Sented
|
||||
7|Egan|Loris, slender|Tanslie|etanslie6@auda.org.au|(867) 8852523|7|Compound - Raspberry
|
||||
8|Meg|Lesser mouse lemur|McLucky|mmclucky7@sciencedirect.com|(207) 4569199|8|Potatoes - Yukon Gold 5 Oz
|
||||
9|Roarke|American alligator|Thackwray|rthackwray8@un.org|(227) 1557126|9|Schnappes - Peach, Walkers
|
||||
10|Matteo|Tern, royal|Pancost|mpancost9@typepad.com|(425) 7967200|10|Tortillas - Flour, 12
|
||||
11|Wilhelmine||Hradsky|whradskya@tmall.com|(229) 6901308|11|Pail For Lid 1537
|
||||
12|Merrile||Dalyiel|mdalyielb@pagesperso-orange.fr|(370) 8234182|12|Table Cloth 90x90 Colour
|
||||
13|Addy||Jimenez|ajimenezc@angelfire.com|(533) 1368420|13|Wanton Wrap
|
||||
14|Florella||Tellwright|ftellwrightd@house.gov|(813) 6121895|14|Water - Tonic
|
||||
15|Jacenta||Flewitt|jflewitte@goo.ne.jp|(884) 4307761|15|Veal - Insides, Grains
|
||||
16|Nady|Baboon, gelada|Senten|nsentenf@yandex.ru|(996) 6939643|16|Soho Lychee Liqueur
|
||||
17|Mano|Common palm civet|D'Cruze|mdcruzeg@ox.ac.uk|(114) 9154228|17|Goat - Whole Cut
|
||||
18|Tadio||Walsh|twalshh@wunderground.com|(490) 6376756|18|Pomello
|
||||
19|Carey||Boler|cboleri@google.co.jp|(439) 9984218|19|Wasabi Powder
|
||||
20|Aldus||Denington|adeningtonj@npr.org|(443) 5882262|20|Devonshire Cream
|
||||
21|Herculie|Horned lark|Delooze|hdeloozek@jimdo.com|(842) 7054442|21|Higashimaru Usukuchi Soy
|
||||
22|Gertrud||Rosenzwig|grosenzwigl@bloglovin.com|(878) 8890041|22|Apples - Sliced / Wedge
|
||||
23|Cece||Grimme|cgrimmem@senate.gov|(802) 2135321|23|Capers - Ox Eye Daisy
|
||||
24|Leann|Bird, red-billed tropic|McGrey|lmcgreyn@cbsnews.com|(681) 8510458|24|Liqueur - Melon
|
||||
25|Rosabelle|Turtle, eastern box|Rennels|rrennelso@vinaora.com|(447) 9158723|25|Turnip - White, Organic
|
||||
26|Clay|Duck, white-faced whistling|Acland|caclandp@oracle.com|(387) 6213827|26|Veal - Tenderloin, Untrimmed
|
||||
27|Devland||Hugland|dhuglandq@ning.com|(391) 8516099|27|Veal - Eye Of Round
|
||||
28|Cris|Bleu, red-cheeked cordon|Morville|cmorviller@archive.org|(751) 2295767|28|Pie Shells 10
|
||||
29|Jehu||Probetts|jprobettss@mac.com|(719) 3208086|29|Pasta - Ravioli
|
||||
30|Kamilah||Bartalin|kbartalint@devhub.com|(889) 5426094|30|Oranges
|
||||
31|Katherine|Crane, brolga|Degoe|kdegoeu@weebly.com|(815) 7408114|31|Oranges - Navel, 72
|
||||
32|Kassey||Riba|kribav@soup.io|(267) 4032421|32|Sauce - Vodka Blush
|
||||
33|Marcello||Woodman|mwoodmanw@dell.com|(869) 5497448|33|Potatoes - Instant, Mashed
|
||||
34|Marie-jeanne||Yo|myox@nymag.com|(414) 2722319|34|Cafe Royale
|
||||
35|Zerk||Morland|zmorlandy@xing.com|(684) 4155779|35|Muskox - French Rack
|
||||
36|Rene|Darwin ground finch (unidentified)|Booker|rbookerz@tmall.com|(727) 7131800|36|Onions - Red
|
||||
37|Romain||Kinnie|rkinnie10@cdc.gov|(967) 3799924|37|Lettuce - Spring Mix
|
||||
38|Fredra||Denisot|fdenisot11@google.es|(986) 9408987|38|Vinegar - Raspberry
|
||||
39|Ania||Djurdjevic|adjurdjevic12@wikispaces.com|(854) 8149676|39|Crawfish
|
||||
40|Gretal|Red-cheeked cordon bleu|Winson|gwinson13@istockphoto.com|(407) 3343406|40|Chips - Miss Vickies
|
||||
41|Ibby|Squirrel, antelope ground|Bediss|ibediss14@webs.com|(517) 5564511|41|Sobe - Orange Carrot
|
||||
42|Kingsley||Sawl|ksawl15@reuters.com|(994) 8049936|42|Water - Mineral, Carbonated
|
||||
43|Syd|Lapwing, southern|Valente|svalente16@whitehouse.gov|(442) 8663735|43|Pork - Sausage, Medium
|
||||
44|Jsandye||Maylin|jmaylin17@archive.org|(560) 9571021|44|Muffin Hinge Container 6
|
||||
45|Beilul||Sedworth|bsedworth18@noaa.gov|(590) 1848805|45|Coriander - Seed
|
||||
46|Dudley||Note|dnote19@unblog.fr|(674) 5901607|46|Apple - Custard
|
||||
47|Berte||Forsdicke|bforsdicke1a@theguardian.com|(680) 4006701|47|Paste - Black Olive
|
||||
48|Gwendolen|Magpie, australian|Drawmer|gdrawmer1b@nba.com|(414) 1746171|48|Extract - Almond
|
||||
49|Ade||Wilkes|awilkes1c@google.es|(340) 1804264|49|Onions - Pearl
|
||||
50|Nils||Burnard|nburnard1d@tuttocitta.it|(729) 4332944|50|Basil - Pesto Sauce
|
||||
1|Gregoor|middle|Kalinke|gkalinke0@indiegogo.com|(773) 172-5515|1|Asparagus - Mexican
|
||||
2|Fayre||Filippozzi|ffilippozzi1@hugedomains.com|(357) 487-4280|2|Steampan - Foil
|
||||
3|Gabey||Lightbody|glightbody2@fc2.com|(332) 816-5691|3|Soup - Campbells, Minestrone
|
||||
4|Seline||Tower|stower3@answers.com|(151) 539-6028|4|Kiwi Gold Zespri
|
||||
5|Joe||Smoe|joe@smoe.gov|(111) 111-1111|5|Kiwi Gold Zespri
|
|
@ -4,4 +4,7 @@ USER2|12355_CONTACT|123-123-1234||918-000-0000||susy.martin4@test.com|GSA|SOMECO
|
|||
USER3|12356_CONTACT|123-123-1234||918-000-0000||stephania.winters4@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T15:23:10Z|SOMECOMPANY|ctldbatch|2021-06-30T18:28:09Z|
|
||||
USER4|12357_CONTACT|123-123-1234||918-000-0000||alexandra.bobbitt5@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T15:23:10Z|SOMECOMPANY|ctldbatch|2021-08-02T22:13:09Z|
|
||||
USER5|12362_CONTACT|123-123-1234||918-000-0000||jospeh.mcdowell3@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T17:58:09Z|SOMECOMPANY|ctldbatch|2021-06-30T18:33:09Z|
|
||||
USER6|12363_CONTACT|123-123-1234||918-000-0000||reginald.ratcliff4@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T17:58:09Z|SOMECOMPANY|ctldbatch|2021-06-30T18:18:09Z|
|
||||
USER6|12363_CONTACT|123-123-1234||918-000-0000||reginald.ratcliff4@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T17:58:09Z|SOMECOMPANY|ctldbatch|2021-06-30T18:18:09Z|
|
||||
USER7|12364_CONTACT|123-123-1234||918-000-0000||reginald.ratcliff4@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T17:58:09Z|SOMECOMPANY|ctldbatch|2021-06-30T18:18:09Z|
|
||||
USER8|12365_CONTACT|123-123-1234||918-000-0000||reginald.ratcliff4@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T17:58:09Z|SOMECOMPANY|ctldbatch|2021-06-30T18:18:09Z|
|
||||
USER9|12366_CONTACT|123-123-1234||918-000-0000||reginald.ratcliff4@test.com|GSA|SOMECOMPANY|ctldbatch|2021-06-30T17:58:09Z|SOMECOMPANY|ctldbatch|2021-06-30T18:18:09Z|
|
|
@ -1,51 +1,6 @@
|
|||
domainname|domaintypeid|authorityid|orgid|securitycontactemail|dnsseckeymonitor|domainpurpose
|
||||
indiegogo.com|1|1|1|ggennrich0@utexas.edu|N|Praesent id massa id nisl venenatis lacinia.
|
||||
gravatar.com|2|2|2|lrome1@uol.com.br|Y|In tempor, turpis nec euismod scelerisque, quam turpis adipiscing lorem, vitae mattis nibh ligula nec sem.
|
||||
multiply.com|3|3|3|ybrommage2@vistaprint.com|Y|In hac habitasse platea dictumst.
|
||||
free.fr|4|4|4|plarderot3@t.co|Y|Morbi quis tortor id nulla ultrices aliquet. Maecenas leo odio, condimentum id, luctus nec, molestie sed, justo. Pellentesque viverra pede ac diam.
|
||||
washingtonpost.com|5|5|5|dchaim4@yahoo.co.jp|N|Maecenas tincidunt lacus at velit. Vivamus vel nulla eget eros elementum pellentesque. Quisque porta volutpat erat.
|
||||
simplemachines.org|6|6|6|iyaxley5@slashdot.org|N|Phasellus in felis.
|
||||
ted.com|7|7|7|ktresler6@netscape.com|N|Integer pede justo, lacinia eget, tincidunt eget, tempus vel, pede. Morbi porttitor lorem id ligula. Suspendisse ornare consequat lectus.
|
||||
diigo.com|8|8|8|dfiggures7@opera.com|Y|
|
||||
cafepress.com|9|9|9|npowys8@psu.edu|N|Integer a nibh. In quis justo.
|
||||
slashdot.org|10|10|10||N|Curabitur at ipsum ac tellus semper interdum. Mauris ullamcorper purus sit amet nulla. Quisque arcu libero, rutrum ac, lobortis vel, dapibus at, diam.
|
||||
shop-pro.jp|11|11|11|laspina@e-recht24.de|N|Aliquam non mauris. Morbi non lectus.
|
||||
nature.com|12|12|12|wlanonb@bloomberg.com|Y|Praesent id massa id nisl venenatis lacinia.
|
||||
comsenz.com|13|13|13|jhalloranc@wiley.com|Y|Morbi ut odio.
|
||||
ed.gov|14|14|14|lpatemand@mit.edu|N|Donec odio justo, sollicitudin ut, suscipit a, feugiat et, eros. Vestibulum ac est lacinia nisi venenatis tristique.
|
||||
webnode.com|15|15|15|nhirthe@storify.com|Y|Mauris lacinia sapien quis libero. Nullam sit amet turpis elementum ligula vehicula consequat. Morbi a ipsum.
|
||||
nature.com|16|16|16|nnussiif@yale.edu|N|Ut tellus. Nulla ut erat id mauris vulputate elementum. Nullam varius.
|
||||
prweb.com|17|17|17||Y|Morbi ut odio. Cras mi pede, malesuada in, imperdiet et, commodo vulputate, justo. In blandit ultrices enim.
|
||||
economist.com|18|18|18|klarkbyh@usnews.com|N|Nulla suscipit ligula in lacus.
|
||||
i2i.jp|19|19|19|rharrisi@foxnews.com|N|Ut at dolor quis odio consequat varius. Integer ac leo.
|
||||
discuz.net|20|20|20|bmeaj@miibeian.gov.cn|N|Morbi quis tortor id nulla ultrices aliquet.
|
||||
google.de|21|21|21|ubakesefk@4shared.com|Y|Etiam vel augue. Vestibulum rutrum rutrum neque.
|
||||
trellian.com|22|22|22|ekleimtl@amazon.co.jp|N|Duis bibendum. Morbi non quam nec dui luctus rutrum. Nulla tellus.
|
||||
hhs.gov|23|23|23|mbulmanm@nymag.com|Y|Vestibulum rutrum rutrum neque.
|
||||
whitehouse.gov|24|24|24|cstudden@fc2.com|N|Quisque erat eros, viverra eget, congue eget, semper rutrum, nulla. Nunc purus. Phasellus in felis.
|
||||
cbc.ca|25|25|25|kbuskeo@shutterfly.com|Y|
|
||||
prweb.com|26|26|26|hcoldbathp@reddit.com|N|Nullam varius. Nulla facilisi. Cras non velit nec nisi vulputate nonummy.
|
||||
wunderground.com|27|27|27|nmessinghamq@macromedia.com|Y|Aliquam erat volutpat. In congue.
|
||||
netlog.com|28|28|28|rthawr@ow.ly|Y|Nunc purus. Phasellus in felis.
|
||||
biblegateway.com|29|29|29|whurlstons@github.com|Y|
|
||||
istockphoto.com|30|30|30|mjiroutekt@un.org|N|Pellentesque eget nunc. Donec quis orci eget orci vehicula condimentum. Curabitur in libero ut massa volutpat convallis.
|
||||
nyu.edu|31|31|31|kpatmoreu@hhs.gov|N|Morbi odio odio, elementum eu, interdum eu, tincidunt in, leo. Maecenas pulvinar lobortis est. Phasellus sit amet erat.
|
||||
github.io|32|32|32|tgaberv@businessweek.com|N|Quisque erat eros, viverra eget, congue eget, semper rutrum, nulla.
|
||||
globo.com|33|33|33|tmidlarw@google.com.br|N|Maecenas rhoncus aliquam lacus.
|
||||
constantcontact.com|34|34|34|plaverenzx@cdbaby.com|Y|Aenean lectus. Pellentesque eget nunc. Donec quis orci eget orci vehicula condimentum.
|
||||
howstuffworks.com|35|35|35|agermainy@bloomberg.com|N|Nullam molestie nibh in lectus. Pellentesque at nulla. Suspendisse potenti.
|
||||
stanford.edu|36|36|36|ndabornz@smh.com.au|Y|Cras non velit nec nisi vulputate nonummy. Maecenas tincidunt lacus at velit. Vivamus vel nulla eget eros elementum pellentesque.
|
||||
csmonitor.com|37|37|37||Y|Nulla justo. Aliquam quis turpis eget elit sodales scelerisque.
|
||||
dagondesign.com|38|38|38|emiller11@adobe.com|Y|Donec odio justo, sollicitudin ut, suscipit a, feugiat et, eros. Vestibulum ac est lacinia nisi venenatis tristique.
|
||||
macromedia.com|39|39|39|bjosephov12@youtube.com|Y|Nulla ac enim. In tempor, turpis nec euismod scelerisque, quam turpis adipiscing lorem, vitae mattis nibh ligula nec sem.
|
||||
virginia.edu|40|40|40|bashbe13@xinhuanet.com|Y|Proin risus.
|
||||
wsj.com|41|41|41||Y|Praesent lectus. Vestibulum quam sapien, varius ut, blandit non, interdum in, ante. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Duis faucibus accumsan odio.
|
||||
dedecms.com|42|42|42|brain15@sourceforge.net|Y|Sed ante.
|
||||
ovh.net|43|43|43|gbrehat16@livejournal.com|N|Ut tellus. Nulla ut erat id mauris vulputate elementum. Nullam varius.
|
||||
nyu.edu|44|44|44|fguarnier17@weebly.com|N|Nullam orci pede, venenatis non, sodales sed, tincidunt eu, felis.
|
||||
kickstarter.com|45|45|45|byes18@google.pl|Y|Pellentesque eget nunc.
|
||||
about.com|46|46|46|tweine19@wikispaces.com|Y|Integer ac neque. Duis bibendum.
|
||||
prlog.org|47|47|47|cweeden1a@cocolog-nifty.com|N|Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Donec pharetra, magna vestibulum aliquet ultrices, erat tortor sollicitudin mi, sit amet lobortis sapien sapien non mi. Integer ac neque.
|
||||
sciencedaily.com|48|48|48|wforcade1b@sciencedaily.com|N|Etiam justo. Etiam pretium iaculis justo.
|
||||
plala.or.jp|49|49|49|gcarleton1c@quantcast.com|Y|Phasellus sit amet erat. Nulla tempus.
|
||||
psu.edu|50|50|50|jabbati1d@omniture.com|N|Morbi non quam nec dui luctus rutrum. Nulla tellus. In sagittis dui vel nisl.
|
||||
Anomaly.gov|1|1|1|ggennrich0@utexas.edu|N|Praesent id massa id nisl venenatis lacinia.
|
||||
TESTDOMAIN.GOV|2|2|2|lrome1@uol.com.br|Y|In tempor, turpis nec euismod scelerisque, quam turpis adipiscing lorem, vitae mattis nibh ligula nec sem.
|
||||
FakeWebsite1.gov|3|3|3|ybrommage2@vistaprint.com|Y|In hac habitasse platea dictumst.
|
||||
FakeWebsite2.gov|4|4|4|plarderot3@t.co|Y|Morbi quis tortor id nulla ultrices aliquet. Maecenas leo odio, condimentum id, luctus nec, molestie sed, justo. Pellentesque viverra pede ac diam.
|
||||
FakeWebsite3.gov|13|5|5|ybrommage2@vistaprint.com|Y|In hac habitasse platea dictumst.
|
|
@ -1,8 +1,11 @@
|
|||
Anomaly.gov|ANOMALY|tech
|
||||
TestDomain.gov|TESTUSER|admin
|
||||
FakeWebsite1|USER1|admin
|
||||
FakeWebsite1|USER2|tech
|
||||
FakeWebsite1|USER3|billing
|
||||
FakeWebsite2.GOV|USER4|admin
|
||||
FakeWebsite2.GOV|USER5|billing
|
||||
FakeWebsite2.GOV|USER6|tech
|
||||
FakeWebsite1.gov|USER1|admin
|
||||
FakeWebsite1.gov|USER2|tech
|
||||
FakeWebsite1.gov|USER3|billing
|
||||
FakeWebsite2.gov|USER4|admin
|
||||
FakeWebsite2.gov|USER5|billing
|
||||
FakeWebsite2.gov|USER6|tech
|
||||
FakeWebsite3.gov|USER7|admin
|
||||
FakeWebsite3.gov|USER8|billing
|
||||
FakeWebsite3.gov|USER9|tech
|
|
@ -1,4 +1,5 @@
|
|||
Anomaly.gov|muahaha|
|
||||
TestDomain.gov|ok|
|
||||
FakeWebsite1.GOV|serverHold|
|
||||
FakeWebsite2.GOV|Hold|
|
||||
FakeWebsite1.gov|serverHold|
|
||||
FakeWebsite2.gov|Hold|
|
||||
FakeWebsite3.gov|ok|
|
|
@ -0,0 +1,5 @@
|
|||
Anomaly.gov|SOME_STRING||data|data|data|data|2008-03-09T16:12:47Z|DATA2|ctldbatch|2022-06-06T01:33:10Z|2023-03-09T16:12:47Z|2023-02-09T16:12:47Z
|
||||
TestDomain.gov|SOME_STRING|data|data|data|data|data|2014-03-15T15:45:05Z|DATA2|ctldbatch|2022-02-13T17:33:07Z|2023-03-15T15:45:05Z|2023-02-15T15:45:05Z
|
||||
FakeWebsite1.gov|SOME_STRING||data|data|data|data|2020-06-14T16:30:06Z|DATA2|ctldbatch|2022-05-16T14:58:10Z|2023-06-14T16:30:06Z|2023-05-14T16:30:06Z
|
||||
FakeWebsite2.gov|SOME_STRING||data|data|data|data|2004-05-07T04:00:00Z|DATA2|ctldbatch|2022-08-18T15:23:09Z|2023-09-30T18:37:39Z|2023-08-30T18:37:39Z
|
||||
FakeWebsite3.gov|SOME_STRING||data|data|data|data|2004-05-07T04:00:00Z|DATA2|ctldbatch|2022-08-18T15:23:09Z|2023-09-30T18:37:39Z|2023-08-30T18:37:39Z
|
|
@ -1,51 +1,6 @@
|
|||
orgid|orgname|orgstreet|orgcity|orgstate|orgzip|orgcountrycode
|
||||
1|Flashdog|298 Monument Hill|Lakeland|Florida|33805|US
|
||||
2|Gigaclub|782 Mosinee Lane|Alexandria|Louisiana|71307|US
|
||||
3|Midel|376 Donald Pass|Waco|Texas|76705|US
|
||||
3|Midel|376 Joe Pass|Waco|Texas|76705|US
|
||||
4|Fanoodle|93001 Arizona Drive|Columbus|Ohio|43268|US
|
||||
5|Kwideo|01 Lotheville Place|Beaumont|Texas|77713|US
|
||||
6|Brainverse|8 Jenifer Point|Fort Myers|Florida|33994|US
|
||||
7|Brainsphere|74264 Reinke Place|Flint|Michigan|48550|US
|
||||
8|Pixoboo|40090 Lillian Avenue|Metairie|Louisiana|70033|US
|
||||
9|Topicshots|99331 Quincy Alley|Cleveland|Ohio|44177|US
|
||||
10|Eayo|17364 Vahlen Avenue|El Paso|Texas|88558|US
|
||||
11|Myworks|961 Kim Park|Honolulu|Hawaii|96845|US
|
||||
12|Flashset|262 Mcguire Parkway|Rochester|New York|14683|US
|
||||
13|Quatz|8 Forest Street|Warren|Ohio|44485|US
|
||||
14|Kazio|928 Carey Plaza|Miami|Florida|33196|US
|
||||
15|DabZ|05350 Claremont Circle|Lexington|Kentucky|40581|US
|
||||
16|Livepath|698 5th Crossing|Boca Raton|Florida|33499|US
|
||||
17|Centimia|82 Packers Court|Simi Valley|California|93094|US
|
||||
18|Avavee|4 Old Gate Center|Tucson|Arizona|85710|US
|
||||
19|Wikizz|74785 Oak Valley Crossing|Phoenix|Arizona|85040|US
|
||||
20|Wikivu|877 Gulseth Park|Tallahassee|Florida|32309|US
|
||||
21|Brainbox|8 Esker Lane|Lexington|Kentucky|40524|US
|
||||
22|Jaxworks|2 Prairieview Street|Young America|Minnesota|55573|US
|
||||
23|Youfeed|191 Ramsey Junction|Suffolk|Virginia|23436|US
|
||||
24|Ntags|24 Melby Court|Kansas City|Missouri|64136|US
|
||||
25|Realblab|119 Butternut Avenue|Dallas|Texas|75323|US
|
||||
26|Trudeo|69 Cordelia Park|Palmdale|California|93591|US
|
||||
27|Wordware|74540 Jenifer Pass|Lake Charles|Louisiana|70607|US
|
||||
28|Jaxnation|80974 Homewood Avenue|Philadelphia|Pennsylvania|19160|US
|
||||
29|Latz|01989 Red Cloud Hill|Columbus|Ohio|43226|US
|
||||
30|Fivespan|0 Ryan Plaza|Honolulu|Hawaii|96805|US
|
||||
31|Youfeed|24930 Hoard Park|San Antonio|Texas|78260|US
|
||||
32|Browsetype|49 Waxwing Circle|Oklahoma City|Oklahoma|73119|US
|
||||
33|Oba|8426 Thompson Parkway|Anaheim|California|92825|US
|
||||
34|Yodo|64815 Thackeray Crossing|Salinas|California|93907|US
|
||||
35|Thoughtstorm|79 Del Sol Drive|Evansville|Indiana|47719|US
|
||||
36|Yamia|2 Marquette Junction|Newark|New Jersey|07195|US
|
||||
37|Demimbu|14 American Ash Trail|Bronx|New York|10474|US
|
||||
38|Rhybox|6234 Cambridge Drive|Fort Lauderdale|Florida|33305|US
|
||||
39|Fivespan|4 Fair Oaks Terrace|Phoenix|Arizona|85040|US
|
||||
40|Skipfire|4 Dayton Circle|Mesquite|Texas|75185|US
|
||||
41|Thoughtmix|52 Claremont Avenue|Indianapolis|Indiana|46207|US
|
||||
42|Meembee|59 Green Ridge Park|Gainesville|Georgia|30506|US
|
||||
43|Trudeo|54 Schurz Place|Chicago|Illinois|60609|US
|
||||
44|Tazz|722 Bunker Hill Place|Bronx|New York|10454|US
|
||||
45|Skiba|8776 Pennsylvania Way|Fayetteville|North Carolina|28314|US
|
||||
46|Zoomlounge|56 Sloan Circle|Evansville|Indiana|47712|US
|
||||
47|Cogilith|7 American Ash Trail|Houston|Texas|77255|US
|
||||
48|Browsebug|15903 Stephen Hill|Arlington|Virginia|22244|US
|
||||
49|Yamia|9144 Graedel Crossing|Lehigh Acres|Florida|33972|US
|
||||
501|Lazzy|4958 Kensington Alley|Fayetteville|North Carolina|28305|US
|
||||
5|Sushi|9999 Sushi Way|Columbus|Ohio|43268|US
|
|
@ -14,9 +14,6 @@ from django.core.management import call_command
|
|||
from unittest.mock import patch
|
||||
|
||||
class TestMigrations(TestCase):
|
||||
|
||||
""" """
|
||||
|
||||
def setUp(self):
|
||||
""" """
|
||||
# self.load_transition_domain_script = "load_transition_domain",
|
||||
|
@ -54,6 +51,13 @@ class TestMigrations(TestCase):
|
|||
f"{self.test_data_file_location}/{self.test_domain_contact_filename}",
|
||||
f"{self.test_data_file_location}/{self.test_contact_filename}",
|
||||
f"{self.test_data_file_location}/{self.test_domain_status_filename}",
|
||||
directory=self.test_data_file_location,
|
||||
agency_adhoc_filename=self.test_agency_adhoc_filename,
|
||||
domain_additional_filename=self.test_domain_additional,
|
||||
domain_escrow_filename=self.test_escrow_domains_daily,
|
||||
domain_adhoc_filename=self.test_domain_types_adhoc,
|
||||
organization_adhoc_filename=self.test_organization_adhoc,
|
||||
authority_adhoc_filename=self.test_authority_adhoc_filename,
|
||||
)
|
||||
|
||||
def run_transfer_domains(self):
|
||||
|
@ -164,7 +168,7 @@ class TestMigrations(TestCase):
|
|||
|
||||
# STEP 2: (analyze the tables just like the
|
||||
# migration script does, but add assert statements)
|
||||
expected_total_transition_domains = 8
|
||||
expected_total_transition_domains = 9
|
||||
expected_total_domains = 4
|
||||
expected_total_domain_informations = 4
|
||||
expected_total_domain_invitations = 7
|
||||
|
@ -185,12 +189,13 @@ class TestMigrations(TestCase):
|
|||
expected_missing_domain_invitations,
|
||||
)
|
||||
|
||||
def test_load_transition_domain(self):
|
||||
def test_load_empty_transition_domain(self):
|
||||
"""Loads TransitionDomains without additional data"""
|
||||
self.run_load_domains()
|
||||
|
||||
# STEP 2: (analyze the tables just like the migration
|
||||
# script does, but add assert statements)
|
||||
expected_total_transition_domains = 8
|
||||
expected_total_transition_domains = 9
|
||||
expected_total_domains = 0
|
||||
expected_total_domain_informations = 0
|
||||
expected_total_domain_invitations = 0
|
||||
|
@ -209,14 +214,233 @@ class TestMigrations(TestCase):
|
|||
expected_missing_domain_informations,
|
||||
expected_missing_domain_invitations,
|
||||
)
|
||||
|
||||
def test_load_full_transition_domain(self):
|
||||
# Load command
|
||||
self.run_load_domains()
|
||||
|
||||
def test_transfer_transition_domains_to_domains(self):
|
||||
# TODO: setup manually instead of calling other script
|
||||
# We should get a consistent number
|
||||
# of records
|
||||
expected_total_transition_domains = 9
|
||||
expected_total_domains = 0
|
||||
expected_total_domain_informations = 0
|
||||
expected_total_domain_invitations = 0
|
||||
|
||||
expected_missing_domains = 9
|
||||
expected_duplicate_domains = 0
|
||||
expected_missing_domain_informations = 9
|
||||
expected_missing_domain_invitations = 9
|
||||
self.compare_tables(
|
||||
expected_total_transition_domains,
|
||||
expected_total_domains,
|
||||
expected_total_domain_informations,
|
||||
expected_total_domain_invitations,
|
||||
expected_missing_domains,
|
||||
expected_duplicate_domains,
|
||||
expected_missing_domain_informations,
|
||||
expected_missing_domain_invitations,
|
||||
)
|
||||
|
||||
expected_transition_domains = [
|
||||
TransitionDomain(
|
||||
username="",
|
||||
domain_name="anomaly.gov",
|
||||
status="ready",
|
||||
email_sent=False,
|
||||
organization_type=None,
|
||||
organization_name="Flashdog",
|
||||
federal_type=None,
|
||||
federal_agency=None,
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="testuser@gmail.com",
|
||||
domain_name="testdomain.gov",
|
||||
status="ready",
|
||||
email_sent=False,
|
||||
organization_type=None,
|
||||
organization_name="Gigaclub",
|
||||
federal_type=None,
|
||||
federal_agency=None,
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="agustina.wyman7@test.com",
|
||||
domain_name="fakewebsite1.gov",
|
||||
status="on hold",
|
||||
email_sent=False,
|
||||
organization_type=None,
|
||||
organization_name="Midel",
|
||||
federal_type=None,
|
||||
federal_agency=None,
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="susy.martin4@test.com",
|
||||
domain_name="fakewebsite1.gov",
|
||||
status="on hold",
|
||||
email_sent=False,
|
||||
organization_type=None,
|
||||
organization_name="Midel",
|
||||
federal_type=None,
|
||||
federal_agency=None,
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="stephania.winters4@test.com",
|
||||
domain_name="fakewebsite1.gov",
|
||||
status="on hold",
|
||||
email_sent=False,
|
||||
organization_type=None,
|
||||
organization_name="Midel",
|
||||
federal_type=None,
|
||||
federal_agency=None,
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="alexandra.bobbitt5@test.com",
|
||||
domain_name="fakewebsite2.gov",
|
||||
status="on hold",
|
||||
email_sent=False,
|
||||
organization_type="Federal",
|
||||
organization_name="Fanoodle",
|
||||
federal_type="Executive",
|
||||
federal_agency="InnoZ",
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="jospeh.mcdowell3@test.com",
|
||||
domain_name="fakewebsite2.gov",
|
||||
status="on hold",
|
||||
email_sent=False,
|
||||
organization_type="Federal",
|
||||
organization_name="Fanoodle",
|
||||
federal_type="Executive",
|
||||
federal_agency="InnoZ",
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="reginald.ratcliff4@test.com",
|
||||
domain_name="fakewebsite2.gov",
|
||||
status="on hold",
|
||||
email_sent=False,
|
||||
organization_type="Federal",
|
||||
organization_name="Fanoodle",
|
||||
federal_type="Executive",
|
||||
federal_agency="InnoZ",
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
),
|
||||
TransitionDomain(
|
||||
username="reginald.ratcliff4@test.com",
|
||||
domain_name="fakewebsite3.gov",
|
||||
status="ready",
|
||||
email_sent=False,
|
||||
organization_type="City",
|
||||
organization_name="Sushi",
|
||||
federal_type=None,
|
||||
federal_agency=None,
|
||||
epp_creation_date=None,
|
||||
epp_expiration_date=None
|
||||
)
|
||||
]
|
||||
|
||||
# Afterwards, their values should be what we expect
|
||||
all_transition_domains = TransitionDomain.objects.all()
|
||||
for domain in all_transition_domains:
|
||||
for expected in expected_transition_domains:
|
||||
|
||||
# This data gets created when the object is,
|
||||
# so we should just match it. Not relevant
|
||||
# to the added data.
|
||||
expected.id = domain.id
|
||||
expected.created_at = domain.created_at
|
||||
expected.updated_at = domain.updated_at
|
||||
|
||||
# Each TransitionDomain should have the correct data
|
||||
self.assertEqual(domain, expected)
|
||||
|
||||
def test_load_full_transfer_domain(self):
|
||||
self.run_load_domains()
|
||||
self.run_transfer_domains()
|
||||
|
||||
# Analyze the tables
|
||||
expected_total_transition_domains = 8
|
||||
expected_total_transition_domains = 9
|
||||
expected_total_domains = 5
|
||||
expected_total_domain_informations = 5
|
||||
expected_total_domain_invitations = 8
|
||||
|
||||
expected_missing_domains = 0
|
||||
expected_duplicate_domains = 0
|
||||
expected_missing_domain_informations = 0
|
||||
expected_missing_domain_invitations = 1
|
||||
self.compare_tables(
|
||||
expected_total_transition_domains,
|
||||
expected_total_domains,
|
||||
expected_total_domain_informations,
|
||||
expected_total_domain_invitations,
|
||||
expected_missing_domains,
|
||||
expected_duplicate_domains,
|
||||
expected_missing_domain_informations,
|
||||
expected_missing_domain_invitations,
|
||||
)
|
||||
|
||||
expected_domains = [
|
||||
Domain(
|
||||
expiration_date=None,
|
||||
name="anomaly.gov",
|
||||
state="ready",
|
||||
),
|
||||
Domain(
|
||||
expiration_date=None,
|
||||
name="testdomain.gov",
|
||||
state="ready",
|
||||
),
|
||||
Domain(
|
||||
expiration_date=None,
|
||||
name="fakewebsite1.gov",
|
||||
state="on hold",
|
||||
),
|
||||
Domain(
|
||||
expiration_date=None,
|
||||
name="fakewebsite2.gov",
|
||||
state="on hold",
|
||||
),
|
||||
Domain(
|
||||
expiration_date=None,
|
||||
name="fakewebsite3.gov",
|
||||
state="ready",
|
||||
),
|
||||
]
|
||||
|
||||
for domain in Domain.objects.all():
|
||||
print(f"""
|
||||
Domain(
|
||||
expiration_date={domain.expiration_date},
|
||||
name="{domain.name}",
|
||||
state="{domain.state}",
|
||||
),
|
||||
"""
|
||||
)
|
||||
for expected in expected_domains:
|
||||
expected.id = domain.id
|
||||
expected.created_at = domain.created_at
|
||||
expected.updated_at = domain.updated_at
|
||||
self.assertEqual(domain, expected)
|
||||
|
||||
def test_transfer_transition_domains_to_domains(self):
|
||||
self.run_load_domains()
|
||||
self.run_transfer_domains()
|
||||
|
||||
# Analyze the tables
|
||||
expected_total_transition_domains = 9
|
||||
expected_total_domains = 4
|
||||
expected_total_domain_informations = 4
|
||||
expected_total_domain_invitations = 7
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue