Change logging and add secrets

This commit is contained in:
zandercymatics 2023-11-29 11:57:17 -07:00
parent b4caa85dd1
commit 59950a1fde
No known key found for this signature in database
GPG key ID: FF4636ABEC9682B7
6 changed files with 86 additions and 37 deletions

View file

@ -2,6 +2,7 @@ name: Upload current-full.csv and current-federal.csv
run-name: Upload current-full.csv and current-federal.csv for branch ${{ github.head_ref }} run-name: Upload current-full.csv and current-federal.csv for branch ${{ github.head_ref }}
on: on:
pull_request:
workflow_dispatch: workflow_dispatch:
inputs: inputs:
environment: environment:
@ -42,9 +43,24 @@ jobs:
const environment = (github && github.event && github.event.inputs) ? github.event.inputs.environment : 'za'; const environment = (github && github.event && github.event.inputs) ? github.event.inputs.environment : 'za';
core.setOutput('environment', environment); core.setOutput('environment', environment);
wait-for-deploy:
runs-on: ubuntu-latest
steps:
- name: Wait for deploy to complete
uses: fountainhead/action-wait-for-check@v1.0.0
id: wait-for-deploy
with:
token: ${{ secrets.GITHUB_TOKEN }}
checkName: "deploy"
ref: ${{ github.event.pull_request.head.sha }}
# the maximum time to wait for the check to complete, in seconds
timeoutSeconds: 600
# the time to wait between checks, in seconds
intervalSeconds: 10
upload-reports: upload-reports:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [variables] needs: [variables, wait-for-deploy]
env: env:
CF_USERNAME: CF_{{ needs.variables.outputs.environment }}_USERNAME CF_USERNAME: CF_{{ needs.variables.outputs.environment }}_USERNAME
CF_PASSWORD: CF_{{ needs.variables.outputs.environment }}_PASSWORD CF_PASSWORD: CF_{{ needs.variables.outputs.environment }}_PASSWORD

View file

@ -12,7 +12,7 @@ from login_required import login_not_required
from cachetools.func import ttl_cache from cachetools.func import ttl_cache
from registrar.utility.s3_bucket import S3ClientHelper from registrar.utility.s3_bucket import S3ClientError, S3ClientHelper
DOMAIN_FILE_URL = "https://raw.githubusercontent.com/cisagov/dotgov-data/main/current-full.csv" DOMAIN_FILE_URL = "https://raw.githubusercontent.com/cisagov/dotgov-data/main/current-full.csv"
@ -103,31 +103,32 @@ def available(request, domain=""):
@require_http_methods(["GET"]) @require_http_methods(["GET"])
@login_not_required @login_not_required
def get_current_full(request, file_path="migrationdata/current-full.csv"): def get_current_full(request, file_name="current-full.csv"):
"""This will return the file content of current-full.csv which is the command """This will return the file content of current-full.csv which is the command
output of generate_current_full_report.py. This command iterates through each Domain output of generate_current_full_report.py. This command iterates through each Domain
and returns a CSV representation.""" and returns a CSV representation."""
return serve_file(file_path, "current-full.csv") return serve_file(file_name)
@require_http_methods(["GET"]) @require_http_methods(["GET"])
@login_not_required @login_not_required
def get_current_federal(request, file_path="migrationdata/current-federal.csv"): def get_current_federal(request, file_name="current-federal.csv"):
"""This will return the file content of current-federal.csv which is the command """This will return the file content of current-federal.csv which is the command
output of generate_current_federal_report.py. This command iterates through each Domain output of generate_current_federal_report.py. This command iterates through each Domain
and returns a CSV representation.""" and returns a CSV representation."""
return serve_file(file_path, "current-federal.csv") return serve_file(file_name)
def serve_file(file_path, file_name): def serve_file(file_name):
"""Downloads a file based on a given filepath. Returns a 404 if not found.""" """Downloads a file based on a given filepath. Returns a 500 if not found."""
s3_client = S3ClientHelper() s3_client = S3ClientHelper()
# TODO - #1403, grab from the S3 instance instead # Serve the CSV file. If not found, an exception will be thrown.
# TODO - check if file exists in s3, not here # This will then be caught by flat, causing it to not read it - which is what we want.
if os.path.exists(file_path): try:
# Serve the CSV file file = s3_client.get_file(file_name, decode_to_utf=True)
file = s3_client.get_file(file_name) except S3ClientError as err:
response = FileResponse(file) # TODO - #1317: Notify operations when auto report generation fails
return response raise err
else:
return HttpResponse("File not found", status=404) response = HttpResponse(file)
return response

View file

@ -58,6 +58,11 @@ secret_key = secret("DJANGO_SECRET_KEY")
secret_aws_ses_key_id = secret("AWS_ACCESS_KEY_ID", None) secret_aws_ses_key_id = secret("AWS_ACCESS_KEY_ID", None)
secret_aws_ses_key = secret("AWS_SECRET_ACCESS_KEY", None) secret_aws_ses_key = secret("AWS_SECRET_ACCESS_KEY", None)
aws_s3_region_name = secret("AWS_S3_ACCESS_KEY_ID", None)
secret_aws_s3_key_id = secret("AWS_S3_SECRET_ACCESS_KEY", None)
secret_aws_s3_key = secret("AWS_S3_REGION", None)
secret_aws_s3_bucket_name = secret("AWS_S3_BUCKET_NAME", None)
secret_registry_cl_id = secret("REGISTRY_CL_ID") secret_registry_cl_id = secret("REGISTRY_CL_ID")
secret_registry_password = secret("REGISTRY_PASSWORD") secret_registry_password = secret("REGISTRY_PASSWORD")
secret_registry_cert = b64decode(secret("REGISTRY_CERT", "")) secret_registry_cert = b64decode(secret("REGISTRY_CERT", ""))
@ -257,6 +262,13 @@ AUTH_USER_MODEL = "registrar.User"
AWS_ACCESS_KEY_ID = secret_aws_ses_key_id AWS_ACCESS_KEY_ID = secret_aws_ses_key_id
AWS_SECRET_ACCESS_KEY = secret_aws_ses_key AWS_SECRET_ACCESS_KEY = secret_aws_ses_key
AWS_REGION = "us-gov-west-1" AWS_REGION = "us-gov-west-1"
# Configuration for accessing AWS S3
AWS_S3_ACCESS_KEY_ID = secret_aws_s3_key_id
AWS_S3_SECRET_ACCESS_KEY = secret_aws_s3_key
AWS_S3_REGION = aws_s3_region_name
AWS_S3_BUCKET_NAME = secret_aws_s3_bucket_name
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html#standard-retry-mode # https://boto3.amazonaws.com/v1/documentation/api/latest/guide/retries.html#standard-retry-mode
AWS_RETRY_MODE: Final = "standard" AWS_RETRY_MODE: Final = "standard"
# base 2 exponential backoff with max of 20 seconds: # base 2 exponential backoff with max of 20 seconds:

View file

@ -4,6 +4,7 @@ import os
from django.core.management import BaseCommand from django.core.management import BaseCommand
from registrar.utility import csv_export from registrar.utility import csv_export
from registrar.utility.s3_bucket import S3ClientHelper
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -23,20 +24,32 @@ class Command(BaseCommand):
def handle(self, **options): def handle(self, **options):
"""Grabs the directory then creates current-federal.csv in that directory""" """Grabs the directory then creates current-federal.csv in that directory"""
file_name = "current-federal.csv"
# Ensures a slash is added # Ensures a slash is added
directory = os.path.join(options.get("directory"), "") directory = os.path.join(options.get("directory"), "")
check_path = options.get("checkpath") check_path = options.get("checkpath")
logger.info("Generating report...") logger.info("Generating report...")
try:
self.generate_current_federal_report(directory, file_name, check_path)
except Exception as err:
# TODO - #1317: Notify operations when auto report generation fails
raise err
else:
logger.info(f"Success! Created {file_name}")
self.generate_current_federal_report(directory, check_path) def generate_current_federal_report(self, directory, file_name, check_path):
logger.info(f"Success! Created {directory}current-federal.csv") """Creates a current-full.csv file under the specified directory,
then uploads it to a AWS S3 bucket"""
def generate_current_federal_report(self, directory, check_path): s3_client = S3ClientHelper()
"""Creates a current-full.csv file under the specified directory""" file_path = os.path.join(directory, file_name)
# TODO - #1403, push to the S3 instance instead
file_path = os.path.join(directory, "current-federal.csv") # Generate a file locally for upload
with open(file_path, "w") as file: with open(file_path, "w") as file:
csv_export.export_data_federal_to_csv(file) csv_export.export_data_federal_to_csv(file)
if check_path and not os.path.exists(file_path): if check_path and not os.path.exists(file_path):
raise FileNotFoundError(f"Could not find newly created file at '{file_path}'") raise FileNotFoundError(f"Could not find newly created file at '{file_path}'")
# Upload this generated file for our S3 instance
s3_client.upload_file(file_path, file_name)

View file

@ -28,23 +28,29 @@ class Command(BaseCommand):
# Ensures a slash is added # Ensures a slash is added
directory = os.path.join(options.get("directory"), "") directory = os.path.join(options.get("directory"), "")
check_path = options.get("checkpath") check_path = options.get("checkpath")
logger.info("Generating report...") logger.info("Generating report...")
try:
self.generate_current_full_report(directory, file_name, check_path) self.generate_current_full_report(directory, file_name, check_path)
except Exception as err:
file_path = os.path.join(directory, file_name) # TODO - #1317: Notify operations when auto report generation fails
logger.info(f"Success! Created {file_path}") raise err
else:
logger.info(f"Success! Created {file_name}")
def generate_current_full_report(self, directory, file_name, check_path): def generate_current_full_report(self, directory, file_name, check_path):
"""Creates a current-full.csv file under the specified directory""" """Creates a current-full.csv file under the specified directory,
then uploads it to a AWS S3 bucket"""
s3_client = S3ClientHelper() s3_client = S3ClientHelper()
# TODO - #1403, push to the S3 instance instead
file_path = os.path.join(directory, file_name) file_path = os.path.join(directory, file_name)
# TODO - Don't genererate a useless file
# Generate a file locally for upload
with open(file_path, "w") as file: with open(file_path, "w") as file:
csv_export.export_data_full_to_csv(file) csv_export.export_data_full_to_csv(file)
if check_path and not os.path.exists(file_path): if check_path and not os.path.exists(file_path):
raise FileNotFoundError(f"Could not find newly created file at '{file_path}'") raise FileNotFoundError(f"Could not find newly created file at '{file_path}'")
# Upload this generated file for our S3 instance
s3_client.upload_file(file_path, file_name) s3_client.upload_file(file_path, file_name)

View file

@ -19,8 +19,8 @@ class S3ClientHelper:
"s3", "s3",
region_name=settings.AWS_S3_REGION, region_name=settings.AWS_S3_REGION,
aws_access_key_id=settings.AWS_S3_ACCESS_KEY_ID, aws_access_key_id=settings.AWS_S3_ACCESS_KEY_ID,
aws_secret_access_key=settings.AWS_S3_SECRET_ACCESS_KEY aws_secret_access_key=settings.AWS_S3_SECRET_ACCESS_KEY,
#config=settings.BOTO_CONFIG, config=settings.BOTO_CONFIG,
) )
except Exception as exc: except Exception as exc:
raise S3ClientError("Could not access the S3 client.") from exc raise S3ClientError("Could not access the S3 client.") from exc
@ -49,9 +49,10 @@ class S3ClientHelper:
"""Gets a file to our S3 instance and returns the file content""" """Gets a file to our S3 instance and returns the file content"""
try: try:
response = self.boto_client.get_object(Bucket=self.get_bucket_name(), Key=file_name) response = self.boto_client.get_object(Bucket=self.get_bucket_name(), Key=file_name)
except self.boto_client.exceptions.NoSuchKey as exc:
raise S3ClientError("File was not found") from exc
except Exception as exc: except Exception as exc:
raise S3ClientError("Couldn't get file") from exc raise S3ClientError("Couldn't get file, an unspecified error occured") from exc
file_content = response["Body"].read() file_content = response["Body"].read()
if decode_to_utf: if decode_to_utf:
return file_content.decode("utf-8") return file_content.decode("utf-8")