Closed gsa-suk closed 1 month ago
2024-10-29 9:09 PM
2024-10-29 10:34 PM
cf t -s production
export PROD_SERVICE_INSTANCE_NAME=fac-private-s3 export PROD_KEY_NAME=sk-fac-private-s3
cf create-service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}"
export PROD_S3_CREDENTIALS=$(cf service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}" | tail -n +2) export PROD_AWS_ACCESS_KEY_ID=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.access_key_id') export PROD_AWS_SECRET_ACCESS_KEY=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.secret_access_key') export PROD_BUCKET_NAME=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.bucket') export PROD_URI=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.uri') export PROD_AWS_DEFAULT_REGION=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.region') export AWS_ACCESS_KEY_ID=$PROD_AWS_ACCESS_KEY_ID export AWS_SECRET_ACCESS_KEY=$PROD_AWS_SECRET_ACCESS_KEY export AWS_DEFAULT_REGION=$PROD_AWS_DEFAULT_REGION
aws s3 ls $PROD_BUCKET_NAME
ls python ./copy_316_275K_sac_files.py $PROD_BUCKET_NAME
aws s3 ls $PROD_BUCKET_NAME/sac-data-load-for-prod/
cf delete-service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}"
cf t -s dev
copy_316_275K_sac_files.py
import subprocess import sys
def copy_316_275K_sac_files_to_s3(bucket_name): print("directory = ", subprocess.call("pwd")) print("bucket_name = ", bucket_name)
# Copy 316 sac filename = "../../Load_Migrated_audit_tables_to_Prod/316_275K_sac_w_Prod_user/load_files/sac316.sql" s3_filename = "s3://" + bucket_name + "/sac-data-load-for-prod/sac316.sql" cmd = [ "aws", "s3", "cp", filename, s3_filename, ] result = subprocess.call(cmd) print("cmd = ", cmd) print(f"result = {result}") # Copy 275K sac chunk files for file_number in range(1, 23): filename = "../../Load_Migrated_audit_tables_to_Prod/316_275K_sac_w_Prod_user/load_files/sac275K_" + str(file_number) + ".sql" s3_filename = "s3://" + bucket_name + "/sac-data-load-for-prod/sac275K_" + str(file_number) + ".sql" cmd = [ "aws", "s3", "cp", filename, s3_filename, ] result = subprocess.call(cmd) print("cmd = ", cmd) print(f"result = {result}")
if name in "main": copy_316_275K_sac_files_to_s3(sys.argv[1])
2. Download cog/over tables from Prod backups to GFE.
cf target -s production
export PROD_SERVICE_INSTANCE_NAME=backups export PROD_KEY_NAME=sk-backups-s3
aws --help
aws s3 ls s3://${PROD_BUCKET_NAME}/
aws s3 ls s3://${PROD_BUCKET_NAME}/backups/scheduled/
aws s3 ls s3://${PROD_BUCKET_NAME}/backups/scheduled/10-29-22/
aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-support_cognizantbaseline.dump . aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-support_cognizantassignment.dump . aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-dissemination_general.dump . aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-dissemination_federalaward.dump .
Acknowledged. Thank you!
Start (UTC)
2024-10-29 9:09 PM
End (UTC)
2024-10-29 10:34 PM
Explanation
export PROD_SERVICE_INSTANCE_NAME=fac-private-s3 export PROD_KEY_NAME=sk-fac-private-s3
cf create-service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}"
export PROD_S3_CREDENTIALS=$(cf service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}" | tail -n +2) export PROD_AWS_ACCESS_KEY_ID=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.access_key_id') export PROD_AWS_SECRET_ACCESS_KEY=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.secret_access_key') export PROD_BUCKET_NAME=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.bucket') export PROD_URI=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.uri') export PROD_AWS_DEFAULT_REGION=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.region') export AWS_ACCESS_KEY_ID=$PROD_AWS_ACCESS_KEY_ID export AWS_SECRET_ACCESS_KEY=$PROD_AWS_SECRET_ACCESS_KEY export AWS_DEFAULT_REGION=$PROD_AWS_DEFAULT_REGION
echo $PROD_BUCKET_NAME
aws s3 ls $PROD_BUCKET_NAME
Copy files to PROD S3
ls python ./copy_316_275K_sac_files.py $PROD_BUCKET_NAME
aws s3 ls $PROD_BUCKET_NAME
aws s3 ls $PROD_BUCKET_NAME/sac-data-load-for-prod/
cf delete-service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}"
cf t -s dev
Note: The above command switches you from Staging to Dev
copy_316_275K_sac_files.py
import subprocess import sys
def copy_316_275K_sac_files_to_s3(bucket_name): print("directory = ", subprocess.call("pwd")) print("bucket_name = ", bucket_name)
if name in "main": copy_316_275K_sac_files_to_s3(sys.argv[1])
cf target -s production
export PROD_SERVICE_INSTANCE_NAME=backups export PROD_KEY_NAME=sk-backups-s3
cf create-service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}"
export PROD_S3_CREDENTIALS=$(cf service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}" | tail -n +2) export PROD_AWS_ACCESS_KEY_ID=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.access_key_id') export PROD_AWS_SECRET_ACCESS_KEY=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.secret_access_key') export PROD_BUCKET_NAME=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.bucket') export PROD_URI=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.uri') export PROD_AWS_DEFAULT_REGION=$(echo "${PROD_S3_CREDENTIALS}" | jq -r '.credentials.region') export AWS_ACCESS_KEY_ID=$PROD_AWS_ACCESS_KEY_ID export AWS_SECRET_ACCESS_KEY=$PROD_AWS_SECRET_ACCESS_KEY export AWS_DEFAULT_REGION=$PROD_AWS_DEFAULT_REGION
aws --help
aws s3 ls s3://${PROD_BUCKET_NAME}/
aws s3 ls s3://${PROD_BUCKET_NAME}/backups/scheduled/
aws s3 ls s3://${PROD_BUCKET_NAME}/backups/scheduled/10-29-22/
aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-support_cognizantbaseline.dump . aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-support_cognizantassignment.dump . aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-dissemination_general.dump . aws s3 cp s3://$PROD_BUCKET_NAME/backups/scheduled/10-29-22/public-dissemination_federalaward.dump .
cf delete-service-key "${PROD_SERVICE_INSTANCE_NAME}" "${PROD_KEY_NAME}"
cf t -s dev
Note: The above command switches you from Prod to Dev