diff --git a/terraform/environments/ccms-ebs-upgrade/application_variables.json b/terraform/environments/ccms-ebs-upgrade/application_variables.json index 9686ef15a1d..819beef0dc6 100644 --- a/terraform/environments/ccms-ebs-upgrade/application_variables.json +++ b/terraform/environments/ccms-ebs-upgrade/application_variables.json @@ -151,7 +151,7 @@ "ec2_oracle_instance_threads_accessgate": "2", "ami_owner": "self", "ec2_instance_type_ftp": "c5d.large", - "ftp_ami_id": "ami-09f731dab809ea9df", + "ftp_ami_id": "ami-08cd358d745620807", "ec2_instance_type_clamav": "c5d.large", "clamav_ami_id": "ami-02cb9c4732e6429dd", "ebsdb_ami_id": "ami-014b2ba6362f16b1e", @@ -171,14 +171,14 @@ "ec2_instance_type_mailrelay": "c4.large", "mailrelay_ami_id": "ami-02654187c4eaddcc0", "key_name": "", - "lz_aws_account_id_env": "140455166311", + "lz_aws_account_id_env": "013163512034", "lz_aws_subnet_env": "10.203.0.0/20", "lz_aws_workspace_nonprod_subnet_env": "10.200.0.0/20", "lz_aws_workspace_prod_subnet_env": "10.200.16.0/20", "lz_aws_workspace_nonprod_prod": "10.200.0.0/19", "lz_aws_appstream_subnet_a_b": "10.200.32.0/23", "cloud_platform_subnet": "172.20.0.0/16", - "lz_ftp_bucket_environment": "uat", + "lz_ftp_bucket_environment": "test", "lz_domain_name": "*.uat.legalservices.gov.uk", "mp_aws_subnet_env": "10.200.0.0/20", "ebs_optimized": "false", @@ -246,6 +246,7 @@ "instance_role_ebsapps": "apps", "instance_role_ebsconc": "conc", "instance_role_ebsdb": "db", + "instance_role_ftp": "ftp", "instance_role_webgate": "webgate" } }, diff --git a/terraform/environments/ccms-ebs-upgrade/ccms-ec2-ftp-sg.tf b/terraform/environments/ccms-ebs-upgrade/ccms-ec2-ftp-sg.tf new file mode 100644 index 00000000000..cc24d607d24 --- /dev/null +++ b/terraform/environments/ccms-ebs-upgrade/ccms-ec2-ftp-sg.tf @@ -0,0 +1,99 @@ +# Security Group for FTP Server + +resource "aws_security_group" "ec2_sg_ftp" { + count = local.is-test ? 1 : 0 + name = "ec2_sg_ftp" + description = "Security Group for FTP Server" + vpc_id = data.aws_vpc.shared.id + tags = merge(local.tags, + { Name = lower(format("sg-%s-%s-FTP", local.application_name, local.environment)) } + ) +} + +# INGRESS Rules + +### FTP + +resource "aws_security_group_rule" "ingress_traffic_ftp_20" { + count = local.is-test ? 1 : 0 + security_group_id = aws_security_group.ec2_sg_ftp[count.index].id + type = "ingress" + description = "FTP" + protocol = "TCP" + from_port = 20 + to_port = 21 + cidr_blocks = [data.aws_vpc.shared.cidr_block, + local.application_data.accounts[local.environment].lz_aws_subnet_env] +} + +### FTP Passive Ports + +resource "aws_security_group_rule" "ingress_traffic_ftp_3000" { + count = local.is-test ? 1 : 0 + security_group_id = aws_security_group.ec2_sg_ftp[count.index].id + type = "ingress" + description = "FTP Passive Ports" + protocol = "TCP" + from_port = 3000 + to_port = 3010 + cidr_blocks = [data.aws_vpc.shared.cidr_block, + local.application_data.accounts[local.environment].lz_aws_subnet_env] +} + +### SSH + +resource "aws_security_group_rule" "ingress_traffic_ftp_22" { + count = local.is-test ? 1 : 0 + security_group_id = aws_security_group.ec2_sg_ftp[count.index].id + type = "ingress" + description = "SSH" + protocol = "TCP" + from_port = 22 + to_port = 22 + cidr_blocks = [data.aws_vpc.shared.cidr_block, + local.application_data.accounts[local.environment].lz_aws_subnet_env] +} + + + +# EGRESS Rules + +### FTP + +resource "aws_security_group_rule" "egress_traffic_ftp_20" { + count = local.is-test ? 1 : 0 + security_group_id = aws_security_group.ec2_sg_ftp[count.index].id + type = "egress" + description = "FTP" + protocol = "TCP" + from_port = 20 + to_port = 21 + cidr_blocks = [data.aws_vpc.shared.cidr_block, + local.application_data.accounts[local.environment].lz_aws_subnet_env] +} + +### SSH + +resource "aws_security_group_rule" "egress_traffic_ftp_22" { + count = local.is-test ? 1 : 0 + security_group_id = aws_security_group.ec2_sg_ftp[count.index].id + type = "egress" + description = "SSH" + protocol = "TCP" + from_port = 22 + to_port = 22 + cidr_blocks = ["0.0.0.0/0"] +} + +### HTTPS + +resource "aws_security_group_rule" "egress_traffic_ftp_443" { + count = local.is-test ? 1 : 0 + security_group_id = aws_security_group.ec2_sg_ftp[count.index].id + type = "egress" + description = "HTTPS" + protocol = "TCP" + from_port = 443 + to_port = 443 + cidr_blocks = ["0.0.0.0/0"] +} diff --git a/terraform/environments/ccms-ebs-upgrade/ccms-ec2-ftp.tf b/terraform/environments/ccms-ebs-upgrade/ccms-ec2-ftp.tf new file mode 100644 index 00000000000..242a9b0f6e1 --- /dev/null +++ b/terraform/environments/ccms-ebs-upgrade/ccms-ec2-ftp.tf @@ -0,0 +1,105 @@ +resource "aws_instance" "ec2_ftp" { + count = local.is-test ? 1 : 0 + instance_type = local.application_data.accounts[local.environment].ec2_instance_type_ftp + ami = local.application_data.accounts[local.environment].ftp_ami_id + key_name = local.application_data.accounts[local.environment].key_name + vpc_security_group_ids = [aws_security_group.ec2_sg_ftp[count.index].id] + subnet_id = data.aws_subnet.private_subnets_a.id + monitoring = true + ebs_optimized = false + associate_public_ip_address = false + iam_instance_profile = aws_iam_instance_profile.iam_instace_profile_ccms_base.name + + # Due to a bug in terraform wanting to rebuild the ec2 if more than 1 ebs block is attached, we need the lifecycle clause below. + lifecycle { + ignore_changes = [ + ebs_block_device, + root_block_device, + user_data, + user_data_replace_on_change + ] + } + user_data_replace_on_change = false + user_data = base64encode(templatefile("./templates/ec2_user_data_ftp.sh", { + environment = "${local.environment}" + lz_aws_account_id_env = "${local.application_data.accounts[local.environment].lz_aws_account_id_env}" + lz_ftp_bucket_environment = "${local.application_data.accounts[local.environment].lz_ftp_bucket_environment}" + hostname = "ftp" + })) + + metadata_options { + http_endpoint = "enabled" + http_tokens = "required" + } + + # Increase the volume size of the root volume + root_block_device { + volume_type = "gp3" + volume_size = 50 + iops = 3000 + encrypted = true + kms_key_id = data.aws_kms_key.ebs_shared.key_id + tags = merge(local.tags, + { Name = lower(format("%s-%s", local.application_data.accounts[local.environment].instance_role_ftp, "root")) }, + { device-name = "/dev/sda1" } + ) + } + + ebs_block_device { + device_name = "/dev/sdb" + volume_type = "gp3" + volume_size = 20 + // iops = 12000 + encrypted = true + kms_key_id = data.aws_kms_key.ebs_shared.key_id + tags = merge(local.tags, + { Name = lower(format("%s-%s", local.application_data.accounts[local.environment].instance_role_ftp, "ftp")) }, + { device-name = "/dev/sda1" } + ) + } + + tags = merge(local.tags, + { Name = lower(format("ec2-%s-%s-FTP", local.application_name, local.environment)) }, + { instance-role = local.application_data.accounts[local.environment].instance_role_ftp }, + { backup = "true" } + ) + + depends_on = [aws_security_group.ec2_sg_ftp] +} + +module "cw-ftp-ec2" { + count = local.is-test ? 1 : 0 + source = "./modules/cw-ec2" + + short_env = local.application_data.accounts[local.environment].short_env + name = "ec2-ftp" + topic = aws_sns_topic.cw_alerts.arn + instanceId = aws_instance.ec2_ftp[count.index].id + imageId = local.application_data.accounts[local.environment].ftp_ami_id + instanceType = local.application_data.accounts[local.environment].ec2_instance_type_ftp + fileSystem = "xfs" # Linux root filesystem + rootDevice = "nvme0n1p1" # This is used by default for root on all the ec2 images + + cpu_eval_periods = local.application_data.cloudwatch_ec2.cpu.eval_periods + cpu_datapoints = local.application_data.cloudwatch_ec2.cpu.eval_periods + cpu_period = local.application_data.cloudwatch_ec2.cpu.period + cpu_threshold = local.application_data.cloudwatch_ec2.cpu.threshold + + mem_eval_periods = local.application_data.cloudwatch_ec2.mem.eval_periods + mem_datapoints = local.application_data.cloudwatch_ec2.mem.eval_periods + mem_period = local.application_data.cloudwatch_ec2.mem.period + mem_threshold = local.application_data.cloudwatch_ec2.mem.threshold + + disk_eval_periods = local.application_data.cloudwatch_ec2.disk.eval_periods + disk_datapoints = local.application_data.cloudwatch_ec2.disk.eval_periods + disk_period = local.application_data.cloudwatch_ec2.disk.period + disk_threshold = local.application_data.cloudwatch_ec2.disk.threshold + + insthc_eval_periods = local.application_data.cloudwatch_ec2.insthc.eval_periods + insthc_period = local.application_data.cloudwatch_ec2.insthc.period + insthc_threshold = local.application_data.cloudwatch_ec2.insthc.threshold + + syshc_eval_periods = local.application_data.cloudwatch_ec2.syshc.eval_periods + syshc_period = local.application_data.cloudwatch_ec2.syshc.period + syshc_threshold = local.application_data.cloudwatch_ec2.syshc.threshold +} diff --git a/terraform/environments/ccms-ebs-upgrade/ccms-secrets.tf b/terraform/environments/ccms-ebs-upgrade/ccms-secrets.tf new file mode 100644 index 00000000000..946aa9d367f --- /dev/null +++ b/terraform/environments/ccms-ebs-upgrade/ccms-secrets.tf @@ -0,0 +1,11 @@ +#### This file can be used to store secrets specific to the member account #### + +resource "aws_secretsmanager_secret" "secret_ftp_s3" { + count = local.is-test ? 1 : 0 + name = "ftp-s3-${local.environment}-aws-key" + description = "AWS credentials for mounting of s3 buckets for the FTP Service to access" + + tags = merge(local.tags, + { Name = "ftp-s3-${local.environment}-aws-key" } + ) +} diff --git a/terraform/environments/ccms-ebs-upgrade/r53.tf b/terraform/environments/ccms-ebs-upgrade/r53.tf index d34114fd664..523f161dcd4 100644 --- a/terraform/environments/ccms-ebs-upgrade/r53.tf +++ b/terraform/environments/ccms-ebs-upgrade/r53.tf @@ -67,4 +67,29 @@ resource "aws_route53_record" "webgate_ec2" { type = "A" ttl = 300 records = [aws_instance.ec2_webgate[count.index].private_ip] -} \ No newline at end of file +} + +## EBSWEBGATE LB DNS +resource "aws_route53_record" "ebswgate" { + provider = aws.core-vpc + zone_id = data.aws_route53_zone.external.zone_id + name = "portal-ag-upgrade.${var.networking[0].business-unit}-${local.environment}.modernisation-platform.service.justice.gov.uk" + type = "A" + alias { + name = aws_lb.webgate_lb.dns_name + zone_id = aws_lb.webgate_lb.zone_id + evaluate_target_health = false + } +} + + +## FTP +resource "aws_route53_record" "ftp" { + count = local.is-test ? 1 : 0 + provider = aws.core-vpc + zone_id = data.aws_route53_zone.external.zone_id + name = "ftp-upgrade.${var.networking[0].business-unit}-${local.environment}.modernisation-platform.service.justice.gov.uk" + type = "A" + ttl = 300 + records = [aws_instance.ec2_ftp[0].private_ip] +} diff --git a/terraform/environments/ccms-ebs-upgrade/scripts/cron-ftp-15-v2.ksh b/terraform/environments/ccms-ebs-upgrade/scripts/cron-ftp-15-v2.ksh new file mode 100644 index 00000000000..452afa6460e --- /dev/null +++ b/terraform/environments/ccms-ebs-upgrade/scripts/cron-ftp-15-v2.ksh @@ -0,0 +1,44 @@ +# +# This job runs out of cron and sequentially runs jobs every 15 minutes. +# +# RBS Outbound +#/export/home/aebsprod/scripts/curl-ftp-v2.ksh 001 +# +# Allpay Outbound +/export/home/aebsprod/scripts/curl-ftp-v2.ksh 002 +# +# Eckoh Outbound +/export/home/aebsprod/scripts/curl-ftp-v2.ksh 003 +# +# Rossendales Outbound +/export/home/aebsprod/scripts/curl-ftp-v2.ksh 004 +# +#TDX Outbound +# added this script to cope with files owned by oebsprod and unix2dos them +# psb 14sep2016 +/export/home/aebsprod/scripts/unix2dos.ksh +/export/home/aebsprod/scripts/curl-ftp-v2.ksh 008 +# +# Microgen Bacway Outbound RBS +/export/home/aebsprod/scripts/curl-ftp-v2.ksh 010 +# +# RBS Inbound +/export/home/aebsprod/scripts/curl-ftp-v2.ksh 011 +# +# Citibank Inbound +## js /export/home/aebsprod/scripts/curl-ftp-v2.ksh 012 +# +# LFFramework Inbound +## js /export/home/aebsprod/scripts/curl-ftp-v2.ksh 013 +# +# Barclaycard Inbound +## js /export/home/aebsprod/scripts/curl-ftp-v2.ksh 014 +# +# Barclaycard Outbound +## js /export/home/aebsprod/scripts/curl-ftp-v2.ksh 007 +# +# Test Outbound +##/export/home/aebsprod/scripts/curl-ftp-v2.ksh 098 +# +# Test Inbound +##/export/home/aebsprod/scripts/curl-ftp-v2.ksh 099 diff --git a/terraform/environments/ccms-ebs-upgrade/scripts/curl-ftp-v2.ksh b/terraform/environments/ccms-ebs-upgrade/scripts/curl-ftp-v2.ksh new file mode 100644 index 00000000000..289326b21ca --- /dev/null +++ b/terraform/environments/ccms-ebs-upgrade/scripts/curl-ftp-v2.ksh @@ -0,0 +1,632 @@ +#!/bin/ksh +# +################################################################################ +# +# Author : Ken Woods +# Date : 24th September 2012 +# Project : LSC IDP (Atos) +# Version : 0.2 +# File : curl-ftp.ksh +# +################################################################################ +# +# This is a modification of the script written by Robin Soper calld ftps_get +# +# This script comes with jobs file called 'curl-ftp.jobs.v2' that contains the +# parameters required to send files to or retrieve files from another server. +# +# The script is designed to move any files found in a source directory on one +# server to a destination directory on another server. Only files are moved +# and they can be pushed or pulled. Any directories that are found are ignored. +# +# After a file has been transfered the size of the source and destination files +# is compared an only if they match is the source file deleted. If they do not +# match then both files are retained and cURL diagnostic information is written +# to the log. A failure of this nature does not terminate the job and any other +# files will be transfered. +# +# Any other failure will terminate the run. +# +# +################################################################################ +# +# When Who Comments +# ---- --- -------- +# 24-09-2012 Ken Woods Initial Version +# 20160831: M.Irving: updating to handle files with spaces. +# 20161024: M.Irving: adding facitity to werite error to log file so that it +# : can be monitored and used to cut incident ticket. +# 12mar2019 : psb : new version to handle S3 comms via Linux server +# 25May2023 Sahid Khan Slight adjustments to use with Linux instead of Solaris +################################################################################ + +################################################################################ +# +# Variables +# +################################################################################ + +BASE_DIR=/export/home/aebsprod/scripts # The location of the this script. +CURL_TMP="${BASE_DIR}/curl-tmp" # Directory for temporary cURL files. +CURL="curl -k" # cURL executable. +JOBS="${BASE_DIR}/ftp.jobs.v2" # Parameter file containing jobs. +CACERT=${BASE_DIR}/.ftps/cacert.crt # SSL certificate for ftps. +LOGFILE="${BASE_DIR}/curl-ftp_v2.log" # Logfile for all actions performed. +LOGLINE="------------------------------------------------------------" +LOGTICKETFILE="${BASE_DIR}/curl-ftp-ticket.log" +# +# turn incident ticketing on or off +#TICKETS=on +TICKETS=off + +# +# Set to true for debug on all functions or set to the names of one or more +# functions that you want to debug. +# +DEBUG="false" +#DEBUG="getRemoteFileList" + +################################################################################ +# +# Functions +# +################################################################################ + +# +# Function:- debug +# Parameters:- $1 = calling function name +# +debug() { + # Check for functions to be debuged. + for debug_func in ${DEBUG} ; do + if [ "${debug_func}" = "true" ] ; then + return 0 + elif [ "${debug_func}" = "$1" ] ; then + return 0 + fi + done + + # If we get here then no debugging to be done. + return 1 +} + +# +# Function:- logerror +# Parameters:- variable containing error text +# +# print error message to stdout and to second logfile for monitoring to read and raise an incident ticket. +# +logerror() { +print "\n$*\n" +if [[ "$TICKETS" = on ]] ; then + print "$(date): Alert: server $(hostname):${0}: $* : Check $(hostname):$LOGFILE" >> $LOGTICKETFILE + /bin/rm ${CURL_TMP}/OK_${jobid} +fi +} + +# +# Function:- checkJobsFile +# Parameters:- None +# +# Make sure we have the required parameter file. +# +checkJobsFile() { + # Check for debug requirement + debug checkJobsFile && set -x + + if [[ ! -f ${JOBS} ]] ; then + message="The job file '${JOBS}' does not exist." + logerror $message + exit 1 + fi +} + +# +# Function:- checkJob +# Parameters:- $1 Job number +# +# Check a Job Number was supplied and that it is valid. +# +checkJob() { + # Check for debug requirement + debug checkJob && set -x + + if [[ -z ${1} ]] ; then + message="Usage:- ${0} " + logerror $message + exit 1 + elif [[ `grep -wc ^${1} ${JOBS}` -eq 1 ]] ; then + print "Job Number : ${1}" + return + elif [[ `grep -wc ^${1} ${JOBS}` -eq 0 ]] ; then + message="Job Number : ${1} not found." + logerror $message + exit 1 + elif [[ `grep -wc ^${1} ${JOBS}` -gt 1 ]] ; then + message="Job Number : ${1} found more than once in jobs file:" + logerror $message + print "\t${JOBS}\n" + grep -w ^${1} ${JOBS} + print "\n" + exit 1 + fi +} + +# +# Function:- getJob +# Parameters:- $1 Job Number +# +# Load up the job parameters. +# +getJob() { + # Check for debug requirement + debug getJob && set -x + + param_check_fail=0 + + job="`grep -w ^${1} ${JOBS}`" + + jobid=`echo ${job} | cut -d, -f1` + + customer=`echo ${job} | cut -d, -f2` + checkParam "customer" "${customer}" + + server=`echo ${job} | cut -d, -f3` + checkParam "server" "${server}" + + local=`echo ${job} | cut -d, -f4` + checkParam "local" "${local}" + + remote=`echo ${job} | cut -d, -f5` + checkParam "remote" "${remote}" + + type=`echo ${job} | cut -d, -f6` + checkParam "type" "${type}" + + port=`echo ${job} | cut -d, -f7` + checkParam "port" "${port}" + + direction=`echo ${job} | cut -d, -f8` + checkParam "direction" "${direction}" + + retain=`echo ${job} | cut -d, -f9` + checkParam "retain" "${retain}" + + userid=`echo ${job} | cut -d, -f10` + checkParam "userid" "${userid}" + + password=`echo ${job} | cut -d, -f11` + checkParam "password" "${password}" + + if [[ param_check_fail -eq 1 ]] ; then + message="Job ${jobid} terminated due to missing parameter." + logerror $message + exit 1 + else + print "" + fi +} + +# +# Function:- checkParam +# Parameters:- $1 Parameter name +# $2 Value +# +# Check to see if th variable is set and return 1 if not. +# +checkParam() { + # Check for debug requirement + debug checkParam && set -x + + if [[ -z "${2}" ]] ; then + param_check_fail=1 + fi + + # If this is a password, don't print it. + [[ "${1}" = "password" ]] && return + + printf "%10s : %s\n" ${1} ${2} +} + +# +# Function:- getRemoteFileList +# Parameters:- None +# +# Create a list of files in the remote directory and report any errors. +# +getRemoteFileList() { + # Check for debug requirement + debug getRemoteFileList && set -x + + # Create the curl command to use. It is placed in ${curl_cmd}. + curlConnect ${type} + + # Create temporary files for remote file list and any errors. + print "Getting remote file information.\n${LOGLINE}" + curl_tmp=`mktemp ${CURL_TMP}/remote.XXXXXX` + curl_err=`mktemp ${CURL_TMP}/error.XXXXXX` + + # Get the list of remote files. + ${CURL} -o ${curl_tmp} ${curl_cmd}/${remote} 2>${curl_err} + if [[ $? -ne 0 ]] ; then + message="Job ${jobid} Terminated getting list of remote files." + logerror $message + cat ${curl_err} + rm ${curl_tmp} ${curl_err} + exit 1 + fi + + # List the files found in the remote directory and create a list of + # files and sizes. + egrep -v '^d|' ${curl_tmp} + if grep -q '220 Microsoft FTP Service' ${curl_err} ; then + unset remote_file_list; egrep -v '^d|' ${curl_tmp}|while read date time size fl; + do + [[ -z "${remote_file_list}" ]] && remote_file_list="$size $fl" || remote_file_list="$remote_file_list + $size $fl" + done + else + remote_file_list="$(cat ${curl_tmp} | awk '/^-/ {print $5, $9}')" + fi + + # Check to see if we have a directory called backup. + if egrep '^d|' ${curl_tmp} | \ + grep -qw BACKUP ; then + backup=BACKUP + else + backup="" + fi + + # Tidy up temporary files. + rm ${curl_tmp} ${curl_err} +} + +# +# Function:- getLocalFileList +# Parameters:- $1 Local directory +# +# Check the directory exists and then create a list of files and sizes. +# +getLocalFileList() { + # Check for debug requirement + debug getJob && set -x + + # Check that the directory exists. + if [[ ! -d ${1} ]] ; then + message="Job ${jobid} Terminated Local directory does not exist :${1}" + logerror $message + print "Job Terminated." + exit 1 + else + # Create list of files and sizes + print "Getting local file information.\n${LOGLINE}" + ls -l ${1} | grep '^-' + ls -1p ${1} | grep -v '\/$'| while read fl ; do + size=$(ls -l "${1}/${fl}" | awk '{print $5}') + + # We don\'t want a blank first record. + if [[ -z "${local_file_list}" ]] ; then + local_file_list="${size} ${fl}" + else + local_file_list="${local_file_list} + ${size} ${fl}" + fi + done + fi +} + +# +# Function:- curlConnect +# Parameters:- $1 Type of transfer +# +# Create a command line for the requested service. +# +curlConnect() { + # Check for debug requirement + debug curlConnect && set -x + + # Set verbose for cURL then we can display it if required. + opts="-v" + + # Create curl command line relating to service. + case ${1} in + ftp) + curl_cmd="${opts} --ftp-skip-pasv-ip \ + --user ${userid}:${password} ftp://${server}:${port}" + ;; + ftps) + curl_cmd="${opts} --ftp-ssl-control -k -v --ftp-ssl \ + --user ${userid}:${password} ftps://${server}:${port}" + checkCacert + ;; + wftps) + curl_cmd="${opts} --ftp-ssl-reqd --cacert ${CACERT} \ + --user ${userid}:${password} ftp://${server}:${port}" + checkCacert + ;; + *) + message="Job ${jobid} Terminated Unknown transport method : ${1}" + logerror $message + print "Job Terminated." + exit 1 + ;; + esac +} + +# +# Function:- checkCacert +# Parameters:- None +# +# Check that the named cacert fil exists. +# +checkCacert() { + # Check for debug requirement + debug checkCacert && set -x + + if [[ ! -s ${CACERT} ]] ; then + message="Job ${jobid} Terminated o certificate file available for ftps." + logerror $message + print "Job Terminated." + exit 1 + fi +} + +# +# Function:- retrieveFiles +# Parameters:- None +# +# Retrieve the files from the remote host. +# +retrieveFiles() { + # Check for debug requirement + debug retrieveFiles && set -x + + # Move to local destination. + if [[ -d ${local} ]] ; then + cd ${local} + else + message="Job ${jobid} Terminated Local destination directory does not exist:${local}" + logerror $message + print "Job Terminated." + exit 1 + fi + + # Create temporary file for any errors. + curl_err=`mktemp ${CURL_TMP}/error.XXXXXX` + + # Retrieve files, check sizes and then remove remote copy. + print "\nRetieving files.\n${LOGLINE}" + echo "${remote_file_list}" | \ + while read r_size r_file ; do + ${CURL} -O ${curl_cmd}/${remote}/"${r_file}" 2>${curl_err} + if [[ $? -eq 0 ]] ; then + l_size=$(ls -l "${r_file}" | awk '{print $5}') + if [[ ${l_size} -eq ${r_size} ]] ; then + print "Received File : ${r_file}" + backupRemoveRemoteFile ${r_file} + else + print "\n\tFile Received : ${r_file}" + print "\n\tFile size incorrect : ${l_size}" + print "\n\tFile size should be : ${r_size}" + fi + else + print "File transfer failed for : ${r_file}" + cat ${curl_err} + fi + done + + # Tidy up temporary files. + rm ${curl_err} +} + +# +# Function:- backupRemoveRemoteFile +# Parameters:- $1 Remote file name +# +# Remove the remote file name. +# +backupRemoveRemoteFile() { + # Check for debug requirement + debug backupRemoveRemoteFile && set -x + + # Create temporary files for remote file list and any errors. + curl_tmp1=`mktemp ${CURL_TMP}/remote.XXXXXX` + curl_err1=`mktemp ${CURL_TMP}/error.XXXXXX` + + if [[ "${retain}" = "delete" ]] ; then + # Delete remote file. + ${CURL} -o ${curl_tmp1} -Q "CWD ${remote}" -X "DELE ${r_file}" ${curl_cmd} 2>${curl_err1} + RETURN=$? + if [[ ${RETURN} -ne 0 && ${RETURN} -ne 19 ]] ; then + print "FAILED to delete remote file : ${r_file}" + cat ${curl_err1} ${curl_tmp1} + else + print "File removed from remote server." + fi + elif [[ "${retain}" = "retain" ]] ; then + # Create the backup directory if it does not exist. + if [[ "${backup}" != "BACKUP" ]] ; then + # Create the backup directory BACKUP if required. + ${CURL} -o ${curl_tmp1} -Q "CWD ${remote}" -Q "-MKD BACKUP" ${curl_cmd} 2>${curl_err1} + if [[ $? -ne 0 ]] ; then + print "Failed to create the backup directory: BACKUP" + cat ${curl_err1} ${curl_tmp1} + fi + cat /dev/null > ${curl_err1} + cat /dev/null > ${curl_tmp1} + fi + + # Backup the remote file. + ${CURL} -o ${curl_tmp1} -Q "-RNFR ${remote}/${r_file}" -Q "-RNTO ${remote}/BACKUP/${r_file}" ${curl_cmd} 2>${curl_err1} + if [[ $? -ne 0 ]] ; then + print "Failed to move '${r_file}' to backup directory: BACKUP" + cat ${curl_err1} ${curl_tmp1} + else + print "File moved to backup directory: BACKUP" + fi + fi + + # Tidy up temporary files. + rm ${curl_err1} ${curl_tmp1} +} + +# +# Function:- sendFiles +# Parameters:- None +# +# Check that the local and remote diretories exist and then send files +# found in the local directory. +# +sendFiles() { + # Check for debug requirement + debug sendFiles && set -x + + # Create the curl command to use. It is placed in ${curl_cmd}. + curlConnect ${type} + + # Create temporary files for remote file list and any errors. + curl_tmp=`mktemp ${CURL_TMP}/remote.XXXXXX` + curl_err=`mktemp ${CURL_TMP}/error.XXXXXX` + + ${CURL} -o ${curl_tmp} ${curl_cmd}/${remote} 2>${curl_err} + if [[ $? -ne 0 ]] ; then + message="Job ${jobid} terminated getting remote destination directory:${remote}" + logerror $message + cat ${curl_tmp1} ${curl_err} + print "Job Terminated." + rm ${curl_tmp1} ${curl_err} + exit 1 + fi + + # Move to local destination. + if [[ -d ${local} ]] ; then + cd ${local} + else + message="Job ${jobid} Terminated Local source directory does not exist:${local}" + logerror $message + print "Job Terminated." + exit 1 + fi + + # Initialise temporary files. + cat /dev/null > ${curl_tmp} + cat /dev/null > ${curl_err} + + # Send files, check sizes and then remove local copy. + print "\nSending files.\n${LOGLINE}" + + echo "${local_file_list}" | \ + while read l_size l_file ; do + # Send file + ${CURL} -T "${l_file}" ${curl_cmd}/${remote} 2>${curl_err} + if [[ $? -eq 0 ]] ; then + # Get size of remote file. + ${CURL} -o ${curl_tmp} -I ${curl_cmd}/${remote}/"${l_file}" 2>${curl_err} + cat ${curl_tmp} + r_size=$(cat ${curl_tmp} | awk '/Content/ {print $2}' | tr -d \\015) + echo "RSize: ${r_size}" + echo "LSize: ${l_size}" + # Check sizes are equal and remove or backup local file. + if [[ ${l_size} -eq ${r_size} ]] ; then + print "Sent File : ${l_file}" + backupRemoveLocalFile + else + print "\n\tFile Sent : ${l_file}" + print "\n\tFile size incorrect : ${r_size}" + print "\n\tFile size should be : ${l_size}" + fi + else + print "File transfer failed for : ${l_file}" + cat ${curl_tmp} ${curl_err} + fi + done + + # Remove temporary files. + #rm ${curl_tmp} ${curl_err} +} + +# +# Function:- backupRemoveLocalFile +# Parameters:- None +# +# Depending on the value of ${retain} remove the local file or place it in the +# backup directory. +# +backupRemoveLocalFile() { + # Check for debug requirement + debug sendFiles && set -x + + if [[ "${retain}" = "delete" ]] ; then + if rm ${local}/"${l_file}" ; then + print "File removed from local server." + else + print "Failed to delete local file : ${l_file}" + fi + elif [[ "${retain}" = "retain" ]] ; then + [[ ! -d ${local}/BACKUP ]] && mkdir ${local}/BACKUP + if mv ${local}/"${l_file}" ${local}/BACKUP/ ; then + print "File moved to BACKUP directory on local server." + else + print "Failed to move file to BACKUP directory on local server." + fi + else + message="Job ${jobid} Terminated Invalid prameter specified for 'Retain': ${retain}" + logerror $message + exit 1 + fi +} + +################################################################################ +# +# Main Section +# +################################################################################ + +# Log all output from the script to ${LOGFILE} +# Comment out the line below if you want output to go to the screen. +exec >> ${LOGFILE} 2>&1 + +print "\n${LOGLINE}\nSTART : `date`\n${LOGLINE}" + +# Check that we have the required job file. +checkJobsFile + +# Check the job number. +checkJob $1 + +# Get job parameters. +getJob $1 + +# Make sure that ${CURL_TMP} exists. +if [[ ! -d ${CURL_TMP} ]] ; then + mkdir -p ${CURL_TMP} +fi + +# Are we pushing or pulling? +if [[ "${direction}" = "pull" ]] ; then + getRemoteFileList + if [[ "${remote_file_list}" = "" ]] ; then + print "No files to retrieve." + else + retrieveFiles + fi +elif [[ "${direction}" = "push" ]] ; then + getLocalFileList ${local} + if [[ "${local_file_list}" = "" ]] ; then + print "No files to transfer." + else + sendFiles + fi +else + message="Invalid parameter : ${direction}" + logerror $message + print "Job ${jobid} Terminated The direction parameter must be push or pull.\n" + print "Job Terminated." + exit 1 +fi + +if ! [[ -f ${CURL_TMP}/OK_${jobid} ]]; then + print "$(date): OK: ${0##*/}: Job $jobid" >> $LOGTICKETFILE + touch ${CURL_TMP}/OK_${jobid} +fi +print "\n${LOGLINE}\nFINISH: `date`\n${LOGLINE}" diff --git a/terraform/environments/ccms-ebs-upgrade/scripts/ftp.jobs.v2 b/terraform/environments/ccms-ebs-upgrade/scripts/ftp.jobs.v2 new file mode 100644 index 00000000000..714f3b2ac37 --- /dev/null +++ b/terraform/environments/ccms-ebs-upgrade/scripts/ftp.jobs.v2 @@ -0,0 +1,88 @@ +# # Each job is made up of a comma seperated list of parameters as defined below. +# +# JobID,Customer,Server,Local Dir,Remote Dir,Protocol,Port,Direction,Retain,Userid,Password +# +# Field descriptions:- +# JobID : The job number that is called with the cURL script. +# Customer : The Customer name. Not esential but provides a name for the transfer. +# Server : The name of the remote server. Make sure it can be resolved. +# Local Dir : The full path of local the directory relative to root. +# Note the last character must be a '/' to signify a directory. +# Remote Dir : The full path of remote the directory relative to root +# Note the last character must be a '/' to signify a directory. +# For Windows servers use the relative path from user login. +# For Unix servers use the full path fro the '/' directory. +# Portocol : This can be an of the following:- +# ftp - Unencrypted ftp. +# ftps - Encrypted control channel with clear data channel. +# wftps - Encrypted control and data channels. +# Port : The port number to be used for control connction. +# Direction : Are the files to be pushed or pulled. This can be 'push' or 'pull'. +# Retain : What to do on transfer (delete/retain). A retained file is moved to +# a sub directory called BACKUP. +# UserID : The user name on th remote server. +# Password : The passord for the specified user. This is in plain text. +# +# +# 2 3 4 8 10 11 12 13 14 7 +# +############################################################ +# Outbound +############################################################ +# +# RBS outbound traffic. +#001,RBS,ftp-prod,/export/home/aebsprod/Outbound/CCMS/RBS/,/s3xfer/S3/laa-ccms-outbound-production/CCMS/Outbound/_RBS-Prod/,ftps,21,push,delete,s3xfer, +# +# Allpay outbound traffic from cdb1d1prod. +002,Allpay,ftp-prod,/export/home/aebsprod/Outbound/CCMS/Allpay/,/s3xfer/S3/laa-ccms-outbound-production/CCMS_PRD_Allpay/Outbound/,ftps,21,push,retain,s3xfer, +# +# Eckoh outbound tracfic from cdb1d1prod. +003,Eckoh,ftp-prod,/export/home/aebsprod/Outbound/CCMS/Eckoh/,/s3xfer/S3/laa-ccms-outbound-production/CCMS_PRD_Eckoh/Outbound/,ftps,21,push,retain,s3xfer, +# +# Rossendales outbound traffic from cdb1d1prod. +004,Rossendales,ftp-prod,/export/home/aebsprod/Outbound/CCMS/Rossendales/,/s3xfer/S3/laa-ccms-outbound-production/CCMS_PRD_Rossendales/Outbound/,ftps,21,push,retain,s3xfer, +# +# DST Transfer to replace IOS transfer. Goes to CDS (Central Print) +005,DST,ftp-prod,/export/home/aebsprod/Outbound/CCMS/IOS/,/s3xfer/S3/laa-ccms-outbound-production/CCMS_PRD_DST/Outbound/,ftps,21,push,retain,s3xfer, +# +# DST transfer to replace IOS transfer of adhoc EDRMS files. Goes to CDS (Central Print) +006,DST_PETEERBOROUGH,ftp-prod,/export/home/aebsprod/Outbound/CCMS/IOS/ADHOC/,/s3xfer/S3/laa-ccms-outbound-production/CCMS_PRD_DST/Outbound/Peterborough/,ftps,21,push,retain,s3xfer, +# +# Barclaycard outbound traffic from cdb1d1prod. +#007,Barclaycard,ftp-prod,/export/home/aebsprod/Inbound/CCMS/Barclaycard/,/s3xfer/S3/laa-ccms-outbound-production/CCMS_PRD_Barclaycard/Outbound/,ftps,21,push,delete,s3xfer, +# +# TDX Outbound traffic from ftp-prod +# psb 008,CCMS_TDX,ACP-LSC-FIL01,/export/home/aebsprod/Outbound/CCMS/TDX/,CCMS_PRD_TDX/Outbound/,ftp,21,push,retain,CCMS_PRD_TDX, +008,CCMS_TDX,ftp-tdx,/export/home/aebsprod/Outbound/CCMS/TDX/,CCMS_PRD_TDX/Outbound/,ftp,21,push,retain,CCMS_PRD_TDX, +# +# Microgen Bacway Outbound RBS +##010,Microgen,ACP-LAA-CAS02,/export/home/aebsprod/Outbound/CCMS/RBS/,Inbound/,wftps,3650,push,retain,Microgen,M1crog3n!! +010,Microgen,ftp-prod,/export/home/aebsprod/Outbound/CCMS/RBS/,/s3xfer/S3/bacway-production-eu-west-2-842522700642/BACS/,ftps,21,push,retain,s3xfer, +# +# outbound test for new script +#098,CCMS_TDX,ftp-prod,/export/home/aebsprod/scripts/testfiles/,/s3xfer/S3/laa-ccms-outbound-production/TESTING/,ftps,21,push,delete,s3xfer, +#098,CCMS_TDX,ftp-prod,/export/home/aebsprod/scripts/testfiles/,/s3xfer/S3/laa-ccms-inbound-production/TESTING/,ftps,21,push,retain,s3xfer, +##098,CCMS_TDX,ftp-prod,/export/home/aebsprod/scripts/testfiles/,/s3xfer/S3/bacway-production-eu-west-2-842522700642/TESTING/,ftps,21,push,retain,s3xfer, +# +# + +############################################################ +# Inbound +############################################################ +# inbound test for new script +099,CCMS_TDX,ftp-prod,/export/home/aebsprod/scripts/testfiles/,/s3xfer/S3/laa-ccms-inbound-production/TESTING/,ftps,21,pull,retain,s3xfer, +# +# RBS inbound traffic from ftp-prod. +011,RBS,ftp-prod,/export/home/aebsprod/Inbound/CCMS/RBS/,/s3xfer/S3/laa-ccms-inbound-production/CCMS_PRD_RBS/Inbound/,ftps,21,pull,retain,s3xfer, +# +# Citibank inbound traffic from ftp-prod. +### Discontinued 012,Citibank,ftp-prod,/export/home/aebsprod/Inbound/CCMS/Citibank/,/s3xfer/S3/laa-ccms-inbound-production/CCMS/Inbound/_Citibank-Prod/,ftps,990,pull,delete,s3xfer, +# +# LFFramework inbound traffic from ftp-prod. +#013,LFFramework,ftp-prod,/export/home/aebsprod/Inbound/CCMS/LFFramework/,/s3xfer/S3/laa-ccms-inbound-production/CCMS/Inbound/_LFFramework-Prod/,ftps,21,pull,delete,s3xfer, +# +# Barclaycard inbound traffic from ftp-prod. +#014,Barclaycard,ftp-prod,/export/home/aebsprod/Inbound/CCMS/Barclaycard/,/s3xfer/S3/laa-ccms-inbound-production/CCMS/Inbound/_Barclaycard-Prod/,ftps,21,pull,delete,s3xfer, +# +# PAYMENT FILE Inbound file for ios-temp payment files for Paragon from ftp-prod +015,Payment_File_CDS,ftp-prod,/export/home/aebsprod/scripts/CDS/,/s3xfer/S3/laa-ccms-inbound-production/PAYMENT_FILES/Inbound/,ftps,21,pull,delete,s3xfer, diff --git a/terraform/environments/ccms-ebs-upgrade/scripts/ftp_backup_housekeeping.bash b/terraform/environments/ccms-ebs-upgrade/scripts/ftp_backup_housekeeping.bash new file mode 100644 index 00000000000..c0d8e432280 --- /dev/null +++ b/terraform/environments/ccms-ebs-upgrade/scripts/ftp_backup_housekeeping.bash @@ -0,0 +1,11 @@ +#!/bin/bash +# Housekeeping for ftp jobs backups + +find /export/home/aebsprod/Outbound/CCMS/Citibank/BACKUP -type f -mtime +31 -exec rm -rf {} \; +find /export/home/aebsprod/Outbound/CCMS/Rossendales/BACKUP -type f -mtime +31 -exec rm -rf {} \; +find /export/home/aebsprod/Outbound/CCMS/TDX/BACKUP -type f -mtime +31 -exec rm -rf {} \; +find /export/home/aebsprod/Outbound/CCMS/RBS/BACKUP -type f -mtime +31 -exec rm -rf {} \; +find /export/home/aebsprod/Outbound/CCMS/Allpay/BACKUP -type f -mtime +31 -exec rm -rf {} \; +find /export/home/aebsprod/Outbound/CCMS/Eckoh/BACKUP -type f -mtime +31 -exec rm -rf {} \; +find /export/home/aebsprod/Outbound/CCMS/IOS/BACKUP -type f -mtime +31 -exec rm -rf {} \; +find /export/home/aebsprod/Outbound/CCMS/IOS/ADHOC/BACKUP -type f -mtime +31 -exec rm -rf {} \;