From 073e855ea9f1badbc1587a73f962b4578c7b3877 Mon Sep 17 00:00:00 2001 From: Robin Richtsfeld Date: Sat, 16 Dec 2017 01:11:22 +0100 Subject: [PATCH] Clean libexec --- libexec/bdutil_helpers.sh | 24 ++++++++++++------------ libexec/configure_hadoop.sh | 6 +++--- libexec/configure_hdfs.sh | 2 +- libexec/hadoop_helpers.sh | 4 ++-- libexec/mount_disks.sh | 2 +- libexec/start_hadoop.sh | 2 +- libexec/start_hadoop2.sh | 2 +- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/libexec/bdutil_helpers.sh b/libexec/bdutil_helpers.sh index 546dc85..ccbbf29 100644 --- a/libexec/bdutil_helpers.sh +++ b/libexec/bdutil_helpers.sh @@ -22,19 +22,19 @@ function bdutil_date() { # Simple wrapper around "echo" so that it's easy to add log messages with a # date/time prefix. function loginfo() { - echo "$(bdutil_date): ${@}" + echo "$(bdutil_date): ${*}" } # Simple wrapper around "echo" controllable with ${VERBOSE_MODE}. function logdebug() { if (( ${VERBOSE_MODE} )); then - loginfo ${@} + loginfo "${@}" fi } # Simple wrapper to pass errors to stderr. function logerror() { - loginfo ${@} >&2 + loginfo "${@}" >&2 } # Helper to overwrite the contents of file specified by $1 with strings @@ -43,7 +43,7 @@ function logerror() { # of stdin/stdout redirection characters unclear. # Example: overwrite_file_with_strings foo.txt hello world function overwrite_file_with_strings() { - local contents=${@:2} + local contents=${*:2} local filename=$1 echo "Overwriting ${filename} with contents '${contents}'" echo "${contents}" > ${filename} @@ -54,7 +54,7 @@ function overwrite_file_with_strings() { # code. # Args: "$@" is the command to run. function run_with_retries() { - local cmd="$@" + local cmd="$*" echo "About to run '${cmd}' with retries..." local update_succeeded=0 @@ -240,7 +240,7 @@ function tcp_port_is_free() { # True if all specified IPv4 tcp ports are available. function all_tcp_ports_are_free() { - ports="$@" + ports="$*" for p in $ports; do if ! tcp_port_is_free $p; then return 1 @@ -251,14 +251,14 @@ function all_tcp_ports_are_free() { # Prints the list of ports in use. function tcp_ports_in_use() { - ports="$@" + ports="$*" local in_use=() for p in $ports; do if ! tcp_port_is_free $p; then in_use+=($p) fi done - echo ${in_use[@]} + echo "${in_use[@]}" } # Wait until all of the specified IPv4 tcp ports are unused. @@ -275,7 +275,7 @@ function wait_until_ports_free() { max_wait_seconds=$2 shift 2 fi - ports="$@" + ports="$*" while (( now < start_time + max_wait_seconds )); do if all_tcp_ports_are_free $ports; then return 0 @@ -299,12 +299,12 @@ function wait_until_ports_free_and_report() { echo "Waiting for ports that are not yet available: ${in_use}." local port_wait_start_time=$(date '+%s') local port_wait_status=0 - wait_until_ports_free ${PORTS[@]} || port_wait_status=$? + wait_until_ports_free "${PORTS[@]}" || port_wait_status=$? local port_wait_end_time=$(date '+%s') local port_wait_time=$(( port_wait_end_time - port_wait_start_time )) if [[ ${port_wait_time} -gt 0 ]]; then - echo "Wait time in seconds for ports ${PORTS[@]} was ${port_wait_time}." - local still_in_use=$(tcp_ports_in_use ${PORTS[@]}) + echo "Wait time in seconds for ports ${PORTS[*]} was ${port_wait_time}." + local still_in_use=$(tcp_ports_in_use "${PORTS[@]}") if [[ "${still_in_use}" != "" ]]; then echo "Ports still in use: ${still_in_use}." fi diff --git a/libexec/configure_hadoop.sh b/libexec/configure_hadoop.sh index 44cab4b..9173462 100644 --- a/libexec/configure_hadoop.sh +++ b/libexec/configure_hadoop.sh @@ -33,7 +33,7 @@ if ! declare -p WORKERS | grep -q '^declare \-a'; then WORKERS=(${WORKERS}) fi -echo ${WORKERS[@]} | tr ' ' '\n' > ${HADOOP_CONF_DIR}/slaves +echo "${WORKERS[@]}" | tr ' ' '\n' > ${HADOOP_CONF_DIR}/slaves echo ${MASTER_HOSTNAME} > ${HADOOP_CONF_DIR}/masters # Basic configuration not related to GHFS or HDFS. @@ -96,8 +96,8 @@ if [[ ${#MOUNTED_DISKS[@]} -eq 0 ]]; then MOUNTED_DISKS=('') fi -MAPRED_LOCAL_DIRS="${MOUNTED_DISKS[@]/%//hadoop/mapred/local}" -NODEMANAGER_LOCAL_DIRS="${MOUNTED_DISKS[@]/%//hadoop/yarn/nm-local-dir}" +MAPRED_LOCAL_DIRS="${MOUNTED_DISKS[*]/%//hadoop/mapred/local}" +NODEMANAGER_LOCAL_DIRS="${MOUNTED_DISKS[*]/%//hadoop/yarn/nm-local-dir}" mkdir -p ${MAPRED_LOCAL_DIRS} ${NODEMANAGER_LOCAL_DIRS} chgrp hadoop -L -R \ diff --git a/libexec/configure_hdfs.sh b/libexec/configure_hdfs.sh index 13015a1..368ec1b 100644 --- a/libexec/configure_hdfs.sh +++ b/libexec/configure_hdfs.sh @@ -34,7 +34,7 @@ if (( ${ENABLE_HDFS} )); then # Location of HDFS data blocks on datanodes; for each mounted disk, add the # path /mnt/diskname/hadoop/dfs/data as a data directory, or if no mounted # disks exist, just go with the absolute path /hadoop/dfs/data. - HDFS_DATA_DIRS="${MOUNTED_DISKS[@]/%//hadoop/dfs/data}" + HDFS_DATA_DIRS="${MOUNTED_DISKS[*]/%//hadoop/dfs/data}" # Do not create HDFS_NAME_DIR, or Hadoop will think it is already formatted mkdir -p /hadoop/dfs ${HDFS_DATA_DIRS} diff --git a/libexec/hadoop_helpers.sh b/libexec/hadoop_helpers.sh index e97d30b..2edc58e 100644 --- a/libexec/hadoop_helpers.sh +++ b/libexec/hadoop_helpers.sh @@ -100,7 +100,7 @@ function start_with_retry_namenode() { ${start_dfs_file} fi done - if !(wait_for_namenode); then + if ! (wait_for_namenode); then echo "Namenode not running after ${max_attempts} attempts at start-dfs.sh" \ >&2 return ${errcode} @@ -174,7 +174,7 @@ function get_hdfs_superuser() { # Create and configure Hadoop2 specific HDFS directories. function initialize_hdfs_dirs() { - local extra_users="$@" + local extra_users="$*" local hdfs_superuser=$(get_hdfs_superuser) local dfs_cmd="sudo -i -u ${hdfs_superuser} hadoop fs" loginfo "Setting up HDFS /tmp directories." diff --git a/libexec/mount_disks.sh b/libexec/mount_disks.sh index 1fb0f2c..9cc5136 100644 --- a/libexec/mount_disks.sh +++ b/libexec/mount_disks.sh @@ -64,7 +64,7 @@ for DISK_PATH in ${DISK_PATHS}; do MOUNT_ENTRY=($(grep -w ${DATAMOUNT} /proc/mounts)) # Taken from /usr/share/google/safe_format_and_mount MOUNT_OPTIONS='defaults,discard' - echo "UUID=${DISK_UUID} ${MOUNT_ENTRY[@]:1:2} ${MOUNT_OPTIONS} 0 2 \ + echo "UUID=${DISK_UUID} ${MOUNT_ENTRY[*]:1:2} ${MOUNT_OPTIONS} 0 2 \ # added by bdutil" >> /etc/fstab fi done diff --git a/libexec/start_hadoop.sh b/libexec/start_hadoop.sh index 6c1a417..d6a6d62 100644 --- a/libexec/start_hadoop.sh +++ b/libexec/start_hadoop.sh @@ -22,7 +22,7 @@ HADOOP_PORTS=(50010 50020 50030 50060 50070 50075 50090) cd ${HADOOP_INSTALL_DIR} # Test for sshability to workers. -for NODE in ${WORKERS[@]}; do +for NODE in "${WORKERS[@]}"; do sudo -u hadoop ssh ${NODE} "exit 0" done diff --git a/libexec/start_hadoop2.sh b/libexec/start_hadoop2.sh index a47e6bc..6bbc9ec 100644 --- a/libexec/start_hadoop2.sh +++ b/libexec/start_hadoop2.sh @@ -23,7 +23,7 @@ HADOOP_PORTS=(8088 50010 50020 50070 50090) cd ${HADOOP_INSTALL_DIR} # Test for sshability to workers. -for NODE in ${WORKERS[@]}; do +for NODE in "${WORKERS[@]}"; do sudo -u hadoop ssh ${NODE} "exit 0" done