#!/usr/bin/env bash

# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

cygwin=false
case "`uname`" in
   CYGWIN*) cygwin=true;;
esac

#
# Initialize globals
#
MAPR_HOME="${BASEMAPR:-/opt/mapr}"
MAPR_CONF="$MAPR_HOME"/conf
ROLES_DIR="$MAPR_HOME/roles"

function get_canonical_dir() {
  target="$1"
  canonical_name=`readlink -f ${target} 2>/dev/null`
  if [[ $? -eq 0 ]]; then
    canonical_dir=`dirname $canonical_name`
    echo ${canonical_dir}
    return
  fi

  # Mac has no readlink -f
  cd `dirname ${target}`
  target=`basename ${target}`

  # chase down the symlinks
  while [ -L ${target} ]; do
    target=`readlink ${target}`
    cd `dirname ${target}`
    target=`basename ${target}`
  done

  canonical_dir=`pwd -P`
  ret=${canonical_dir}
  echo $ret
}

#
# Checks if slider is installed
#
has_slider(){
if [ -f "$ROLES_DIR/slider" ]; then
  return 0; # 0 = true
else
  return 1;
fi
}

function find_spark_home() {
local SPARK_HOME_LINK="$1"
SPARK_HOME=""

# file exists and is a symbolic link
if [ -h "$SPARK_HOME_LINK" ]; then
  SPARK_HOME=$(readlink "$SPARK_HOME_LINK")
fi
echo $SPARK_HOME
}

function find_spark_assembly_path(){
local SPARK_HOME="$1"

# Use script spark-jars.sh to find SPARK_ASSEMBLY_PATH
if [ -f "$SPARK_HOME/conf/spark-jars.sh" ] ; then
  source "$SPARK_HOME/conf/spark-jars.sh"
  SPARK_ASSEMBLY_PATH="$SPARK_JARS_CLASSPATH"
fi

# for Spark-1.6.X
if [ -d "$SPARK_HOME/lib" ] ; then
  SPARK_ASSEMBLY_PATH=$(ls ${SPARK_HOME}/lib/spark-assembly-*.jar)
fi

# for Spark-2.X.X +
if [ -d "$SPARK_HOME/jars" ] ; then
  SPARK_ASSEMBLY_PATH="${SPARK_HOME}/jars/*"
fi

echo "$SPARK_ASSEMBLY_PATH"
}

bin=$(get_canonical_dir "$0")

bin=`cd "$bin"; pwd`

. "$bin"/hive-config.sh

#BASEMAPR should be set by hive-config.sh
HADOOP_DIR=$(get_canonical_dir "/usr/bin/hadoop")
HADOOP_DIR=`dirname $HADOOP_DIR`
export HADOOP_HOME=${HADOOP_HOME:-$HADOOP_DIR}
if [ -f $BASEMAPR/hbase/hbaseversion ]; then
  MAPR_HBASE_VERSION=`cat $BASEMAPR/hbase/hbaseversion 2>/dev/null`
  MAPR_HBASE_HOME=${MAPR_HBASE_HOME:-"${BASEMAPR}/hbase/hbase-${MAPR_HBASE_VERSION}"}
  MAPR_HBASE_JAR=$(find $BASEMAPR/hbase -regextype posix-extended -regex ".*\/hbase-${MAPR_HBASE_VERSION}-.*[^s]\.jar" -print 2> /dev/null)
  MAPR_HBASE_TEST_JAR=$(find $BASEMAPR/hbase -regextype posix-extended -regex ".*\/hbase-${MAPR_HBASE_VERSION}.*tests\.jar" -print 2> /dev/null)
  MAPR_HBASE_CONF="${MAPR_HBASE_HOME}/conf"
fi
MAPR_ZK_HOME=${MAPR_ZK_HOME:-${BASEMAPR}/lib}
export MAPR_IMPERSONATION_ENABLED=true


TMP_USER_DIR="/tmp/${USER}"
STDERR="${TMP_USER_DIR}/stderr"
SERVICE=""
HELP=""
START=""
STOP=""
STATUS=""
FINAL_OPT=""
count=0
SKIP_HBASECP=false
SKIP_HADOOPVERSION=false

SERVICE_ARGS=()
while [ $# -gt 0 ]; do
  case "$1" in
    --version)
      shift
      SERVICE=version
      ;;
    --service)
      shift
      SERVICE=$1
      shift
      ;;
    --rcfilecat)
      SERVICE=rcfilecat
      shift
      ;;
    --orcfiledump)
      SERVICE=orcfiledump
      shift
      ;;
    --llapdump)
      SERVICE=llapdump
      shift
      ;;
    --skiphadoopversion)
      SKIP_HADOOPVERSION=true
      shift
      ;;
    --skiphbasecp)
      SKIP_HBASECP=true
      shift
      ;;
    --help)
      HELP=_help
      shift
      ;;
    --debug*)
      DEBUG=$1
      FINAL_OPT=$HELP
      ((count++))
      shift
      ;;
    --start)
      START=_start
      FINAL_OPT=$START
      ((count++))
      shift
      ;;
    --stop)
      STOP=_stop
      FINAL_OPT=$STOP
      ((count++))
      shift
      ;;
    --status)
      STATUS=_status
      FINAL_OPT=$STATUS
      ((count++))
      shift
      ;;
    *)
      SERVICE_ARGS=("${SERVICE_ARGS[@]}" "$1")
      shift
      ;;
  esac
done

if [ "$SERVICE" = "" ] ; then
  if [ "$HELP" = "_help" ] ; then
    SERVICE="help"
  else
    SERVICE="cli"
  fi
fi

if [[ "$SERVICE" =~ ^(help|version|orcfiledump|rcfilecat|schemaTool|cleardanglingscratchdir|metastore|beeline|llapstatus|llap)$ ]] ; then
  SKIP_HBASECP=true
fi

if [[ "$SERVICE" =~ ^(help|schemaTool)$ ]] ; then
  SKIP_HADOOPVERSION=true
fi

if [ -f "${HIVE_CONF_DIR}/hive-env.sh" ]; then
  . "${HIVE_CONF_DIR}/hive-env.sh"
fi

if [[ -z "$SPARK_HOME" ]]
then
  export SPARK_HOME=$(find_spark_home "/usr/local/spark")
  export SPARK_CONF="$SPARK_HOME/conf/spark-defaults.conf"
fi

HADOOP_VERSION_FILE="$MAPR_HOME"/hadoop/hadoopversion
HADOOP_VERSION=$(cat "$HADOOP_VERSION_FILE")
HADOOP_HOME="$MAPR_HOME"/hadoop/hadoop-"$HADOOP_VERSION"
HADOOP_CONF_DIR="$HADOOP_HOME"/etc/hadoop

CLASSPATH="${HADOOP_CONF_DIR}":"${HIVE_CONF_DIR}"

HIVE_LIB=${HIVE_HOME}/lib

# needed for execution
if [ ! -f ${HIVE_LIB}/hive-exec-*.jar ]; then
  echo "Missing Hive Execution Jar: ${HIVE_LIB}/hive-exec-*.jar"
  exit 1;
fi

if [ ! -f ${HIVE_LIB}/hive-metastore-*.jar ]; then
  echo "Missing Hive MetaStore Jar"
  exit 2;
fi

# cli specific code
if [ ! -f ${HIVE_LIB}/hive-cli-*.jar ]; then
  echo "Missing Hive CLI Jar"
  exit 3;
fi

for f in ${HIVE_LIB}/*.jar; do
  CLASSPATH=${CLASSPATH}:$f;
done

if has_slider; then
  SLIDER_VERSION_FILE="$MAPR_HOME"/slider/sliderversion
  SLIDER_VERSION=$(cat "$SLIDER_VERSION_FILE")
  export SLIDER_HOME="$MAPR_HOME"/slider/slider-"$SLIDER_VERSION"
  SLIDER_LIB="$SLIDER_HOME"/lib
  SLIDER_CONF="$SLIDER_HOME"/conf
  COMMONS_DIGESTER=$(ls "$SLIDER_LIB"/commons-digester*jar)
  COMMON_BEANUTILS=$(find "$SLIDER_LIB" -regextype posix-extended -regex ".*(commons-beanutils-[0-9]).*\.(jar)$")
  COMMON_BEANUTILS_CORE=$(find "$SLIDER_LIB" -regextype posix-extended -regex ".*(commons-beanutils-core).*\.(jar)$")
  CLASSPATH="$CLASSPATH":"$SLIDER_CONF/*":"$COMMONS_DIGESTER":"$COMMON_BEANUTILS":"$COMMON_BEANUTILS_CORE"
fi


# Add hbase conf, hbase jars and zookeeper jar to classpath
if [ ! -z "${MAPR_HBASE_CONF}" ]; then
  CLASSPATH="${CLASSPATH}:${MAPR_HBASE_CONF}"
fi
if [ ! -z "${MAPR_HBASE_JAR}" ] ; then
  CLASSPATH=${CLASSPATH}:${MAPR_HBASE_JAR};
fi
if [ ! -z "${MAPR_HBASE_TEST_JAR}" ] ; then
  CLASSPATH=${CLASSPATH}:${MAPR_HBASE_TEST_JAR};
fi
if [ -f "${MAPR_ZK_HOME}/zookeeper-*.jar" ] ; then
  CLASSPATH=${CLASSPATH}:${MAPR_ZK_HOME}/zookeeper-*.jar;
fi
if [[ -n $SENTRY_HOME ]] ; then
  for f in $SENTRY_HOME/lib/s*.jar; do
    CLASSPATH=${CLASSPATH}:${f}
  done
fi

# add the auxillary jars such as serdes
if [ -d "${HIVE_AUX_JARS_PATH}" ]; then
  hive_aux_jars_abspath=`cd ${HIVE_AUX_JARS_PATH} && pwd`
  for f in $hive_aux_jars_abspath/*.jar; do
    if [[ ! -f $f ]]; then
        continue;
    fi
    if $cygwin; then
	f=`cygpath -w "$f"`
    fi
    AUX_CLASSPATH=${AUX_CLASSPATH}:$f
    if [ "${AUX_PARAM}" == "" ]; then
        AUX_PARAM=file://$f
    else
        AUX_PARAM=${AUX_PARAM},file://$f;
    fi
  done
elif [ "${HIVE_AUX_JARS_PATH}" != "" ]; then
  HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/,/:/g'`
  if $cygwin; then
      HIVE_AUX_JARS_PATH=`cygpath -p -w "$HIVE_AUX_JARS_PATH"`
      HIVE_AUX_JARS_PATH=`echo $HIVE_AUX_JARS_PATH | sed 's/;/,/g'`
  fi
  AUX_CLASSPATH=${AUX_CLASSPATH}:${HIVE_AUX_JARS_PATH}
  AUX_PARAM="file://$(echo ${HIVE_AUX_JARS_PATH} | sed 's/:/,file:\/\//g')"
fi

# adding jars from auxlib directory
for f in ${HIVE_HOME}/auxlib/*.jar; do
  if [[ ! -f $f ]]; then
      continue;
  fi
  if $cygwin; then
      f=`cygpath -w "$f"`
  fi
  AUX_CLASSPATH=${AUX_CLASSPATH}:$f
  if [ "${AUX_PARAM}" == "" ]; then
    AUX_PARAM=file://$f
  else
    AUX_PARAM=${AUX_PARAM},file://$f;
  fi
done
if $cygwin; then
    CLASSPATH=`cygpath -p -w "$CLASSPATH"`
    CLASSPATH=${CLASSPATH};${AUX_CLASSPATH}
else
    CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH}
fi

# supress the HADOOP_HOME warnings in 1.x.x
export HADOOP_HOME_WARN_SUPPRESS=true

#replacing line breaks - Bug 20191
CLASSPATH=`echo "$CLASSPATH" | tr '\n' ':'`
#replacing repeating colons
CLASSPATH=`echo $CLASSPATH | sed 's/:\{2,\}/:/g'`

# to make sure log4j2.x and jline jars are loaded ahead of the jars pulled by hadoop
export HADOOP_USER_CLASSPATH_FIRST=true

# pass classpath to hadoop
if [ "$HADOOP_CLASSPATH" != "" ]; then
  export HADOOP_CLASSPATH="${CLASSPATH}:${HADOOP_CLASSPATH}"
else
  export HADOOP_CLASSPATH="$CLASSPATH"
fi

# also pass hive classpath to hadoop
if [ "$HIVE_CLASSPATH" != "" ]; then
  export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${HIVE_CLASSPATH}";
fi

# check for hadoop in the path
HADOOP_IN_PATH=`which hadoop 2>/dev/null`
if [ -f ${HADOOP_IN_PATH} ]; then
  HADOOP_DIR=`dirname "$HADOOP_IN_PATH"`/..
fi
# HADOOP_HOME env variable overrides hadoop in the path
HADOOP_HOME=${HADOOP_HOME:-${HADOOP_PREFIX:-$HADOOP_DIR}}
if [ "$HADOOP_HOME" == "" ]; then
  echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path";
  exit 4;
fi

# add distcp to classpath, hive depends on it
for f in ${HADOOP_HOME}/share/hadoop/tools/lib/hadoop-distcp-*.jar; do
  export HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:$f;
done

HADOOP=$HADOOP_HOME/bin/hadoop
if [ ! -f ${HADOOP} ]; then
  echo "Cannot find hadoop installation: \$HADOOP_HOME or \$HADOOP_PREFIX must be set or hadoop must be in the path";
  exit 4;
fi

if [ ! -d ${TMP_USER_DIR} ]; then
  mkdir -p ${TMP_USER_DIR} 2> /dev/null
  if [ $? -ne 0 ]; then
    STDERR="/dev/tty"
  fi
fi

if [ "${STDERR}" != "/dev/null" ] && [ ! -f ${STDERR} ]; then
  touch ${STDERR} 2> /dev/null
  if [ $? -ne 0 ]; then
    STDERR="/dev/tty"
  fi
fi

if [ "$SKIP_HADOOPVERSION" = false ]; then
  # Make sure we're using a compatible version of Hadoop
  if [ "x$HADOOP_VERSION" == "x" ]; then
      HADOOP_VERSION=$($HADOOP version 2>> ${STDERR} | awk -F"\t" '/Hadoop/ {print $0}' | cut -d' ' -f 2);
  fi

  # Save the regex to a var to workaround quoting incompatabilities
  # between Bash 3.1 and 3.2
  hadoop_version_re="^([[:digit:]]+)\.([[:digit:]]+)(\.([[:digit:]]+))?.*$"

  if [[ "$HADOOP_VERSION" =~ $hadoop_version_re ]]; then
      hadoop_major_ver=${BASH_REMATCH[1]}
      hadoop_minor_ver=${BASH_REMATCH[2]}
      hadoop_patch_ver=${BASH_REMATCH[4]}
  else
      echo "Unable to determine Hadoop version information."
      echo "'hadoop version' returned:"
      echo `$HADOOP version`
      exit 5
  fi

  if [ "$hadoop_major_ver" -lt "1" -a  "$hadoop_minor_ver$hadoop_patch_ver" -lt "201" ]; then
      echo "Hive requires Hadoop 0.20.x (x >= 1)."
      echo "'hadoop version' returned:"
      echo `$HADOOP version`
      exit 6
  fi
fi

if [ "$SKIP_HBASECP" = false ]; then
  # HBase detection. Need bin/hbase and a conf dir for building classpath entries.
  # Start with BigTop defaults for HBASE_HOME and HBASE_CONF_DIR.
  HBASE_HOME=${HBASE_HOME:-"/usr/lib/hbase"}
  HBASE_CONF_DIR=${HBASE_CONF_DIR:-"/etc/hbase/conf"}
  if [[ ! -d $HBASE_CONF_DIR ]] ; then
    # not explicitly set, nor in BigTop location. Try looking in HBASE_HOME.
    HBASE_CONF_DIR="$HBASE_HOME/conf"
  fi

  # perhaps we've located the HBase config. if so, include it on classpath.
  if [[ -d $HBASE_CONF_DIR ]] ; then
    export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${HBASE_CONF_DIR}"
  fi

  # look for the hbase script. First check HBASE_HOME and then ask PATH.
  if [[ -e $HBASE_HOME/bin/hbase ]] ; then
    HBASE_BIN="$HBASE_HOME/bin/hbase"
  fi
  HBASE_BIN=${HBASE_BIN:-"$(which hbase 2>/dev/null)"}

  # perhaps we've located HBase. If so, include its details on the classpath
  if [[ -n $HBASE_BIN ]] ; then
    # exclude ZK, PB, and Guava (See HIVE-2055)
    # depends on HBASE-8438 (hbase-0.94.14+, hbase-0.96.1+) for `hbase mapredcp` command
    for x in $($HBASE_BIN mapredcp 2>> ${STDERR} | tr ':' '\n') ; do
      if [[ $x == *zookeeper* || $x == *protobuf-java* || $x == *guava* ]] ; then
        continue
      fi
      # TODO: should these should be added to AUX_PARAM as well?
      export HADOOP_CLASSPATH="${HADOOP_CLASSPATH}:${x}"
    done
  fi
fi

#replacing line breaks - Bug 20191
HADOOP_CLASSPATH=`echo "$HADOOP_CLASSPATH" | tr '\n' ':'`
#replacing repeating colons
HADOOP_CLASSPATH=`echo $HADOOP_CLASSPATH | sed 's/:\{2,\}/:/g'`


if [ "${AUX_PARAM}" != "" ]; then
  if [[ "$SERVICE" != beeline ]]; then
    HIVE_OPTS="$HIVE_OPTS --hiveconf hive.aux.jars.path=${AUX_PARAM}"
  fi
  AUX_JARS_CMD_LINE="-libjars ${AUX_PARAM}"
fi

SERVICE_LIST=""

for i in "$bin"/ext/*.sh ; do
  . $i
done

for i in "$bin"/ext/util/*.sh ; do
  . $i
done

if [ "$DEBUG" ]; then
  if [ "$HELP" ]; then
    debug_help
    exit 0
  else
    get_debug_params "$DEBUG"
    export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS $HIVE_MAIN_CLIENT_DEBUG_OPTS"
  fi
fi

if [ "$HIVE_IDENT_STRING" = "" ]; then
  export HIVE_IDENT_STRING=`id -nu`
fi
# get log directory
if [ "$HIVE_LOG_DIR" = "" ]; then
  export HIVE_LOG_DIR="$HIVE_HOME/logs"
fi
if [ "$HIVE_PID_DIR" = "" ]; then
  export HIVE_PID_DIR="$HIVE_HOME/pids"
fi

TORUN=""
for j in $SERVICE_LIST ; do
  if [ "$j" = "$SERVICE" ] ; then
    TORUN=${j}$FINAL_OPT
  fi
done

# to initialize logging for all services
export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dlog4j.configurationFile=hive-log4j2.properties "

export SPLIT_HIVE_LOGS_INTO_FILES="${SPLIT_HIVE_LOGS_INTO_FILES:-true}"

if [ "$SPLIT_HIVE_LOGS_INTO_FILES" = false ] ; then
  echo "Hive logs will be written only in the hive.logs file. Set SPLIT_HIVE_LOGS_INTO_FILES to true or remove it to write hivemeta and hs2 logs separately."
else
  export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dhive.log.file=${USER}-${SERVICE}-${HOSTNAME}.log "
fi

if [ -f "${HIVE_CONF_DIR}/parquet-logging.properties" ]; then
  export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Djava.util.logging.config.file=${HIVE_CONF_DIR}/parquet-logging.properties "
else
  export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Djava.util.logging.config.file=$bin/../conf/parquet-logging.properties "
fi

if [[ "$SERVICE" =~ ^(hiveserver2|beeline|cli)$ ]] ; then
  # If process is backgrounded, don't change terminal settings
  if [[ ( ! $(ps -o stat= -p $$) =~ "+" ) && ! ( -p /dev/stdin ) ]]; then
    export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Djline.terminal=jline.UnsupportedTerminal"
  fi
fi

if [ "$SERVICE" = "hiveserver2" -o "$SERVICE" = "metastore" ] ; then
  export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS -Dfs.cache.lru.enable=true"
fi

declare -A IS_SERVICE_CMD=(["metastore_start"]=1 ["metastore_stop"]=1 ["metastore_status"]=1 ["hiveserver2_start"]=1 ["hiveserver2_stop"]=1 ["hiveserver2_status"]=1)

export HIVE_LOCAL_TASK_CHILD_OPTS=${HIVE_LOCAL_TASK_CHILD_OPTS}

source "$MAPR_HOME"/server/common-ecosystem.sh 2>/dev/null

#
# Enable Jmx only when HiveMetastore starts or HiveServer2 starts.
#
function isJmxNeeded() {
if [ "$TORUN" = "metastore_start" ] || [ "$TORUN" = "hiveserver2_start" ] ; then
  return 0; # true
else
  return 1; # false
fi
}

#
# Log Info level message into file ${HIVE_HOME}/logs/hive_jmx_options.log.
#
function logJmxInfo(){
echo "$(date +%F\ %T) INFO [hive] ${HIVE_HOME}/bin/hive: " $* >> "${HIVE_JMX_LOG_FILE:-${HIVE_HOME}/logs/hive_jmx_options.log}"
}

#
# Log Warning level message into file ${HIVE_HOME}/logs/hive_jmx_options.log.
#
function logJmxWarn(){
echo "$(date +%F\ %T) WARN [hive] ${HIVE_HOME}/bin/hive: " $* >> "${HIVE_JMX_LOG_FILE:-${HIVE_HOME}/logs/hive_jmx_options.log}"
}

#
# Configure Jmx for HS2 and HMS.
#
function configureJmx() {
MAPR_JMX_HIVE_METASTORE_PORT=${MAPR_JMX_HIVE_METASTORE_PORT:-9011}
MAPR_JMX_HIVE_SERVER2_PORT=${MAPR_JMX_HIVE_SERVER2_PORT:-9012}
if [ -z "$MAPR_JMXLOCALBINDING" ]; then
  MAPR_JMXLOCALBINDING="false"
fi

if [ -z "$MAPR_JMXAUTH" ]; then
  MAPR_JMXAUTH="false"
fi

if [ -z "$MAPR_JMXSSL" ]; then
  MAPR_JMXSSL="false"
fi

if [ -z "$MAPR_JMXDISABLE" ] && [ -z "$MAPR_JMXLOCALHOST" ] && [ -z "$MAPR_JMXREMOTEHOST" ]; then
  logJmxInfo "No MapR JMX options given - defaulting to local binding"
fi

if [ -z "$MAPR_JMXDISABLE" ] || [ "$MAPR_JMXDISABLE" = 'false' ]; then
  # default setting for localBinding
  MAPR_JMX_OPTS="-Dcom.sun.management.jmxremote"
  if [ "$MAPR_JMXAUTH" = "true" ]; then
    MAPR_JMX_OPTS="$MAPR_JMX_OPTS -Dcom.sun.management.jmxremote.authenticate=true \
    -Dcom.sun.management.jmxremote.password.file=$MAPR_HOME/conf/jmxremote.password \
    -Dcom.sun.management.jmxremote.access.file=$MAPR_HOME/conf/jmxremote.access"
  else
    MAPR_JMX_OPTS="$MAPR_JMX_OPTS -Dcom.sun.management.jmxremote.authenticate=false"
  fi

  if [ "$MAPR_JMXLOCALHOST" = "true" ] && [ "$MAPR_JMXREMOTEHOST" = "true" ]; then
    logJmxWarn "Both MAPR_JMXLOCALHOST and MAPR_JMXREMOTEHOST options are enabled - defaulting to MAPR_JMXLOCALHOST config"
    MAPR_JMXREMOTEHOST=false
  fi
  if [ "$MAPR_JMXLOCALHOST" = "true" ] || [ "$MAPR_JMXREMOTEHOST" = "true" ]; then
    if [ "$MAPR_JMXSSL" = "true" ]; then
      MAPR_JMX_OPTS="$MAPR_JMX_OPTS -Dcom.sun.management.jmxremote.ssl=true"
    else
      MAPR_JMX_OPTS="$MAPR_JMX_OPTS -Dcom.sun.management.jmxremote.ssl=false"
    fi

    if [ "$MAPR_JMXLOCALHOST" = "true" ]; then
      MAPR_JMX_OPTS="$MAPR_JMX_OPTS -Djava.rmi.server.hostname=localhost \
      -Dcom.sun.management.jmxremote.host=localhost \
      -Dcom.sun.management.jmxremote.local.only=true"
    fi

    case "$SERVICE" in
      hiveserver2)
      if [ -z "$MAPR_JMX_HIVE_SERVER2_ENABLE" ] || [ "$MAPR_JMX_HIVE_SERVER2_ENABLE" = "true" ]; then
        MAPR_JMX_PORT=${MAPR_JMX_HIVE_SERVER2_PORT}
      else
        MAPR_JMX_PORT=""
        MAPR_JMX_OPTS=""
      fi;;

      metastore)
      if [ -z "$MAPR_JMX_HIVE_METASTORE_ENABLE" ] || [ "$MAPR_JMX_HIVE_METASTORE_ENABLE" = "true" ]; then
        MAPR_JMX_PORT=${MAPR_JMX_HIVE_METASTORE_PORT}
      else
        MAPR_JMX_PORT=""
        MAPR_JMX_OPTS=""
      fi;;
    esac

    if [ -z "$MAPR_JMX_OPTS" ]; then
      logJmxWarn "JMX disabled for $SERVICE"
    elif [ -z "$MAPR_JMX_PORT" ]; then
      logJmxWarn "No JMX port given for $SERVICE - disabling TCP base JMX service"
      MAPR_JMX_OPTS=""
    else
      if [ "$MAPR_JMXLOCALHOST" = "true" ]; then
        logJmxInfo "Enabling TCP JMX for $SERVICE only on localhost port $MAPR_JMX_PORT"
      else
        logJmxInfo "Enabling TCP JMX for $SERVICE on port $MAPR_JMX_PORT"
      fi
        MAPR_JMX_OPTS="$MAPR_JMX_OPTS -Dcom.sun.management.jmxremote.port=$MAPR_JMX_PORT"
    fi
  fi

  if [ "$MAPR_JMXLOCALBINDING" = "true" ] && [ -z "$MAPR_JMX_OPTS" ]; then
    logJmxInfo "Enabling JMX local binding only"
    MAPR_JMX_OPTS="-Dcom.sun.management.jmxremote"
  fi
else
  logJmxInfo "JMX disabled by user request"
  MAPR_JMX_OPTS=""
fi
}

#
# MapR JMX handling.
#
if isJmxNeeded; then
  configureJmx
fi

if [ "$TORUN" = "" ] ; then
  echo "Service $SERVICE not found"
  echo "Available Services: $SERVICE_LIST"
  exit 7
else
  set -- "${SERVICE_ARGS[@]}"
  if [ -f "$HIVE_HOME/bin"/sync-pid.sh ] ; then
    [ -n "${IS_SERVICE_CMD[$TORUN]}" ] && source "$HIVE_HOME/bin"/sync-pid.sh
  fi

  # Configure metastore or hiveserver2 for collecting metrics
  if [ "$TORUN" = "metastore_start" ] || [ "$TORUN" = "hiveserver2_start" ] ; then
    export HADOOP_CLIENT_OPTS="$HADOOP_CLIENT_OPTS $MAPR_JMX_OPTS"
  fi

  $TORUN "$@"
fi
