trafodion-codereview mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From amandamoran <...@git.apache.org>
Subject [GitHub] incubator-trafodion pull request: Vanilla Apache HBase support and...
Date Wed, 20 Apr 2016 17:50:02 GMT
Github user amandamoran commented on a diff in the pull request:

    https://github.com/apache/incubator-trafodion/pull/440#discussion_r60455673
  
    --- Diff: install/installer/traf_apache_mods ---
    @@ -0,0 +1,267 @@
    +#!/bin/bash
    +# @@@ START COPYRIGHT @@@
    +#
    +# Licensed to the Apache Software Foundation (ASF) under one
    +# or more contributor license agreements.  See the NOTICE file
    +# distributed with this work for additional information
    +# regarding copyright ownership.  The ASF licenses this file
    +# to you under the Apache License, Version 2.0 (the
    +# "License"); you may not use this file except in compliance
    +# with the License.  You may obtain a copy of the License at
    +#
    +#   http://www.apache.org/licenses/LICENSE-2.0
    +#
    +# Unless required by applicable law or agreed to in writing,
    +# software distributed under the License is distributed on an
    +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
    +# KIND, either express or implied.  See the License for the
    +# specific language governing permissions and limitations
    +# under the License.
    +#
    +# @@@ END COPYRIGHT @@@
    +#
    +# This script will configure HBase with HBase-trx
    +# and co-processors needed for Trafodion.  It uses
    +# Ambari's configs.sh script to do this.
    +#
    +# NOTE: Only for Ambari installations
    +
    +TRAF_CONFIG=/etc/trafodion/trafodion_config
    +source $TRAF_CONFIG
    +
    +export PDSH="pdsh -R exec"
    +export PDSH_SSH_CMD="ssh -q -n %h"
    +export PDCP="pdcp -R ssh"
    +
    +export PDSH_HADOOP_NODES="$PDSH $MY_HBASE_NODES $PDSH_SSH_CMD"
    +export PDCP_HADOOP_NODES="$PDCP $MY_HBASE_NODES"
    +#=====================================
    +# copy Trafodion hbase trx jar to /usr/lib/hbase/lib
    +
    +cd $UNTAR_DIR
    +
    +HDFS_NODE=$(echo $HDFS_NODES | head -n1 | awk '{print $1;}')
    +HBASE_NODE=$(echo $HBASE_NODES | head -n1 | awk '{print $1;}')
    +echo "export HDFS_NODE=\"$HDFS_NODE\"" >> $TRAF_CONFIG
    +echo "export HBASE_NODE=\"$HBASE_NODE\"" >> $TRAF_CONFIG
    +sudo chmod 777 $TRAF_CONFIG
    +source $TRAF_CONFIG
    +
    +
    +hbase_trx_jar="hbase-trx-apache1_0_2-2.0.0.jar"
    +
    +traf_util_jar="trafodion-utility-*.jar"
    +
    +
    +# The permissions the Trafodion build process creates on the hbase-trx jar
    +# files does not work well with the installation process so we change them
    +sudo chmod -R 777 $UNTAR_DIR/export/lib
    +
    +if [ ! -f $UNTAR_DIR/export/lib/$hbase_trx_jar ]; then
    +    echo "***ERROR: unable to find $UNTAR_DIR/export/lib/$hbase_trx_jar"
    +    exit -1
    +fi
    +
    +# if more than one node then copy to all nodes
    +echo "***INFO: copying $hbase_trx_jar to all nodes"
    +if [ $node_count -ne 1 ]; then
    +    $PDSH_HADOOP_NODES sudo rm -rf  $HBASE_HOME/lib/hbase-trx* 2>/dev/null
    +    $TRAF_PDSH mkdir -p $LOCAL_WORKDIR 2>/dev/null
    +    $PDSH_HADOOP_NODES mkdir -p $LOCAL_WORKDIR 2>/dev/null
    +    cp $UNTAR_DIR/export/lib/$hbase_trx_jar $LOCAL_WORKDIR
    +    cp $UNTAR_DIR/export/lib/$traf_util_jar $LOCAL_WORKDIR
    +    $PDCP_HADOOP_NODES $LOCAL_WORKDIR/$hbase_trx_jar $LOCAL_WORKDIR
    +    $PDCP_HADOOP_NODES $LOCAL_WORKDIR/$traf_util_jar $LOCAL_WORKDIR
    +    $PDSH_HADOOP_NODES sudo cp $LOCAL_WORKDIR/$traf_util_jar $HBASE_HOME/lib
    +    $PDSH_HADOOP_NODES sudo cp $LOCAL_WORKDIR/$hbase_trx_jar $HBASE_HOME/lib
    +    $PDSH_HADOOP_NODES sudo chmod 644 $HBASE_HOME/lib/$hbase_trx_jar
    +    $PDSH_HADOOP_NODES sudo chmod 644 $HBASE_HOME/lib/$traf_util_jar
    +
    +    $PDSH_HADOOP_NODES rm $LOCAL_WORKDIR/$hbase_trx_jar 2>/dev/null
    +    $PDSH_HADOOP_NODES rm $LOCAL_WORKDIR/$traf_util_jar 2>/dev/null
    +else
    +    for node in $HBASE_NODES
    +    do 
    +    ssh -q -n $node sudo rm -rf $HBASE_HOME/lib/hbase-trx* 2>/dev/null
    +    ssh -q -n $node sudo mkdir -p $TRAF_WORKDIR 2>/dev/null
    +    ssh -q -n $node sudo chmod 777 $TRAF_WORKDIR
    +    scp -q $UNTAR_DIR/export/lib/$hbase_trx_jar $(whoami)@$node:$TRAF_WORKDIR
    +    scp -q $UNTAR_DIR/export/lib/$traf_util_jar $(whoami)@$node:$TRAF_WORKDIR
    +    ssh -q -n $node sudo cp $TRAF_WORKDIR/$hbase_trx_jar $HBASE_HOME/lib
    +    ssh -q -n $node sudo cp $TRAF_WORKDIR/$traf_util_jar $HBASE_HOME/lib
    +    ssh -q -n $node sudo chmod 644 $HADOOP_PATH/$hbase_trx_jar
    +    ssh -q -n $node sudo chmod 644 $HADOOP_PATH/$traf_util_jar
    +    done
    +fi
    +
    +#=======================================
    +#Check that HBase-trx copied to all nodes
    +
    +for node in $HBASE_NODES
    +do
    +   copiedOver=$(ssh -q -n $node sudo ls $HBASE_HOME/lib/hbase-trx* | wc -l)
    +   if [[ $copiedOver -ne "1" ]]; then
    +      echo "***ERROR: $hbase_trx_jar was not copied on $node"
    +      echo "***ERROR: Please investigate why this happened"
    +      echo "***ERROR: Trafodion can not start without this. EXITING..."
    +      exit -1
    +   fi
    +done
    +
    +echo "***INFO: $hbase_trx_jar copied correctly! Huzzah."
    +
    +
    +
    +#Copy hbase-site.xml file
    +ssh -q -n $HBASE_NODE sudo cp $HBASE_HOME/conf/hbase-site.xml $HOME
    +ssh -q -n $HBASE_NODE sudo chown $(whoami).$(whoami) $HOME/hbase-site.xml
    +ssh -q -n $HBASE_NODE sudo chmod 777 $HOME/hbase-site.xml
    +
    +scp -q $(whoami)@$HBASE_NODE:$HOME/hbase-site.xml $HOME
    +if [[ $? -gt 1 ]]; then
    +   echo "***ERROR: Unable to find $HBASE_HOME/conf/hbase-site.xml file on $HBASE_NODE
or unable to copy."
    +   exit -1
    +fi
    +sudo cp $HOME/hbase-site.xml $TRAF_WORKDIR
    +sudo chown trafodion.trafodion $TRAF_WORKDIR/hbase-site.xml
    +
    +#=====================================
    +# create new directories for bulkload and lobs if not already there
    +rm $LOCAL_WORKDIR/traf_temp_output 2>/dev/null
    +
    +ssh -q -n $HDFS_NODE 'sudo su' "$HDFS_USER" '--command "' "$HADOOP_PREFIX"'/bin/hdfs
dfs -mkdir /hbase-staging" 2> $HOME/traf_temp_output'
    +if [ $? != 0 ]; then
    +   # ok if directory already exists
    --- End diff --
    
    You are right! This would be much cleaner! Thanks! 


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastructure@apache.org or file a JIRA ticket
with INFRA.
---

Mime
View raw message