Phoenix: Kerberize Installation

(Last Updated On: )

In this tutorial I will show you how to use Kerberos with Phoenix. Before you begin ensure you have installed Kerberos Server, Hadoop, HBase and Zookeeper.

This assumes your hostname is “hadoop”

Install Phoenix

wget http://apache.forsale.plus/phoenix/apache-phoenix-5.0.0-HBase-2.0/bin/apache-phoenix-5.0.0-HBase-2.0-bin.tar.gz
tar -zxvf apache-phoenix-5.0.0-HBase-2.0-bin.tar.gz
sudo mv apache-phoenix-5.0.0-HBase-2.0-bin /usr/local/phoenix/
cd /usr/local/phoenix/

Setup .bashrc:

 sudo nano ~/.bashrc

Add the following to the end of the file.

#PHOENIX VARIABLES START
export PHOENIX_HOME=/usr/local/phoenix
export PHOENIX_CLASSPATH=$PHOENIX_HOME/*
export PATH=$PATH:$PHOENIX_HOME/bin
#PHOENIX VARIABLES END

 source ~/.bashrc

Link Files

ln -sf $HBASE_CONF_DIR/hbase-site.xml $PHOENIX_HOME/bin/hbase-site.xml
ln -sf $HADOOP_CONF_DIR/core-site.xml $PHOENIX_HOME/bin/core-site.xml
ln -sf $PHOENIX_HOME/phoenix-5.0.0-HBase-2.0-server.jar $HBASE_HOME/lib/phoenix-5.0.0-HBase-2.0-server.jar

hbase-env.sh

nano /usr/local/hbase/conf/hbase-env.sh

#Ensure the following env variables are set

export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/usr/local/hadoop/etc/hadoop}
export PHOENIX_CLASSPATH=${PHOENIX_CLASSPATH:-/usr/local/phoenix}
export HBASE_CLASSPATH="$HBASE_CLASSPATH:$CLASSPATH:$HADOOP_CONF_DIR:$PHOENIX_CLASSPATH/phoenix-5.0.0-HBase-2.0-server.jar:$PHOENIX_CLASSPATH/phoenix-core-5.0.0-HBase-2.0.jar:$PHOENIX_CLASSPATH/phoenix-5.0.0-HBase-2.0-client.jar"

hbase-site.xml

nano /usr/local/hbase/conf/hbase-site.xml

#Add the following properties

<property>
	<name>phoenix.functions.allowUserDefinedFunctions</name>
	<value>true</value>
	<description>enable UDF functions</description>
</property>
<property>
	<name>hbase.regionserver.wal.codec</name>
	<value>org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec</value>
</property>
<property>
	<name>hbase.region.server.rpc.scheduler.factory.class</name>
	<value>org.apache.hadoop.hbase.ipc.PhoenixRpcSchedulerFactory</value>
	<description>Factory to create the Phoenix RPC Scheduler that uses separate queues for index and metadata updates</description>
</property>
<property>
	<name>hbase.rpc.controllerfactory.class</name>
	<value>org.apache.hadoop.hbase.ipc.controller.ServerRpcControllerFactory</value>
	<description>Factory to create the Phoenix RPC Scheduler that uses separate queues for index and metadata updates</description>
</property>
<property>
	<name>hbase.defaults.for.version.skip</name>
	<value>true</value>
</property>
<property>
	<name>phoenix.queryserver.http.port</name>
	<value>8765</value>
</property>
<property>
	<name>phoenix.queryserver.serialization</name>
	<value>PROTOBUF</value>
</property>
<property>
	<name>phoenix.queryserver.keytab.file</name>
	<value>/etc/security/keytabs/hbase.service.keytab</value>
</property>
<property>
	<name>phoenix.queryserver.kerberos.principal</name>
	<value>hbase/hadoop@REALM.CA</value>
</property>
<property>
	<name>hoenix.queryserver.http.keytab.file</name>
	<value>/etc/security/keytabs/hbaseHTTP.service.keytab</value>
</property>
<property>
	<name>phoenix.queryserver.http.kerberos.principal</name>
	<value>hbaseHTTP/hadoop@REALM.CA</value>
</property>
<property>
	<name>phoenix.queryserver.dns.nameserver</name>
	<value>hadoop</value>
</property>
<property>
	<name>phoenix.queryserver.dns.interface</name>
	<value>enp0s3</value>
</property>
<property>
		<name>phoenix.schema.mapSystemTablesToNamespace</name>
		<value>true</value>
</property>
<property>
		<name>phoenix.schema.isNamespaceMappingEnabled</name>
		<value>true</value>
</property>

sqlline.py

sqlline.py hadoop:2181:/hbase-secure:hbase/hadoop@GAUDREAULT_KDC.CA:/etc/security/keytabs/hbase.service.keytab