本文主要是介绍linux下基于hadoop安装hive和Zeppelin安装,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
将包放在opt目录下
/opt/soft/hive110/lib目录下
[root@wq opt]# tar -zxf hive-1.1.0-cdh5.14.2.tar.gz
[root@wq opt]# mv hive-1.1.0-cdh5.14.2 soft/hive110
[root@wq opt]# cd /opt/soft/hive110/conf
[root@wq conf]# touch hive-site.xml
[root@wq conf]# vim hive-site.xml
修改hive-site.xml如下(ip地址需要更改
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration><property><name>hive.metastore.warehouse.dir</name><value>hive110/warehouse</value></property><property><name>hive.metastore.local</name><value>false</value></property><property><name>javax.jdo.option.ConnectionURL</name><value>jdbc:mysql://192.168.100.155:3306/hive?useSSL=false&createDatabaseIfNotExist=true</value></property><property><name>javax.jdo.option.ConnectionDriverName</name><value>com.mysql.jdbc.Driver</value></property><property><name>javax.jdo.option.ConnectionUserName</name><value>root</value></property><property><name>javax.jdo.option.ConnectionPassword</name><value>okok</value></property><property><name>hive.server2.authentication</name><value>NONE</value></property><property><name>hive.server2.thrift.client.user</name><value>root</value></property><property><name>hive.server2.thrift.client.password</name><value>root</value></property>
</configuration>
[root@wq conf]# vim /etc/profile
[root@wq conf]# source /etc/profile
修改profile文件如下
#hive environment
export HIVE_HOME=/opt/soft/hive110
export PATH=$path:$hive_home/bin
bin下方
schematool -dbType mysql -initSchema
启动hive (bin下
hive --service hiveserver2 &
第二章Beeline用法
beeline -u jdbc:hive2://192.168.100.155:10000/mydemo
启动脚本
#! /bin/bashmy_start(){if [ $1 == "start" ];then#start hadoopsh /opt/soft/hadoop260/sbin/start-dfs.shsh /opt/soft/hadoop260/sbin/start-yarn.sh#start hivenohup /opt/soft/hive110/bin/hive --service hiveserver2 &#start zeppelinsh /opt/soft/zeppelin081/bin/zeppelin-daemon.sh startecho "start over"else#close zeppelinsh /opt/soft/zeppelin081/bin/zeppelin-daemon.sh stop#close hivehiveprocess=`jps | grep RunJar | awk '{print $1}'`for no in $hiveprocessdokill -9 $no #如果出现多个Jar 循环删除done#stop hadoopsh /opt/soft/hadoop260/sbin/stop-dfs.shsh /opt/soft/hadoop260/sbin/stop-yarn.shecho "stop over"fi
}my_start $1
chmod +x run.sh
source run.sh start
source run.sh stop
Zeppelin安装
tar -zvxf zeppelin-0.8.1-bin-all.tgz
mv zeppelin-0.8.1-bin-all soft/zeppelin081
cd soft/zeppelin081/conf
修改 配置文件
cp zeppelin-site.xml.template zeppelin-site.xml
vim zeppelin-site.xml
<property><name>zeppelin.helium.registry</name><value>helium</value>
</property>
cp zeppelin-env.sh.template zeppelin-env.sh
添加JAVA_HOME和HADOOP_CONF_DIR (指定自己的java和hadoop安装目录)
vim zeppelin-env.sh
export JAVA_HOME=/opt/soft/jdk180
export HADOOP_CONF_DIR=/opt/soft/hadoop260/etc/hadoop
vim /etc/profilesource /etc/profile
#zeppelin environment
export ZEPPELIN_HOME=/opt/soft/zeppelin081export PATH=$PATH:$ZEPPELIN_HOME/bin
[root@wq bin]# cp /opt/soft/hive110/conf/hive-site.xml /opt/soft/zeppelin081/conf/
导入jar包
[root@wq bin]# cp /opt/soft/hadoop260/share/hadoop/common/hadoop-common-2.6.0-cdh5.14.2.jar /opt/soft/opt/soft/zeppelin081/interpreter/jdbc
cp /opt/soft/hive110/lib/hive-jdbc-1.1.0-cdh5.14.2-standalone.jar /opt/soft/zeppelin081/interpreter/jdbc/
zeppelin-daemon.sh start
进入http://192.168.100.155:8080/#/
default.driver org.apache.hive.jdbc.HiveDriver
default.url jdbc:hive2://192.168.42.200:10000
default.user hive
这篇关于linux下基于hadoop安装hive和Zeppelin安装的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!