准备
安装hadoop
安装hive
derby
# 上传
scp -P 22 -r D:\00garbage\big_data\hive\apache-hive-3.1.3-bin.tar.gz mybigdata@192.168.66.20:/home/mybigdata/
tar -zxvf apache-hive-3.1.3-bin.tar.gz
mv apache-hive-3.1.3 hive
# 环境变量
vim ~/.bashrc
# 初始化元数据库 (metastore)。derby 只支持单连接。报错解决 https://blog.csdn.net/qq_41918166/article/details/128748687
bin/schematool -dbType derby -initSchema
# 启动
bin/hive
# 测试
hive> show databases;
hive> show tables;
hive> create table student(id int, name string);
hive> insert into student values(1,"xcrj");
hive> select * from student;
# web访问hdfs
http://h01:9870/explorer.html#/
输入 /user/hive/warehouse/student
~/.bashrc 添加
#HIVE_HOME
export HIVE_HOME=/home/mybigdata/hive
export PATH=$PATH:$HIVE_HOME/bin
mysql
先清空 derby
quit;
rm -rf derby.log metastore_db
hadoop fs -rm -r /user
docker安装mysql
sudo docker pull mysql:5.7.40
sudo docker run -itd --name mysql57 -p 3306:3306 -e MYSQL_ROOT_PASSWORD=123456 mysql:5.7.40
docker exec -it mysql57 /bin/bash
mysql -uroot -p
create database metastore;
配置
# 配置
vim $HIVE_HOME/conf/hive-site.xml
# 初始化元数据库 (metastore)
bin/schematool -dbType mysql -initSchema -verbose
# 启动
bin/hive
# 测试1
hive> show databases;
hive> show tables;
hive> create table student(id int, name string);
hive> insert into student values(1,"xcrj");
hive> select * from student;
# 测试2,查看MySQL metastore库 元数据
use metastore;
show tables;
select * from DBS;
select * from TBLS;
select * from COLUMNS_V2;
# web访问hdfs
http://h01:9870/explorer.html#/
输入 /user/hive/warehouse/student
hive-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration><!-- JDBC连接的URL --><property><name>javax.jdo.option.ConnectionURL</name><value>jdbc:mysql://h01:3306/metastore?useSSL=false</value></property><!-- JDBC连接的Driver--><property><name>javax.jdo.option.ConnectionDriverName</name><value>com.mysql.jdbc.Driver</value></property><!-- JDBC连接的username--><property><name>javax.jdo.option.ConnectionUserName</name><value>root</value></property><!-- JDBC连接的password --><property><name>javax.jdo.option.ConnectionPassword</name><value>123456</value></property><!-- Hive默认在HDFS的工作目录 --><property><name>hive.metastore.warehouse.dir</name><value>/user/hive/warehouse</value></property>
</configuration>
hive.log位置
cd /tmp/mybigdata/
tail -f hive.log