※参考
http://takemikami.com/2016/12/14/CentOS7hadoophive.html
install†
root で
jdk†
wget --no-check-certificate --no-cookies --header "Cookie: oraclelicense=accept-securebackup-cookie" http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.rpm
rpm -ivh jdk-8u66-linux-x64.rpm
selinux†
/etc/selinux/config
SELINUX=disabled
reboot
maria†
yum -y install mariadb-server
systemctl enable mariadb
systemctl start mariadb
adduser hadoop
su hadoop
一般ユーザで†
ssh setting†
ssh-keygen
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod 600 ~/.ssh/authorized_keys
ssh localhost
exit
echo "export JAVA_HOME=/usr/java/default" >> ~/.bashrc
hadoop†
cd
wget http://ftp.jaist.ac.jp/pub/apache/hadoop/common/hadoop-2.7.3/hadoop-2.7.3.tar.gz
tar xzfp hadoop*
echo "export HADOOP_HOME=/home/`whoami`/hadoop-2.7.3" >> ~/.bashrc
source ~/.bashrc
mkdir -p ~/var/lib/hdfs/{name,data}
$HADOOP_HOME/etc/hadoop/core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
$HADOOP_HOME/etc/hadoop/hdfs-site.xml
<configuration>
<property>
<name>dfs.name.dir</name>
<value>/home/hadoop/var/lib/hdfs/name</value>
</property>
<property>
<name>dfs.data.dir</name>
<value>/home/hadoop/var/lib/hdfs/data</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
path の設定
PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
hadoop namenode -format
start-dfs.sh
hive†
cd
wget http://ftp.jaist.ac.jp/pub/apache/hive/hive-2.1.1/apache-hive-2.1.1-bin.tar.gz
tar xzfp apache-hive*
echo "export HIVE_HOME=/home/`whoami`/apache-hive-2.1.1-bin" >> ~/.bashrc
echo "PATH=$PATH:$HIVE_HOME/bin" >> ~/.bashrc
source ~/.bashrc
maria†
mysql -u root -p
CREATE DATABASE hive_metastore;
USE hive_metastore;
CREATE USER hive@localhost IDENTIFIED BY '【パスワード】';
GRANT ALL PRIVILEGES ON hive_metastore.* TO hive@localhost;
FLUSH PRIVILEGES;
JDBC†
wget https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-5.1.41.tar.gz
tar xzfp mysql*
cp mysql*/mysql*.jar $HIVE_HOME/lib/
$HIVE_HOME/conf/hive-site.xml
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value><![CDATA[jdbc:mysql://localhost/hive_metastore?autoReconnect=true&useSSL=false]]></value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>【パスワード】</value>
</property>
<property>
<name>datanucleus.fixedDatastore</name>
<value>false</value>
</property>
<property>
<name>hive.exec.local.scratchdir</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.downloaded.resources.dir</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.querylog.location</name>
<value>/tmp/hive</value>
</property>
<property>
<name>hive.server2.enable.doAs</name>
<value>false</value>
</property>
</configuration>
schematool -dbType mysql -initSchema
hive
amazon s3 を hadoop データ置き場に使う場合†
etc/hadoop/yarn-site.xml
<property>
<name>fs.s3a.impl</name>
<value>org.apache.hadoop.fs.s3a.S3AFileSystem</value>
</property>
<property>
<name>fs.s3a.access.key</name>
<value>【AWSアクセスキー】</value>
</property>
<property>
<name>fs.s3a.secret.key</name>
<value>【AWSシークレットアクセスキー】</value>
</property>
etc/hadoop/hadoop-env.sh
export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$HADOOP_HOME/share/hadoop/tools/lib/*