其他分享
首页 > 其他分享> > hive on spark 关于hive的配置

hive on spark 关于hive的配置

作者:互联网

1.   hive-site.xml 

 

root@hadoop101 conf]# vi hive-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://hadoop101:3306/metastore?createDatabaseIfNotExist=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
<description>username to use against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>root01</value>
<description>password to use against metastore database</description>
</property>

  <property>
    <name>hive.execution.engine</name>
    <value>spark</value>
    <description>
      Expects one of [mr, tez, spark].
      Chooses execution engine. Options are: mr (Map reduce, default), tez (hadoop 2 only), spark
    </description>
  </property>

  <property>
    <name>spark.yarn.jars</name>
    <value>hdfs://hadoop101:9000/user/hive/warehouse/spark_jars</value>
    <description>
      spark_jars
    </description>
  </property>

<property>
    <name>hive.metastore.schema.verification</name>
    <value>false</value>
</property>
</configuration>
 

 

Yarn-site增加 

<property>
    <name>yarn.resourcemanager.address</name>
    <value>hadoop102:8032</value>
  </property>
  <property>
    <name>yarn.resourcemanager.scheduler.address</name>
    <value>hadoop102:8030</value>
  </property>
  <property>
    <name>yarn.resourcemanager.resource-tracker.address</name>
    <value>hadoop102:8031</value>
  </property>

  

 

cp spark-env.sh /opt/module/apache-hive-1.2.1-bin/conf/

mysql jar包放到  hive 的lib目录下

 

 

# hadoop fs -mkdir /user/hive/warehouse/spark_jars
# hadoop fs -put jars/* /user/hive/warehouse/spark_jars
# hadoop fs -rm -r /user/hive/warehouse/spark_jars

  

 

 

 

 

标签:配置,hadoop,hive,javax,metastore,spark,jars
来源: https://www.cnblogs.com/mengbin0546/p/16683771.html