依赖如下:
<dependencies>
<!-- mysql -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.26</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.12</artifactId>
<version>2.4.5</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.12</artifactId>
<version>${spark.version}</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.fasterxml.jackson.module/jackson-module-scala -->
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-scala_2.12</artifactId>
<version>2.10.3</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.module</groupId>
<artifactId>jackson-module-jaxb-annotations</artifactId>
<version>2.10.3</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.10.3</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.10.3</version>
</dependency>
</dependencies>
def main(args: Array[String]):Unit = {
"com.bytedance.connectHive"
System.setProperty("HADOOP_USER_NAME", "hadoopadmin")
// 创建sparkSession 对象
val spark = SparkSession
.builder()
.master("local[2]")
.appName("Spark Hive Example")
.config("spark.sql.warehouse.dir", "hdfs://cdhalone:9000/user/hive/warehouse")
.enableHiveSupport()
.getOrCreate()
import spark.implicits._
import spark.sql
spark.sql("show databases").show
spark.sql("use for_zuoye").show
spark.sql("show tables").show(100)
spark.sql("set hive.exec.mode.local.auto = true;").show
spark.sql("select * from window01").show
spark.sql("insert into window01 values ('1','2',3)").show
spark.sql("select * from window01").show
spark.close()
}