1.启动spark-shell 需要加上mysql jar位置
spark-shell --master local[2] --jars /Users/walle/app/mysql-connector-java-8.0.11.jar
2. 简单sql
spark.sql("show databases").show
spark.sql("show tables").show
spark.sql("use sparktest")
spark.sql("select * from student").show
3. 代码中用SparkSession
package com.waitingfy
object sparkhive {
def main(args: Array[String]): Unit = {
import java.io.File
import org.apache.spark.sql.{Row, SaveMode, SparkSession}
case class Record(key: Int, value: String)
// warehouseLocation points to the default location for managed databases and tables
val warehouseLocation = new File("spark-warehouse").getAbsolutePath
val spark = SparkSession
.builder()
.master("local[2]")
.appName("Spark Hive Example")
.config("spark.sql.warehouse.dir", warehouseLocation)
.enableHiveSupport()
.getOrCreate()
import spark.implicits._
import spark.sql
sql("CREATE TABLE IF NOT EXISTS src (key INT, value STRING) USING hive")
sql("LOAD DATA LOCAL INPATH '/usr/local/Cellar/spark-2.3.0/examples/src/main/resources/kv1.txt' INTO TABLE src")
// Queries are expressed in HiveQL
// sql("SELECT * FROM src").show()
val sqlDF = sql("SELECT key, value FROM src WHERE key < 10 ORDER BY key")
sqlDF.show()
spark.close()
}
}
http://www.waitingfy.com/archives/4352
Post Views: 13
4352
spark | | | Trackback |