package datasource
import org.apache.spark.{SparkConf, SparkContext}
import org.elasticsearch.spark.sql.EsSparkSQL
object Data2ES {
def main(args: Array[String]): Unit = {
val conf = new SparkConf()
val sc = new SparkContext(conf)
val sqlContext = new org.apache.spark.sql.hive.HiveContext(sc)
val options=Map(
("es.nodes", "192.168.111.76"),
("es.port", "9200"),
("es.index.auto.create", "true"),
("es.write.operation", "index")
)
val order_index=sqlContext.sql("select * from order_index")
val rdd=order_index.rdd.repartition(80)
val schema=order_index.schema
val df=sqlContext.createDataFrame(rdd,schema)
EsSparkSQL.saveToEs(df,"order_index_3/order_3",options)
sc.stop()
}
}