Flink輸出到Elasticsearch


1.代碼

import java.util
import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink
import org.apache.http.HttpHost
import org.elasticsearch.client.Requests

//溫度傳感器讀取樣例類
case class SensorReading(id: String, timestamp: Long, temperature: Double)

object EsSinkTest {
def main(args: Array[String]): Unit = {
val env = StreamExecutionEnvironment.getExecutionEnvironment
env.setParallelism(1)

//source
val inputStream = env.readTextFile("sensor1.txt")

//transform
import org.apache.flink.api.scala._
val dataStream = inputStream.map(x => {
val arr = x.split(",")
SensorReading(arr(0).trim, arr(1).trim.toLong, arr(2).trim.toDouble)
})

//sink
val httpHosts = new util.ArrayList[HttpHost]()
httpHosts.add(new HttpHost("localhost", 9200))
//創建一個esSink的Builder
val esSinkBuilder = new ElasticsearchSink.Builder[SensorReading] (
httpHosts,
new ElasticsearchSinkFunction[SensorReading] {
override def process(t: SensorReading, runtimeContext: RuntimeContext, requestIndexer: RequestIndexer): Unit = {
print("saving data" + t)
//包裝成一個Map或者JsonObject
val hashMap = new util.HashMap[String, String]()
hashMap.put("sensor_id", t.id)
hashMap.put("temperature", t.temperature.toString)
hashMap.put("timestamp", t.timestamp.toString)
//創建index request,准備發送數據
val indexRequest = Requests.indexRequest().index("sensor").`type`("readingData").source(hashMap)
//發送請求,寫入數據
requestIndexer.add(indexRequest)
println("data saved successfully")
}
}
)

dataStream.addSink(esSinkBuilder.build())

env.execute("es sink test")

}
}

2.啟動Elasticsearch
3.訪問: 127.0.0.1:9200/sensor/_search?pretty ,結果如下

  

有幫助的歡迎評論打賞哈,謝謝!


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM