• flinkSinkES


    import java.util
    
    import it.bigdata.flink.study.SensorReding
    import org.apache.flink.api.common.functions.RuntimeContext
    import org.apache.flink.streaming.api.scala._
    import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
    import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink
    import org.apache.http.HttpHost
    import org.elasticsearch.client.Requests
    
    object EsSinkTest {
      def main(args: Array[String]): Unit = {
        //创建环境
        val env = StreamExecutionEnvironment.getExecutionEnvironment
        env.setParallelism(1)
    
        //读取数据
        val inputPath ="D:\ideaDemo\maven_flink\src\main\resources\sensor.txt"
        val inputStream = env.readTextFile(inputPath)
    
        //简单的转换为
        val dataStream = inputStream.map(data => {
          val arr = data.split(",")
          SensorReding(arr(0), arr(1).toLong, arr(2).toDouble)
        })
    
        //定义HttpHosts
        val httpHosts = new util.ArrayList[HttpHost]()
        httpHosts.add(new HttpHost("127.0.0.0",9200))
    
        //自定义写入es的EsSinkFunction
        val myEsSinkFunc = new ElasticsearchSinkFunction[SensorReding] {
          override def process(t: SensorReding, runtimeContext: RuntimeContext, requestIndexer: RequestIndexer): Unit = {
            //包装也给Map作为data source
            val dataSource = new util.HashMap[String,String]()
            dataSource.put("id",t.id)
            dataSource.put("temperature",t.temperature.toString)
            dataSource.put("ts",t.timestamp.toString)
    
            //创建index reques,用于发送http请求
            val indexRequest = Requests.indexRequest()
              .index("sensor")
              .`type`("readingdata")
              .source(dataSource)
    
            //用index发送请求
            requestIndexer.add(indexRequest)
          }
        }
    
        dataStream.addSink(new ElasticsearchSink
        .Builder[SensorReding](httpHosts,myEsSinkFunc)
            .build()
        )
    
        env.execute("es sink test")
      }
    }
    author@nohert
  • 相关阅读:
    为django项目创建虚拟环境
    linux下安装python
    使用scrapy-crawlSpider 爬取tencent 招聘
    linux基础3
    Scrapy
    scrapy-Redis 分布式爬虫
    scrapy-redis(一)
    Linux中文件上传使用rz
    centos 7 安装nginx
    MySQL 5.7 zip 文件安装过程
  • 原文地址:https://www.cnblogs.com/gzgBlog/p/14928228.html
Copyright © 2020-2023  润新知