flink-sql解析canal-json實現實時同步


package com.lezhi.business.dxxbs.transmission.table

import com.lezhi.common.{CommonTransmissonFunciton, SystemParams}
import org.apache.flink.streaming.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._

object user_login {
  def main(args: Array[String]): Unit = {

    val bsEnv = StreamExecutionEnvironment.getExecutionEnvironment
    val bsSettings = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build()
    val bnv = StreamTableEnvironment.create(bsEnv, bsSettings)
    val table_name="user_login"
    val primaryKey="USER_ID"
    val table_column=
      """
        |USER_ID  STRING,
        |USER_PHONE  STRING,
        |USER_PWD  STRING,
        |CREAT_TIME  STRING,
        |UPLOAD_TIME  STRING,
        |UNION_ID  STRING,
        |OPEN_ID  STRING
        |""".stripMargin

    val sql_source_table="CREATE TABLE source_table_"+table_name+" (" +
      table_column+
      ") WITH (" +
      "'connector' = 'kafka'," +         //連接類型為kafka
      "'topic' = '"+SystemParams.TOPIC+"'," +   //kafka topic名稱
      "'properties.bootstrap.servers' = '"+SystemParams.BOOTSTRAP_SERVER+"'," +     //kafka bootstrap.servers配置
      "'scan.startup.mode' = 'earliest-offset'," +   // topic消費位置設置
      "'format' = 'canal-json'," +    //數據格式配置
      "'canal-json.ignore-parse-errors' = 'true'," +    //當解析異常時,忽略字段的解析異常,則會將該字段值設置為null。
      "'canal-json.table.include' ='"+table_name+"')"


    bnv.executeSql(sql_source_table)

//    bnv.executeSql("select * from source_table_"+table_name).print()
val sql_result_table ="CREATE TABLE sink_table_"+table_name+" (" +
  table_column+
  ",PRIMARY KEY ("+primaryKey+") NOT ENFORCED" +
  ") WITH (" +
  "'connector' = 'jdbc'," +   //連接類型為jdbc
  "'url' = '"+SystemParams.JDBC_URL_BYMM+"'," +     //he JDBC database url.
  "'table-name' = '"+table_name+"'," +    //連接的表名
  " 'username' ='"+SystemParams.JDBC_USERNAME+"',"+     //連接數據庫用戶名
  " 'password' ='"+SystemParams.JDBC_PASSWORD+"')"

    println(sql_result_table)
    bnv.executeSql(sql_result_table)
    bnv.executeSql("INSERT INTO sink_table_"+table_name+" SELECT * FROM source_table_"+table_name)

    bnv.execute(table_name)

  }
}

注意,下沉時下沉的表必須要有主鍵,否則會在更新數據時,舊數據和新數據會同時存在


免責聲明!

本站轉載的文章為個人學習借鑒使用,本站對版權不負任何法律責任。如果侵犯了您的隱私權益,請聯系本站郵箱yoyou2525@163.com刪除。



 
粵ICP備18138465號   © 2018-2025 CODEPRJ.COM