• FlinkCDC for mysql to Clickhouse


    完整依赖

    <dependencies>
           <!-- https://mvnrepository.com/artifact/org.apache.flink/flink-core -->
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-core</artifactId>
               <version>1.13.0</version>
           </dependency>
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-streaming-java_2.12</artifactId>
               <version>1.13.0</version>
           </dependency>
     
    <!--       <dependency>-->
    <!--           <groupId>org.apache.flink</groupId>-->
    <!--           <artifactId>flink-jdbc_2.12</artifactId>-->
    <!--           <version>1.10.3</version>-->
    <!--       </dependency>-->
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-connector-jdbc_2.12</artifactId>
               <version>1.13.0</version>
           </dependency>
     
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-java</artifactId>
               <version>1.13.0</version>
           </dependency>
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-clients_2.12</artifactId>
               <version>1.13.0</version>
           </dependency>
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-table-api-java-bridge_2.12</artifactId>
               <version>1.13.0</version>
           </dependency>
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-table-common</artifactId>
               <version>1.13.0</version>
           </dependency>
     
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-table-planner_2.12</artifactId>
               <version>1.13.0</version>
           </dependency>
     
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-table-planner-blink_2.12</artifactId>
               <version>1.13.0</version>
           </dependency>
           <dependency>
               <groupId>org.apache.flink</groupId>
               <artifactId>flink-table-planner-blink_2.12</artifactId>
               <version>1.13.0</version>
               <type>test-jar</type>
           </dependency>
     
           <dependency>
               <groupId>com.alibaba.ververica</groupId>
               <artifactId>flink-connector-mysql-cdc</artifactId>
               <version>1.4.0</version>
           </dependency>
     
     
           <dependency>
               <groupId>com.aliyun</groupId>
               <artifactId>flink-connector-clickhouse</artifactId>
               <version>1.12.0</version>
           </dependency>
           <dependency>
               <groupId>ru.yandex.clickhouse</groupId>
               <artifactId>clickhouse-jdbc</artifactId>
               <version>0.2.6</version>
           </dependency>
           <dependency>
               <groupId>com.google.code.gson</groupId>
               <artifactId>gson</artifactId>
               <version>2.8.6</version>
           </dependency>
       </dependencies>
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32
    • 33
    • 34
    • 35
    • 36
    • 37
    • 38
    • 39
    • 40
    • 41
    • 42
    • 43
    • 44
    • 45
    • 46
    • 47
    • 48
    • 49
    • 50
    • 51
    • 52
    • 53
    • 54
    • 55
    • 56
    • 57
    • 58
    • 59
    • 60
    • 61
    • 62
    • 63
    • 64
    • 65
    • 66
    • 67
    • 68
    • 69
    • 70
    • 71
    • 72
    • 73
    • 74
    • 75
    • 76
    • 77
    • 78
    • 79
    • 80
    • 81
    • 82
    • 83
    • 84
    • 85
    • 86

    Flink CDC

    package name.lijiaqi.cdc;
     
    import com.alibaba.ververica.cdc.debezium.DebeziumDeserializationSchema;
    import com.google.gson.Gson;
    import com.google.gson.internal.LinkedTreeMap;
    import io.debezium.data.Envelope;
    import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
    import org.apache.flink.api.common.typeinfo.TypeInformation;
    import org.apache.flink.configuration.Configuration;
    import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
    import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
    import org.apache.flink.streaming.api.functions.source.SourceFunction;
    import com.alibaba.ververica.cdc.connectors.mysql.MySQLSource;
    import org.apache.flink.util.Collector;
    import org.apache.kafka.connect.source.SourceRecord;
     
    import org.apache.kafka.connect.data.Field;
    import org.apache.kafka.connect.data.Schema;
    import org.apache.kafka.connect.data.Struct;
     
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.PreparedStatement;
    import java.util.HashMap;
     
    public class MySqlBinlogSourceExample {
       public static void main(String[] args) throws Exception {
           SourceFunction<String> sourceFunction = MySQLSource.<String>builder()
                  .hostname("localhost")
                  .port(3306)
                  .databaseList("test")
                  .username("flinkcdc")
                  .password("dafei1288")
                  .deserializer(new JsonDebeziumDeserializationSchema())
                  .build();
     
           StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
     
           // 添加 source
           env.addSource(sourceFunction)
           // 添加 sink
          .addSink(new ClickhouseSink());
     
           env.execute("mysql2clickhouse");
      }
     
       // 将cdc数据反序列化
       public static class JsonDebeziumDeserializationSchema implements DebeziumDeserializationSchema {
           @Override
           public void deserialize(SourceRecord sourceRecord, Collector collector) throws Exception {
     
               Gson jsstr = new Gson();
               HashMap<String, Object> hs = new HashMap<>();
     
               String topic = sourceRecord.topic();
               String[] split = topic.split("[.]");
               String database = split[1];
               String table = split[2];
               hs.put("database",database);
               hs.put("table",table);
               //获取操作类型
               Envelope.Operation operation = Envelope.operationFor(sourceRecord);
               //获取数据本身
               Struct struct = (Struct)sourceRecord.value();
               Struct after = struct.getStruct("after");
     
               if (after != null) {
                   Schema schema = after.schema();
                   HashMap<String, Object> afhs = new HashMap<>();
                   for (Field field : schema.fields()) {
                       afhs.put(field.name(), after.get(field.name()));
                  }
                   hs.put("data",afhs);
              }
     
               String type = operation.toString().toLowerCase();
               if ("create".equals(type)) {
                   type = "insert";
              }
               hs.put("type",type);
     
               collector.collect(jsstr.toJson(hs));
          }
     
           @Override
           public TypeInformation<String> getProducedType() {
               return BasicTypeInfo.STRING_TYPE_INFO;
          }
      }
     
     
       public static class ClickhouseSink extends RichSinkFunction<String>{
           Connection connection;
           PreparedStatement pstmt;
           private Connection getConnection() {
               Connection conn = null;
               try {
                   Class.forName("ru.yandex.clickhouse.ClickHouseDriver");
                   String url = "jdbc:clickhouse://localhost:8123/default";
                   conn = DriverManager.getConnection(url,"default","dafei1288");
     
              } catch (Exception e) {
                   e.printStackTrace();
              }
               return conn;
          }
     
           @Override
           public void open(Configuration parameters) throws Exception {
               super.open(parameters);
               connection = getConnection();
               String sql = "insert into sink_ch_test(id,name,description) values (?,?,?)";
               pstmt = connection.prepareStatement(sql);
          }
     
           // 每条记录插入时调用一次
           public void invoke(String value, Context context) throws Exception {
               //{"database":"test","data":{"name":"jacky","description":"fffff","id":8},"type":"insert","table":"test_cdc"}
               Gson t = new Gson();
               HashMap<String,Object> hs = t.fromJson(value,HashMap.class);
               String database = (String)hs.get("database");
               String table = (String)hs.get("table");
               String type = (String)hs.get("type");
     
               if("test".equals(database) && "test_cdc".equals(table)){
                   if("insert".equals(type)){
                       System.out.println("insert => "+value);
                       LinkedTreeMap<String,Object> data = (LinkedTreeMap<String,Object>)hs.get("data");
                       String name = (String)data.get("name");
                       String description = (String)data.get("description");
                       Double id = (Double)data.get("id");
                       // 未前面的占位符赋值
                       pstmt.setInt(1, id.intValue());
                       pstmt.setString(2, name);
                       pstmt.setString(3, description);
     
                       pstmt.executeUpdate();
                  }
              }
          }
     
           @Override
           public void close() throws Exception {
               super.close();
     
               if(pstmt != null) {
                   pstmt.close();
              }
     
               if(connection != null) {
                   connection.close();
              }
          }
      }
    }
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32
    • 33
    • 34
    • 35
    • 36
    • 37
    • 38
    • 39
    • 40
    • 41
    • 42
    • 43
    • 44
    • 45
    • 46
    • 47
    • 48
    • 49
    • 50
    • 51
    • 52
    • 53
    • 54
    • 55
    • 56
    • 57
    • 58
    • 59
    • 60
    • 61
    • 62
    • 63
    • 64
    • 65
    • 66
    • 67
    • 68
    • 69
    • 70
    • 71
    • 72
    • 73
    • 74
    • 75
    • 76
    • 77
    • 78
    • 79
    • 80
    • 81
    • 82
    • 83
    • 84
    • 85
    • 86
    • 87
    • 88
    • 89
    • 90
    • 91
    • 92
    • 93
    • 94
    • 95
    • 96
    • 97
    • 98
    • 99
    • 100
    • 101
    • 102
    • 103
    • 104
    • 105
    • 106
    • 107
    • 108
    • 109
    • 110
    • 111
    • 112
    • 113
    • 114
    • 115
    • 116
    • 117
    • 118
    • 119
    • 120
    • 121
    • 122
    • 123
    • 124
    • 125
    • 126
    • 127
    • 128
    • 129
    • 130
    • 131
    • 132
    • 133
    • 134
    • 135
    • 136
    • 137
    • 138
    • 139
    • 140
    • 141
    • 142
    • 143
    • 144
    • 145
    • 146
    • 147
    • 148
    • 149
    • 150
    • 151
    • 152
    • 153
    • 154
    • 155

    Flink SQL CDC

    package name.lijiaqi.cdc;
     
    import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
    import org.apache.flink.table.api.EnvironmentSettings;
    import org.apache.flink.table.api.SqlDialect;
    import org.apache.flink.table.api.TableResult;
    import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
     
    public class MysqlToMysqlMain {
       public static void main(String[] args) throws Exception {
           EnvironmentSettings fsSettings = EnvironmentSettings.newInstance()
                  .useBlinkPlanner()
                  .inStreamingMode()
                  .build();
           StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
           env.setParallelism(1);
           StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env, fsSettings);
     
     
     
           tableEnv.getConfig().setSqlDialect(SqlDialect.DEFAULT);
     
     
           // 数据源表
           String sourceDDL =
                   "CREATE TABLE mysql_binlog (\n" +
                           " id INT NOT NULL,\n" +
                           " name STRING,\n" +
                           " description STRING\n" +
                           ") WITH (\n" +
                           " 'connector' = 'mysql-cdc',\n" +
                           " 'hostname' = 'localhost',\n" +
                           " 'port' = '3306',\n" +
                           " 'username' = 'flinkcdc',\n" +
                           " 'password' = 'dafei1288',\n" +
                           " 'database-name' = 'test',\n" +
                           " 'table-name' = 'test_cdc'\n" +
                           ")";
     
     
           String url = "jdbc:mysql://127.0.0.1:3306/test";
           String userName = "root";
           String password = "dafei1288";
           String mysqlSinkTable = "test_cdc_sink";
           // 输出目标表
           String sinkDDL =
                   "CREATE TABLE test_cdc_sink (\n" +
                           " id INT NOT NULL,\n" +
                           " name STRING,\n" +
                           " description STRING,\n" +
                           " PRIMARY KEY (id) NOT ENFORCED \n " +
                           ") WITH (\n" +
                           " 'connector' = 'jdbc',\n" +
                           " 'driver' = 'com.mysql.jdbc.Driver',\n" +
                           " 'url' = '" + url + "',\n" +
                           " 'username' = '" + userName + "',\n" +
                           " 'password' = '" + password + "',\n" +
                           " 'table-name' = '" + mysqlSinkTable + "'\n" +
                           ")";
           // 简单的聚合处理
           String transformSQL =
                   "insert into test_cdc_sink select * from mysql_binlog";
     
           tableEnv.executeSql(sourceDDL);
           tableEnv.executeSql(sinkDDL);
           TableResult result = tableEnv.executeSql(transformSQL);
     
           // 等待flink-cdc完成快照
           result.print();
           env.execute("sync-flink-cdc");
      }
     
    }
    
    • 1
    • 2
    • 3
    • 4
    • 5
    • 6
    • 7
    • 8
    • 9
    • 10
    • 11
    • 12
    • 13
    • 14
    • 15
    • 16
    • 17
    • 18
    • 19
    • 20
    • 21
    • 22
    • 23
    • 24
    • 25
    • 26
    • 27
    • 28
    • 29
    • 30
    • 31
    • 32
    • 33
    • 34
    • 35
    • 36
    • 37
    • 38
    • 39
    • 40
    • 41
    • 42
    • 43
    • 44
    • 45
    • 46
    • 47
    • 48
    • 49
    • 50
    • 51
    • 52
    • 53
    • 54
    • 55
    • 56
    • 57
    • 58
    • 59
    • 60
    • 61
    • 62
    • 63
    • 64
    • 65
    • 66
    • 67
    • 68
    • 69
    • 70
    • 71
    • 72
    • 73
  • 相关阅读:
    Stm32_标准库_7_光敏传感器
    MicroStation二次开发问题记录(1):打开项目时自动加载dll文件
    远程服务器Ubuntu 18.04安装VNC远程桌面
    色氨酸乙酯双三氟甲基磺酰亚胺[TrpC2][Tf2N]离子液体
    详解KubeEdge边缘网络项目EdgeMesh
    RocketMQ、Kafka、RabbitMQ 消费原理,顺序消费问题【图文理解】
    Vue(七)——vuex(vuex基础,使用,state,getters,mutations,actions基本概念与使用以及基本实现案例)
    SQL UNION 运算符
    【面试】 C/C++面试题
    [附源码]java毕业设计咖啡销售管理系统-
  • 原文地址:https://blog.csdn.net/Alecor/article/details/133949843