1 流程图

2 Flink来源表建模
CREATE TABLE NJ_QL_JC_SSJC_SOURCE (
'properties.bootstrap.servers' = '172.*.*.*:9092',
'properties.group.id' = 'QL_JC_SSJC_NJ_QL_JC_SSJC_SOURCE',
'scan.startup.mode' = 'group-offsets',
'properties.isolation.level' = 'read_committed',
'properties.auto.offset.reset' = 'earliest',
CREATE TABLE ODS_QL_JC_SSJC_SOURCE (
'topic' = 'ODS_QL_JC_SSJC',
'properties.bootstrap.servers' = '172.*.*.*:21007,172.*.*.*:21007,172.*.*.*:21007',
'properties.security.protocol' = 'SASL_PLAINTEXT',
'properties.sasl.kerberos.service.name' = 'kafka',
'properties.kerberos.domain.name' = 'hadoop.hadoop.com',
'properties.group.id' = 'ODS_QL_JC_SSJC_SOURCE_ODS_QL_JC_SSJC_SOURCE',
'scan.startup.mode' = 'group-offsets',
'properties.auto.offset.reset' = 'earliest',
'properties.isolation.level' = 'read_committed',
'sink.semantic' = 'exactly-once',
3 Flink去向表建模
CREATE TABLE KAFKA_ODS_QL_JC_SSJC_SINK (
'topic' = 'ODS_QL_JC_SSJC',
'properties.bootstrap.servers' = '172.*.*.*:21007,172.*.*.*:21007,172.*.*.*:21007',
'properties.security.protocol' = 'SASL_PLAINTEXT',
'properties.sasl.kerberos.service.name' = 'kafka',
'properties.kerberos.domain.name' = 'hadoop.hadoop.com',
'properties.transaction.timeout.ms' = '900000'
CREATE TABLE DORIS_ODS_QL_JC_SSJC_SINK (
'fenodes' = '3.*.*.*:8030,3.*.*.*:8030,3.*.*.*:8030',
'table.identifier' = 'doris_d.ods_ql_jc_ssjc',
'sink.properties.two_phase_commit' = 'true'
4 城市Topic至中台Topic的Flinksql
KAFKA_ODS_QL_JC_SSJC_SINK
CURRENT_TIMESTAMP as extract_time,
CURRENT_TIMESTAMP as extract_time,
CURRENT_TIMESTAMP as extract_time,
5 中台Topic至Doris的Flinksql
insert into DORIS_ODS_QL_JC_SSJC_SINK
CURRENT_TIMESTAMP as extract_time,