本文简单模拟对流量的处理,大概步骤如下:
1、通过获取一个维度流,内容是流量内容的元数据信息,获取解析并进行广播
2、获取实时流量流,做延迟处理(防止数据关联不上)
3、流量流关联元数据广播流,通过元数据信息获取对应的数据
4、打包成avro格式(自行百度)数据并进行sink
flink版本1.14
- "1.0" encoding="UTF-8"?>
- <project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0modelVersion>
-
- <groupId>org.examplegroupId>
- <artifactId>FlinkCodeartifactId>
- <version>1.0-SNAPSHOTversion>
-
- <properties>
- <maven.compiler.source>8maven.compiler.source>
- <maven.compiler.target>8maven.compiler.target>
- <jdk.version>1.8jdk.version>
- <jar.name>ubs-data-converterjar.name>
- <project.build.sourceEncoding>UTF-8project.build.sourceEncoding>
-
- <flink.version>1.14.4flink.version>
- properties>
- <dependencies>
- <dependency>
- <groupId>org.apache.flinkgroupId>
- <artifactId>flink-streaming-java_2.11artifactId>
- <version>${flink.version}version>
- <scope>providedscope>
- dependency>
- <dependency>
- <groupId>org.apache.flinkgroupId>
- <artifactId>flink-connector-kafka_2.11artifactId>
- <version>${flink.version}version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.kafkagroupId>
- <artifactId>kafka-clientsartifactId>
- exclusion>
- exclusions>
- dependency>
- <dependency>
- <groupId>org.apache.flinkgroupId>
- <artifactId>flink-runtime-web_2.11artifactId>
- <version>${flink.version}version>
- <scope>providedscope>
- dependency>
- <dependency>
- <groupId>org.apache.httpcomponentsgroupId>
- <artifactId>httpclientartifactId>
- <version>4.5.10version>
- dependency>
- <dependency>
- <groupId>com.alibabagroupId>
- <artifactId>fastjsonartifactId>
- <version>1.2.8version>
- dependency>
- <dependency>
- <groupId>org.apache.avrogroupId>
- <artifactId>avroartifactId>
- <version>1.9.2version>
- dependency>
- <dependency>
- <groupId>org.apache.httpcomponentsgroupId>
- <artifactId>httpcoreartifactId>
- <version>4.4.1version>
- dependency>
- <dependency>
- <groupId>org.projectlombokgroupId>
- <artifactId>lombokartifactId>
- <version>1.18.16version>
- dependency>
- <dependency>
- <groupId>org.projectlombokgroupId>
- <artifactId>lombokartifactId>
- <version>1.18.16version>
- <scope>compilescope>
- dependency>
- dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.avrogroupId>
- <artifactId>avro-maven-pluginartifactId>
- <version>1.9.2version>
- <executions>
- <execution>
- <phase>generate-sourcesphase>
- <goals>
- <goal>schemagoal>
- goals>
- <configuration>
- <sourceDirectory>${project.basedir}/src/main/resources/sourceDirectory>
- <outputDirectory>${project.basedir}/src/main/java/com/msxfoutputDirectory>
- configuration>
- execution>
- executions>
- plugin>
- <plugin>
- <groupId>org.apache.maven.pluginsgroupId>
- <artifactId>maven-compiler-pluginartifactId>
- <version>3.1version>
- <configuration>
- <source>${jdk.version}source>
- <target>${jdk.version}target>
- <encoding>${project.build.sourceEncoding}encoding>
- configuration>
- plugin>
- <plugin>
- <groupId>org.apache.maven.pluginsgroupId>
- <artifactId>maven-shade-pluginartifactId>
- <version>3.1.1version>
- <executions>
- <execution>
- <phase>packagephase>
- <goals>
- <goal>shadegoal>
- goals>
- <configuration>
- <finalName>${jar.name}finalName>
- <artifactSet>
- <excludes>
- <exclude>com.google.code.findbugs:jsr305exclude>
- <exclude>org.slf4j:*exclude>
- <exclude>log4j:*exclude>
- <exclude>org.glassfish.jersey.core:jersey-commonexclude>
- excludes>
- artifactSet>
- <relocations>
- <relocation>
- <pattern>com.google.commonpattern>
- <shadedPattern>com.shade.google.commonshadedPattern>
- relocation>
- <relocation>
- <pattern>org.apache.kafkapattern>
- <shadedPattern>org.shade.apache.kafkashadedPattern>
- relocation>
- relocations>
- <filters>
- <filter>
- <artifact>*artifact>
- <includes>
- <include>org/apache/htrace/**include>
- <include>org/apache/avro/**include>
- <include>com/msxf/**include>
- <include>org/apache/flink/streaming/**include>
- <include>org/apache/flink/connector/**include>
- <include>org/apache/kafka/**include>
- <include>org/apache/hive/**include>
- <include>org/apache/hadoop/hive/**include>
- <include>org/apache/curator/**include>
- <include>org/apache/zookeeper/**include>
- <include>org/apache/jute/**include>
- <include>org/apache/thrift/**include>
- <include>org/apache/http/**include>
- <include>org/I0Itec/**include>
- <include>jline/**include>
- <include>com/yammer/**include>
- <include>kafka/**include>
- <include>org/apache/hadoop/hbase/**include>
- <include>com/alibaba/fastjson/**include>
- <include>org/elasticsearch/action/**include>
- <include>io/confluent/**include>
- <include>com/fasterxml/**include>
- <include>org/elasticsearch/**include>
- <include>hbase-default.xmlinclude>
- <include>hbase-site.xmlinclude>
- includes>
- filter>
- <filter>
- <artifact>org.apache.hadoop.hive.*:*artifact>
- <excludes>
- <exclude>exclude>
- <exclude>exclude>
- <exclude>exclude>
- excludes>
- filter>
- filters>
- configuration>
- execution>
- executions>
- plugin>
- plugins>
- build>
- project>
正常数据流应该通过其他方式(比如访问数据库、KAFKA流)获取,本次我们直接自定义source
- package source;
-
- import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
-
-
- public class StringSource extends RichSourceFunction
{ -
- Boolean running = true;
- @Override
- public void run(SourceContext ctx) throws Exception {
- while (running){
- String value = String.format("{\"data\":[{\"name\":\"id\",\"comment\":\"ID\"}" +
- ",{\"name\":\"age\",\"comment\":\"年龄\"}" +
- ",{\"name\":\"sex\",\"comment\":\"性别\"}]}");
- ctx.collect(value);
- running=false;
- }
- }
-
- @Override
- public void cancel() {
- running=false;
- }
- }
模拟流量数据,本身也是通过KAFKA获取实时流量数据,本文是简单Demo,所以也通过自定义Source获取
- package source;
-
- import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
-
- import java.util.Arrays;
- import java.util.List;
- import java.util.Random;
-
-
- public class FlowSourceFunction extends RichSourceFunction
{ -
- Boolean running=true;
-
- private final List
USERS = Arrays.asList("张三","李四","牛二"); -
- private final List
BEHAVIOR = Arrays.asList("login", "out", "delete"); -
- private final List
SEX = Arrays.asList("男", "女"); -
- Random random = new Random();
-
- @Override
- public void run(SourceContext
ctx) throws Exception { - while (running){
- String id = USERS.get(random.nextInt(USERS.size()));
- String age = String.valueOf(random.nextInt(100));
- String sex = SEX.get(random.nextInt(SEX.size()));
- String time = String.valueOf(System.currentTimeMillis());
- String res = String.format("{\"id\":\"%s\"," +
- "\"age\":\"%s\"," +
- "\"sex\":\"%s\"," +
- "\"time\":\"%s\"}",id,age,sex,time);
- ctx.collect(res);
- Thread.sleep(1000);
- }
- }
-
- @Override
- public void cancel() {
- running=false;
- }
- }
- package func;
-
- import bean.SchemaInfo;
- import com.alibaba.fastjson.JSON;
- import com.alibaba.fastjson.JSONObject;
- import org.apache.avro.Schema;
- import org.apache.flink.api.common.functions.RichMapFunction;
- import org.apache.avro.SchemaBuilder;
- import org.apache.flink.util.StringUtils;
-
- import java.util.HashSet;
- import java.util.stream.Stream;
-
-
- public class MetaDataMapFunction extends RichMapFunction
{ -
- private String db;
- private String table;
-
- private final String NAME="name";
-
- public MetaDataMapFunction(String db, String table) {
- this.db = db;
- this.table = table;
- }
-
- @Override
- public SchemaInfo map(String value) throws Exception {
- String[] aliases = {db.concat(".").concat(table)};
- //存储fields
- HashSet
- //schema支持多种类型,这里我们选择构建常见的record类型
- //初始化结果:
- //{"type":"record","name":"flow","namespace":"com.flow","doc":"fow_event","fields":[],"aliases":["db.table"]}
- SchemaBuilder.RecordBuilder
recordBuilder = SchemaBuilder.record("flow").namespace("com.flow").aliases(aliases).doc("fow_event"); - //初始化之后要完善schema中的fields,首先获取fields
- SchemaBuilder.FieldAssembler
fields = recordBuilder.fields(); - //处理元数据流
- JSONObject obj = JSON.parseObject(value);
- //过滤掉不包含ID的数据
- Stream
data= obj.getJSONArray("data").stream().filter( - o -> !StringUtils.isNullOrWhitespaceOnly(JSON.parseObject(o.toString()).getOrDefault(NAME, "").toString())
- ).map(o->JSONObject.parseObject(o.toString()));
- data.forEach(
- o->{
- String name = o.get("name").toString();
- String comment = o.get("comment").toString();
- buildFields(fields,name,comment);
- fieldsSet.add(name);
- }
- );
- Schema schema = fields.endRecord();
- return new SchemaInfo(schema.toString(),fieldsSet);
- }
-
- public void buildFields(SchemaBuilder.FieldAssembler
fields,String name,String comment) { - fields.name(name)//字段名称
- .doc(comment)//描述内容、注释
- .orderAscending()//排序方式无
- .type()//类型
- .optional()
- .stringType();
- }
- }
- package bean;
-
- import lombok.AllArgsConstructor;
- import lombok.Data;
-
- import java.util.HashSet;
-
-
-
- @Data
- @AllArgsConstructor
- public class SchemaInfo {
- public String info;
-
- public HashSet set;
-
- }
- package func;
-
- import com.alibaba.fastjson.JSON;
- import org.apache.flink.api.common.functions.RichMapFunction;
- import org.apache.flink.configuration.Configuration;
-
- /**
- *
- * 1、延时map的初始化,阻塞主流数据,等待广播流schema写入广播状态,保证主流数据可以获取到schema
- * 2、标准化数据
- */
- public class DelayEtlMap extends RichMapFunction
{ -
- private final long delayTime;
-
- public DelayEtlMap(long delayTime) {
- this.delayTime = delayTime;
- }
-
- @Override
- public void open(Configuration parameters) throws Exception {
- super.open(parameters);
- Thread.sleep(delayTime);
- }
-
- @Override
- public String map(String value) throws Exception {
- return value;
- }
- }
- package func;
-
- import bean.FlowData;
- import bean.SchemaInfo;
- import com.alibaba.fastjson.JSON;
- import com.alibaba.fastjson.JSONObject;
- import org.apache.flink.api.common.state.BroadcastState;
- import org.apache.flink.api.common.state.MapStateDescriptor;
- import org.apache.flink.api.common.state.ReadOnlyBroadcastState;
- import org.apache.flink.api.common.typeinfo.TypeInformation;
- import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction;
- import org.apache.flink.util.Collector;
-
- import java.text.SimpleDateFormat;
- import java.util.HashMap;
-
- /**
- *
- * processElement 处理业务流数据
- * processBroadcastElement 处理广播流数据
- */
- public class FlowBrodCastFunction extends BroadcastProcessFunction
{ -
- @Override
- public void processElement(String value, BroadcastProcessFunction
.ReadOnlyContext ctx, Collector out) throws Exception { - ReadOnlyBroadcastState
flowMetaData = ctx.getBroadcastState(new MapStateDescriptor( - "flowMetaData"
- , TypeInformation.of(String.class)
- , TypeInformation.of(SchemaInfo.class)));
- SchemaInfo schema = flowMetaData.get("schema");
- JSONObject jsonObject = JSON.parseObject(value);
- Long time = jsonObject.getLong("time");
- //创建时间格式
- SimpleDateFormat yyyyMMdd = new SimpleDateFormat("yyyyMMdd");
- String dt = yyyyMMdd.format(time);
- //获取元数据字段
- HashMap
map = new HashMap<>(); - schema.getSet().forEach(
- o->{
- map.put(o.toString(),jsonObject.get(o).toString());
- }
- );
- out.collect(new FlowData(schema.getInfo(),map,time));
- }
-
- @Override
- public void processBroadcastElement(SchemaInfo value, BroadcastProcessFunction
.Context ctx, Collector out) throws Exception { - if(value != null){
- BroadcastState
flowMetaData = ctx.getBroadcastState(new MapStateDescriptor( - "flowMetaData"
- , TypeInformation.of(String.class)
- , TypeInformation.of(SchemaInfo.class)));
- flowMetaData.put("schema", value);
- }
- }
- }
FlowData对象
- package bean;
-
- import lombok.AllArgsConstructor;
- import lombok.Data;
-
- import java.util.HashMap;
-
-
- @Data
- @AllArgsConstructor
- public class FlowData {
- public String schema;
-
- public HashMap
values; -
- private long time;
- }
- package ubs.app;
-
- import bean.FlowData;
- import bean.SchemaInfo;
- import func.DelayEtlMap;
- import func.FlowBrodCastFunction;
- import func.FlowSinkFunction;
- import func.MetaDataMapFunction;
- import org.apache.flink.api.common.state.MapStateDescriptor;
- import org.apache.flink.api.common.typeinfo.TypeInformation;
- import org.apache.flink.api.java.utils.ParameterTool;
- import org.apache.flink.streaming.api.datastream.BroadcastStream;
- import org.apache.flink.streaming.api.datastream.DataStreamSource;
- import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
- import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
- import org.apache.flink.util.StringUtils;
- import source.FlowSourceFunction;
- import source.StringSource;
-
- import java.util.Objects;
-
-
- public class FlowApp {
- public static void main(String[] args) throws Exception{
- //解析参数
- ParameterTool parameterTool = ParameterTool.fromArgs(args);
- //获取参数 db
- String db = parameterTool.get("db")==null?"":parameterTool.get("db");
- //获取参数 table
- String table = parameterTool.get("table")==null?"":parameterTool.get("table");
- //初始化环境
- StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
- //模拟元数据流,后续获取到元数据进行广播
- DataStreamSource
configSource = env.addSource(new StringSource()); - //设置并行度
- configSource.setParallelism(1);
- //获取实时流量数据流:kafka source
- // KafkaSource
source = SourceGetter.getValueOnlySimpleStrDesSource(parameterTool); - // DataStreamSource
realData = env.fromSource(source, WatermarkStrategy.noWatermarks(), "liuliang"); - DataStreamSource
realData = env.addSource(new FlowSourceFunction()); - realData.print("realData: ");
- // //处理元数据并广播:将元数据流接收并处理为schema
- BroadcastStream
metaData = configSource.filter(data -> !StringUtils.isNullOrWhitespaceOnly(data)) - .setParallelism(1)
- .map(new MetaDataMapFunction("a", "b"))
- .broadcast(new MapStateDescriptor
( - "flowMetaData"
- , TypeInformation.of(String.class)
- , TypeInformation.of(SchemaInfo.class)
- ));
- //延迟主流
- SingleOutputStreamOperator
realData2 = realData.map(new DelayEtlMap(1000)) - .filter(Objects::nonNull);
- //连接流,解析数据
- SingleOutputStreamOperator
res = realData2.connect(metaData).process(new FlowBrodCastFunction()); - res.addSink(new FlowSinkFunction());
- res.print("res");
- env.execute();
- }
- }