update flink to 2.12
This commit is contained in:
parent
fa430f0c0d
commit
96bf3cdaa5
19
pom.xml
19
pom.xml
@ -16,8 +16,7 @@
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<maven.compiler.source>1.8</maven.compiler.source>
|
||||
<maven.compiler.target>1.8</maven.compiler.target>
|
||||
<flink.version>1.10.0</flink.version>
|
||||
<scala.binary.version>2.11</scala.binary.version>
|
||||
<flink.version>1.12.0</flink.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
@ -29,15 +28,23 @@
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
|
||||
<artifactId>flink-streaming-java_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
<!-- <scope>compile</scope>-->
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-clients_${scala.binary.version}</artifactId>
|
||||
<artifactId>flink-clients_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>2.12.0</version>
|
||||
</dependency>
|
||||
|
||||
<!--日志-->
|
||||
<dependency>
|
||||
<groupId>org.slf4j</groupId>
|
||||
@ -54,7 +61,7 @@
|
||||
<!--flink kafka connector-->
|
||||
<dependency>
|
||||
<groupId>org.apache.flink</groupId>
|
||||
<artifactId>flink-connector-kafka-0.11_${scala.binary.version}</artifactId>
|
||||
<artifactId>flink-connector-kafka_2.12</artifactId>
|
||||
<version>${flink.version}</version>
|
||||
</dependency>
|
||||
|
||||
@ -93,7 +100,7 @@
|
||||
<dependency>
|
||||
<groupId>org.scala-lang</groupId>
|
||||
<artifactId>scala-library</artifactId>
|
||||
<version>2.10.6</version>
|
||||
<version>2.12.10</version>
|
||||
<scope>provided</scope>
|
||||
</dependency>
|
||||
|
||||
|
@ -26,7 +26,7 @@ public class FlinkSinkToKafka {
|
||||
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||
props.put("auto.offset.reset", "latest");
|
||||
DataStreamSource<String> student = env.addSource(new FlinkKafkaConsumer011<>(
|
||||
DataStreamSource<String> student = env.addSource(new FlinkKafkaConsumer<>(
|
||||
READ_TOPIC, //这个 kafka topic 需要和上面的工具类的 topic 一致
|
||||
new SimpleStringSchema(),
|
||||
props)).setParallelism(1);
|
||||
@ -36,7 +36,7 @@ public class FlinkSinkToKafka {
|
||||
properties.setProperty("zookeeper.connect", "localhost:2181");
|
||||
properties.setProperty("group.id", "student-write");
|
||||
|
||||
student.addSink(new FlinkKafkaProducer011<String>(
|
||||
student.addSink(new FlinkKafkaProducer<String>(
|
||||
"student-write",
|
||||
new SimpleStringSchema(),
|
||||
properties
|
||||
|
@ -16,7 +16,7 @@ public class FlinkCustomSource {
|
||||
|
||||
env.addSource(new SourceFromMySQL()).print();
|
||||
|
||||
env.execute("Flink add data sourc");
|
||||
env.execute("Flink add data source");
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ package com.thinker.main;
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.api.datastream.DataStreamSource;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
@ -26,7 +26,7 @@ public class Main {
|
||||
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||
props.put("auto.offset.reset", "latest"); //value 反序列化
|
||||
|
||||
DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>(
|
||||
DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer<>(
|
||||
"metric", //kafka topic
|
||||
new SimpleStringSchema(), // String 序列化
|
||||
props)).setParallelism(1);
|
||||
|
@ -6,7 +6,7 @@ import com.thinker.sql.SinkToMySQL;
|
||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
|
||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
||||
|
||||
import java.util.Properties;
|
||||
|
||||
@ -29,7 +29,7 @@ public class SinkToMysql {
|
||||
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||
props.put("auto.offset.reset", "latest");
|
||||
|
||||
SingleOutputStreamOperator<Student> student = env.addSource(new FlinkKafkaConsumer011<>(
|
||||
SingleOutputStreamOperator<Student> student = env.addSource(new FlinkKafkaConsumer<>(
|
||||
"student", //这个 kafka topic 需要和上面的工具类的 topic 一致
|
||||
new SimpleStringSchema(),
|
||||
props)).setParallelism(1)
|
||||
|
@ -1,7 +1,5 @@
|
||||
package com.thinker.model;
|
||||
|
||||
import jdk.nashorn.internal.objects.annotations.Getter;
|
||||
import jdk.nashorn.internal.objects.annotations.Setter;
|
||||
|
||||
/**
|
||||
* @author zeekling [lingzhaohui@zeekling.cn]
|
||||
|
Loading…
Reference in New Issue
Block a user