update flink to 2.12
This commit is contained in:
parent
fa430f0c0d
commit
96bf3cdaa5
19
pom.xml
19
pom.xml
@ -16,8 +16,7 @@
|
|||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<maven.compiler.source>1.8</maven.compiler.source>
|
<maven.compiler.source>1.8</maven.compiler.source>
|
||||||
<maven.compiler.target>1.8</maven.compiler.target>
|
<maven.compiler.target>1.8</maven.compiler.target>
|
||||||
<flink.version>1.10.0</flink.version>
|
<flink.version>1.12.0</flink.version>
|
||||||
<scala.binary.version>2.11</scala.binary.version>
|
|
||||||
</properties>
|
</properties>
|
||||||
|
|
||||||
<dependencies>
|
<dependencies>
|
||||||
@ -29,15 +28,23 @@
|
|||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.flink</groupId>
|
<groupId>org.apache.flink</groupId>
|
||||||
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
|
<artifactId>flink-streaming-java_2.12</artifactId>
|
||||||
<version>${flink.version}</version>
|
<version>${flink.version}</version>
|
||||||
|
<!-- <scope>compile</scope>-->
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.flink</groupId>
|
<groupId>org.apache.flink</groupId>
|
||||||
<artifactId>flink-clients_${scala.binary.version}</artifactId>
|
<artifactId>flink-clients_2.12</artifactId>
|
||||||
<version>${flink.version}</version>
|
<version>${flink.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.scala-lang</groupId>
|
||||||
|
<artifactId>scala-library</artifactId>
|
||||||
|
<version>2.12.0</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
<!--日志-->
|
<!--日志-->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.slf4j</groupId>
|
<groupId>org.slf4j</groupId>
|
||||||
@ -54,7 +61,7 @@
|
|||||||
<!--flink kafka connector-->
|
<!--flink kafka connector-->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.flink</groupId>
|
<groupId>org.apache.flink</groupId>
|
||||||
<artifactId>flink-connector-kafka-0.11_${scala.binary.version}</artifactId>
|
<artifactId>flink-connector-kafka_2.12</artifactId>
|
||||||
<version>${flink.version}</version>
|
<version>${flink.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
@ -93,7 +100,7 @@
|
|||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.scala-lang</groupId>
|
<groupId>org.scala-lang</groupId>
|
||||||
<artifactId>scala-library</artifactId>
|
<artifactId>scala-library</artifactId>
|
||||||
<version>2.10.6</version>
|
<version>2.12.10</version>
|
||||||
<scope>provided</scope>
|
<scope>provided</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ public class FlinkSinkToKafka {
|
|||||||
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||||
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||||
props.put("auto.offset.reset", "latest");
|
props.put("auto.offset.reset", "latest");
|
||||||
DataStreamSource<String> student = env.addSource(new FlinkKafkaConsumer011<>(
|
DataStreamSource<String> student = env.addSource(new FlinkKafkaConsumer<>(
|
||||||
READ_TOPIC, //这个 kafka topic 需要和上面的工具类的 topic 一致
|
READ_TOPIC, //这个 kafka topic 需要和上面的工具类的 topic 一致
|
||||||
new SimpleStringSchema(),
|
new SimpleStringSchema(),
|
||||||
props)).setParallelism(1);
|
props)).setParallelism(1);
|
||||||
@ -36,7 +36,7 @@ public class FlinkSinkToKafka {
|
|||||||
properties.setProperty("zookeeper.connect", "localhost:2181");
|
properties.setProperty("zookeeper.connect", "localhost:2181");
|
||||||
properties.setProperty("group.id", "student-write");
|
properties.setProperty("group.id", "student-write");
|
||||||
|
|
||||||
student.addSink(new FlinkKafkaProducer011<String>(
|
student.addSink(new FlinkKafkaProducer<String>(
|
||||||
"student-write",
|
"student-write",
|
||||||
new SimpleStringSchema(),
|
new SimpleStringSchema(),
|
||||||
properties
|
properties
|
||||||
|
@ -16,7 +16,7 @@ public class FlinkCustomSource {
|
|||||||
|
|
||||||
env.addSource(new SourceFromMySQL()).print();
|
env.addSource(new SourceFromMySQL()).print();
|
||||||
|
|
||||||
env.execute("Flink add data sourc");
|
env.execute("Flink add data source");
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ package com.thinker.main;
|
|||||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||||
import org.apache.flink.streaming.api.datastream.DataStreamSource;
|
import org.apache.flink.streaming.api.datastream.DataStreamSource;
|
||||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
|
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
||||||
|
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
@ -26,7 +26,7 @@ public class Main {
|
|||||||
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||||
props.put("auto.offset.reset", "latest"); //value 反序列化
|
props.put("auto.offset.reset", "latest"); //value 反序列化
|
||||||
|
|
||||||
DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>(
|
DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer<>(
|
||||||
"metric", //kafka topic
|
"metric", //kafka topic
|
||||||
new SimpleStringSchema(), // String 序列化
|
new SimpleStringSchema(), // String 序列化
|
||||||
props)).setParallelism(1);
|
props)).setParallelism(1);
|
||||||
|
@ -6,7 +6,7 @@ import com.thinker.sql.SinkToMySQL;
|
|||||||
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
import org.apache.flink.api.common.serialization.SimpleStringSchema;
|
||||||
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
|
||||||
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
|
||||||
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
|
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
|
||||||
|
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ public class SinkToMysql {
|
|||||||
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
|
||||||
props.put("auto.offset.reset", "latest");
|
props.put("auto.offset.reset", "latest");
|
||||||
|
|
||||||
SingleOutputStreamOperator<Student> student = env.addSource(new FlinkKafkaConsumer011<>(
|
SingleOutputStreamOperator<Student> student = env.addSource(new FlinkKafkaConsumer<>(
|
||||||
"student", //这个 kafka topic 需要和上面的工具类的 topic 一致
|
"student", //这个 kafka topic 需要和上面的工具类的 topic 一致
|
||||||
new SimpleStringSchema(),
|
new SimpleStringSchema(),
|
||||||
props)).setParallelism(1)
|
props)).setParallelism(1)
|
||||||
|
@ -1,7 +1,5 @@
|
|||||||
package com.thinker.model;
|
package com.thinker.model;
|
||||||
|
|
||||||
import jdk.nashorn.internal.objects.annotations.Getter;
|
|
||||||
import jdk.nashorn.internal.objects.annotations.Setter;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author zeekling [lingzhaohui@zeekling.cn]
|
* @author zeekling [lingzhaohui@zeekling.cn]
|
||||||
|
Loading…
Reference in New Issue
Block a user