增加pom

This commit is contained in:
zeek 2020-05-05 14:07:14 +08:00
parent f60481d7c5
commit 01c707d62e
4 changed files with 292 additions and 86 deletions

207
pom.xml
View File

@ -1,101 +1,136 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>com.thinker</groupId> <groupId>com.thinker</groupId>
<artifactId>flink-test</artifactId> <artifactId>flink-test</artifactId>
<version>1.0-SNAPSHOT</version> <version>1.0-SNAPSHOT</version>
<name>flink-test</name> <name>flink-test</name>
<!-- FIXME change it to the project's website --> <!-- FIXME change it to the project's website -->
<url>http://www.example.com</url> <url>http://www.example.com</url>
<properties> <properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.7</maven.compiler.source> <maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target> <maven.compiler.target>1.8</maven.compiler.target>
</properties> <flink.version>1.10.0</flink.version>
<scala.binary.version>2.11</scala.binary.version>
</properties>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.apache.flink</groupId> <groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId> <artifactId>flink-java</artifactId>
<version>1.10.0</version> <version>${flink.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.flink</groupId> <groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId> <artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>1.10.0</version> <version>${flink.version}</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.apache.flink</groupId> <groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId> <artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>1.10.0</version> <version>${flink.version}</version>
</dependency> </dependency>
<dependency> <!--日志-->
<groupId>org.apache.flink</groupId> <dependency>
<artifactId>statefun-sdk</artifactId> <groupId>org.slf4j</groupId>
<version>2.0.0</version> <artifactId>slf4j-log4j12</artifactId>
</dependency> <version>1.7.7</version>
<dependency> <scope>runtime</scope>
<groupId>org.apache.flink</groupId> </dependency>
<artifactId>statefun-flink-harness</artifactId> <dependency>
<version>2.0.0</version> <groupId>log4j</groupId>
</dependency> <artifactId>log4j</artifactId>
<version>1.2.17</version>
<scope>runtime</scope>
</dependency>
<!--flink kafka connector-->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka-0.11_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency> <dependency>
<groupId>junit</groupId> <groupId>org.apache.flink</groupId>
<artifactId>junit</artifactId> <artifactId>statefun-sdk</artifactId>
<version>4.11</version> <version>2.0.0</version>
<scope>test</scope> </dependency>
</dependency> <dependency>
</dependencies> <groupId>org.apache.flink</groupId>
<artifactId>statefun-flink-harness</artifactId>
<version>2.0.0</version>
</dependency>
<build> <!--alibaba fastjson-->
<pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) --> <dependency>
<plugins> <groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.51</version>
</dependency>
<!-- 该插件用于将Scala代码编译成class文件 --> <dependency>
<plugin> <groupId>mysql</groupId>
<groupId>net.alchim31.maven</groupId> <artifactId>mysql-connector-java</artifactId>
<artifactId>scala-maven-plugin</artifactId> <version>5.1.34</version>
<version>3.4.6</version> </dependency>
<executions>
<execution>
<!-- 声明绑定到maven的compile阶段 -->
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin> <dependency>
<groupId>org.apache.maven.plugins</groupId> <groupId>junit</groupId>
<artifactId>maven-assembly-plugin</artifactId> <artifactId>junit</artifactId>
<version>3.0.0</version> <version>4.11</version>
<configuration> <scope>test</scope>
<descriptorRefs> </dependency>
<descriptorRef>jar-with-dependencies</descriptorRef> </dependencies>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins> <build>
</pluginManagement> <pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
</build> <plugins>
<!-- 该插件用于将Scala代码编译成class文件 -->
<plugin>
<groupId>net.alchim31.maven</groupId>
<artifactId>scala-maven-plugin</artifactId>
<version>3.4.6</version>
<executions>
<execution>
<!-- 声明绑定到maven的compile阶段 -->
<goals>
<goal>compile</goal>
<goal>testCompile</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.0.0</version>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</pluginManagement>
</build>
</project> </project>

View File

@ -0,0 +1,39 @@
package com.thinker;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;
import java.util.Properties;
/**
* @author zeekling [lingzhaohui@zeekling.cn]
* @version 1.0
* @apiNote
* @since 2020-05-05
*/
public class Main {
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("zookeeper.connect", "localhost:2181");
props.put("group.id", "metric-group");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); //key 反序列化
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("auto.offset.reset", "latest"); //value 反序列化
DataStreamSource<String> dataStreamSource = env.addSource(new FlinkKafkaConsumer011<>(
"metric", //kafka topic
new SimpleStringSchema(), // String 序列化
props)).setParallelism(1);
dataStreamSource.print(); //把从 kafka 读取到的数据打印在控制台
env.execute("Flink add data source");
}
}

View File

@ -0,0 +1,71 @@
package com.thinker.model;
import java.util.Map;
/**
* @author zeekling [lingzhaohui@zeekling.cn]
* @version 1.0
* @apiNote 实体类
* @since 2020-05-05
*/
public class Metric {
private String name;
private long timestamp;
private Map<String, Object> fields;
private Map<String, String> tags;
public Metric() {
}
public Metric(String name, long timestamp, Map<String, Object> fields, Map<String, String> tags) {
this.name = name;
this.timestamp = timestamp;
this.fields = fields;
this.tags = tags;
}
@Override
public String toString() {
return "Metric{" +
"name='" + name + '\'' +
", timestamp='" + timestamp + '\'' +
", fields=" + fields +
", tags=" + tags +
'}';
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getTimestamp() {
return timestamp;
}
public void setTimestamp(long timestamp) {
this.timestamp = timestamp;
}
public Map<String, Object> getFields() {
return fields;
}
public void setFields(Map<String, Object> fields) {
this.fields = fields;
}
public Map<String, String> getTags() {
return tags;
}
public void setTags(Map<String, String> tags) {
this.tags = tags;
}
}

View File

@ -0,0 +1,61 @@
package com.thinker.util;
import com.thinker.model.Metric;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import com.alibaba.fastjson.JSON;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
* @author zeekling [lingzhaohui@zeekling.cn]
* @version 1.0
* @apiNote kafka 中写数据工具类
* @since 2020-05-05
*/
public class KafkaUtils {
private static final String broker_list = "localhost:9092";
private static final String topic = "metric"; // kafka topicFlink 程序中需要和这个统一
private static void writeToKafka() throws InterruptedException {
Properties props = new Properties();
props.put("bootstrap.servers", broker_list);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //key 序列化
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); //value 序列化
KafkaProducer producer = new KafkaProducer<String, String>(props);
Metric metric = new Metric();
metric.setTimestamp(System.currentTimeMillis());
metric.setName("mem");
Map<String, String> tags = new HashMap<>();
Map<String, Object> fields = new HashMap<>();
tags.put("cluster", "zhisheng");
tags.put("host_ip", "101.147.022.106");
fields.put("used_percent", 90d);
fields.put("max", 27244873d);
fields.put("used", 17244873d);
fields.put("init", 27244873d);
metric.setTags(tags);
metric.setFields(fields);
ProducerRecord record = new ProducerRecord<String, String>(topic, null, null, JSON.toJSONString(metric));
producer.send(record);
System.out.println("发送数据: " + JSON.toJSONString(metric));
producer.flush();
}
public static void main(String[] args) throws InterruptedException {
while (true) {
Thread.sleep(300);
writeToKafka();
}
}
}