Commit 96cf7ad1 authored by zhoushiguang's avatar zhoushiguang

实时计算模块

parent 8a1624b6
......@@ -21,6 +21,7 @@
<modules>
<module>wj-datacenter-service</module>
<module>wj-realtime-computing</module>
</modules>
<!-- 依赖版本 -->
......
<?xml version="1.0" encoding="UTF-8"?>
<module type="JAVA_MODULE" version="4" />
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<!--<parent>
<artifactId>wj-platform</artifactId>
<groupId>com.wanji</groupId>
<version>1.0</version>
</parent>-->
<modelVersion>4.0.0</modelVersion>
<groupId>com.wanji</groupId>
<artifactId>wj-realtime-computing</artifactId>
<version>1.1.0</version>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<flink.version>1.14.3</flink.version>
<scala.binary.version>2.11</scala.binary.version>
<jackson.version>2.13.2</jackson.version>
<elasticsearch.version>7.10.1</elasticsearch.version>
<es.client.version>7.10.1</es.client.version>
<!-- jar包名尾部标识 -->
<jar.tail>all</jar.tail>
<!--<jar.tail>batch-track</jar.tail>-->
<!--<jar.tail>export</jar.tail>-->
<!--<jar.tail>export</jar.tail>-->
<!--<jar.tail>monitor</jar.tail>-->
</properties>
<dependencies>
<dependency>
<groupId>org.apache.bahir</groupId>
<artifactId>flink-connector-redis_2.11</artifactId>
<version>1.0</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-elasticsearch7_2.11</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>7.10.1</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>7.10.1</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-tx</artifactId>
<version>4.1.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context</artifactId>
<version>4.1.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-context-support</artifactId>
<version>4.1.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>4.1.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-beans</artifactId>
<version>4.1.4.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aop</artifactId>
<version>4.1.4.RELEASE</version>
</dependency>
<dependency>
<groupId>javax.transaction</groupId>
<artifactId>jta</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpasyncclient</artifactId>
<version>4.1.4</version>
</dependency>
<dependency>
<groupId>com.vividsolutions</groupId>
<artifactId>jts</artifactId>
<version>1.13</version>
</dependency>
<dependency>
<groupId>com.mapabc.coord</groupId>
<artifactId>coord-cvt</artifactId>
<version>1.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.google.protobuf/protobuf-java -->
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>3.19.4</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.orbitz.consul/consul-client -->
<dependency>
<groupId>com.orbitz.consul</groupId>
<artifactId>consul-client</artifactId>
<version>1.5.3</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.ecwid.consul/consul-api -->
<!--<dependency>
<groupId>com.ecwid.consul</groupId>
<artifactId>consul-api</artifactId>
<version>1.4.5</version>
</dependency>-->
<!-- <dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-runtime-web_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
<scope>compile</scope>
</dependency>-->
<!-- flink java 依赖 -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- flink java 实时计算依赖 -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<!-- flink kafka 官方依赖 -->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_${scala.binary.version}</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>com.ververica</groupId>
<artifactId>flink-sql-connector-mysql-cdc</artifactId>
<version>2.3.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/com.getindata/flink-http-connector -->
<!--<dependency>
<groupId>com.getindata</groupId>
<artifactId>flink-http-connector</artifactId>
<version>0.9.0</version>
</dependency>-->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
<version>3.0.0</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.0.0</version>
</dependency>
<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>2.9.0</version>
</dependency>
<!-- alibaba fastjson 依赖 -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.79</version>
</dependency>
<!-- redis链接依赖包 -->
<!--<dependency>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
<version>4.1.1</version>
</dependency>-->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.36</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-api</artifactId>
<version>2.17.2</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.23</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.yaml/snakeyaml -->
<dependency>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
<version>1.30</version>
</dependency>
<!--<dependency>
<groupId>com.github.ulisesbocchio</groupId>
<artifactId>jasypt-spring-boot-starter</artifactId>
<version>2.1.0</version>
</dependency>-->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.6</version>
</dependency>
<!-- 解决consul包中 jackson-databind 2.12.0 依赖漏洞的问题-->
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-jdk8</artifactId>
<version>${jackson.version}</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.datatype</groupId>
<artifactId>jackson-datatype-guava</artifactId>
<version>${jackson.version}</version>
</dependency>
<!-- MyBatisPlus -->
<!--<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>3.1.2</version>
</dependency>-->
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-generator</artifactId>
<version>3.0.7.1</version>
</dependency>
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-extension</artifactId>
<version>3.1.2</version>
</dependency>
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis-typehandlers-jsr310</artifactId>
<version>1.0.2</version>
</dependency>
<!--<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis-spring</artifactId>
<version>2.0.6</version>
</dependency>
<dependency>
<groupId>org.mybatis</groupId>
<artifactId>mybatis</artifactId>
<version>3.5.7</version>
</dependency>-->
<!--<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-freemarker</artifactId>
<version>2.2.13.RELEASE</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
<version>2.2.13.RELEASE</version>
</dependency>-->
<dependency>
<groupId>org.apache.velocity</groupId>
<artifactId>velocity</artifactId>
<version>1.7</version>
</dependency>
<dependency>
<groupId>org.freemarker</groupId>
<artifactId>freemarker</artifactId>
<version>2.3.28</version>
</dependency>
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.9.9</version>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>2.8.9</version>
</dependency>
<!--阿里 Druid Spring Boot Starter依赖 -->
<!--<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.1</version>
</dependency>-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid</artifactId>
<version>1.1.2</version>
</dependency>
<!--MySQL JDBC驱动 -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.48</version>
</dependency>
</dependencies>
<!--<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.4.2</version>
<configuration>
<skipTests>true</skipTests>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.6</version>
<configuration>
&lt;!&ndash;<skipTests>true</skipTests>&ndash;&gt;
<archive>
<manifest>
&lt;!&ndash;<mainClass>com.wanji.indicators.task.track.stream.TrackAndRouteMain</mainClass>&ndash;&gt;
<addClasspath>true</addClasspath>
<classpathPrefix>lib/</classpathPrefix>
</manifest>
<manifestEntries>
<Class-Path>./</Class-Path>
</manifestEntries>
</archive>
<excludes>
<exclude>bin/**</exclude>
<exclude>redis/**</exclude>
<exclude>logger/**</exclude>
&lt;!&ndash;<exclude>**/*.properties</exclude>&ndash;&gt;
<exclude>**/*.yml</exclude>
<exclude>**/*.txt</exclude>
</excludes>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<configuration>
&lt;!&ndash;<skipTests>true</skipTests>&ndash;&gt;
&lt;!&ndash; not append assembly id in release file name &ndash;&gt;
<appendAssemblyId>false</appendAssemblyId>
<descriptors>
<descriptor>src/main/assembly/package.xml</descriptor>
</descriptors>
</configuration>
<executions>
<execution>
<id>make-assembly</id>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>-->
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration combine.self="override">
<createDependencyReducedPom>false</createDependencyReducedPom>
<shadedArtifactAttached>false</shadedArtifactAttached>
<finalName>${project.artifactId}-${jar.tail}-${project.version}</finalName>
<filters>
<!-- Globally exclude log4j.properties from our JAR files. -->
<filter>
<artifact>*</artifact>
<excludes>
<!--<exclude>log4j.properties</exclude>-->
<exclude>log4j-test.properties</exclude>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
</excludes>
</filter>
</filters>
<artifactSet>
<excludes>
<exclude>org.slf4j:slf4j-log4j12</exclude>
<exclude>log4j:log4j</exclude>
</excludes>
</artifactSet>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>reference.conf</resource>
</transformer>
<!-- The service transformer is needed to merge META-INF/services files -->
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.ApacheNoticeResourceTransformer"/>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>META-INF/spring.handlers</resource>
</transformer>
<transformer
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>META-INF/spring.schemas</resource>
</transformer>
<!--<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.wanji.indicators.task.track.stream.TrackUnionMain</mainClass>
</transformer>-->
</transformers>
</configuration>
</execution>
<execution>
<!--
Disable inherited shade-flink to prevent the Shade plugin from changing the project.basedir. The basedir
is changed by the Shade plugin when dependencyReducedPomLocation is set to a different location than the
original basedir. We do that in the root pom.xml.
-->
<id>shade-flink</id>
<phase>none</phase>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
\ No newline at end of file
package com.wanji.indicators.config;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisConfigBase;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import java.util.Properties;
/**
* @author fengyi
* @date 2023/4/7
* @description
*/
public class RedisConfig {
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
public FlinkJedisConfigBase getRedisConfig(){
FlinkJedisConfigBase config = new FlinkJedisPoolConfig.Builder()
.setHost(properties.getProperty("redis.host"))
.setPassword(properties.getProperty("redis.password"))
.setPort(Integer.parseInt(properties.getProperty("redis.port")))
.setDatabase(Integer.parseInt(properties.getProperty("redis.database")))
.build();
return config;
}
}
package com.wanji.indicators.constant;
import java.util.HashMap;
import java.util.Map;
public class Constant {
//机动车类型集合,逗号分割
public static final String MOTOR_TYPES= "1,2,3,7,8,10,11,12,13,15";
public static final String AREA_REALTIME_INDICATOR= "AREA_REALTIME_INDICATOR";
public static final String HASH_KEY_TRAVEL_DATA = "TRAVEL_DATA";
public static final String HASH_KEY_AREA_FREE_FLOW_SPEED = "AREA_FREE_FLOW_SPEED";
public static final String HASH_KEY_AREA_MAX_QUEUE_LENGTH = "AREA_MAX_QUEUE_LENGTH";
}
package com.wanji.indicators.constant;
import java.util.Objects;
public enum DirectionFromCnToEn {
North("北方向", "North direction"),
Northeast("东北方向", "Northeast direction"),
East("东方向", "East direction"),
Southeast("东南方向", "Southeast direction"),
South("南方向", "South direction"),
Southwest("西南方向", "Southwest direction"),
West("西方向", "West direction"),
Northwest("西北方向", "Northwest direction");
private String cndir;
private String endir;
DirectionFromCnToEn(String cndir, String endir) {
this.cndir = cndir;
this.endir = endir;
}
public String getCndir() {
return this.cndir;
}
public void setCndir(String cndir) {
this.cndir = cndir;
}
public String getEndir() {
return this.endir;
}
public void setEndir(String endir) {
this.endir = endir;
}
public static String getEnDir(String dirName) {
DirectionFromCnToEn[] values = values();
for (DirectionFromCnToEn dirEnum : values) {
String dircn = dirEnum.getCndir();
if (Objects.equals(dirName, dircn))
return dirEnum.getEndir();
}
return null;
}
}
package com.wanji.indicators.constant;
public enum EventState {
GREEN(Integer.valueOf(1), "绿"),
YELLOW(Integer.valueOf(2), "黄"),
RED(Integer.valueOf(3), "红"),
DISSIPATE(Integer.valueOf(1), "已消散"),
NOT_DISSIPATE(Integer.valueOf(0), "未消散");
private Integer code;
private String name;
EventState(Integer code, String name) {
this.code = code;
this.name = name;
}
public Integer getCode() {
return this.code;
}
public void setCode(Integer code) {
this.code = code;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
}
package com.wanji.indicators.constant;
public enum EventType {
EVENT(Integer.valueOf(0), "交通事件"),
CONFLICT_POINT(Integer.valueOf(1), "冲突点"),
RAPID_ACCELERATION(Integer.valueOf(2), "急加速"),
RAPID_DECELERATION(Integer.valueOf(3), "急减速"),
SHARP_TURN(Integer.valueOf(4), "急转弯"),
SPEEDING(Integer.valueOf(5), "违法超速"),
ACCIDENT(Integer.valueOf(6), "多车事故"),
ABNORMAL_PARKING(Integer.valueOf(7), "单车事故"),
RETROGRADE(Integer.valueOf(8), "逆向行驶"),
ILLEGAL_PARKING(Integer.valueOf(9), "违规停车"),
CROSSING_THE_ROAD(Integer.valueOf(10), "横穿路段"),
NOT_FOLLOWING_THE_LANE(Integer.valueOf(11), "不按车道行驶"),
CHANGE_LANE(Integer.valueOf(12), "违规变道"),
ABNORMAL_STOP(Integer.valueOf(13), "异常停车"),
OVERFLOW_CONGESTION(Integer.valueOf(14), "路口溢出"),
RUNNING_RED_LIGHT(Integer.valueOf(15), "闯红灯"),
EMERGENCY_LANE(Integer.valueOf(16), "占用应急车道"),
ROAD_CONGESTION(Integer.valueOf(17), "路段拥堵"),
PEDESTRIAN_RUNNING_RED_LIGHT(Integer.valueOf(18), "行人闯红灯"),
NON_VEHICLE_RUNNING_RED_LIGHT(Integer.valueOf(19), "非机动车闯红灯"),
PEDESTRIAN_INVADE(Integer.valueOf(20), "行人入侵机动车道"),
NON_VEHICLE_INVADE(Integer.valueOf(21), "非机动车入侵机动车道"),
RIGHT_TURN_NOT_SLOWING_DOWN(Integer.valueOf(22), "右转弯未减速"),
RIGHT_TURN_BEYOND_SAFE_SPEED(Integer.valueOf(23), "右转弯超出安全车速"),
WARNING_OF_TRUCK_TURNING_RIGHT(Integer.valueOf(24), "大型车右转预警"),
TURN_RIGHT_WITHOUT_STOPPING(Integer.valueOf(25), "大型车右转未停车"),
INTRUSION_RISK_ZONE(Integer.valueOf(26), "大型车入侵危险区"),
DANGEROUS_DRIVING_S(Integer.valueOf(27), "S型危险驾驶"),
FREQUENT_LANE_CHANGES(Integer.valueOf(28), "连续变道"),
CROSS_CONGESTION(Integer.valueOf(29), "路口拥堵"),
CROSS_DEADLOCK(Integer.valueOf(30), "路口死锁"),
CROSS_IMBALANCE(Integer.valueOf(31), "路口失衡"),
OUT_LANE(Integer.valueOf(32), "驶离车道");
private Integer code;
private String name;
EventType(Integer code, String name) {
this.code = code;
this.name = name;
}
public Integer getCode() {
return this.code;
}
public void setCode(Integer code) {
this.code = code;
}
public String getName() {
return this.name;
}
public void setName(String name) {
this.name = name;
}
public static String getNameByCode(int code) {
String result = "";
switch (code) {
case 1:
result = CONFLICT_POINT.getName();
break;
case 2:
result = RAPID_ACCELERATION.getName();
break;
case 3:
result = RAPID_DECELERATION.getName();
break;
case 4:
result = SHARP_TURN.getName();
break;
case 5:
result = SPEEDING.getName();
break;
case 6:
result = ACCIDENT.getName();
break;
case 7:
result = ABNORMAL_PARKING.getName();
break;
case 8:
result = RETROGRADE.getName();
break;
case 9:
result = ILLEGAL_PARKING.getName();
break;
case 10:
result = CROSSING_THE_ROAD.getName();
break;
case 11:
result = NOT_FOLLOWING_THE_LANE.getName();
break;
case 12:
result = CHANGE_LANE.getName();
break;
case 13:
result = ABNORMAL_STOP.getName();
break;
case 14:
result = OVERFLOW_CONGESTION.getName();
break;
case 15:
result = RUNNING_RED_LIGHT.getName();
break;
case 16:
result = EMERGENCY_LANE.getName();
break;
case 17:
result = ROAD_CONGESTION.getName();
break;
case 18:
result = PEDESTRIAN_RUNNING_RED_LIGHT.getName();
break;
case 19:
result = NON_VEHICLE_RUNNING_RED_LIGHT.getName();
break;
case 20:
result = PEDESTRIAN_INVADE.getName();
break;
case 21:
result = NON_VEHICLE_INVADE.getName();
break;
case 22:
result = RIGHT_TURN_NOT_SLOWING_DOWN.getName();
break;
case 23:
result = RIGHT_TURN_BEYOND_SAFE_SPEED.getName();
break;
case 24:
result = WARNING_OF_TRUCK_TURNING_RIGHT.getName();
break;
case 25:
result = TURN_RIGHT_WITHOUT_STOPPING.getName();
break;
case 26:
result = INTRUSION_RISK_ZONE.getName();
break;
case 27:
result = DANGEROUS_DRIVING_S.getName();
break;
case 28:
result = FREQUENT_LANE_CHANGES.getName();
break;
case 29:
result = CROSS_CONGESTION.getName();
break;
case 30:
result = CROSS_DEADLOCK.getName();
break;
case 31:
result = CROSS_IMBALANCE.getName();
break;
case 32:
result = OUT_LANE.getName();
break;
}
return result;
}
}
package com.wanji.indicators.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import java.time.LocalDateTime;
import com.baomidou.mybatisplus.annotation.TableId;
import java.io.Serializable;
import lombok.Data;
import lombok.EqualsAndHashCode;
import com.baomidou.mybatisplus.annotation.TableName;
@Data
@EqualsAndHashCode(callSuper=false)
@TableName("t_base_lane_info")
public class BaseLaneInfo extends Model<BaseLaneInfo> {
private static final long serialVersionUID = 1L;
/**
* 车道编号(渠化编号_车道序号)
*/
@TableId(value = "id", type = IdType.AUTO)
private String id;
/**
* 车道代码
*/
private String code;
/**
* 车道序号,从左车道开始编号11、12、13...
*/
private Integer sort;
/**
* 车道类型:1路段车道;2进口车道;3出口车道;4左转弯待转区;6直行待行区
*/
private Integer type;
/**
* 车道方向:1北;2东北;3东;4东南;5南;6西南;7西;8西北
*/
private Integer dir;
/**
* 车道转向:1左转;2直行;3右转;4掉头;5直左;6直右;7左直右;8左右;9左转掉头;10直行掉头;11右转掉头;12左直掉头;13直右掉头;14左直右掉头;15左右掉头
*/
private Integer turn;
/**
* 车道类别:1机动车;2非机动车;3公交专用;4可变;5潮汐
*/
private Integer category;
/**
* 路口ID
*/
private String crossId;
/**
* 路段编号
*/
private String rid;
/**
* 渠化编号
*/
private String segmentId;
/**
* 车道长度
*/
private Double length;
/**
* 车道宽度
*/
private Double width;
/**
* 空间对象
*/
private String wkt;
/**
* 创建时间
*/
private LocalDateTime gmtCreate;
/**
* 修改时间
*/
private LocalDateTime gmtModified;
@Override
protected Serializable pkVal() {
return this.id;
}
private Integer ridLevel;
private String ridWkt;
private Double ridLength;
}
package com.wanji.indicators.entity;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.extension.activerecord.Model;
import java.time.LocalDateTime;
import com.baomidou.mybatisplus.annotation.TableId;
import java.io.Serializable;
import lombok.Data;
import lombok.EqualsAndHashCode;
import com.baomidou.mybatisplus.annotation.TableName;
@Data
@EqualsAndHashCode(callSuper=false)
@TableName("t_base_rid_info")
public class BaseRidInfo extends Model<BaseRidInfo> {
private static final long serialVersionUID = 1L;
/**
* 路段编号(开始路口编号+结束路口编号+主辅路序号)
*/
@TableId(value = "id", type = IdType.AUTO)
private String id;
/**
* 路段名称
*/
private String name;
/**
* 道路编号
*/
private String roadId;
/**
* 道路名称
*/
private String roadName;
/**
* 道路方向编号
*/
private String roadDirId;
/**
* 开始路口编号
*/
private String startCrossId;
/**
* 结束路口编号
*/
private String endCrossId;
/**
* 驶出方向:1北;2东北;3东;4东南;5南;6西南;7西;8西北
*/
private Integer outDir;
/**
* 驶入方向:1北;2东北;3东;4东南;5南;6西南;7西;8西北
*/
private Integer inDir;
/**
* 驶出角度,正北顺时针0~359
*/
private Double startAngle;
/**
* 驶入角度,正北顺时针0~359
*/
private Double endAngle;
/**
* 行驶方向:0上行;1下行
*/
private Integer direction;
/**
* 顺序号:路段在道路方向的顺序号
*/
private Integer sort;
/**
* 路段走向:1 南向北;2 西向东;3 北向南;4 东向西;5 内环;6 外环;99 其他(参见路段走向字典)
*/
private Integer trend;
/**
* 道路等级:
41000 高速公路;
42000 国道;
43000 城市快速路;
44000 城市主干道;
45000 城市次干道;
47000 城市普通道路;
51000 省道;
52000 县道;
53000 乡道;
54000 县乡村内部道路;
49 小路(参见道路等级字典)
*/
private Integer level;
/**
* 行政区划代码,跨区数据会以逗号分开
*/
private String areaCode;
/**
* 路段长度(米)
*/
private Double length;
/**
* 路段宽度(米)
*/
private Double width;
/**
* 是否单行线:0否;1是;99其他
*/
private Integer isOneway;
/**
* 路段类型:1路段;3匝道;4隧道;5桥梁;6高架;99其他
*/
private Integer type;
/**
* 主辅标志:1主路;2辅路;99其他 (参见主辅标志字典)
*/
private Integer mainFlag;
/**
* 空间对象
*/
private String wkt;
/**
* 信控路段编号
*/
private String scId;
/**
* 信控路段名称
*/
private String scName;
/**
* 信控路段序号
*/
private Integer scSort;
/**
* 创建时间
*/
private LocalDateTime gmtCreate;
/**
* 修改时间
*/
private LocalDateTime gmtModified;
@Override
protected Serializable pkVal() {
return this.id;
}
}
package com.wanji.indicators.mapper;
import com.wanji.indicators.entity.BaseLaneInfo;
import java.util.List;
import org.apache.ibatis.annotations.Param;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
public interface BaseLaneInfoMapper extends BaseMapper<BaseLaneInfo>{
/**
* 查询表t_base_lane_info所有信息
*/
List<BaseLaneInfo> findAllBaseLaneInfo();
/**
* 根据主键id查询表t_base_lane_info信息
* @param id
*/
BaseLaneInfo findBaseLaneInfoByid(@Param("id") String id);
/**
* 根据条件查询表t_base_lane_info信息
* @param baseLaneInfo
*/
List<BaseLaneInfo> findBaseLaneInfoByCondition(BaseLaneInfo baseLaneInfo);
/**
* 根据主键id查询表t_base_lane_info信息
* @param id
*/
Integer deleteBaseLaneInfoByid(@Param("id") String id);
/**
* 根据主键id更新表t_base_lane_info信息
* @param baseLaneInfo
*/
Integer updateBaseLaneInfoByid(BaseLaneInfo baseLaneInfo);
/**
* 新增表t_base_lane_info信息
* @param baseLaneInfo
*/
Integer addBaseLaneInfo(BaseLaneInfo baseLaneInfo);
}
package com.wanji.indicators.mapper;
import com.wanji.indicators.entity.BaseRidInfo;
import java.util.List;
import org.apache.ibatis.annotations.Param;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
public interface BaseRidInfoMapper extends BaseMapper<BaseRidInfo>{
/**
* 查询表t_base_rid_info所有信息
*/
List<BaseRidInfo> findAllBaseRidInfo();
/**
* 根据主键id查询表t_base_rid_info信息
* @param id
*/
BaseRidInfo findBaseRidInfoByid(@Param("id") String id);
/**
* 根据条件查询表t_base_rid_info信息
* @param baseRidInfo
*/
List<BaseRidInfo> findBaseRidInfoByCondition(BaseRidInfo baseRidInfo);
/**
* 根据主键id查询表t_base_rid_info信息
* @param id
*/
Integer deleteBaseRidInfoByid(@Param("id") String id);
/**
* 根据主键id更新表t_base_rid_info信息
* @param baseRidInfo
*/
Integer updateBaseRidInfoByid(BaseRidInfo baseRidInfo);
/**
* 新增表t_base_rid_info信息
* @param baseRidInfo
*/
Integer addBaseRidInfo(BaseRidInfo baseRidInfo);
}
package com.wanji.indicators.model;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* @author fengyi
* @date 2023/3/1
* @description
*/
@Data
public class CarTrackModel implements Serializable {
/*******************************************/
/*全域ID*/
private String orgCode = "";
/*车辆ID*/
private Integer id;
/*车牌号*/
private String picLicense;
/*车牌颜色*/
private Integer licenseColor;
/*车辆颜色*/
private Integer originalColor;
/*车辆类型*/
private Integer originalType;
/*车辆所有人*/
private String owner;
/*使用性质*/
private Integer function;
/*所有权*/
private Integer ownership;
/*路网数据*/
private RoadNet roadnet;
/*******************************************/
/*格式化时间戳:yyyy-MM-dd HH:mm:ss.SSS*/
private String timeStamp;
/*数值时间戳*/
private Long globalTimeStamp;
/*经度*/
private Double longitude;
/*纬度*/
private Double latitude;
/*速度*/
private Double speed;
/*航向角*/
private Double courseAngle;
/*******************************************/
@Data
public static class RoadNet implements Serializable {
/*路口ID*/
private String crossId;
/*路段ID*/
private String rid;
/*车道ID*/
private String laneId;
/*渠化ID*/
private String segmentId;
}
}
package com.wanji.indicators.model;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* @author fengyi
* @date 2023/3/1
* @description
*/
@Data
public class FrameModel implements Serializable {
private String timeStamp;
private Long globalTimeStamp;
private Integer participantNum;
private String orgCode ="";
private List<CarTrackModel> e1FrameParticipant;
}
package com.wanji.indicators.model;
import com.alibaba.fastjson.annotation.JSONField;
import com.google.gson.JsonObject;
import lombok.Data;
import java.io.Serializable;
import java.util.*;
/**
* @author fengyi
* @date 2023/3/1
* @description
*/
@Data
public class RoutePathCarTrackModel implements Serializable {
//id域集合,不同的路口范围可能域ID和车辆ID发生变更 {"orgCode":"","id":""}
private Set<Map<String,Object>> targetId = new HashSet<>();
/*车牌号*/
private String picLicense;
/*路径起始点车辆时间*/
private String startTime;
/*起点所在车道*/
private String startLaneId;
/*起点所属路段*/
private String startRid;
/*起点所属渠化*/
private String startSegmentId;
/*起点所属路口*/
private String startCrossId;
/*起点坐标*/
private Double startLongitude;
/*终点坐标*/
private Double startLatitude;
/*起点时间戳*/
@JSONField(serialize = false)
private Long startGlobalTimeStamp;
/*路径终点车辆时间*/
private String endTime;
/*终点所在车道*/
private String endLaneId;
/*终点所属路段*/
private String endRid;
/*终点所属渠化*/
private String endSegmentId;
/*终点所属路口*/
private String endCrossId;
/*终点坐标*/
private Double endLongitude;
/*终点坐标*/
private Double endLatitude;
/*终点时间戳*/
@JSONField(serialize = false)
private Long endGlobalTimeStamp;
/*路径编号*/
//@JSONField(serialize = false)
private Integer lineNo = 0;
/*路径点序号*/
//@JSONField(serialize = false)
private Integer seqNo;
/*路径长度*/
//@JSONField(serialize = false)
private Double routeLength=0D;
//路径坐标集合
@JSONField(serialize = false)
private List<Double[]> pointList = new ArrayList<>();
/*上一个点坐标*/
@JSONField(serialize = false)
private Double preLongitude;
/*上一个点坐标*/
@JSONField(serialize = false)
private Double preLatitude;
/*上一个点时间戳*/
@JSONField(serialize = false)
private Long preTimestamp;
@JSONField(serialize = false)
private String preDateTime;
/*是否为完成计算的路径*/
@JSONField(serialize = false)
private boolean isFinished;
@JSONField(serialize = false)
private Long createTime;
}
package com.wanji.indicators.model;
import com.alibaba.fastjson.annotation.JSONField;
import lombok.Data;
import lombok.ToString;
import java.io.Serializable;
import java.util.Date;
import java.util.List;
import java.util.Set;
/**
* @author fengyi
* @date 2023/3/1
* @description
*/
@Data
public class SingleCarTrackListModel implements Serializable {
/**
* 公用属性Start
*/
/*全域ID*/
private String orgCode;
/*车辆ID*/
private Integer id;
/*车牌号*/
private String picLicense;
/*车牌颜色*/
private Integer licenseColor;
/*车辆颜色*/
private Integer originalColor;
/*车辆类型*/
private Integer originalType;
/*车辆所有人*/
private String owner;
/*使用性质*/
private Integer function;
/*所有权*/
private Integer ownership;
/*窗口开始时间*/
@JSONField(format="yyyy-MM-dd HH:mm:ss.SSS")
private Date startTime;
/*窗口结束时间*/
@JSONField(format="yyyy-MM-dd HH:mm:ss.SSS")
private Date endTime;
/*批次全域监测开始时间*/
@JSONField(format="yyyy-MM-dd HH:mm:ss.SSS")
private Date globalStartTime;
/*批次全域监测结束时间*/
@JSONField(format="yyyy-MM-dd HH:mm:ss.SSS")
private Date globalEndTime;
//路网数据
private Set<CarTrackModel.RoadNet> roadnets;
//private CarTrackCommonProperty staticProperty;
/**
* 轨迹动态属性集合
*/
private List<CarTrackDynamicProperty> tracks;
// @Data
// @ToString
// public static class CarTrackCommonProperty implements Serializable {
// /*全域ID*/
// private Integer orgCode;
// /*车辆ID*/
// private Integer id;
// /*车牌号*/
// private String picLicense;
// /*车牌颜色*/
// private Integer licenseColor;
// /*车辆颜色*/
// private Integer originalColor;
// /*车辆类型*/
// private Integer originalType;
// /*车辆所有人*/
// private String owner;
// /*使用性质*/
// private String function;
// /*所有权*/
// private String ownership;
// /*窗口开始时间*/
// private String startTime;
// /*窗口结束时间*/
// private String endTime;
// /*批次全域监测开始时间*/
// private String globalStartTime;
// /*批次全域监测结束时间*/
// private String globalEndTime;
//
// }
@Data
public static class CarTrackDynamicProperty implements Serializable {
/*格式化时间戳:yyyy-MM-dd HH:mm:ss.SSS*/
private String timeStamp;
/*数值时间戳*/
private Long globalTimeStamp;
/*经度*/
private Double longitude;
/*纬度*/
private Double latitude;
/*速度*/
private Double speed;
/*航向角*/
private Double courseAngle;
}
}
package com.wanji.indicators.model;
import lombok.Data;
import java.io.Serializable;
import java.util.List;
/**
* @author fengyi
* @date 2023/3/1
* @description
*/
@Data
public class SnapshotIndicatorModel implements Serializable {
private String timeStamp;
private Long laneNum;
private String orgCode ="";
private List<SnapshotInfo> eventList;
}
package com.wanji.indicators.model;
import lombok.Data;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/4/7
* @description
*/
@Data
public class SnapshotInfo implements Serializable {
private Long timestamp;
private String globalId;
Integer carNums;
double dynamicQueueLength;
String laneId;
double meanSpaceHeadway;
double staticQueueLength;
double vehicleLengthRatio;
double vehicleNumsRatio;
}
package com.wanji.indicators.model;
import lombok.Data;
import java.io.Serializable;
import java.util.Date;
/**
* @author fengyi
* @date 2023/3/10
* @description
*/
@Data
public class TopicLastTimeModel implements Serializable {
private String topic;
private Long timestamp;
private Long offset;
private String bootstrapServers;
private long createTime;
}
package com.wanji.indicators.service;
import com.wanji.indicators.entity.BaseLaneInfo;
import com.baomidou.mybatisplus.extension.service.IService;
import com.baomidou.mybatisplus.core.metadata.IPage;
import java.util.List;
/**
* <p>
* 车道基础信息 服务类
* </p>
*
* @author fengyi
* @since 2023-04-06
*/
public interface BaseLaneInfoService extends IService<BaseLaneInfo> {
public List<BaseLaneInfo> findLaneRidInfo();
}
package com.wanji.indicators.service;
import com.wanji.indicators.entity.BaseRidInfo;
import com.baomidou.mybatisplus.extension.service.IService;
import com.baomidou.mybatisplus.core.metadata.IPage;
import java.util.List;
/**
* <p>
* 路段基础信息 服务类
* </p>
*
* @author fengyi
* @since 2023-04-05
*/
public interface BaseRidInfoService extends IService<BaseRidInfo> {
public List<BaseRidInfo> findAll();
}
package com.wanji.indicators.service.impl;
import com.vividsolutions.jts.geom.LineString;
import com.wanji.indicators.entity.BaseLaneInfo;
import com.wanji.indicators.entity.BaseRidInfo;
import com.wanji.indicators.mapper.BaseLaneInfoMapper;
import com.wanji.indicators.service.BaseLaneInfoService;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.wanji.indicators.util.GeomsConvertUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import javax.annotation.Resource;
import java.util.List;
/**
* <p>
* 车道基础信息 服务实现类
* </p>
*
* @author fengyi
* @since 2023-04-06
*/
@Service
public class BaseLaneInfoServiceImpl extends ServiceImpl<BaseLaneInfoMapper, BaseLaneInfo> implements BaseLaneInfoService {
@Resource
private BaseLaneInfoMapper baseLaneInfoMapper;
@Override
public List<BaseLaneInfo> findLaneRidInfo() {
List<BaseLaneInfo> list = baseLaneInfoMapper.findAllBaseLaneInfo();
for (BaseLaneInfo model : list) {
String wkt = model.getRidWkt();
LineString lineString = GeomsConvertUtil.getLineString(wkt);
double ridLength = lineString.getLength();
model.setRidLength(ridLength);
}
return list;
}
}
package com.wanji.indicators.service.impl;
import com.vividsolutions.jts.geom.LineString;
import com.wanji.indicators.entity.BaseRidInfo;
import com.wanji.indicators.mapper.BaseRidInfoMapper;
import com.wanji.indicators.service.BaseRidInfoService;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.wanji.indicators.util.ArithOfBigDecmial;
import com.wanji.indicators.util.GeomsConvertUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import javax.annotation.Resource;
import java.util.List;
/**
* <p>
* 路段基础信息 服务实现类
* </p>
*
* @author fengyi
* @since 2023-04-05
*/
@Service
public class BaseRidInfoServiceImpl extends ServiceImpl<BaseRidInfoMapper, BaseRidInfo> implements BaseRidInfoService {
@Resource
private BaseRidInfoMapper baseRidInfoMapper;
@Override
public List<BaseRidInfo> findAll() {
List<BaseRidInfo> list = baseRidInfoMapper.findAllBaseRidInfo();
for (BaseRidInfo model : list) {
String wkt = model.getWkt();
LineString lineString = GeomsConvertUtil.getLineString(wkt);
double ridLength = lineString.getLength();
model.setLength(ridLength);
}
return null;
}
public static void main(String[] args) {
String wkt = "112.96358875770298,28.188337641846843;112.96340547895414,28.18724784621854;112.96322220020323,28.18615803948525;112.96310925849781,28.184595086003952;112.96292227780222,28.18303210967499";
LineString lineString = GeomsConvertUtil.getLineString(wkt);
double ridLength = lineString.getLength();
System.out.println(ArithOfBigDecmial.mul(ridLength, 6371000 * Math.PI / 180));
}
}
package com.wanji.indicators.source;
import com.wanji.indicators.entity.BaseLaneInfo;
import com.wanji.indicators.service.BaseLaneInfoService;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import javax.annotation.Resource;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* 查询 进口道对应右转出口rid,及右转出口RID可以进入的各进口RID
*/
@Slf4j
public class LaneRidLengthSource extends RichParallelSourceFunction<Map<String,List<BaseLaneInfo>>> {
private volatile boolean running = true;
@Resource
BaseLaneInfoService baseLaneInfoService;
public void open(Configuration parameters) throws Exception {
super.open(parameters);
ApplicationContext beanConf = new ClassPathXmlApplicationContext("spring-container.xml");
baseLaneInfoService = beanConf.getBean(BaseLaneInfoService.class);
}
@Override
public void run(SourceContext<Map<String,List<BaseLaneInfo>>> sourceContext) throws Exception {
if (running) {
List<BaseLaneInfo> list = baseLaneInfoService.findLaneRidInfo();
if (list.size() > 0) {
Map<String,List<BaseLaneInfo>> group = list.stream().collect(Collectors.groupingBy(o->o.getId()));
sourceContext.collect(group);
}
}
}
@Override
public void cancel() {
running = false;
}
}
package com.wanji.indicators.task.export;
import com.alibaba.fastjson.JSONObject;
import com.google.gson.JsonObject;
import lombok.Data;
import org.apache.kafka.common.record.TimestampType;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/3/10
* @description
*/
@Data
public class KafkaRecordModel implements Serializable {
private TimestampType timestampType ;
private Long timestamp;
private JSONObject values;
private String topic;
}
package com.wanji.indicators.task.export;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.google.gson.JsonObject;
import com.wanji.indicators.model.TopicLastTimeModel;
import com.wanji.indicators.task.track.service.func.SplitRoutePathFunction;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ReadKafkaDeserialization implements KafkaDeserializationSchema<KafkaRecordModel> {
private static final Logger log = LoggerFactory.getLogger(SplitRoutePathFunction.class);
private final String encoding = "UTF8";
public ReadKafkaDeserialization( ) {
}
@Override
public TypeInformation<KafkaRecordModel> getProducedType() {
return TypeInformation.of(KafkaRecordModel.class);
}
@Override
public void open(DeserializationSchema.InitializationContext context) throws Exception {
}
@Override
public boolean isEndOfStream(KafkaRecordModel nextElement) {
return false;
}
@Override
public KafkaRecordModel deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
if (consumerRecord != null) {
try {
String json = new String(consumerRecord.value(),"UTF-8");
JSONObject jsonObject = JSONObject.parseObject(json);
KafkaRecordModel model = new KafkaRecordModel();
model.setTimestamp(consumerRecord.timestamp());
model.setTimestampType(consumerRecord.timestampType());
model.setValues(jsonObject);
model.setTopic(consumerRecord.topic());
return model;
} catch (Exception e) {
log.error("deserialize failed : " + e.getMessage());
}
}
return null;
}
}
\ No newline at end of file
package com.wanji.indicators.task.export;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.model.RoutePathCarTrackModel;
import com.wanji.indicators.task.track.service.sink.FileSink;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.core.execution.JobListener;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.kafka.common.record.TimestampType;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class TrackExportToFileMain {
private static final Logger log = LoggerFactory.getLogger(TrackExportToFileMain.class);
public static void main(String[] args) {
//批量轨迹es index,逗号分割:esIndexName,windowTime
ParameterTool parameter = ParameterTool.fromArgs(args);
String bootstrapServers = parameter.get("bootstrapServers");
String topic = parameter.get("topic");
String startTime = parameter.get("startTime");
String endTime = parameter.get("endTime");
String sinkPath = parameter.get("sinkPath");//目录
if (!sinkPath.endsWith("/")) {
sinkPath += "/";
}
sinkPath += startTime + "-" + endTime + ".json";
log.info("导出路径:" + sinkPath);
long startTimestamp = DateUtil.StringToMillis(startTime, "yyyyMMddHHmmss");
long endTimestamp = DateUtil.StringToMillis(endTime, "yyyyMMddHHmmss");
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
KafkaSource<KafkaRecordModel> source = KafkaSource.<KafkaRecordModel>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "true")
.setBootstrapServers(bootstrapServers)
.setTopics(topic)
.setGroupId(properties.getProperty("consumer.group.id") + "-Export")
// 从大于等于此时间戳开始的偏移量开始
.setStartingOffsets(OffsetsInitializer.timestamp(startTimestamp))
.setDeserializer(KafkaRecordDeserializationSchema.of(new ReadKafkaDeserialization()))
.build();
DataStream<KafkaRecordModel> stream = env
.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka-car-track-source");
SingleOutputStreamOperator<KafkaRecordModel> frameModelStream =
stream.filter(new FilterFunction<KafkaRecordModel>() {
@Override
public boolean filter(KafkaRecordModel value) throws Exception {
Long ts = value.getTimestamp();
//Long endTs = DateUtil.StringToMillis("2023-03-30 11:40:00:999","yyyy-MM-dd HH:mm:ss:SSS");
if (ts <= endTimestamp) {
return true;
}
return false;
}
}).name("过滤出时间范围内数据");
SingleOutputStreamOperator<KafkaRecordModel> timeOutStream = frameModelStream
.keyBy(KafkaRecordModel::getTopic)
.process(new KeyedProcessFunction<String, KafkaRecordModel, KafkaRecordModel>() {
private transient MapState<String, Long> dataTimeState;
//连续点间隔超时时间,单位分钟
private Integer timeOut = 10;
@Override
public void open(Configuration parameters) throws Exception {
StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.hours(12))//状态过期时间
.setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
.cleanupFullSnapshot()
.setStateVisibility(StateTtlConfig.StateVisibility.ReturnExpiredIfNotCleanedUp)//不返回过期值
.build();
MapStateDescriptor mapStateDescriptor = new MapStateDescriptor<>("Kafka_Data_Export",
TypeInformation.of(new TypeHint<Tuple3<String, Integer, String>>() {
}),
TypeInformation.of(new TypeHint<RoutePathCarTrackModel>() {
})
);
mapStateDescriptor.enableTimeToLive(ttlConfig);
dataTimeState = getRuntimeContext().getMapState(mapStateDescriptor);
}
@Override
public void processElement(KafkaRecordModel value, Context context, Collector<KafkaRecordModel> out) throws Exception {
out.collect(value);
int interval = 60 * 1000;
long fireTime = context.timerService().currentProcessingTime() + interval;
context.timerService().registerProcessingTimeTimer(fireTime);
dataTimeState.put(value.getTopic(), context.timerService().currentProcessingTime());
//log.info("register process time:" + DateUtil.toDateTime(fireTime, "yyyy-MM-dd HH:mm:ss"));
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<KafkaRecordModel> out) throws Exception {
Iterator<Map.Entry<String, Long>> iterator = dataTimeState.iterator();
while (iterator.hasNext()) {
Map.Entry<String, Long> entry = iterator.next();
long dataTime = entry.getValue();
//log.info("dataTime:"+DateUtil.toDateTime(dataTime,"yyyy-MM-dd HH:mm:ss")+" onTime:" + DateUtil.toDateTime(timestamp, "yyyy-MM-dd HH:mm:ss"));
if (timestamp - dataTime > 30 * 1000) {
//超时未收到新数据
ctx.timerService().deleteProcessingTimeTimer(timestamp);
System.exit(0);
}
}
}
});
SingleOutputStreamOperator<String> toJsonStream =
timeOutStream.flatMap(
new FlatMapFunction<KafkaRecordModel, String>() {
@Override
public void flatMap(KafkaRecordModel value, Collector<String> out) throws Exception {
Long ts = value.getTimestamp();
//Long endTs = DateUtil.StringToMillis("2023-03-30 11:40:00:999","yyyy-MM-dd HH:mm:ss:SSS");
// if (ts > endTimestamp) {
// System.exit(0);
// }
log.info("记录时间:{},数据导出范围【{},{}】", DateUtil.toDateTime(ts, "yyyy-MM-dd HH:mm:ss.SSS"), startTime, endTime);
String json = JSON.toJSONString(value);
out.collect(json);
}
})
.name("转Json输出");
try {
toJsonStream.addSink(new FileSink(sinkPath));
env.execute("根据起始时间范围导出数据到文件");
env.executeAsync().cancel();
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
}
package com.wanji.indicators.task.freeflow.service.func;
import com.wanji.indicators.task.freeflow.service.model.FrameMaxSpeedModel;
import com.wanji.indicators.task.freeflow.service.model.MaxSpeedResultModel;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
public class SpeedAggregateFunction implements AggregateFunction<FrameMaxSpeedModel,MaxSpeedResultModel,MaxSpeedResultModel> {
private static final Logger log = LoggerFactory.getLogger(SpeedAggregateFunction.class);
@Override
public MaxSpeedResultModel createAccumulator() {
MaxSpeedResultModel initModel = new MaxSpeedResultModel();
initModel.setAvgSpeed(0D);
initModel.setMaxSpeed(0d);
initModel.setSumSpeed(0D);
initModel.setCount(0);
return initModel;
}
/**
* 每进入一个数据就会执行一次
* @param value 当前进入的数据
* @param accumulator 之前计算好的中间结果
* @return
*/
@Override
public MaxSpeedResultModel add(FrameMaxSpeedModel value, MaxSpeedResultModel accumulator) {
accumulator.setMaxSpeed(value.getSpeed()>accumulator.getMaxSpeed()?value.getSpeed():accumulator.getMaxSpeed());
accumulator.setAvgSpeed((value.getSpeed()+accumulator.getAvgSpeed())/2);
accumulator.setSumSpeed(value.getSpeed()+accumulator.getSumSpeed());
accumulator.setCount(accumulator.getCount()+1);
//System.out.println("avgSpeed:"+accumulator.getAvgSpeed()+":"+accumulator.getSumSpeed()/accumulator.getCount()+" maxSpeed:"+accumulator.getMaxSpeed());
return accumulator;
}
/*
当window的结束时间到达时,触发这个方法,返回结果
*/
@Override
public MaxSpeedResultModel getResult(MaxSpeedResultModel accumulator) {
//log.info("AggResult:"+accumulator);
return accumulator;
}
/**
* 在session窗口才会用到merge,时间窗口其实用不到
* @param a
* @param b
* @return
*/
@Override
public MaxSpeedResultModel merge(MaxSpeedResultModel a, MaxSpeedResultModel b) {
return null;
}
}
package com.wanji.indicators.task.freeflow.service.func;
import com.wanji.indicators.task.freeflow.service.model.MaxSpeedResultModel;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
public class SpeedProcessWindow extends ProcessWindowFunction<
MaxSpeedResultModel,
MaxSpeedResultModel,
String,
TimeWindow> {
private static final Logger log = LoggerFactory.getLogger(SpeedProcessWindow.class);
@Override
public void process(String s, Context context, Iterable<MaxSpeedResultModel> elements, Collector<MaxSpeedResultModel> out) throws Exception {
long windowStartTs = context.window().getStart();
long windowEndTs = context.window().getEnd();
List<MaxSpeedResultModel> list = StreamSupport.stream(elements.spliterator(), false).collect(Collectors.toList());
if (elements.iterator().hasNext()) {
MaxSpeedResultModel result = elements.iterator().next();
result.setStartTime(DateUtil.toDateTime(windowStartTs,"yyyy-MM-dd HH:mm:ss.SSS"));
result.setEndTime(DateUtil.toDateTime(windowEndTs,"yyyy-MM-dd HH:mm:ss.SSS"));
result.setDate(DateUtil.toDateTime(windowEndTs,"yyyy-MM-dd"));
log.info("FreeFlowSpeed Result:" + result.toString());
out.collect(result);
}
}
}
package com.wanji.indicators.task.freeflow.service.model;
import com.wanji.indicators.constant.Constant;
import lombok.Data;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
@Data
public class FrameMaxSpeedModel implements Serializable {
private String id= Constant.HASH_KEY_AREA_FREE_FLOW_SPEED;
private Double speed;
private long timestamp;
//yyyyMMdd
private String date;
}
package com.wanji.indicators.task.freeflow.service.model;
import com.wanji.indicators.constant.Constant;
import lombok.Data;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
@Data
public class MaxSpeedResultModel implements Serializable {
private String id= Constant.HASH_KEY_AREA_FREE_FLOW_SPEED;
private String startTime;
private String endTime;
private Double maxSpeed;
private Double avgSpeed;
private Double sumSpeed;
private Integer count;
private long timestamp;
//yyyyMMdd
private String date;
}
package com.wanji.indicators.task.freeflow.service.sink;
import com.alibaba.fastjson.JSON;
import com.wanji.indicators.task.freeflow.service.model.MaxSpeedResultModel;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
/**
* @author fengyi
* @date 2023/4/7
* @description
*/
public class CustomRedisMapper implements RedisMapper<MaxSpeedResultModel> {
private String redisKey ;
public CustomRedisMapper(String redisKey){
this.redisKey = redisKey;
}
// 定义保存数据到redis的命令,存成Hash表,hset sensor_temp id temperature
@Override
public RedisCommandDescription getCommandDescription() {
return new RedisCommandDescription(RedisCommand.HSET, this.redisKey);
}
@Override
public String getKeyFromData(MaxSpeedResultModel data) {
return data.getId();
}
@Override
public String getValueFromData(MaxSpeedResultModel data) {
return JSON.toJSONString(data);
}
}
package com.wanji.indicators.task.freeflow.stream;
import com.wanji.indicators.constant.Constant;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.FrameModel;
import com.wanji.indicators.task.freeflow.service.sink.CustomRedisMapper;
import com.wanji.indicators.config.RedisConfig;
import com.wanji.indicators.task.freeflow.service.model.FrameMaxSpeedModel;
import com.wanji.indicators.task.freeflow.service.model.MaxSpeedResultModel;
import com.wanji.indicators.task.freeflow.service.func.SpeedAggregateFunction;
import com.wanji.indicators.task.freeflow.service.func.SpeedProcessWindow;
import com.wanji.indicators.task.track.service.func.FrameFlatMap;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.util.Collector;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.*;
/**
* 自由流速度计算
*/
public class FreeFlowSpeedMain {
private static final Logger log = LoggerFactory.getLogger(FreeFlowSpeedMain.class);
public static void main(String[] args) {
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DateTime currentDateTime = new DateTime();
//过去一天0点
DateTime dateTime = currentDateTime.withMillisOfDay(0).plusDays(-1);
KafkaSource<String> source = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "true")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setTopics(properties.getProperty("consumer.topic"))
.setGroupId(properties.getProperty("consumer.group.id") + "_Free_Flow_Speed")
/* 设置起始偏移量有以下几种情况:
1.从指定的位置消费:OffsetsInitializer.offsets(Map< TopicPartition, Long> offsets)
2.从最新位置消费(最后一条处):OffsetsInitializer.latest()
3.从最早位置消费(第一条处):OffsetsInitializer.earliest()
4.从上次提交的位置消费:OffsetsInitializer.committedOffsets()
5.新的组,从来没有提交过,再指定一个消费方式:OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)
*/
// 从大于等于此时间戳开始的偏移量开始
.setStartingOffsets(OffsetsInitializer.timestamp(dateTime.getMillis()))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
DataStream<String> stream = env
.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka-car-track-source");
SingleOutputStreamOperator<FrameModel> frameModelStream =
stream
.flatMap(new FrameFlatMap())
.setParallelism(1)
.name("轨迹帧数据-JsonToObject");
SingleOutputStreamOperator<FrameMaxSpeedModel> carTrackModelStream =
frameModelStream.
flatMap(new FlatMapFunction<FrameModel, FrameMaxSpeedModel>() {
@Override
public void flatMap(FrameModel value, Collector<FrameMaxSpeedModel> out) throws Exception {
List<CarTrackModel> list = value.getE1FrameParticipant();
String motorObjectType = Constant.MOTOR_TYPES;
String[] sps = motorObjectType.split(",");
List<String> typeList = Arrays.asList(sps);
//取每帧机动车最大速度
OptionalDouble doubleStream = list.stream().filter(o->typeList.contains(o.getOriginalType().toString())).mapToDouble(CarTrackModel::getSpeed).max();
if (doubleStream.isPresent()) {
double maxSpeed = doubleStream.getAsDouble();
FrameMaxSpeedModel model = new FrameMaxSpeedModel();
model.setSpeed(maxSpeed);
model.setTimestamp(value.getGlobalTimeStamp());
model.setDate(DateUtil.toDateTime(value.getGlobalTimeStamp(),"yyyyMMdd"));
//log.info("Frame MaxSpeed:"+maxSpeed+" "+DateUtil.toDateTime(value.getGlobalTimeStamp(),"yyyy-MM-dd HH:mm:ss.SSS"));
out.collect(model);
}
}
})
.setParallelism(1)
.name("计算每帧机动车最大速度");
SingleOutputStreamOperator<MaxSpeedResultModel> freeSpeedStream = carTrackModelStream
.assignTimestampsAndWatermarks(
WatermarkStrategy.<FrameMaxSpeedModel>forBoundedOutOfOrderness(Duration.ofSeconds(1))
.withTimestampAssigner((event, timestamp) -> event.getTimestamp()))
.keyBy(FrameMaxSpeedModel::getDate)
//.window(TumblingEventTimeWindows.of(Time.days(1), Time.hours(16)))
.window(TumblingEventTimeWindows.of(Time.days(1)))
.aggregate(new SpeedAggregateFunction(),new SpeedProcessWindow())
.name("按天取最大速度-作为自由流速度");
try {
//每2次取较大者更新到redis
//SingleOutputStreamOperator<MaxSpeedResultModel> maxStream = freeSpeedStream.keyBy(MaxSpeedResultModel::getDate).countWindow(2).maxBy("maxSpeed");
//maxStream.print();
freeSpeedStream.addSink( new RedisSink<>(new RedisConfig().getRedisConfig(), new CustomRedisMapper(Constant.AREA_REALTIME_INDICATOR)));
env.execute("自由流速度计算工作流");
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
}
package com.wanji.indicators.task.monitor.service.func;
import com.wanji.indicators.model.TopicLastTimeModel;
import com.wanji.indicators.task.track.service.func.SplitRoutePathFunction;
import org.apache.flink.api.common.serialization.DeserializationSchema;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.streaming.connectors.kafka.KafkaDeserializationSchema;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CustomKafkaDeserialization implements KafkaDeserializationSchema<TopicLastTimeModel> {
private static final Logger log = LoggerFactory.getLogger(SplitRoutePathFunction.class);
private final String encoding = "UTF8";
public CustomKafkaDeserialization( ) {
}
@Override
public TypeInformation<TopicLastTimeModel> getProducedType() {
return TypeInformation.of(TopicLastTimeModel.class);
}
@Override
public void open(DeserializationSchema.InitializationContext context) throws Exception {
}
@Override
public boolean isEndOfStream(TopicLastTimeModel nextElement) {
return false;
}
@Override
public TopicLastTimeModel deserialize(ConsumerRecord<byte[], byte[]> consumerRecord) throws Exception {
if (consumerRecord != null) {
try {
String topic = consumerRecord.topic();
long ts = consumerRecord.timestamp();
long offset = consumerRecord.offset();
TopicLastTimeModel model = new TopicLastTimeModel();
model.setTimestamp(ts);
model.setTopic(topic);
model.setOffset(offset);
model.setCreateTime(System.currentTimeMillis());
return model;
} catch (Exception e) {
log.error("deserialize failed : " + e.getMessage());
}
}
return null;
}
}
\ No newline at end of file
package com.wanji.indicators.task.monitor.service.func;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import java.util.HashMap;
import java.util.Map;
/**
* @author fengyi
* @date 2023/3/13
* @description
*/
public class KafkaCustomerOffsetSource extends RichParallelSourceFunction<Map<String, Integer>> {
@Override
public void run(SourceContext<Map<String, Integer>> ctx) throws Exception {
}
@Override
public void cancel() {
}
}
package com.wanji.indicators.task.monitor.service.func;
import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.model.TopicLastTimeModel;
import com.wanji.indicators.task.track.service.func.ProcessCarTrackByKey;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.HttpUtil;
import org.apache.flink.api.common.state.ValueState;
import org.apache.flink.api.common.state.ValueStateDescriptor;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import org.joda.time.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
/**
* @author fengyi
* @date 2023/3/12
* @description
*/
public class TopicMonitorKeyProcessFunction extends KeyedProcessFunction<String, TopicLastTimeModel, TopicLastTimeModel> {
private static final Logger log = LoggerFactory.getLogger(TopicMonitorKeyProcessFunction.class);
private Properties properties;
private ValueState<TopicLastTimeModel> valueState;
private int delay;//无数据间隔 单位分钟
private int timeOut;//数据超时时间 单位分钟
public TopicMonitorKeyProcessFunction(int delay, int timeOut, Properties properties) {
this.delay = delay;
this.timeOut = timeOut;
this.properties = properties;
}
@Override
public void open(Configuration parameters) throws Exception {
TypeInformation<TopicLastTimeModel> topicInfo = TypeInformation.of(new TypeHint<TopicLastTimeModel>() {
});
valueState = getRuntimeContext().getState(new ValueStateDescriptor<TopicLastTimeModel>("TOPIC_LAST_STATE", topicInfo));
}
@Override
public void processElement(TopicLastTimeModel value, Context ctx, Collector<TopicLastTimeModel> out) throws Exception {
TopicLastTimeModel currentTopicModel = valueState.value();
if (currentTopicModel == null) {
currentTopicModel = new TopicLastTimeModel();
BeanUtils.copyProperties(value, currentTopicModel);
currentTopicModel.setBootstrapServers(properties.getProperty("bootstrap.servers"));
//X分钟定时器
long coalescedTime = ((ctx.timerService().currentProcessingTime() + delay * 60 * 1000) / 1000) * 1000;
log.info("Topic:"+currentTopicModel.getTopic()+" 数据时间:" + DateUtil.toDateTime(currentTopicModel.getTimestamp(), "yyyy-MM-dd HH:mm:ss.SSS") + ",当前时间:" + DateUtil.toDateTime(ctx.timerService().currentProcessingTime(), "yyyy-MM-dd HH:mm:ss.SSS") + " ,定时器下次触发时间:" + DateUtil.toDateTime(coalescedTime, "yyyy-MM-dd HH:mm:ss.SSS"));
//注册x分钟定时器
ctx.timerService().registerProcessingTimeTimer(coalescedTime);
} else {
currentTopicModel.setTimestamp(value.getTimestamp());
}
valueState.update(currentTopicModel);
out.collect(currentTopicModel);
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<TopicLastTimeModel> out) throws Exception {
TopicLastTimeModel current = valueState.value();
log.info(ctx.getCurrentKey() + "==>onTimer:" + DateUtil.toDateTime(ctx.timestamp(), "yyyy-MM-dd HH:mm:ss.SSS") + ",数据时间:" + DateUtil.toDateTime(current.getTimestamp(), "yyyy-MM-dd HH:mm:ss.SSS"));
if (Objects.nonNull(current)) {
DateTime nowTime = DateTime.now();
DateTime dataTime = new DateTime(current.getTimestamp());
long minute = 0;
if (dataTime.isBeforeNow()) {
//数据时间与当前时间的时间差
Interval interval = new Interval(dataTime, nowTime);
Duration duration = interval.toDuration();
minute = duration.getStandardMinutes();
}
// StringBuilder stringBuilder = new StringBuilder();
// stringBuilder.append("Kafka全域感知数据检测:\r\n");
// stringBuilder.append("问题描述:全域感知数据于 " + dataTime.toString("yyyy-MM-dd HH:mm:ss") + " 开始无数据\r\n");
// stringBuilder.append("Kafka服务地址:" + current.getBootstrapServers() + "\r\n");
// stringBuilder.append("KafkaTopic:" + current.getTopic() + "\r\n");
// stringBuilder.append("检测时间:" + nowTime.toString("yyyy-MM-dd HH:mm:ss"));
JSONObject jsonObject = new JSONObject();
jsonObject.put("msg_type", "text");
JSONObject contentObject = new JSONObject();
contentObject.put("text", "Kafka Topic:" + current.getTopic() + "于" + dataTime.toString("yyyy-MM-dd HH:mm:ss") + "开始无数据");
jsonObject.put("content", contentObject);
log.info(jsonObject.toString());
out.collect(current);
String at = "<at user_id=\\\\\\\"ou_xxx\\\\\\\">Tom</at>";
if (minute > this.timeOut) {
Map<String, String> propertyMap = new HashMap<>();
propertyMap.put("Content-Type", "application/json");
String urlStr = properties.getProperty("alarm.feishu.url");
byte[] bytes = HttpUtil.httpRequest(urlStr, "POST", jsonObject.toJSONString(), "UTF-8", 10, propertyMap);
if (bytes != null && bytes.length > 0) {
String json = new String(bytes, "UTF-8");
log.info("飞书告警请求:{}\r\n" + "飞书告警响应:{}", jsonObject.toString(), json);
}
}
long coalescedTime = ((ctx.timerService().currentProcessingTime() + delay * 60 * 1000) / 1000) * 1000;
ctx.timerService().registerProcessingTimeTimer(coalescedTime);
log.info("Topic:"+current.getTopic()+" 数据时间:" + DateUtil.toDateTime(current.getTimestamp(), "yyyy-MM-dd HH:mm:ss.SSS") + "," + DateUtil.toDateTime(nowTime.getMillis(), "yyyy-MM-dd HH:mm:ss.SSS") + ",定时器下次触发时间: " + DateUtil.toDateTime(coalescedTime, "yyyy-MM-dd HH:mm:ss.SSS"));
}
}
}
package com.wanji.indicators.task.monitor.stream;
import com.wanji.indicators.model.TopicLastTimeModel;
import com.wanji.indicators.task.monitor.service.func.CustomKafkaDeserialization;
import com.wanji.indicators.task.monitor.service.func.TopicMonitorKeyProcessFunction;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.ProcessingTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.*;
public class KafkaMonitorMain {
//private static final Logger log = LoggerFactory.getLogger(TrafficEventMain.class);
private static final Logger log = LoggerFactory.getLogger(TopicMonitorKeyProcessFunction.class);
public static void main(String[] args) {
int freqTime = 5;
int timeOut = 30;
ParameterTool parameter = ParameterTool.fromArgs(args);
//topic列表,逗号分割
String topicList = parameter.get("topicList");
freqTime = Integer.valueOf(parameter.get("freqTime","5"));
timeOut = Integer.valueOf(parameter.get("timeOut","5"));
String[] topicArray = topicList.split(",");
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
//构建消费者
Consumer<String, String> consumer = createConsumer(properties);
//获取最后消费的topic分区offset
Map<TopicPartition, Long> endingOffsetMap = getEndingOffsets(consumer,topicArray);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//env.setParallelism(2);
DateTime now = DateTime.now();
DateTime preDateTime = now.minusHours(24);
KafkaSource<TopicLastTimeModel> source = KafkaSource.<TopicLastTimeModel>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setTopics(topicArray)
.setGroupId(properties.getProperty("consumer.group.id") + "_DATA_MONITOR")
//.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.EARLIEST))
.setStartingOffsets(OffsetsInitializer.offsets(endingOffsetMap))
.setDeserializer(KafkaRecordDeserializationSchema.of(new CustomKafkaDeserialization()))
.build();
DataStream<TopicLastTimeModel> stream = env
.fromSource(source, WatermarkStrategy.<TopicLastTimeModel>forBoundedOutOfOrderness(Duration.ofSeconds(1))
, "kafka-data-monitor-source");
SingleOutputStreamOperator<TopicLastTimeModel> frameModelStream =
stream
.keyBy(TopicLastTimeModel::getTopic)
.process(new TopicMonitorKeyProcessFunction(freqTime, timeOut, properties))
// .window(ProcessingTimeSessionWindows.withGap(Time.minutes(1)))
// .process(new ProcessWindowFunction<TopicLastTimeModel, TopicLastTimeModel, String, TimeWindow>() {
// @Override
// public void process(String s, Context context, Iterable<TopicLastTimeModel> elements, Collector<TopicLastTimeModel> out) throws Exception {
// System.out.println("***************************");
// }
// })
.name("Kafka-Topic 数据监控");
try {
//frameModelStream.print();
env.execute("Kafka-Topic数据监控");
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
@SuppressWarnings("unchecked")
public static Map<TopicPartition, Long> getEndingOffsets(Consumer<String, String> consumer,String[] topicArray) {
Map<TopicPartition, Long> endingOffsetMap = new HashMap<>();
try {
//topic列表
Map<String, List<PartitionInfo>> topics = consumer.listTopics();
for (String topic : topicArray) {
List<PartitionInfo> partitionInfos = topics.get(topic);
if (partitionInfos == null) {
log.warn("Partition information was not found for topic {}", topic);
} else {
Collection<TopicPartition> partitions = new ArrayList<>();
for (PartitionInfo partitionInfo : partitionInfos) {
partitions.add(new TopicPartition(topic, partitionInfo.partition()));
}
//最新offset
Map<TopicPartition, Long> endingOffsets = consumer.endOffsets(partitions);
Map<TopicPartition, Long> offsetMap = new HashMap<>();
for (Map.Entry<TopicPartition, Long> entry : endingOffsets.entrySet()){
offsetMap.put(entry.getKey(), entry.getValue()-1);
}
endingOffsetMap.putAll(offsetMap);
}
}
} finally {
consumer.close();
}
return endingOffsetMap;
}
private static Consumer<String, String> createConsumer(Properties properties) {
final Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getProperty("bootstrap.servers"));
props.put(ConsumerConfig.GROUP_ID_CONFIG, properties.getProperty("consumer.group.id") + "_DATA_MONITOR");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
final Consumer<String, String> consumer = new KafkaConsumer<>(props);
return consumer;
}
}
package com.wanji.indicators.task.queuelength.service.func;
import com.wanji.indicators.model.SnapshotInfo;
import com.wanji.indicators.model.SnapshotIndicatorModel;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
/**
* 帧数据转换
*/
public class ParseSnapshotInfoFlatMap implements FlatMapFunction<SnapshotIndicatorModel, SnapshotInfo> {
private static final Logger log = LoggerFactory.getLogger(ParseSnapshotInfoFlatMap.class);
@Override
public void flatMap(SnapshotIndicatorModel snapshotIndicatorModel, Collector<SnapshotInfo> collector) throws Exception {
String timeStamp = snapshotIndicatorModel.getTimeStamp();
long ts = DateUtil.StringToMillis(timeStamp, "yyyy-MM-dd HH:mm:ss:SSS");
String globalId = snapshotIndicatorModel.getOrgCode();
List<SnapshotInfo> list = snapshotIndicatorModel.getEventList();
list.forEach(o -> {
o.setTimestamp(ts);
o.setGlobalId(globalId);
collector.collect(o);
});
}
/**
* 获取随机数
*
* @param min
* @param max
* @return
*/
public static int getRandom(int min, int max) {
int floor = (int) Math.floor(Math.random() * (max - min + 1) + min);
return floor;
}
}
package com.wanji.indicators.task.queuelength.service.func;
import com.wanji.indicators.model.SnapshotInfo;
import com.wanji.indicators.task.freeflow.service.model.FrameMaxSpeedModel;
import com.wanji.indicators.task.queuelength.service.model.MaxQueueResultModel;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
public class QueueAggregateFunction implements AggregateFunction<SnapshotInfo,MaxQueueResultModel, MaxQueueResultModel> {
private static final Logger log = LoggerFactory.getLogger(QueueAggregateFunction.class);
@Override
public MaxQueueResultModel createAccumulator() {
MaxQueueResultModel initModel = new MaxQueueResultModel();
initModel.setMaxQueueLength(0D);
return initModel;
}
/**
* 每进入一个数据就会执行一次
* @param value 当前进入的数据
* @param accumulator 之前计算好的中间结果
* @return
*/
@Override
public MaxQueueResultModel add(SnapshotInfo value, MaxQueueResultModel accumulator) {
accumulator.setMaxQueueLength(value.getDynamicQueueLength()>accumulator.getMaxQueueLength()?value.getDynamicQueueLength():accumulator.getMaxQueueLength());
//System.out.println("avgSpeed:"+accumulator.getAvgSpeed()+":"+accumulator.getSumSpeed()/accumulator.getCount()+" maxSpeed:"+accumulator.getMaxSpeed());
return accumulator;
}
/*
当window的结束时间到达时,触发这个方法,返回结果
*/
@Override
public MaxQueueResultModel getResult(MaxQueueResultModel accumulator) {
//log.info("AggResult:"+accumulator);
return accumulator;
}
/**
* 在session窗口才会用到merge,时间窗口其实用不到
* @param a
* @param b
* @return
*/
@Override
public MaxQueueResultModel merge(MaxQueueResultModel a, MaxQueueResultModel b) {
return null;
}
}
package com.wanji.indicators.task.queuelength.service.func;
import com.wanji.indicators.task.queuelength.service.model.MaxQueueResultModel;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
public class QueueProcessWindow extends ProcessWindowFunction<
MaxQueueResultModel,
MaxQueueResultModel,
String,
TimeWindow> {
private static final Logger log = LoggerFactory.getLogger(QueueProcessWindow.class);
@Override
public void process(String s, Context context, Iterable<MaxQueueResultModel> elements, Collector<MaxQueueResultModel> out) throws Exception {
long windowStartTs = context.window().getStart();
long windowEndTs = context.window().getEnd();
List<MaxQueueResultModel> list = StreamSupport.stream(elements.spliterator(), false).collect(Collectors.toList());
if (elements.iterator().hasNext()) {
MaxQueueResultModel result = elements.iterator().next();
result.setStartTime(DateUtil.toDateTime(windowStartTs,"yyyy-MM-dd HH:mm:ss.SSS"));
result.setEndTime(DateUtil.toDateTime(windowEndTs,"yyyy-MM-dd HH:mm:ss.SSS"));
log.info("MaxQueueLength Result:" + result.toString());
out.collect(result);
}
}
}
package com.wanji.indicators.task.queuelength.service.func;
import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.model.SnapshotIndicatorModel;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 帧数据转换
*/
public class SnapshotFlatMap implements FlatMapFunction<String, SnapshotIndicatorModel> {
private static final Logger log = LoggerFactory.getLogger(SnapshotFlatMap.class);
@Override
public void flatMap(String trackJson, Collector<SnapshotIndicatorModel> collector) throws Exception {
SnapshotIndicatorModel frameModel = JSONObject.parseObject(trackJson, SnapshotIndicatorModel.class);
collector.collect(frameModel);
}
}
package com.wanji.indicators.task.queuelength.service.model;
import com.wanji.indicators.constant.Constant;
import lombok.Data;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
@Data
public class MaxQueueResultModel implements Serializable {
private String id= Constant.HASH_KEY_AREA_MAX_QUEUE_LENGTH;
private String startTime;
private String endTime;
private Double maxQueueLength;
}
package com.wanji.indicators.task.queuelength.service.sink;
import com.alibaba.fastjson.JSON;
import com.wanji.indicators.task.freeflow.service.model.MaxSpeedResultModel;
import com.wanji.indicators.task.queuelength.service.model.MaxQueueResultModel;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
/**
* @author fengyi
* @date 2023/4/7
* @description
*/
public class QueueRedisMapper implements RedisMapper<MaxQueueResultModel> {
private String redisKey ;
public QueueRedisMapper(String redisKey){
this.redisKey = redisKey;
}
// 定义保存数据到redis的命令,存成Hash表,hset sensor_temp id temperature
@Override
public RedisCommandDescription getCommandDescription() {
return new RedisCommandDescription(RedisCommand.HSET, this.redisKey);
}
@Override
public String getKeyFromData(MaxQueueResultModel data) {
return data.getId();
}
@Override
public String getValueFromData(MaxQueueResultModel data) {
return JSON.toJSONString(data);
}
}
package com.wanji.indicators.task.queuelength.stream;
import com.wanji.indicators.constant.Constant;
import com.wanji.indicators.model.SnapshotIndicatorModel;
import com.wanji.indicators.model.SnapshotInfo;
import com.wanji.indicators.task.freeflow.service.sink.CustomRedisMapper;
import com.wanji.indicators.config.RedisConfig;
import com.wanji.indicators.task.queuelength.service.model.MaxQueueResultModel;
import com.wanji.indicators.task.queuelength.service.func.ParseSnapshotInfoFlatMap;
import com.wanji.indicators.task.queuelength.service.func.QueueAggregateFunction;
import com.wanji.indicators.task.queuelength.service.func.QueueProcessWindow;
import com.wanji.indicators.task.queuelength.service.func.SnapshotFlatMap;
import com.wanji.indicators.task.queuelength.service.sink.QueueRedisMapper;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.util.Properties;
/**
* 自由流速度计算
*/
public class MaxQueueLengthMain {
private static final Logger log = LoggerFactory.getLogger(MaxQueueLengthMain.class);
public static void main(String[] args) {
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DateTime currentDateTime = new DateTime();
//过去一天0点
DateTime dateTime = currentDateTime.withMillisOfDay(0).plusDays(-1);
KafkaSource<String> source = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "true")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setTopics(properties.getProperty("consumer.snapshot.indicator.topic"))
.setGroupId(properties.getProperty("consumer.group.id") + "_snapshot")
/* 设置起始偏移量有以下几种情况:
1.从指定的位置消费:OffsetsInitializer.offsets(Map< TopicPartition, Long> offsets)
2.从最新位置消费(最后一条处):OffsetsInitializer.latest()
3.从最早位置消费(第一条处):OffsetsInitializer.earliest()
4.从上次提交的位置消费:OffsetsInitializer.committedOffsets()
5.新的组,从来没有提交过,再指定一个消费方式:OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)
*/
// 从大于等于此时间戳开始的偏移量开始
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
DataStream<String> stream = env
.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka-car-track-source");
SingleOutputStreamOperator<SnapshotIndicatorModel> frameModelStream =
stream
.flatMap(new SnapshotFlatMap())
.setParallelism(1)
.name("每秒快照数据-JsonToObject");
SingleOutputStreamOperator<SnapshotInfo> carTrackModelStream =
frameModelStream.
flatMap(new ParseSnapshotInfoFlatMap())
.setParallelism(1)
.name("快照车道数据-FlatMap");
SingleOutputStreamOperator<MaxQueueResultModel> freeSpeedStream = carTrackModelStream
.assignTimestampsAndWatermarks(
WatermarkStrategy.<SnapshotInfo>forBoundedOutOfOrderness(Duration.ofSeconds(1))
.withTimestampAssigner((event, timestamp) -> event.getTimestamp()))
.keyBy(SnapshotInfo::getGlobalId)
//.window(TumblingEventTimeWindows.of(Time.minutes(2)))
.window(SlidingEventTimeWindows.of(Time.minutes(2),Time.seconds(1)))
.aggregate(new QueueAggregateFunction(),new QueueProcessWindow())
.name("每5秒计算2分钟最大排队长度");
try {
freeSpeedStream.addSink( new RedisSink<MaxQueueResultModel>(new RedisConfig().getRedisConfig(), new QueueRedisMapper(Constant.AREA_REALTIME_INDICATOR)));
env.execute("2分钟最大排队长度计算工作流");
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
}
package com.wanji.indicators.task.track.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.constant.Constant;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.SingleCarTrackListModel;
import com.wanji.indicators.task.track.service.func.ProcessCarTrackByKey;
import com.wanji.indicators.task.track.service.key.KeySelectorGlobalIdPlateNo;
import com.wanji.indicators.task.track.service.sink.BatchTrackElasticSearchSinkFunction;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.ElasticSearchSinkUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.apache.http.HttpHost;
import org.springframework.beans.factory.annotation.Value;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.time.Duration;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
public class AreaRealTimeIndicatorMainNew implements Serializable {
private final StreamExecutionEnvironment env;
private final PropertiesHelper instance;
private Properties p = new Properties();
private String path;
private String indexName = "area.realtime.indicator";
private int windowTime = 2;
private AreaRealTimeIndicatorMainNew(StreamExecutionEnvironment env, String paramInfo) {
this.env = env;
this.instance = PropertiesHelper.getInstance();
this.p = this.instance.getProperties();
}
public static AreaRealTimeIndicatorMainNew init(StreamExecutionEnvironment env, String paramInfo) {
return new AreaRealTimeIndicatorMainNew(env,paramInfo);
}
public void run(SingleOutputStreamOperator<CarTrackModel> carTrackModelStream) {
//筛选机动车类型数据
SingleOutputStreamOperator<CarTrackModel> filterStream =
carTrackModelStream.filter(new FilterFunction<CarTrackModel>() {
@Override
public boolean filter(CarTrackModel value) throws Exception {
String motorObjectType = Constant.MOTOR_TYPES;
String[] sps = motorObjectType.split(",");
List<String> typeList = Arrays.asList(sps);
String carType = value.getOriginalType().toString();
if (typeList.contains(carType)){
return true;
}
return false;
}
}).name("筛选机动车数据");
SingleOutputStreamOperator<JSONObject> groupByPlateStream =
filterStream.assignTimestampsAndWatermarks(
WatermarkStrategy.<CarTrackModel>forBoundedOutOfOrderness(Duration.ofSeconds(1))
.withTimestampAssigner((event, timestamp) -> event.getGlobalTimeStamp()))
.keyBy(CarTrackModel::getOrgCode)
.window(SlidingEventTimeWindows.of(Time.minutes(this.windowTime),Time.seconds(5)))
.process(new ProcessWindowFunction<CarTrackModel, JSONObject, String, TimeWindow>() {
@Override
public void process(String s, Context context, Iterable<CarTrackModel> elements, Collector<JSONObject> out) throws Exception {
Iterator<CarTrackModel> iterator = elements.iterator();
List<CarTrackModel> list = StreamSupport.stream(elements.spliterator(), false).collect(Collectors.toList());
//平均速度
double avgSpeed = list.stream().filter(o->o.getSpeed()>0).collect(Collectors.averagingDouble(o->o.getSpeed())).doubleValue();
//目标数量
long count = list.stream().collect(Collectors.groupingBy(o->o.getId())).size();
JSONObject jsonObject = new JSONObject();
jsonObject.put("vehicleNum",count);
jsonObject.put("avgSpeed",avgSpeed);
out.collect(jsonObject);
long ws = context.window().getStart();
long we = context.window().getEnd();
System.out.println(DateUtil.toDateTime(ws,"yyyy-MM-dd HH:mm:ss.SSS")+" "+DateUtil.toDateTime(we,"yyyy-MM-dd HH:mm:ss.SSS"));
}
})
.setParallelism(2)
.name("车辆轨迹分组-(全域ID-车辆ID-车牌号)");
//从配置文件中读取 es 的地址
try {
groupByPlateStream.print();
List<HttpHost> esAddresses = ElasticSearchSinkUtil.getEsAddresses(p.getProperty("elasticsearch.server"));
BatchTrackElasticSearchSinkFunction elasticsearchSinkFunction = new BatchTrackElasticSearchSinkFunction(indexName);
//ElasticSearchSinkUtil.addSink(esAddresses, 2, groupByPlateStream, elasticsearchSinkFunction);
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
}
package com.wanji.indicators.task.track.service;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.RoutePathCarTrackModel;
import com.wanji.indicators.task.track.service.func.SplitRoutePathFunction;
import com.wanji.indicators.task.track.service.key.KeySelectorGlobalIdPlateNo;
import com.wanji.indicators.task.track.service.sink.PartitionRoutePathElasticSearchSinkFunction;
import com.wanji.indicators.task.track.service.sink.RoutePathElasticSearchSinkFunction;
import com.wanji.indicators.util.ElasticSearchSinkUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.http.HttpHost;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.time.Duration;
import java.util.List;
import java.util.Properties;
public class CarRoutePathMainNew {
private final StreamExecutionEnvironment env;
private final PropertiesHelper instance;
private Properties p = new Properties();
private String path;
private String indexName;
private CarRoutePathMainNew(StreamExecutionEnvironment env, String indexName) {
this.env = env;
this.indexName = indexName;
this.instance = PropertiesHelper.getInstance();
this.p = this.instance.getProperties();
}
public static CarRoutePathMainNew init(StreamExecutionEnvironment env, String indexName) {
return new CarRoutePathMainNew(env, indexName);
}
public void run(SingleOutputStreamOperator<CarTrackModel> carTrackModelStream) {
//给每个车辆轨迹点进行路径规划分段,连续两点超过半小时的重新生成一条新路线
SingleOutputStreamOperator<RoutePathCarTrackModel> splitLineStream = carTrackModelStream
.assignTimestampsAndWatermarks(
WatermarkStrategy.<CarTrackModel>forBoundedOutOfOrderness(Duration.ofSeconds(1))
.withTimestampAssigner((event, timestamp) -> event.getGlobalTimeStamp()))
.keyBy(new KeySelectorGlobalIdPlateNo())
.process(new SplitRoutePathFunction())
.setParallelism(2).name("提取车辆通行路径");
//从配置文件中读取 es 的地址
try {
List<HttpHost> esAddresses = ElasticSearchSinkUtil.getEsAddresses(p.getProperty("elasticsearch.server"));
RoutePathElasticSearchSinkFunction elasticsearchSinkFunction = new RoutePathElasticSearchSinkFunction(indexName);
ElasticSearchSinkUtil.addSink(esAddresses, 2, splitLineStream, elasticsearchSinkFunction);
PartitionRoutePathElasticSearchSinkFunction elasticsearchSinkFunction1 = new PartitionRoutePathElasticSearchSinkFunction(indexName);
ElasticSearchSinkUtil.addSink(esAddresses, 2, splitLineStream, elasticsearchSinkFunction1);
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
}
package com.wanji.indicators.task.track.service;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.SingleCarTrackListModel;
import com.wanji.indicators.task.track.service.func.ProcessCarTrackByKey;
import com.wanji.indicators.task.track.service.key.KeySelectorGlobalIdPlateNo;
import com.wanji.indicators.task.track.service.sink.BatchTrackElasticSearchSinkFunction;
import com.wanji.indicators.task.track.service.sink.PartitionBatchTrackElasticSearchSinkFunction;
import com.wanji.indicators.util.ElasticSearchSinkUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.windowing.assigners.TumblingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.http.HttpHost;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.time.Duration;
import java.util.List;
import java.util.Properties;
public class TrackStoreMainNew {
private final StreamExecutionEnvironment env;
private final PropertiesHelper instance;
private Properties p = new Properties();
private String path;
private String indexName ;
//窗口时间,单位秒
private Integer windowTime=5;
private TrackStoreMainNew(StreamExecutionEnvironment env,String paramInfo) {
this.env = env;
String[] sps = paramInfo.split(",");
this.indexName = sps[0];
if (sps.length>1)
this.windowTime = Integer.parseInt(sps[1]);
this.instance = PropertiesHelper.getInstance();
this.p = this.instance.getProperties();
}
public static TrackStoreMainNew init(StreamExecutionEnvironment env,String paramInfo) {
return new TrackStoreMainNew(env,paramInfo);
}
public void run(SingleOutputStreamOperator<CarTrackModel> carTrackModelStream) {
//根据车辆上报时间每5分钟一个窗口累积
SingleOutputStreamOperator<SingleCarTrackListModel> groupByPlateStream =
carTrackModelStream.assignTimestampsAndWatermarks(
WatermarkStrategy.<CarTrackModel>forBoundedOutOfOrderness(Duration.ofSeconds(1))
.withTimestampAssigner((event, timestamp) -> event.getGlobalTimeStamp()))
.keyBy(new KeySelectorGlobalIdPlateNo())
.window(TumblingEventTimeWindows.of(Time.seconds(this.windowTime)))
//.trigger(new CountTriggerWithTimeout<>(Integer.MAX_VALUE,TimeCharacteristic.EventTime))
.process(new ProcessCarTrackByKey())
.setParallelism(2)
.name("车辆轨迹分组-(全域ID-车辆ID-车牌号)");
//从配置文件中读取 es 的地址
try {
List<HttpHost> esAddresses = ElasticSearchSinkUtil.getEsAddresses(p.getProperty("elasticsearch.server"));
BatchTrackElasticSearchSinkFunction elasticsearchSinkFunction = new BatchTrackElasticSearchSinkFunction(indexName);
ElasticSearchSinkUtil.addSink(esAddresses, 2, groupByPlateStream, elasticsearchSinkFunction);
PartitionBatchTrackElasticSearchSinkFunction elasticsearchSinkFunction1 = new PartitionBatchTrackElasticSearchSinkFunction(indexName);
ElasticSearchSinkUtil.addSink(esAddresses, 2, groupByPlateStream, elasticsearchSinkFunction1);
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
}
package com.wanji.indicators.task.track.service.func;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.FrameModel;
import com.wanji.indicators.model.RoutePathCarTrackModel;
import com.wanji.indicators.util.CarNumGenerator;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.util.Collector;
import org.apache.kafka.common.protocol.types.Field;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
/**
* 帧数据转换
*/
public class CarTrackFlatMap implements FlatMapFunction<FrameModel, CarTrackModel> {
private static final Logger log = LoggerFactory.getLogger(CarTrackFlatMap.class);
//孪生出的数据暂时没有车牌号,通过ID映射一个随机车牌号作为测试
static Map<Integer,String[]> randomCarNumMap = new ConcurrentHashMap<>();
static String motorObjectType = "1,2,3,7,8,10,11,12,13,15";
boolean isMockPlate = false;
@Override
public void flatMap(FrameModel frameModel, Collector<CarTrackModel> collector) throws Exception {
String timeStamp = frameModel.getTimeStamp();
Long globalTimeStamp = frameModel.getGlobalTimeStamp();
Integer participantNum = frameModel.getParticipantNum();
String globalId = frameModel.getOrgCode();
List<CarTrackModel> list = frameModel.getE1FrameParticipant();
String[] sps = motorObjectType.split(",");
List<String> motorTypeList = Arrays.asList(sps);
list.forEach(o->{
String type = o.getOriginalType().toString();
/*****************模拟车牌号、车辆颜色、车牌颜色******************************/
if (isMockPlate) {
if (motorTypeList.contains(type)) {
Integer id = o.getId();
String[] mockData = randomCarNumMap.get(id);
if (Objects.isNull(mockData)) {
String carNum = CarNumGenerator.getCarNum();
Integer picColor = getRandom(0, 7);
Integer bodyColor = getRandom(1, 9);
Integer function = getRandom(1, 5);
Integer ownership = getRandom(1, 5);
mockData = new String[]{carNum, picColor.toString(), bodyColor.toString(), function.toString(), ownership.toString()};
randomCarNumMap.put(id, mockData);
}
o.setPicLicense(mockData[0]);
o.setLicenseColor(Integer.valueOf(mockData[1]));
o.setOriginalColor(Integer.valueOf(mockData[2]));
o.setFunction(Integer.valueOf(mockData[3]));
o.setOwnership(Integer.valueOf(mockData[4]));
}
}
/***********************************************/
o.setTimeStamp(DateUtil.toDateTime(globalTimeStamp,"yyyy-MM-dd HH:mm:ss.SSS"));
o.setGlobalTimeStamp(globalTimeStamp);
o.setOrgCode(globalId);
collector.collect(o);
});
}
/**
* 获取随机数
*
* @param min
* @param max
* @return
*/
public static int getRandom(int min, int max) {
int floor = (int) Math.floor(Math.random() * (max - min + 1) + min);
return floor;
}
}
package com.wanji.indicators.task.track.service.func;
import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.model.FrameModel;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 帧数据转换
*/
public class FrameFlatMap implements FlatMapFunction<String, FrameModel> {
private static final Logger log = LoggerFactory.getLogger(FrameFlatMap.class);
@Override
public void flatMap(String trackJson, Collector<FrameModel> collector) throws Exception {
FrameModel frameModel = JSONObject.parseObject(trackJson, FrameModel.class);
collector.collect(frameModel);
}
}
package com.wanji.indicators.task.track.service.func;
import com.wanji.indicators.model.CarTrackModel;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.watermark.Watermark;
/**
* @author fengyi
* @date 2023/3/3
* @description
*/
public class MyWaterMark implements AssignerWithPeriodicWatermarks<CarTrackModel> {
private final long maxTimeLag = 3000; // 3 seconds
@Override
public long extractTimestamp(CarTrackModel element, long previousElementTimestamp) {
return element.getGlobalTimeStamp();
}
@Override
public Watermark getCurrentWatermark() {
// return the watermark as current time minus the maximum time lag
return new Watermark(System.currentTimeMillis() - maxTimeLag);
}
}
package com.wanji.indicators.task.track.service.func;
/**
* @author fengyi
* @date 2023/3/1
* @description
*/
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.SingleCarTrackListModel;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import java.util.*;
import java.util.stream.Collectors;
//@Slf4j
public class ProcessCarTrackByKey extends ProcessWindowFunction<CarTrackModel, SingleCarTrackListModel, Tuple3<String,Integer,String>, TimeWindow> {
private static final Logger log = LoggerFactory.getLogger(ProcessCarTrackByKey.class);
@Override
public void process(Tuple3<String, Integer, String> integerIntegerStringTuple3, Context context, Iterable<CarTrackModel> iterable, Collector<SingleCarTrackListModel> out) throws Exception {
Iterator<CarTrackModel> iterator = iterable.iterator();
SingleCarTrackListModel singleCarTrackListModel = new SingleCarTrackListModel();
List<SingleCarTrackListModel.CarTrackDynamicProperty> trackList = new ArrayList<>();
int i = 0;
CarTrackModel tmp = null;
Set<CarTrackModel.RoadNet> roadNets = new HashSet<>();
while (iterator.hasNext()) {
CarTrackModel carTrackModel = iterator.next();
// if (i==0) {
// //提取公共属性
// BeanUtils.copyProperties(carTrackModel,singleCarTrackListModel);
// //窗口的开始时间
// long winStartTimeStamp = context.window().getStart();
// //窗口的截止时间
// long winEndTimeStamp = context.window().getEnd();
// singleCarTrackListModel.setStartTime(new Date(winStartTimeStamp));
// singleCarTrackListModel.setEndTime(new Date(winEndTimeStamp));
// }
//提取动态属性
SingleCarTrackListModel.CarTrackDynamicProperty dynamicProperty = new SingleCarTrackListModel.CarTrackDynamicProperty();
dynamicProperty.setCourseAngle(carTrackModel.getCourseAngle());
dynamicProperty.setLongitude(carTrackModel.getLongitude());
dynamicProperty.setLatitude(carTrackModel.getLatitude());
dynamicProperty.setSpeed(carTrackModel.getSpeed());
dynamicProperty.setTimeStamp(carTrackModel.getTimeStamp());
dynamicProperty.setGlobalTimeStamp(carTrackModel.getGlobalTimeStamp());
if (!trackList.contains(dynamicProperty)) {
trackList.add(dynamicProperty);
roadNets.add(carTrackModel.getRoadnet());
}
tmp = carTrackModel;
i++;
}
Date globalStartTime = null;
Date globalEndTime = null;
if (!trackList.isEmpty()) {
//按时间排序
trackList = trackList.stream().sorted(Comparator.comparing(SingleCarTrackListModel.CarTrackDynamicProperty::getGlobalTimeStamp)).collect(Collectors.toList());
//提取公共属性
BeanUtils.copyProperties(tmp, singleCarTrackListModel);
//时段内第一帧检测时间
globalStartTime = new Date(trackList.get(0).getGlobalTimeStamp());
//时段内最后一帧检测时间
globalEndTime = new Date(trackList.get(trackList.size()-1).getGlobalTimeStamp());
//窗口的开始时间
long winStartTimeStamp = context.window().getStart();
//窗口的截止时间
long winEndTimeStamp = context.window().getEnd();
singleCarTrackListModel.setStartTime(new Date(winStartTimeStamp));
singleCarTrackListModel.setEndTime(new Date(winEndTimeStamp));
singleCarTrackListModel.setGlobalStartTime(globalStartTime);
singleCarTrackListModel.setGlobalEndTime(globalEndTime);
}
singleCarTrackListModel.setTracks(trackList);
singleCarTrackListModel.setRoadnets(roadNets);
// log.info("plateNo:{},startTime:{},endTime:{},size:{}",singleCarTrackListModel.getStaticProperty().getPicLicense(),singleCarTrackListModel.getStaticProperty().getStartTime(),singleCarTrackListModel.getStaticProperty().getEndTime(),trackList.size());
out.collect(singleCarTrackListModel);
}
public static void main(String[] args) {
System.out.println(new Date());
}
}
package com.wanji.indicators.task.track.service.func;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.RoutePathCarTrackModel;
import com.wanji.indicators.util.ArithOfBigDecmial;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.GeomsConvertUtil;
import org.apache.flink.api.common.state.MapState;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.StateTtlConfig;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.common.typeinfo.TypeHint;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
public class SplitRoutePathFunction extends KeyedProcessFunction<Tuple3<String, Integer, String>, CarTrackModel, RoutePathCarTrackModel> {
private static final Logger log = LoggerFactory.getLogger(SplitRoutePathFunction.class);
//路径节点缓存
private transient MapState<Tuple3<String, Integer, String>, RoutePathCarTrackModel> mapStateOfLineSplit;
//缓存线路编号
private transient MapState<Tuple3<String, Integer, String>, Integer> lineNoState;
//连续点间隔超时时间,单位分钟
private Integer timeOut=10;
@Override
public void open(Configuration parameters) throws Exception {
StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.hours(12))//状态过期时间
.setUpdateType(StateTtlConfig.UpdateType.OnReadAndWrite)
.cleanupFullSnapshot()
.setStateVisibility(StateTtlConfig.StateVisibility.ReturnExpiredIfNotCleanedUp)//不返回过期值
.build();
MapStateDescriptor mapStateDescriptor = new MapStateDescriptor<>("CAR_ROUTE_PATH_CACHE",
TypeInformation.of(new TypeHint<Tuple3<String, Integer, String>>() {
}),
TypeInformation.of(new TypeHint<RoutePathCarTrackModel>() {
})
);
MapStateDescriptor lineNoCacheDescriptor = new MapStateDescriptor<>("CAR_LINE_NO_CACHE",
TypeInformation.of(new TypeHint<Tuple3<String, Integer, String>>() {
}),
TypeInformation.of(new TypeHint<Integer>() {
}
)
);
mapStateDescriptor.enableTimeToLive(ttlConfig);
lineNoCacheDescriptor.enableTimeToLive(ttlConfig);
mapStateOfLineSplit = getRuntimeContext().getMapState(mapStateDescriptor);
lineNoState = getRuntimeContext().getMapState(lineNoCacheDescriptor);
}
@Override
public void processElement(CarTrackModel carTrackModel, Context context, Collector<RoutePathCarTrackModel> collector) throws Exception {
//orgCod_id_plateNo
Tuple3<String, Integer, String> currentKey = context.getCurrentKey();
//数据时间戳
long ts = carTrackModel.getGlobalTimeStamp();
Integer agoLineNo = lineNoState.get(currentKey);
RoutePathCarTrackModel agoTrack = mapStateOfLineSplit.get(currentKey);
if (agoTrack != null && agoLineNo != null) {
int agoSeqNo = agoTrack.getSeqNo();
long agoTs = agoTrack.getPreTimestamp();
double agoDistance = agoTrack.getRouteLength();
//连续两个点之间的总长度
double distance = GeomsConvertUtil.getDistance(agoTrack.getPreLongitude(), agoTrack.getPreLatitude(), carTrackModel.getLongitude(), carTrackModel.getLatitude());
distance = ArithOfBigDecmial.round(distance, 2);
Double[] xy = new Double[]{carTrackModel.getLongitude(), carTrackModel.getLatitude()};
//agoTrack.getPointList().add(xy);
agoTrack.setRouteLength(distance + agoDistance);
//agoTrack.setCreateTime(System.currentTimeMillis());
long dif = ts - agoTs;
if (dif > 0) {
//两点之间时间差超过半小时、小于半小时的距离大于2公里 生成路径
if (dif > timeOut * 60 * 1000 || distance > 2000) {
//设置终点信息
agoTrack.setEndTime(DateUtil.toDateTime(agoTrack.getPreTimestamp(), "yyyy-MM-dd HH:mm:ss.SSS"));
//agoTrack.setEndTime(new Date(carTrackModel.getGlobalTimeStamp()));
agoTrack.setEndGlobalTimeStamp(agoTrack.getPreTimestamp());
agoTrack.setEndLongitude(agoTrack.getPreLongitude());
agoTrack.setEndLatitude(agoTrack.getEndLatitude());
//agoTrack.setCreateTime(System.currentTimeMillis());
agoTrack.getTargetId().add(getTargetId(carTrackModel));
agoTrack.setSeqNo(agoSeqNo + 1);
agoTrack.setLineNo(agoLineNo + 1);
agoTrack.setFinished(true);
collector.collect(agoTrack);
//路径输出、清理缓存
mapStateOfLineSplit.remove(currentKey);
lineNoState.put(currentKey,agoTrack.getLineNo());
log.info(currentKey+"==>路径生成信息:difTime:{},distance:{},路径属性:{}",dif,distance,agoTrack);
//作为下个路径起点
initCarPathStartInfo(currentKey,carTrackModel,agoTrack.getLineNo());
} else {
//路径中间过程节点
agoTrack.setPreLongitude(carTrackModel.getLongitude());
agoTrack.setPreLatitude(carTrackModel.getLatitude());
agoTrack.setPreTimestamp(carTrackModel.getGlobalTimeStamp());
agoTrack.setPreDateTime(DateUtil.toDateTime(carTrackModel.getGlobalTimeStamp(), "yyyy-MM-dd HH:mm:ss.SSS"));
agoTrack.setSeqNo(agoSeqNo + 1);
agoTrack.getTargetId().add(getTargetId(carTrackModel));
//agoTrack.setCreateTime(System.currentTimeMillis());
//更新状态数据
mapStateOfLineSplit.put(currentKey, agoTrack);
//log.info(currentKey+"===>路径过程数据:"+agoTrack);
}
}
} else {
//初始起点
initCarPathStartInfo(currentKey,carTrackModel,0);
}
//long currentProcessingTime = context.timerService().currentProcessingTime();
//long fireTime = currentProcessingTime - currentProcessingTime % 60000 + interval;
//log.info(currentKey + "==>" + DateUtil.toDateTime(currentProcessingTime, "yyyy-MM-dd HH:mm:ss.SSS") + "," + DateUtil.toDateTime(fireTime, "yyyy-MM-dd HH:mm:ss.SSS"));
//如果注册相同数据的TimeTimer,后面的会将前面的覆盖,即相同的timeTimer只会触发一次
//context.timerService().registerProcessingTimeTimer(fireTime);
int interval = timeOut * 60 * 1000;
long fireTime = context.timestamp() - context.timestamp() % 60000 + interval;
context.timerService().registerEventTimeTimer(fireTime);
}
@Override
public void onTimer(long timestamp, OnTimerContext ctx, Collector<RoutePathCarTrackModel> out) throws Exception {
log.info(ctx.getCurrentKey() + "==>onTimer:" + DateUtil.toDateTime(ctx.timestamp(), "yyyy-MM-dd HH:mm:ss.SSS") + "," + DateUtil.toDateTime(ctx.timerService().currentProcessingTime(), "yyyy-MM-dd HH:mm:ss.SSS"));
Iterator<Map.Entry<Tuple3<String, Integer, String>, RoutePathCarTrackModel>> iterator = mapStateOfLineSplit.iterator();
Set<Tuple3<String, Integer, String>> expireKeys = new HashSet<>();
//超时未收到新数据,以最后一条记录作为终点形成一条路径
while (iterator.hasNext()) {
Map.Entry<Tuple3<String, Integer, String>, RoutePathCarTrackModel> entry = iterator.next();
RoutePathCarTrackModel model = entry.getValue();
//设置终点信息
model.setEndTime(DateUtil.toDateTime(model.getPreTimestamp(), "yyyy-MM-dd HH:mm:ss.SSS"));
//model.setEndTime(new Date(model.getPreTimestamp()));
model.setEndGlobalTimeStamp(model.getPreTimestamp());
model.setEndLongitude(model.getPreLongitude());
model.setEndLatitude(model.getPreLatitude());
model.setSeqNo(model.getSeqNo());
model.setLineNo(model.getLineNo() + 1);
model.setFinished(true);
//重置路径编号
lineNoState.put(ctx.getCurrentKey(),model.getLineNo());
expireKeys.add(entry.getKey());
if (model.getSeqNo() > 1) {//至少两个点输出
out.collect(model);
log.info(ctx.getCurrentKey()+"==>超时未收到路径轨迹点,此时以超时时间策略生成路径:timer:{},路径信息:{}",DateUtil.toDateTime(ctx.timestamp(), "yyyy-MM-dd HH:mm:ss.SSS"),model);
}
}
for (Tuple3<String, Integer, String> key : expireKeys) {
mapStateOfLineSplit.remove(key);
}
ctx.timerService().deleteProcessingTimeTimer(timestamp);
}
private Map<String, Object> getTargetId(CarTrackModel carTrackModel) {
Map<String, Object> hashMap = new HashMap<>();
hashMap.put("orgCode", carTrackModel.getOrgCode());
hashMap.put("id", carTrackModel.getId());
return hashMap;
}
//路径初始点
private void initCarPathStartInfo(Tuple3<String, Integer, String> currentKey,CarTrackModel carTrackModel,int lineNo) throws Exception {
//路网静态数据
CarTrackModel.RoadNet roadNet = carTrackModel.getRoadnet();
RoutePathCarTrackModel initModel = new RoutePathCarTrackModel();
initModel.setLineNo(lineNo);
initModel.setSeqNo(1);
initModel.setPicLicense(carTrackModel.getPicLicense());
if (roadNet != null) {
initModel.setStartCrossId(roadNet.getCrossId());
initModel.setStartSegmentId(roadNet.getSegmentId());
initModel.setStartRid(roadNet.getRid());
initModel.setStartLaneId(roadNet.getLaneId());
}
initModel.setStartTime(DateUtil.toDateTime(carTrackModel.getGlobalTimeStamp(), "yyyy-MM-dd HH:mm:ss.SSS"));
//initModel.setStartTime(new Date(carTrackModel.getGlobalTimeStamp()));
initModel.setStartGlobalTimeStamp(carTrackModel.getGlobalTimeStamp());
initModel.setStartLongitude(carTrackModel.getLongitude());
initModel.setStartLatitude(carTrackModel.getLatitude());
initModel.setPreLongitude(carTrackModel.getLongitude());
initModel.setPreLatitude(carTrackModel.getLatitude());
initModel.setPreTimestamp(carTrackModel.getGlobalTimeStamp());
initModel.setPreDateTime(DateUtil.toDateTime(carTrackModel.getGlobalTimeStamp(), "yyyy-MM-dd HH:mm:ss.SSS"));
//initModel.setCreateTime(System.currentTimeMillis());
Double[] xy = new Double[]{carTrackModel.getLongitude(),carTrackModel.getLatitude()};
//initModel.getPointList().add(xy);
initModel.getTargetId().add(getTargetId(carTrackModel));
//缓存路径初始点状态数据
mapStateOfLineSplit.put(currentKey, initModel);
lineNoState.put(currentKey,initModel.getLineNo());
}
}
package com.wanji.indicators.task.track.service.func;//package com.wanji.indicators.project.event.track.service.func;
//
//import com.wanji.indicators.model.CarTrackModel;
//import org.apache.flink.api.common.eventtime.WatermarkGenerator;
//import org.apache.flink.api.common.eventtime.WatermarkGeneratorSupplier;
//import org.apache.flink.api.common.eventtime.WatermarkOutput;
//import org.apache.flink.api.common.eventtime.WatermarkStrategy;
//import org.apache.flink.api.java.tuple.Tuple2;
//
///**
// * @author fengyi
// * @date 2023/3/3
// * @description
// */
//public class WatermarkDemoFunction implements WatermarkStrategy<CarTrackModel> {
//
// private Tuple2<Long,Boolean> state = Tuple2.of(0L,true);
//
// @Override
// public WatermarkGenerator<CarTrackModel> createWatermarkGenerator(WatermarkGeneratorSupplier.Context context) {
// return new WatermarkGenerator<CarTrackModel>() {
// private long maxWatermark;
//
// @Override
// public void onEvent(CarTrackModel waterSensor, long l, WatermarkOutput watermarkOutput) {
// maxWatermark = Math.max(maxWatermark,waterSensor.getAmount());
// System.out.println("maxWatermark is " + maxWatermark);
// state.f0 = System.currentTimeMillis();
// state.f1 = false;
// }
// @Override
// public void onPeriodicEmit(WatermarkOutput watermarkOutput) {
// //乱序时间
// long outOfTime = 3000L;
// if (maxWatermark - outOfTime <=0){
// } else {
// //10s内没有数据则关闭当前窗口
// if (System.currentTimeMillis() - state.f0 >= 10000L && !state.f1){
// watermarkOutput.emitWatermark(new Watermark(maxWatermark + 5000L));
// state.f1 = true;
// System.out.println("触发窗口");
// } else {
// System.out.println("正常发送水印");
// watermarkOutput.emitWatermark(new Watermark(maxWatermark - outOfTime));
// }
// }
// }
// };
// }
//}
\ No newline at end of file
package com.wanji.indicators.task.track.service.key;
import com.wanji.indicators.model.CarTrackModel;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple3;
public class KeySelectorGlobalIdPlateNo implements KeySelector<CarTrackModel, Tuple3<String, Integer, String>> {
//以车牌号和颜色组合为唯一标识key
@Override
public Tuple3<String, Integer, String> getKey(CarTrackModel carTrackModel) throws Exception {
return new Tuple3<>(carTrackModel.getOrgCode(),carTrackModel.getId(),carTrackModel.getPicLicense());
}
}
package com.wanji.indicators.task.track.service.key;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.RoutePathCarTrackModel;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.api.java.tuple.Tuple3;
import java.util.Map;
public class KeySelectorRoutePath implements KeySelector<RoutePathCarTrackModel, Tuple2<String, Integer>> {
//以车牌号和颜色组合为唯一标识key
@Override
public Tuple2<String, Integer> getKey(RoutePathCarTrackModel carTrackModel) throws Exception {
return new Tuple2<>(carTrackModel.getPicLicense(),carTrackModel.getLineNo());
}
}
package com.wanji.indicators.task.track.service.sink;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.wanji.indicators.model.SingleCarTrackListModel;
import com.wanji.indicators.util.DateUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
import java.util.UUID;
/**
* @author fengyi
* @date 2023/3/2
* @description
*/
public class BatchTrackElasticSearchSinkFunction implements ElasticsearchSinkFunction<SingleCarTrackListModel>, Serializable {
private static Logger log = LoggerFactory.getLogger(BatchTrackElasticSearchSinkFunction.class);
private String index;
public BatchTrackElasticSearchSinkFunction(String index) {
this.index = index;
}
public IndexRequest createIndexRequest(SingleCarTrackListModel element) {
String jsonStr = JSON.toJSONString(element, SerializerFeature.WRITE_MAP_NULL_FEATURES);
JSONObject jsonObject = JSONObject.parseObject(jsonStr, Feature.OrderedField);
jsonObject.put("recordTime", DateUtil.getNowTime("yyyy-MM-dd HH:mm:ss.SSS"));
//String id = element.getOrgCode()+"_"+element.getId()+"_"+element.getPicLicense()+"_"+element.getG;
return Requests.indexRequest()
.index(index)
// .type(StringUtils.isEmpty(element.getPicLicense()) ?element.getId().toString():element.getPicLicense())
// .id(UUID.randomUUID().toString())
// .timeout(TimeValue.timeValueSeconds(60))
.source(jsonObject.toJSONString(), XContentType.JSON);
}
@Override
public void process(SingleCarTrackListModel row, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
try {
requestIndexer.add(createIndexRequest(row));
}catch (Exception e){
e.printStackTrace();
}
}
}
package com.wanji.indicators.task.track.service.sink;
import com.wanji.indicators.util.FileUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FileSink implements SinkFunction<String> {
private static final Logger log = LoggerFactory.getLogger(FileSink.class);
private String path;
public FileSink(String path) {
this.path = path;
}
@Override
public void invoke(String json, Context context) {
if(StringUtils.isNotBlank(path)){
FileUtil.writeApend(path, json);
}
}
}
package com.wanji.indicators.task.track.service.sink;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.wanji.indicators.model.SingleCarTrackListModel;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.ElasticSearchSinkUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.apache.http.HttpHost;
import org.apache.ibatis.session.SqlSession;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestClientBuilder;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Serializable;
import java.util.*;
/**
* @author fengyi
* @date 2023/3/2
* @description
*/
public class PartitionBatchTrackElasticSearchSinkFunction implements ElasticsearchSinkFunction<SingleCarTrackListModel>, Serializable {
private static Logger log = LoggerFactory.getLogger(PartitionBatchTrackElasticSearchSinkFunction.class);
private String index;
private static Map<String,Boolean> mappingFlag = new HashMap<>();
@Override
public void open() throws Exception {
}
public PartitionBatchTrackElasticSearchSinkFunction(String index) {
this.index = index;
}
public IndexRequest createIndexRequest(SingleCarTrackListModel element) {
String jsonStr = JSON.toJSONString(element, SerializerFeature.WRITE_MAP_NULL_FEATURES);
JSONObject jsonObject = JSONObject.parseObject(jsonStr, Feature.OrderedField);
jsonObject.put("recordTime", DateUtil.getNowTime("yyyy-MM-dd HH:mm:ss.SSS"));
//按天对索引进行分区
String index = getIndexNameByDate(element);
ElasticSearchSinkUtil.createIndex(index,true,getMappingInfo());
//String id = element.getOrgCode()+"_"+element.getId()+"_"+element.getPicLicense()+"_"+element.getG;
IndexRequest indexRequest = Requests.indexRequest()
.index(index)
// .type(StringUtils.isEmpty(element.getPicLicense()) ?element.getId().toString():element.getPicLicense())
// .id(UUID.randomUUID().toString())
// .timeout(TimeValue.timeValueSeconds(60))
.source(jsonObject.toJSONString(), XContentType.JSON);
return indexRequest;
}
@Override
public void process(SingleCarTrackListModel row, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
try {
requestIndexer.add(createIndexRequest(row));
} catch (Exception e){
e.printStackTrace();
}
}
/**
* 创建mapping
* @param client
* @throws IOException
*/
public XContentBuilder getMappingInfo() {
//创建mapping约束字段
try {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
//.startObject(index)
//.startObject("mapping")
.startObject("properties")
.startObject("orgCode").field("type", "text").field("fielddata", true).endObject()
.startObject("endTime").field("type", "text").field("fielddata", true).endObject()
.endObject()
//.endObject()
//.endObject()
.endObject();
return mapping;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
private String getIndexNameByDate(SingleCarTrackListModel row){
String globalDate = DateUtil.toDateTime(row.getGlobalEndTime().getTime(),"yyyyMMdd");
//按天对索引进行分区
String index = this.index+"_"+globalDate;
return index;
}
}
package com.wanji.indicators.task.track.service.sink;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.wanji.indicators.model.RoutePathCarTrackModel;
import com.wanji.indicators.model.SingleCarTrackListModel;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.ElasticSearchSinkUtil;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/3/2
* @description
*/
public class PartitionRoutePathElasticSearchSinkFunction implements ElasticsearchSinkFunction<RoutePathCarTrackModel>, Serializable {
private static Logger log = LoggerFactory.getLogger(PartitionRoutePathElasticSearchSinkFunction.class);
private String index;
public PartitionRoutePathElasticSearchSinkFunction(String index) {
this.index = index;
}
public IndexRequest createIndexRequest(RoutePathCarTrackModel element) {
String id = element.getTargetId().toString()+"_"+element.getPicLicense()+element.getLineNo();
String jsonStr = JSON.toJSONString(element, SerializerFeature.WRITE_MAP_NULL_FEATURES);
JSONObject jsonObject = JSONObject.parseObject(jsonStr, Feature.OrderedField);
jsonObject.put("recordTime", DateUtil.getNowTime("yyyy-MM-dd HH:mm:ss.SSS"));
String globalDate = DateUtil.toDateTime(DateUtil.StringToMillis(element.getEndTime(),"yyyy-MM-dd HH:mm:ss.SSS"),"yyyyMMdd");
//按天对索引进行分区
String index = this.index+"_"+globalDate;
ElasticSearchSinkUtil.createIndex(index,true,getMappingInfo());
//log.info(jsonObject.toJSONString());
return Requests.indexRequest()
.index(index)
.source(jsonObject.toJSONString(), XContentType.JSON)
;
}
@Override
public void process(RoutePathCarTrackModel row, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
try {
requestIndexer.add(createIndexRequest(row));
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* 创建mapping
* @param client
* @throws IOException
*/
public XContentBuilder getMappingInfo() {
//创建mapping约束字段
try {
XContentBuilder mapping = XContentFactory.jsonBuilder()
.startObject()
//.startObject(indexName)
.startObject("properties")
//.startObject("orgCode").field("type", "text").field("fielddata", "true").endObject()
.startObject("endTime").field("type", "text").field("fielddata", "true").endObject()
//.endObject()
.endObject()
.endObject();
return mapping;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
package com.wanji.indicators.task.track.service.sink;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.wanji.indicators.model.RoutePathCarTrackModel;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.api.common.functions.RuntimeContext;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.RequestIndexer;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.XContentType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/3/2
* @description
*/
public class RoutePathElasticSearchSinkFunction implements ElasticsearchSinkFunction<RoutePathCarTrackModel>, Serializable {
private static Logger log = LoggerFactory.getLogger(RoutePathElasticSearchSinkFunction.class);
private String index;
public RoutePathElasticSearchSinkFunction(String index) {
this.index = index;
}
public IndexRequest createIndexRequest(RoutePathCarTrackModel element) {
String id = element.getTargetId().toString()+"_"+element.getPicLicense()+element.getLineNo();
String jsonStr = JSON.toJSONString(element, SerializerFeature.WRITE_MAP_NULL_FEATURES);
JSONObject jsonObject = JSONObject.parseObject(jsonStr, Feature.OrderedField);
jsonObject.put("recordTime", DateUtil.getNowTime("yyyy-MM-dd HH:mm:ss.SSS"));
//log.info(jsonObject.toJSONString());
return Requests.indexRequest()
.index(index)
.source(jsonObject.toJSONString(), XContentType.JSON)
;
}
@Override
public void process(RoutePathCarTrackModel row, RuntimeContext runtimeContext, RequestIndexer requestIndexer) {
try {
requestIndexer.add(createIndexRequest(row));
} catch (Exception e) {
e.printStackTrace();
}
}
}
package com.wanji.indicators.task.track.stream;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.FrameModel;
import com.wanji.indicators.task.track.service.CarRoutePathMainNew;
import com.wanji.indicators.task.track.service.TrackStoreMainNew;
import com.wanji.indicators.task.track.service.func.CarTrackFlatMap;
import com.wanji.indicators.task.track.service.func.FrameFlatMap;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.joda.time.DateTime;
import org.joda.time.DateTimeUtils;
import java.util.Calendar;
import java.util.Properties;
public class CarRoutePathMain {
//private static final Logger log = LoggerFactory.getLogger(TrafficEventMain.class);
public static void main(String[] args) {
//批量轨迹es index,逗号分割:esIndexName,windowTime
String indexName = args[0];
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.getCheckpointConfig().setTolerableCheckpointFailureNumber(3);
DateTime currentDateTime = new DateTime();
DateTime dateTime = currentDateTime.withMillisOfDay(0).plusDays(-1);
KafkaSource<String> source = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms","1000")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setTopics(properties.getProperty("consumer.topic"))
.setGroupId(properties.getProperty("consumer.group.id")+"-Car-Route-Path")
/* 设置起始偏移量有以下几种情况:
1.从指定的位置消费:OffsetsInitializer.offsets(Map< TopicPartition, Long> offsets)
2.从最新位置消费(最后一条处):OffsetsInitializer.latest()
3.从最早位置消费(第一条处):OffsetsInitializer.earliest()
4.从上次提交的位置消费:OffsetsInitializer.committedOffsets()
5.新的组,从来没有提交过,再指定一个消费方式:OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)
*/
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.EARLIEST))//设置起始偏移量,也就是从哪里消费
//.setStartingOffsets(OffsetsInitializer.timestamp(dateTime.getMillis()))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
DataStream<String> stream = env
.fromSource(source, WatermarkStrategy.noWatermarks(),"kafka-car-track-source");
SingleOutputStreamOperator<FrameModel> frameModelStream =
stream
.flatMap(new FrameFlatMap())
.setParallelism(1)
.name("轨迹帧数据-JsonToObject");
SingleOutputStreamOperator<CarTrackModel> carTrackModelStream =
frameModelStream.
flatMap(new CarTrackFlatMap())
.setParallelism(1)
.name("轨迹帧数据解析-ToCarTrackModel");
try {
//轨迹路径
CarRoutePathMainNew.init(env,indexName).run(carTrackModelStream);
env.execute("路径计算工作流");
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
}
package com.wanji.indicators.task.track.stream;
import com.wanji.indicators.constant.Constant;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.FrameModel;
import com.wanji.indicators.task.track.service.CarRoutePathMainNew;
import com.wanji.indicators.task.track.service.TrackStoreMainNew;
import com.wanji.indicators.task.track.service.func.CarTrackFlatMap;
import com.wanji.indicators.task.track.service.func.FrameFlatMap;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.java.utils.ParameterTool;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.runtime.state.filesystem.FsStateBackend;
import org.apache.flink.runtime.state.memory.MemoryStateBackend;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.joda.time.DateTime;
import java.util.*;
public class TrackAndRouteMain {
//private static final Logger log = LoggerFactory.getLogger(TrafficEventMain.class);
public static void main(String[] args) {
//批量轨迹es index,逗号分割:esIndexName,windowTime
String indexName = args[0];
String indexName1 = args[1];
// ParameterTool parameter = ParameterTool.fromArgs(args);
// String indexName = parameter.get("Track_ES_IndexName");
// String indexName1 = parameter.get("Route_ES_IndexName");
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
String topic = properties.getProperty("consumer.topic");
if (args.length>2){
topic = args[2];
}
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(60*1000);
env.getCheckpointConfig().setTolerableCheckpointFailureNumber(3);
env.getCheckpointConfig().setCheckpointTimeout(10 * 60 * 1000);
env.getCheckpointConfig().setCheckpointStorage(new FileSystemCheckpointStorage(properties.getProperty("check.point.uri")));
//env.setStateBackend(new FsStateBackend(properties.getProperty("check.point.uri")));
env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
// env.setStateBackend(
// new MemoryStateBackend(MemoryStateBackend.DEFAULT_MAX_STATE_SIZE * 100));
DateTime currentDateTime = new DateTime();
DateTime dateTime = currentDateTime.withMillisOfDay(0).minusHours(-4);
KafkaSource<String> source = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "true")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setTopics(topic)
.setGroupId(properties.getProperty("consumer.group.id"))
/* 设置起始偏移量有以下几种情况
1.从指定的位置消费:OffsetsInitializer.offsets(Map<TopicPartition, Long> offsets)
2.从最新位置消费(最后一条处):OffsetsInitializer.latest()
3.从最早位置消费(第一条处):OffsetsInitializer.earliest()
4.从上次提交的位置消费:OffsetsInitializer.committedOffsets()
5.新的组,从来没有提交过,再指定一个消费方式:OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)
*/
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
// 从大于等于此时间戳开始的偏移量开始
//.setStartingOffsets(OffsetsInitializer.timestamp(dateTime.getMillis()))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
DataStream<String> stream = env
.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka-car-track-source");
SingleOutputStreamOperator<FrameModel> frameModelStream =
stream
.flatMap(new FrameFlatMap())
.setParallelism(1)
.name("轨迹帧数据-JsonToObject");
SingleOutputStreamOperator<CarTrackModel> carTrackModelStream =
frameModelStream.
flatMap(new CarTrackFlatMap())
.setParallelism(1)
.name("轨迹帧数据解析-ToCarTrackModel");
//筛选机动车类型数据
SingleOutputStreamOperator<CarTrackModel> filterStream =
carTrackModelStream.filter(new FilterFunction<CarTrackModel>() {
@Override
public boolean filter(CarTrackModel value) throws Exception {
String motorObjectType = Constant.MOTOR_TYPES;
String[] sps = motorObjectType.split(",");
List<String> typeList = Arrays.asList(sps);
String carType = value.getOriginalType().toString();
if (typeList.contains(carType)){
return true;
}
return false;
}
}).name("筛选机动车数据");
try {
//批量轨迹封装
TrackStoreMainNew.init(env, indexName).run(carTrackModelStream);
CarRoutePathMainNew.init(env, indexName1).run(carTrackModelStream);
env.execute("轨迹处理工作流");
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
}
package com.wanji.indicators.task.track.stream;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.FrameModel;
import com.wanji.indicators.task.track.service.AreaRealTimeIndicatorMainNew;
import com.wanji.indicators.task.track.service.CarRoutePathMainNew;
import com.wanji.indicators.task.track.service.TrackStoreMainNew;
import com.wanji.indicators.task.track.service.func.CarTrackFlatMap;
import com.wanji.indicators.task.track.service.func.FrameFlatMap;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.joda.time.DateTime;
import java.util.Properties;
/**
* 对单辆车轨迹,每30秒封装一个批次
*/
public class TrackUnionMain {
//private static final Logger log = LoggerFactory.getLogger(TrafficEventMain.class);
public static void main(String[] args) {
//批量轨迹es index,逗号分割:esIndexName,windowTime
String indexName = "";//args[0];
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
DateTime currentDateTime = new DateTime();
//过去一天0点
DateTime dateTime = currentDateTime.withMillisOfDay(0).plusDays(-1);
KafkaSource<String> source = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms","1000")
.setProperty("commit.offsets.on.checkpoint","true")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setTopics(properties.getProperty("consumer.topic"))
.setGroupId(properties.getProperty("consumer.group.id")+"_BATCH_TRACK")
/* 设置起始偏移量有以下几种情况:
1.从指定的位置消费:OffsetsInitializer.offsets(Map< TopicPartition, Long> offsets)
2.从最新位置消费(最后一条处):OffsetsInitializer.latest()
3.从最早位置消费(第一条处):OffsetsInitializer.earliest()
4.从上次提交的位置消费:OffsetsInitializer.committedOffsets()
5.新的组,从来没有提交过,再指定一个消费方式:OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)
*/
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))//设置起始偏移量,也就是从哪里消费
// 从大于等于此时间戳开始的偏移量开始
//.setStartingOffsets(OffsetsInitializer.timestamp(dateTime.getMillis()))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
DataStream<String> stream = env
.fromSource(source, WatermarkStrategy.noWatermarks(),"kafka-car-track-source");
SingleOutputStreamOperator<FrameModel> frameModelStream =
stream
.flatMap(new FrameFlatMap())
.setParallelism(1)
.name("轨迹帧数据-JsonToObject");
SingleOutputStreamOperator<CarTrackModel> carTrackModelStream =
frameModelStream.
flatMap(new CarTrackFlatMap())
.setParallelism(1)
.name("轨迹帧数据解析-ToCarTrackModel");
try {
//批量轨迹封装
TrackStoreMainNew.init(env,indexName).run(carTrackModelStream);
//AreaRealTimeIndicatorMainNew.init(env,indexName).run(carTrackModelStream);
env.execute("轨迹处理工作流");
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
}
package com.wanji.indicators.task.travelTime.service;
import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.config.RedisConfig;
import com.wanji.indicators.constant.Constant;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.task.queuelength.service.func.QueueAggregateFunction;
import com.wanji.indicators.task.queuelength.service.func.QueueProcessWindow;
import com.wanji.indicators.task.queuelength.service.model.MaxQueueResultModel;
import com.wanji.indicators.task.queuelength.service.sink.QueueRedisMapper;
import com.wanji.indicators.task.track.service.key.KeySelectorGlobalIdPlateNo;
import com.wanji.indicators.task.track.service.sink.BatchTrackElasticSearchSinkFunction;
import com.wanji.indicators.task.travelTime.service.func.TravelAggregateFunction;
import com.wanji.indicators.task.travelTime.service.func.TravelProcessWindow;
import com.wanji.indicators.task.travelTime.service.model.TravelTimeResultModel;
import com.wanji.indicators.task.travelTime.service.sink.TravelRedisMapper;
import com.wanji.indicators.util.ElasticSearchSinkUtil;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.KeyedProcessFunction;
import org.apache.flink.streaming.api.windowing.assigners.EventTimeSessionWindows;
import org.apache.flink.streaming.api.windowing.assigners.SlidingEventTimeWindows;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.util.Collector;
import org.apache.http.HttpHost;
import java.io.Serializable;
import java.net.MalformedURLException;
import java.time.Duration;
import java.util.List;
import java.util.Properties;
public class MaxTravelTimeMainNew implements Serializable {
private final StreamExecutionEnvironment env;
private final PropertiesHelper instance;
private Properties p = new Properties();
private String path;
private String indexName = "area.realtime.indicator";
private int windowTime = 2;
private MaxTravelTimeMainNew(StreamExecutionEnvironment env, String paramInfo) {
this.env = env;
this.instance = PropertiesHelper.getInstance();
this.p = this.instance.getProperties();
}
public static MaxTravelTimeMainNew init(StreamExecutionEnvironment env, String paramInfo) {
return new MaxTravelTimeMainNew(env,paramInfo);
}
public void run(SingleOutputStreamOperator<CarTrackModel> filterStream) {
SingleOutputStreamOperator<TravelTimeResultModel> groupByPlateStream =
filterStream.assignTimestampsAndWatermarks(
WatermarkStrategy.<CarTrackModel>forBoundedOutOfOrderness(Duration.ofSeconds(5))
.withTimestampAssigner((event, timestamp) -> event.getGlobalTimeStamp()))
.keyBy(new KeySelectorGlobalIdPlateNo())
.window(EventTimeSessionWindows.withGap(Time.minutes(2)))
.aggregate(new TravelAggregateFunction(),new TravelProcessWindow())
.name("旅行时间、速度计算");
try {
SingleOutputStreamOperator<TravelTimeResultModel> travelTimeStream = groupByPlateStream.keyBy(TravelTimeResultModel::getId).countWindow(2).maxBy("travelTime");
travelTimeStream.print();
travelTimeStream.addSink( new RedisSink<TravelTimeResultModel>(new RedisConfig().getRedisConfig(), new TravelRedisMapper(Constant.AREA_REALTIME_INDICATOR)));
// SingleOutputStreamOperator<TravelTimeResultModel> travelSpeedStream = groupByPlateStream.keyBy(TravelTimeResultModel::getId).countWindow(2).maxBy("travelSpeed");
// travelTimeStream.addSink( new RedisSink<TravelTimeResultModel>(new RedisConfig().getRedisConfig(), new TravelRedisMapper(Constant.AREA_REALTIME_INDICATOR)));
} catch (Exception e) {
e.printStackTrace();
}
}
}
package com.wanji.indicators.task.travelTime.service.func;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.task.travelTime.service.model.TravelTimeResultModel;
import com.wanji.indicators.util.ArithOfBigDecmial;
import com.wanji.indicators.util.DateUtil;
import com.wanji.indicators.util.GeomsConvertUtil;
import org.apache.flink.api.common.functions.AggregateFunction;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
public class TravelAggregateFunction implements AggregateFunction<CarTrackModel, TravelTimeResultModel, TravelTimeResultModel> {
private static final Logger log = LoggerFactory.getLogger(TravelAggregateFunction.class);
@Override
public TravelTimeResultModel createAccumulator() {
TravelTimeResultModel initModel = new TravelTimeResultModel();
initModel.setTravelSpeed(0D);
initModel.setTravelTime(0D);
initModel.setDistance(0D);
return initModel;
}
/**
* 每进入一个数据就会执行一次
*
* @param value 当前进入的数据
* @param accumulator 之前计算好的中间结果
* @return
*/
@Override
public TravelTimeResultModel add(CarTrackModel value, TravelTimeResultModel accumulator) {
//记录初始进入时间
if (accumulator.getTimestamp() == null) {
accumulator.setTimestamp(value.getGlobalTimeStamp());
accumulator.setStartTimestamp(value.getGlobalTimeStamp());
accumulator.setLng(value.getLongitude());
accumulator.setLat(value.getLatitude());
accumulator.setGlobalId(value.getOrgCode()+"_"+value.getId()+"_"+value.getPicLicense());
} else {
//从出现到当前帧的经过的时间
long travelTime = value.getGlobalTimeStamp() - accumulator.getTimestamp();
if (travelTime > 0) {
//行驶距离
double distance = GeomsConvertUtil.getDistance(accumulator.getLng(), accumulator.getLat(), value.getLongitude(), value.getLatitude());
accumulator.setTravelTime((accumulator.getTravelTime() + travelTime)/1000);
accumulator.setDistance(accumulator.getDistance() + distance);
//速度
double speed = ArithOfBigDecmial.div(distance, travelTime);
accumulator.setTravelSpeed(speed);
accumulator.setTimestamp(value.getGlobalTimeStamp());
}
log.info("[carId:{},midId:{},travelTime:{},nowTime:{},preTime:{}]",value.getOrgCode()+"_"+value.getId()+"_"+value.getPicLicense(),accumulator.getGlobalId(),accumulator.getTravelTime(), DateUtil.toDateTime(value.getGlobalTimeStamp(),"yyyy-MM-dd HH:mm:ss.SSS"),DateUtil.toDateTime(accumulator.getTimestamp(),"yyyy-MM-dd HH:mm:ss.SSS"));
}
//System.out.println("avgSpeed:"+accumulator.getAvgSpeed()+":"+accumulator.getSumSpeed()/accumulator.getCount()+" maxSpeed:"+accumulator.getMaxSpeed());
return accumulator;
}
/*
当window的结束时间到达时,触发这个方法,返回结果
*/
@Override
public TravelTimeResultModel getResult(TravelTimeResultModel accumulator) {
//log.info("AggResult:"+accumulator);
return accumulator;
}
/**
* 在session窗口才会用到merge,时间窗口其实用不到
*
* @param a
* @param b
* @return
*/
@Override
public TravelTimeResultModel merge(TravelTimeResultModel a, TravelTimeResultModel b) {
return null;
}
}
package com.wanji.indicators.task.travelTime.service.func;
import com.wanji.indicators.task.travelTime.service.model.TravelTimeResultModel;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.streaming.api.functions.windowing.ProcessWindowFunction;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.apache.flink.util.Collector;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
public class TravelProcessWindow extends ProcessWindowFunction<
TravelTimeResultModel,
TravelTimeResultModel,
Tuple3<String, Integer, String>,
TimeWindow> {
private static final Logger log = LoggerFactory.getLogger(TravelProcessWindow.class);
@Override
public void process(Tuple3<String, Integer, String> s, Context context, Iterable<TravelTimeResultModel> elements, Collector<TravelTimeResultModel> out) throws Exception {
long windowStartTs = context.window().getStart();
long windowEndTs = context.window().getEnd();
List<TravelTimeResultModel> list = StreamSupport.stream(elements.spliterator(), false).collect(Collectors.toList());
if (elements.iterator().hasNext()) {
TravelTimeResultModel result = elements.iterator().next();
result.setStartTime(DateUtil.toDateTime(windowStartTs,"yyyy-MM-dd HH:mm:ss.SSS"));
result.setEndTime(DateUtil.toDateTime(windowEndTs,"yyyy-MM-dd HH:mm:ss.SSS"));
log.info("Travel Data Result:" + result.toString());
out.collect(result);
}
}
}
package com.wanji.indicators.task.travelTime.service.model;
import com.wanji.indicators.constant.Constant;
import lombok.Data;
import java.io.Serializable;
/**
* @author fengyi
* @date 2023/4/6
* @description
*/
@Data
public class TravelTimeResultModel implements Serializable {
private String id = Constant.HASH_KEY_TRAVEL_DATA;
// private String plateNo;
// private String globalCode;
private String globalId;
private String startTime;
private Long startTimestamp;
private String endTime;
private Double lng;//经度
private Double lat;//纬度
private Double travelTime;//旅行时间
private Double travelSpeed;//旅行速度
private Double distance;//行驶距离
private Long timestamp;
}
package com.wanji.indicators.task.travelTime.service.sink;
import com.alibaba.fastjson.JSON;
import com.wanji.indicators.task.freeflow.service.model.MaxSpeedResultModel;
import com.wanji.indicators.task.queuelength.service.model.MaxQueueResultModel;
import com.wanji.indicators.task.travelTime.service.model.TravelTimeResultModel;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommand;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisCommandDescription;
import org.apache.flink.streaming.connectors.redis.common.mapper.RedisMapper;
/**
* @author fengyi
* @date 2023/4/7
* @description
*/
public class TravelRedisMapper implements RedisMapper<TravelTimeResultModel> {
private String redisKey ;
public TravelRedisMapper(String redisKey){
this.redisKey = redisKey;
}
// 定义保存数据到redis的命令,存成Hash表,hset sensor_temp id temperature
@Override
public RedisCommandDescription getCommandDescription() {
return new RedisCommandDescription(RedisCommand.HSET, this.redisKey);
}
@Override
public String getKeyFromData(TravelTimeResultModel data) {
return data.getId();
}
@Override
public String getValueFromData(TravelTimeResultModel data) {
return JSON.toJSONString(data);
}
}
package com.wanji.indicators.task.travelTime.stream;
import com.wanji.indicators.constant.Constant;
import com.wanji.indicators.model.CarTrackModel;
import com.wanji.indicators.model.FrameModel;
import com.wanji.indicators.task.track.service.CarRoutePathMainNew;
import com.wanji.indicators.task.track.service.TrackStoreMainNew;
import com.wanji.indicators.task.track.service.func.CarTrackFlatMap;
import com.wanji.indicators.task.track.service.func.FrameFlatMap;
import com.wanji.indicators.task.travelTime.service.MaxTravelTimeMainNew;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.runtime.state.storage.FileSystemCheckpointStorage;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.joda.time.DateTime;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
public class CalculateTravelTimeMain {
//private static final Logger log = LoggerFactory.getLogger(TrafficEventMain.class);
public static void main(String[] args) {
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(60*1000);
env.getCheckpointConfig().setTolerableCheckpointFailureNumber(3);
env.getCheckpointConfig().setCheckpointTimeout(10 * 60 * 1000);
env.getCheckpointConfig().setCheckpointStorage(new FileSystemCheckpointStorage(properties.getProperty("check.point.uri")));
//env.setStateBackend(new FsStateBackend(properties.getProperty("check.point.uri")));
env.getCheckpointConfig().setExternalizedCheckpointCleanup(CheckpointConfig.ExternalizedCheckpointCleanup.DELETE_ON_CANCELLATION);
// env.setStateBackend(
// new MemoryStateBackend(MemoryStateBackend.DEFAULT_MAX_STATE_SIZE * 100));
DateTime currentDateTime = new DateTime();
DateTime dateTime = currentDateTime.withMillisOfDay(0).minusHours(-4);
String topic = properties.getProperty("consumer.topic");
KafkaSource<String> source = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "true")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setTopics(topic)
.setGroupId(properties.getProperty("consumer.group.id")+"_TRAVEL"+ UUID.randomUUID().toString())
/* 设置起始偏移量有以下几种情况
1.从指定的位置消费:OffsetsInitializer.offsets(Map<TopicPartition, Long> offsets)
2.从最新位置消费(最后一条处):OffsetsInitializer.latest()
3.从最早位置消费(第一条处):OffsetsInitializer.earliest()
4.从上次提交的位置消费:OffsetsInitializer.committedOffsets()
5.新的组,从来没有提交过,再指定一个消费方式:OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)
*/
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
// 从大于等于此时间戳开始的偏移量开始
//.setStartingOffsets(OffsetsInitializer.timestamp(dateTime.getMillis()))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
DataStream<String> stream = env
.fromSource(source, WatermarkStrategy.noWatermarks(), "kafka-car-track-source");
SingleOutputStreamOperator<FrameModel> frameModelStream =
stream
.flatMap(new FrameFlatMap())
.setParallelism(1)
.name("轨迹帧数据-JsonToObject");
SingleOutputStreamOperator<CarTrackModel> carTrackModelStream =
frameModelStream.
flatMap(new CarTrackFlatMap())
.setParallelism(1)
.name("轨迹帧数据解析-ToCarTrackModel");
//筛选机动车类型数据
SingleOutputStreamOperator<CarTrackModel> filterStream =
carTrackModelStream.filter(new FilterFunction<CarTrackModel>() {
@Override
public boolean filter(CarTrackModel value) throws Exception {
String motorObjectType = Constant.MOTOR_TYPES;
String[] sps = motorObjectType.split(",");
List<String> typeList = Arrays.asList(sps);
String carType = value.getOriginalType().toString();
if (typeList.contains(carType)){
return true;
}
return false;
}
}).name("筛选机动车数据");
try {
//批量轨迹封装
MaxTravelTimeMainNew.init(env, "indexName").run(filterStream);
env.execute("旅行时间速度计算工作流");
} catch (Exception e) {
e.printStackTrace();
//log.error("交通指标计算任务异常 : " + e);
}
}
}
package com.wanji.indicators.thread;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class FixedThreadPool {
private static ExecutorService newFixedThreadPool = null;
public static ExecutorService getInstance() {
if (newFixedThreadPool == null)
synchronized (FixedThreadPool.class) {
if (newFixedThreadPool == null)
newFixedThreadPool = Executors.newFixedThreadPool(5);
}
return newFixedThreadPool;
}
}
package com.wanji.indicators.trigger;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.streaming.api.windowing.time.Time;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
import org.apache.flink.streaming.api.windowing.windows.Window;
/**
* CountAndTimeTrigger : 满足一定条数和时间触发
* 条数的触发使用计数器计数
* 时间的触发,使用 flink 的 timerServer,注册触发器触发
*
* @param <W>
*/
@Slf4j
public class CountAndTimeTrigger<W extends Window> extends Trigger<Object, W> {
// 触发的条数
private final long size;
// 触发的时长
private final long interval;
private static final long serialVersionUID = 1L;
// 条数计数器
private final ReducingStateDescriptor<Long> countStateDesc =
new ReducingStateDescriptor<>("count", new ReduceSum(), LongSerializer.INSTANCE);
// 时间计数器,保存下一次触发的时间
private final ReducingStateDescriptor<Long> timeStateDesc =
new ReducingStateDescriptor<>("fire-interval", new ReduceMin(), LongSerializer.INSTANCE);
public CountAndTimeTrigger(long size, long interval) {
this.size = size;
this.interval = interval;
}
@Override
public TriggerResult onElement(Object element, long timestamp, W window, TriggerContext ctx) throws Exception {
// 注册窗口结束的触发器, 不需要会自动触发
// ctx.registerProcessingTimeTimer(window.maxTimestamp());
// count
ReducingState<Long> count = ctx.getPartitionedState(countStateDesc);
//interval
ReducingState<Long> fireTimestamp = ctx.getPartitionedState(timeStateDesc);
// 每条数据 counter + 1
count.add(1L);
if (count.get() >= size) {
log.info("countTrigger triggered, count : {}", count.get());
// 满足条数的触发条件,先清 0 条数计数器
count.clear();
// 满足条数时也需要清除时间的触发器,如果不是创建结束的触发器
if (fireTimestamp.get() != window.maxTimestamp()) {
// log.info("delete trigger : {}, {}", sdf.format(fireTimestamp.get()), fireTimestamp.get());
ctx.deleteProcessingTimeTimer(fireTimestamp.get());
}
fireTimestamp.clear();
// fire 触发计算
return TriggerResult.FIRE;
}
// 触发之后,下一条数据进来才设置时间计数器注册下一次触发的时间
timestamp = ctx.getCurrentProcessingTime();
if (fireTimestamp.get() == null) {
// long start = timestamp - (timestamp % interval);
long nextFireTimestamp = timestamp + interval;
// log.info("register trigger : {}, {}", sdf.format(nextFireTimestamp), nextFireTimestamp);
ctx.registerProcessingTimeTimer(nextFireTimestamp);
fireTimestamp.add(nextFireTimestamp);
}
return TriggerResult.CONTINUE;
}
@Override
public TriggerResult onProcessingTime(long time, W window, TriggerContext ctx) throws Exception {
// count
ReducingState<Long> count = ctx.getPartitionedState(countStateDesc);
//interval
ReducingState<Long> fireTimestamp = ctx.getPartitionedState(timeStateDesc);
// time trigger and window end
if (time == window.maxTimestamp()) {
log.info("window close : {}", time);
// 窗口结束,清0条数和时间的计数器
count.clear();
ctx.deleteProcessingTimeTimer(fireTimestamp.get());
fireTimestamp.clear();
return TriggerResult.FIRE_AND_PURGE;
} else if (fireTimestamp.get() != null && fireTimestamp.get().equals(time)) {
log.info("timeTrigger trigger, time : {}", time);
// 时间计数器触发,清0条数和时间计数器
count.clear();
fireTimestamp.clear();
return TriggerResult.FIRE;
}
return TriggerResult.CONTINUE;
}
@Override
public TriggerResult onEventTime(long time, W window, TriggerContext ctx) throws Exception {
return TriggerResult.CONTINUE;
}
@Override
public boolean canMerge() {
return true;
}
@Override
public void clear(W window, TriggerContext ctx) throws Exception {
}
@Override
public void onMerge(Window window, OnMergeContext ctx) {
ctx.mergePartitionedState(countStateDesc);
ctx.mergePartitionedState(timeStateDesc);
}
@Override
public String toString() {
return "CountAndContinuousProcessingTimeTrigger( maxCount:" + size + ",interval:" + interval + ")";
}
public static <W extends Window> CountAndTimeTrigger<W> of(long maxCount, Time interval) {
return new CountAndTimeTrigger(maxCount, interval.toMilliseconds());
}
/**
* 用于合并
*/
private static class ReduceSum implements ReduceFunction<Long> {
private static final long serialVersionUID = 1L;
@Override
public Long reduce(Long value1, Long value2) {
return value1 + value2;
}
}
/**
* 用于合并
*/
private static class ReduceMin implements ReduceFunction<Long> {
private static final long serialVersionUID = 1L;
@Override
public Long reduce(Long value1, Long value2) {
return Math.min(value1, value2);
}
}
}
package com.wanji.indicators.trigger;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* 带超时的计数窗口触发器
* 可以指定时间和条数一起作为触发条件的触发器
* 如果我需要到时间就触发,并且到时间之前如果已经积累了足够数量的数据;或者在限定时间内没有积累足够数量的数据,我依然希望触发窗口业务,那么就需要自定义触发器。
*/
public class CountTriggerWithTimeout<T> extends Trigger<T, TimeWindow> {
private static Logger LOG = LoggerFactory.getLogger(CountTriggerWithTimeout.class);
/**
* 窗口最大数据量
*/
private int maxCount;
/**
* event time / process time
*/
private TimeCharacteristic timeType;
/**
* 用于储存窗口当前数据量的状态对象
*/
private ReducingStateDescriptor<Long> countStateDescriptor =
new ReducingStateDescriptor("counter", new Sum(), LongSerializer.INSTANCE);
public CountTriggerWithTimeout(int maxCount, TimeCharacteristic timeType) {
this.maxCount = maxCount;
this.timeType = timeType;
}
private TriggerResult fireAndPurge(TimeWindow window, TriggerContext ctx) throws Exception {
clear(window, ctx);
return TriggerResult.FIRE_AND_PURGE;
}
@Override
public TriggerResult onElement(T element, long timestamp, TimeWindow window, TriggerContext ctx) throws Exception {
ReducingState<Long> countState = ctx.getPartitionedState(countStateDescriptor);
countState.add(1L);
if (countState.get() >= maxCount) {
//LOG.info("fire with count: " + countState.get());
return fireAndPurge(window, ctx);
}
if (timestamp >= window.getEnd()) {
//LOG.info("fire with time:{},count:{}, window End:{},window Max:{} " , DateUtil.toDateTime(timestamp,"yyyy-MM-dd HH:mm:ss.SSS"),countState.get(), DateUtil.toDateTime(window.getEnd(),"yyyy-MM-dd HH:mm:ss.SSS"), DateUtil.toDateTime(window.maxTimestamp(),"yyyy-MM-dd HH:mm:ss.SSS"));
return fireAndPurge(window, ctx);
} else {
return TriggerResult.CONTINUE;
}
}
@Override
public TriggerResult onProcessingTime(long time, TimeWindow window, TriggerContext ctx) throws Exception {
if (timeType != TimeCharacteristic.ProcessingTime) {
return TriggerResult.CONTINUE;
}
if (time >= window.getEnd()) {
return TriggerResult.CONTINUE;
} else {
ReducingState<Long> countState = ctx.getPartitionedState(countStateDescriptor);
//LOG.info("fire with process time: " + DateUtil.toDateTime(time,"yyyy-MM-dd HH:mm:ss.SSS")+",trigger count:"+countState.get());
return fireAndPurge(window, ctx);
}
}
@Override
public TriggerResult onEventTime(long time, TimeWindow window, TriggerContext ctx) throws Exception {
//LOG.info("fire with event time: {},end time:{},win max time:{}" , DateUtil.toDateTime(time,"yyyy-MM-dd HH:mm:ss.SSS"), DateUtil.toDateTime(window.getEnd(),"yyyy-MM-dd HH:mm:ss.SSS"), DateUtil.toDateTime(window.maxTimestamp(),"yyyy-MM-dd HH:mm:ss.SSS"));
if (timeType != TimeCharacteristic.EventTime) {
return TriggerResult.CONTINUE;
}
if (time >= window.getEnd()) {
return TriggerResult.CONTINUE;
} else {
ReducingState<Long> countState = ctx.getPartitionedState(countStateDescriptor);
LOG.info("fire with event time: {},count:{},end time:{},win max time:{}", DateUtil.toDateTime(time, "yyyy-MM-dd HH:mm:ss.SSS"), countState.get(), DateUtil.toDateTime(window.getEnd(), "yyyy-MM-dd HH:mm:ss.SSS"), DateUtil.toDateTime(window.maxTimestamp(), "yyyy-MM-dd HH:mm:ss.SSS"));
return fireAndPurge(window, ctx);
}
}
@Override
public void clear(TimeWindow window, TriggerContext ctx) throws Exception {
ReducingState<Long> countState = ctx.getPartitionedState(countStateDescriptor);
countState.clear();
}
/**
* 计数方法
*/
class Sum implements ReduceFunction<Long> {
@Override
public Long reduce(Long value1, Long value2) throws Exception {
return value1 + value2;
}
}
}
package com.wanji.indicators.trigger;
import com.wanji.indicators.util.DateUtil;
import org.apache.flink.api.common.functions.ReduceFunction;
import org.apache.flink.api.common.state.ReducingState;
import org.apache.flink.api.common.state.ReducingStateDescriptor;
import org.apache.flink.api.common.typeutils.base.LongSerializer;
import org.apache.flink.streaming.api.windowing.triggers.Trigger;
import org.apache.flink.streaming.api.windowing.triggers.TriggerResult;
import org.apache.flink.streaming.api.windowing.windows.TimeWindow;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CustomCountTriggerWithEventTime<T> extends Trigger<T, TimeWindow> {
/**
*
*/
private static final long serialVersionUID = 6021946857731563476L;
private static final Logger LOG = LoggerFactory.getLogger(CustomCountTriggerWithEventTime.class);
private final long maxCount;
private final ReducingStateDescriptor<Long> countStateDescriptor;
public CustomCountTriggerWithEventTime(long maxCount) {
this.maxCount = maxCount;
countStateDescriptor = new ReducingStateDescriptor<>("countState", new ReduceSum(), LongSerializer.INSTANCE);
}
private TriggerResult fireAndPurge(long timestamp, TimeWindow window, TriggerContext ctx) throws Exception {
clear(window, ctx);
return TriggerResult.FIRE_AND_PURGE;
}
@Override
public TriggerResult onElement(T element, long timestamp, TimeWindow window, TriggerContext ctx) throws Exception {
if (window.maxTimestamp() > ctx.getCurrentWatermark()) {
ctx.registerEventTimeTimer(window.maxTimestamp());
}
ReducingState<Long> countState = ctx.getPartitionedState(countStateDescriptor);
// 新的element进来,总数需要加1
countState.add(1L);
if (countState.get() >= maxCount) {
LOG.info("Count Trigger triggered on count exceed. count {}", countState.get());
return fireAndPurge(timestamp, window, ctx);
}
return TriggerResult.CONTINUE;
}
@Override
public TriggerResult onProcessingTime(long time, TimeWindow window, TriggerContext ctx) throws Exception {
// 窗口结束触发
return TriggerResult.CONTINUE;
}
@Override
public TriggerResult onEventTime(long time, TimeWindow window, TriggerContext ctx) throws Exception {
ReducingState<Long> countState = ctx.getPartitionedState(countStateDescriptor);
LOG.info("Count Trigger triggered on time reached time {} count:{} window end {} window max {}", DateUtil.toDateTime(time,"yyyy-MM-dd HH:mm:ss"),countState.get(), DateUtil.toDateTime(window.getEnd(),"yyyy-MM-dd HH:mm:ss"), DateUtil.toDateTime(window.maxTimestamp(),"yyyy-MM-dd HH:mm:ss"));
if (time == window.maxTimestamp()) {
return fireAndPurge(time, window, ctx);
} else {
return TriggerResult.CONTINUE;
}
}
@Override
public void clear(TimeWindow window, TriggerContext ctx) throws Exception {
ctx.deleteEventTimeTimer(window.maxTimestamp());
ReducingState<Long> countState = ctx.getPartitionedState(countStateDescriptor);
countState.clear();
}
@Override
public boolean canMerge() {
return true;
}
@Override
public void onMerge(TimeWindow window,
OnMergeContext ctx) {
// only register a timer if the watermark is not yet past the end of the merged window
// this is in line with the logic in onElement(). If the watermark is past the end of
// the window onElement() will fire and setting a timer here would fire the window twice.
long windowMaxTimestamp = window.maxTimestamp();
if (windowMaxTimestamp > ctx.getCurrentWatermark()) {
ctx.registerEventTimeTimer(windowMaxTimestamp);
}
}
/**
* 计数方法
*/
class ReduceSum implements ReduceFunction<Long> {
@Override
public Long reduce(Long value1, Long value2) throws Exception {
return value1 + value2;
}
}
}
package com.wanji.indicators.trigger;
/**
* @author fengyi
* @date 2023/3/3
* @description
*/
public class EventTimeTrigger {
}
package com.wanji.indicators.trigger;
import org.apache.flink.api.common.eventtime.WatermarkGenerator;
import org.apache.flink.api.common.eventtime.WatermarkOutput;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.util.Preconditions;
public class MyAssignerWithPeriodicWatermarksAdapter<T> implements WatermarkGenerator<T> {
private final AssignerWithPeriodicWatermarks<T> wms;
private final Long autoWatermarkInterval;
public MyAssignerWithPeriodicWatermarksAdapter(AssignerWithPeriodicWatermarks<T> wms, Long autoWatermarkInterval) {
this.wms = (AssignerWithPeriodicWatermarks) Preconditions.checkNotNull(wms);
this.autoWatermarkInterval = autoWatermarkInterval;
}
@Override
public void onEvent(T event, long eventTimestamp, WatermarkOutput output) {
}
@Override
public void onPeriodicEmit(WatermarkOutput output) {
Watermark next = this.wms.getCurrentWatermark();
if (next != null) {
Long emitWatermarkTimestamp = (autoWatermarkInterval != null && autoWatermarkInterval.longValue() != 0L && (System.currentTimeMillis() - next.getTimestamp() < autoWatermarkInterval)) ? next.getTimestamp() : System.currentTimeMillis();
output.emitWatermark(new org.apache.flink.api.common.eventtime.Watermark(emitWatermarkTimestamp));
}
}
}
package com.wanji.indicators.trigger;
import org.apache.flink.api.common.eventtime.TimestampAssigner;
import org.apache.flink.api.common.eventtime.TimestampAssignerSupplier;
import org.apache.flink.api.common.eventtime.WatermarkGenerator;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.streaming.api.functions.AssignerWithPeriodicWatermarks;
import org.apache.flink.util.Preconditions;
public class MyWatermarkStrategy<T> implements WatermarkStrategy<T> {
private static final long serialVersionUID = 1L;
private final AssignerWithPeriodicWatermarks<T> wms;
private final Long autoWatermarkInterval;
public MyWatermarkStrategy(AssignerWithPeriodicWatermarks<T> wms, Long autoWatermarkInterval) {
this.wms = (AssignerWithPeriodicWatermarks) Preconditions.checkNotNull(wms);
this.autoWatermarkInterval = autoWatermarkInterval;
}
@Override
public TimestampAssigner<T> createTimestampAssigner(TimestampAssignerSupplier.Context context) {
return this.wms;
}
@Override
public WatermarkGenerator<T> createWatermarkGenerator(org.apache.flink.api.common.eventtime.WatermarkGeneratorSupplier.Context context) {
return new MyAssignerWithPeriodicWatermarksAdapter(this.wms, this.autoWatermarkInterval);
}
}
package com.wanji.indicators.udf;
/**
* 计算线段长度
* @author fengyi
* @date 2023/7/25
* @description
*/
import com.vividsolutions.jts.geom.LineString;
import com.wanji.indicators.util.ArithOfBigDecmial;
import com.wanji.indicators.util.GeomsConvertUtil;
import org.apache.flink.table.functions.FunctionContext;
import org.apache.flink.table.functions.ScalarFunction;
public class LineLengthUdf extends ScalarFunction {
// 可选,open方法可以不写。
// 如果编写open方法需要声明'import org.apache.flink.table.functions.FunctionContext;'。
@Override
public void open(FunctionContext context) {
}
public double eval(String points) {
LineString lineString = GeomsConvertUtil.getLineString(points);
//度单位转换为米单位
double length = ArithOfBigDecmial.mul(lineString.getLength(), 6371000 * Math.PI / 180);
length = ArithOfBigDecmial.round(length, 2);
return length;
}
//可选,close方法可以不写。
@Override
public void close() {
}
}
/**
*
*/
package com.wanji.indicators.util;
/*
* @author 周士广
* @date 2019年5月29日
*/
import java.math.BigDecimal;
/**
* 由于Java的简单类型不能够精确的对浮点数进行运算,这个工具类提供精 确的浮点数运算,包括加减乘除和四舍五入。
*/
public class ArithOfBigDecmial {
// 默认除法运算精度
private static final int DEF_DIV_SCALE = 10;
// 这个类不能实例化
private ArithOfBigDecmial() {
}
/**
* 提供精确的加法运算。
*
* @param v1 被加数
* @param v2 加数
* @return 两个参数的和
*/
public static double add(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.add(b2).doubleValue();
}
/**
* 提供精确的减法运算。
*
* @param v1 被减数
* @param v2 减数
* @return 两个参数的差
*/
public static double sub(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.subtract(b2).doubleValue();
}
/**
* 提供精确的乘法运算。
*
* @param v1 被乘数
* @param v2 乘数
* @return 两个参数的积
*/
public static double mul(double v1, double v2) {
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.multiply(b2).doubleValue();
}
/**
* 提供(相对)精确的除法运算,当发生除不尽的情况时,精确到 小数点以后10位,以后的数字四舍五入。
*
* @param v1 被除数
* @param v2 除数
* @return 两个参数的商
*/
public static double div(double v1, double v2) {
return div(v1, v2, DEF_DIV_SCALE);
}
/**
* 提供(相对)精确的除法运算。当发生除不尽的情况时,由scale参数指 定精度,以后的数字四舍五入。
*
* @param v1 被除数
* @param v2 除数
* @param scale 表示表示需要精确到小数点以后几位。
* @return 两个参数的商
*/
public static double div(double v1, double v2, int scale) {
if (scale < 0) {
throw new IllegalArgumentException("The scale must be a positive integer or zero");
}
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.divide(b2, scale, BigDecimal.ROUND_HALF_UP).doubleValue();
}
/**
* 提供(相对)精确的除法运算。当发生除不尽的情况时,由scale参数指 定精度,以后的数字四舍五入。
*
* @param v1 被除数
* @param v2 除数
* @param scale 表示表示需要精确到小数点以后几位。
* @return 两个参数的商
*/
public static double div(double v1, double v2, int scale,int roundingMode) {
if (scale < 0) {
throw new IllegalArgumentException("The scale must be a positive integer or zero");
}
BigDecimal b1 = new BigDecimal(Double.toString(v1));
BigDecimal b2 = new BigDecimal(Double.toString(v2));
return b1.divide(b2, scale, roundingMode).doubleValue();
}
/**
* 提供精确的小数位四舍五入处理。
*
* @param v 需要四舍五入的数字
* @param scale 小数点后保留几位
* @return 四舍五入后的结果
*/
public static double round(double v, int scale) {
if (scale < 0) {
throw new IllegalArgumentException("The scale must be a positive integer or zero");
}
BigDecimal b = new BigDecimal(Double.toString(v));
BigDecimal one = new BigDecimal("1");
return b.divide(one, scale, BigDecimal.ROUND_HALF_UP).doubleValue();
}
public static void main(String[] args) {
BigDecimal bd = new BigDecimal("11.00001");
bd = bd.setScale(0, BigDecimal.ROUND_HALF_UP);
bd = bd.setScale(0, BigDecimal.ROUND_UP);
System.out.println(bd.intValue());
}
}
package com.wanji.indicators.util;
/**
* @author fengyi
* @date 2023/1/12
* @description
*/
public class CarNumGenerator {
/**
* 中国各个地区的数组
*/
private static final String[] CAR_AREA_ARRAY = {"京"};
/**
* 城市代码,不能有字母 I 和 O
*/
private static final String[] CAR_LETTER_ARRAY = {"A","K","L","B","M","C","N","P","D","Q","E","R","F","S","H","U","V"};
/**
* 车牌号,不能有字母 I 和 O
*/
private static final String[] CAR_CHAR_ARRAY = {"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "J", "K",
"L", "M", "N", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"};
/**
* 车牌编号长度
*/
private static final int CAR_NUM_LENGTH = 5;
public static String getCarNum() {
// 随机获取地区
String area = CAR_AREA_ARRAY[(int)(Math.random() * CAR_AREA_ARRAY.length)];
// 随机生成城市编号
String cityCode = CAR_LETTER_ARRAY[(int)(Math.random() * CAR_LETTER_ARRAY.length)];
// 循环5次,生成车牌号
StringBuilder sb = new StringBuilder();
for(int i=0; i<CAR_NUM_LENGTH; i++) {
sb.append(CAR_CHAR_ARRAY[(int)(Math.random()*CAR_CHAR_ARRAY.length)]);
}
// 拼接
return area + cityCode + sb;
}
public static void main(String[] args) {
String carNum = getCarNum();
System.out.println(carNum);
}
}
package com.wanji.indicators.util;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DateUtil {
private static final Logger log = LoggerFactory.getLogger(DateUtil.class);
private static final String FORMAT = "yyyy-MM-dd HH:mm:ss";
public static String YYYY_MM_DD_HH_MM_SS = "yyyy-MM-dd HH:mm:ss";
public static String YYYY_MM_DD_HH_MM = "yyyy-MM-dd HH:mm";
public static String YYYY_MM_DD = "yyyy-MM-dd";
public static String YYYYMMDD = "yyyyMMdd";
public static int currentTimeSlot(long time) {
Date date = new Date(time);
int hh = date.getHours();
String id = TimeZone.getDefault().getID();
if (StringUtils.isNotBlank(id) &&
id.contains("Etc/UTC"))
hh += 8;
if (hh > 6 && hh < 10)
return 1;
if (hh > 16 && hh < 20)
return 3;
if (hh >= 20 || hh < 6)
return 4;
return 2;
}
public static Date stringToDate(String time, String fmt) {
SimpleDateFormat sdf = new SimpleDateFormat(fmt);
try {
Date parse = sdf.parse(time);
return parse;
} catch (ParseException e) {
e.printStackTrace();
return null;
}
}
public static String dateFormat(String dateTime, String format) throws Exception {
Date date = (new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")).parse(dateTime);
return (new SimpleDateFormat(format)).format(date);
}
public static String toDateTime(long time, String format) {
SimpleDateFormat df = new SimpleDateFormat(format);
Date date = new Date(time);
return df.format(date);
}
public static long StringToMillis(String time, String fmt) {
if (StringUtils.isBlank(fmt))
fmt = "yyyy-MM-dd HH:mm:ss";
SimpleDateFormat sdf = new SimpleDateFormat(fmt);
Date date = null;
try {
date = sdf.parse(time);
return date.getTime();
} catch (ParseException e) {
e.printStackTrace();
return 0L;
}
}
public static long StringToMillis(String time) {
return StringToMillis(time, null);
}
public static long getNowTime() {
return System.currentTimeMillis();
}
public static String getNowTime(String format) {
if (StringUtils.isBlank(format))
format = "yyyy-MM-dd HH:mm:ss";
return toDateTime(getNowTime(), format);
}
public static String getAgoTime(String format, String unit, int time) {
return getTime(format, unit, time, "ago", new Date());
}
public static String getAfterTime(String format, String unit, int time) {
return getTime(format, unit, time, "after", new Date());
}
public static String getAgoTime(String format, String unit, int time, Date date) {
return getTime(format, unit, time, "ago", date);
}
public static String getAfterTime(String format, String unit, int time, Date date) {
return getTime(format, unit, time, "after", date);
}
private static String getTime(String format, String unit, int time, String type, Date now) {
Date now_Time;
long ss = 1000L;
long mm = 60L * ss;
long HH = 60L * mm;
long dd = 24L * HH;
long milliseconds = 0L;
if ("ss".equals(unit)) {
milliseconds = ss;
} else if ("mm".equals(unit)) {
milliseconds = mm;
} else if ("HH".equals(unit)) {
milliseconds = HH;
} else {
milliseconds = dd;
}
if ("ago".equals(type)) {
now_Time = new Date(now.getTime() - time * milliseconds);
} else {
now_Time = new Date(now.getTime() + time * milliseconds);
}
SimpleDateFormat dateFormat = new SimpleDateFormat(format);
String agoTime = dateFormat.format(now_Time);
return agoTime;
}
public static int getDifferenceBetweenTheTime(String startTime, String endTime, String fmt) {
long start = StringToMillis(startTime, fmt);
long end = StringToMillis(endTime, fmt);
long ms = end - start;
int ss = 1000;
int mi = ss * 60;
int hh = mi * 60;
int dd = hh * 24;
long day = ms / dd;
return Math.abs((int)day);
}
public static int getDifferenceBetweenTheMi(String startTime, String endTime, String fmt) {
long start = StringToMillis(startTime, fmt);
long end = StringToMillis(endTime, fmt);
long ms = end - start;
int ss = 1000;
int mm = ss * 60;
long mi = ms / mm;
return Math.abs((int)mi);
}
public static int getDifferenceBetweenTheS(String startTime, String endTime, String fmt) {
long start = StringToMillis(startTime, fmt);
long end = StringToMillis(endTime, fmt);
long ms = end - start;
int ss = 1000;
long si = ms / ss;
return Math.abs((int)si);
}
public static String getDamaiDay(String date) {
return getDamaiDay(stringToDate(date, YYYY_MM_DD));
}
private static String getDamaiDay(Date date) {
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
switch (calendar.get(7)) {
case 2:
return "星期一";
case 3:
return "星期二";
case 4:
return "星期三";
case 5:
return "星期四";
case 6:
return "星期五";
case 7:
return "星期六";
case 1:
return "星期日";
}
return "";
}
public static long datecheck(long time) {
String s = DateFormatUtils.format(time, YYYY_MM_DD_HH_MM_SS);
String dateStr = s.substring(0, 15);
int m = Integer.valueOf(s.substring(15, 16)).intValue();
if (m < 5) {
dateStr = dateStr + "0:00";
} else {
dateStr = dateStr + "5:00";
}
return StringToMillis(dateStr);
}
}
package com.wanji.indicators.util;
import java.text.DecimalFormat;
public class DoubleUtil {
private static DecimalFormat FMT5 = new DecimalFormat("#0.00000");
private static DecimalFormat FMT2 = new DecimalFormat("#0.00");
private static DecimalFormat FMT1 = new DecimalFormat("#0.0");
@Deprecated
public static DecimalFormat fmt1() {
return FMT1;
}
@Deprecated
public static DecimalFormat fmt2() {
return FMT2;
}
@Deprecated
public static DecimalFormat fmt5() {
return FMT5;
}
public static double fmt1(double x) {
return Double.parseDouble(FMT1.format(x));
}
public static double fmt2(double x) {
return Double.parseDouble(FMT2.format(x).trim());
}
public static double fmt5(double x) {
return Double.parseDouble(FMT5.format(x));
}
}
package com.wanji.indicators.util;
import com.google.gson.Gson;
import com.wanji.indicators.model.SingleCarTrackListModel;
import org.apache.commons.codec.binary.Base64;
import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkBase;
import org.apache.flink.streaming.connectors.elasticsearch.ElasticsearchSinkFunction;
import org.apache.flink.streaming.connectors.elasticsearch.util.RetryRejectedExecutionFailureHandler;
import org.apache.flink.streaming.connectors.elasticsearch7.ElasticsearchSink;
import org.apache.flink.streaming.connectors.elasticsearch7.RestClientFactory;
import org.apache.http.HttpHost;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.message.BasicHeader;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.*;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.GetIndexRequest;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import sun.misc.BASE64Encoder;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.UUID;
/**
* @author fengyi
* @date 2023/3/2
* @description
*/
public class ElasticSearchSinkUtil {
private static RestHighLevelClient client = null;
private static Gson gson = new Gson().newBuilder().create();
private static BASE64Encoder base64Encoder = new BASE64Encoder();
private PropertiesHelper instance;
static {
Properties p = PropertiesHelper.getInstance().getProperties();
List<HttpHost> hostLit = null;
try {
hostLit = getEsAddresses(p.getProperty("elasticsearch.server"));
HttpHost[] hostArray = hostLit.toArray(new HttpHost[hostLit.size()]);
RestClientBuilder builder = RestClient.builder(hostArray);
//开始设置用户名和密码
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(p.getProperty("elasticsearch.username"), p.getProperty("elasticsearch.password")));
builder.setHttpClientConfigCallback(f -> f.setDefaultCredentialsProvider(credentialsProvider));
ElasticSearchSinkUtil.client = new RestHighLevelClient(builder);
} catch (MalformedURLException e) {
e.printStackTrace();
}
}
public static RestHighLevelClient getClient() {
return client;
}
public static void close() {
try {
ElasticSearchSinkUtil.client.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public static boolean isIndexExists(String indexName) {
try {
return ElasticSearchSinkUtil.client.indices().exists(new GetIndexRequest(indexName), RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
public static void createIndex(String indexName,boolean isCreateMapping,XContentBuilder mappingBuilder) {
if (isIndexExists(indexName)) {
return;
}
try {
CreateIndexRequest index = new CreateIndexRequest(indexName);
if (isCreateMapping) {
index.mapping(mappingBuilder);
}
ElasticSearchSinkUtil.client.indices().create(index, RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
}
public static <T> IndexResponse add(T obj, String indexName) {
IndexRequest indexRequest = new IndexRequest(indexName).id(UUID.randomUUID().toString());
indexRequest.source(new Gson().toJson(obj), XContentType.JSON);
try {
return ElasticSearchSinkUtil.client.index(indexRequest, RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* 解析配置文件的 es hosts
*
* @param hosts
* @return
* @throws MalformedURLException
*/
public static List<HttpHost> getEsAddresses(String hosts) throws MalformedURLException {
String[] hostList = hosts.split(",");
List<HttpHost> addresses = new ArrayList<>();
for (String host : hostList) {
if (host.startsWith("http")) {
URL url = new URL(host);
addresses.add(new HttpHost(url.getHost(), url.getPort()));
} else {
String[] parts = host.split(":", 2);
if (parts.length > 1) {
addresses.add(new HttpHost(parts[0], Integer.parseInt(parts[1])));
} else {
throw new MalformedURLException("invalid elasticsearch hosts format");
}
}
}
return addresses;
}
/**
* es sink
*
* @param hosts es hosts
* @param parallelism 并行数
* @param data 数据
* @param func
* @param <T>
*/
public static <T> void addSink(List<HttpHost> hosts, int parallelism,
SingleOutputStreamOperator<T> data, ElasticsearchSinkFunction<T> func) {
ElasticsearchSink.Builder<T> esBuilder = new ElasticsearchSink.Builder<>(hosts, func);
//设置用户名密码
esBuilder.setRestClientFactory(restClientFactory);
// 每100条数据执行一次或5秒执行一次
esBuilder.setBulkFlushMaxActions(100);
esBuilder.setBulkFlushInterval(5000);
// 设置失败重试的次数
esBuilder.setBulkFlushBackoffRetries(3);
// 设置重试的时间间隔
esBuilder.setBulkFlushBackoffDelay(2);
// 设置重试策略
esBuilder.setBulkFlushBackoffType(ElasticsearchSinkBase.FlushBackoffType.EXPONENTIAL);
// 设置失败处理
esBuilder.setFailureHandler(new RetryRejectedExecutionFailureHandler());
data.addSink(esBuilder.build()).setParallelism(parallelism).name("BatchTrack Sink To ES");
}
public static RestClientFactory restClientFactory = new RestClientFactory() {
@Override
public void configureRestClientBuilder(RestClientBuilder restClientBuilder) {
PropertiesHelper instance = PropertiesHelper.getInstance();
String user = instance.getElasticProperties().getProperty("elasticsearch.username");
String pwd = instance.getElasticProperties().getProperty("elasticsearch.password");
String auth = Base64.encodeBase64String((user + ":" + pwd).getBytes());
restClientBuilder.setDefaultHeaders(new BasicHeader[]{new BasicHeader("Authorization", "Basic " + auth)});
}
};
}
package com.wanji.indicators.util;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
public class FileUtil {
public static void writerAndDel(String file, String conent) {
BufferedWriter out = null;
try {
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, false)));
out.write(conent);
out.newLine();
out.flush();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (out != null)
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static void writeApend(String file, String conent) {
BufferedWriter out = null;
try {
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file, true)));
out.write(conent);
out.newLine();
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (out != null)
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
package com.wanji.indicators.util;
import com.alibaba.fastjson.JSONArray;
import com.vividsolutions.jts.geom.*;
import com.vividsolutions.jts.io.ParseException;
import com.vividsolutions.jts.io.WKTReader;
import com.vividsolutions.jts.linearref.LengthLocationMap;
import com.vividsolutions.jts.linearref.LinearLocation;
import com.vividsolutions.jts.linearref.LocationIndexedLine;
import com.vividsolutions.jts.operation.buffer.BufferParameters;
import com.vividsolutions.jts.operation.buffer.OffsetCurveBuilder;
import com.vividsolutions.jts.operation.distance.DistanceOp;
import com.vividsolutions.jts.operation.linemerge.LineMerger;
import org.apache.commons.lang.StringUtils;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
public class GeomsConvertUtil {
private static GeometryFactory geometryFactory = new GeometryFactory();
private static int pointCxNum = 10;// 连续抽稀pointCxNum个点
private static int distance = 2;// 抽稀距离 distance 米
// 地球的赤道半径是6378137米
public final static double Rc = 6378137;
// 地球的极半径是6356725米
public final static double Rj = 6356725;
/**
* 按距离缓冲偏移坐标
* @param xys 坐标集合,格式:x,y;x1,y1;...,xn,yn
* @param distance 偏移的距离,单位:米,正数为向左平移,负数时为向右偏移
* @return 返回偏移后的坐标集合 格式:x,y;x1,y1;...,xn,yn
*/
public static String offsetCure(String xys,double distance) {
String offsetxy = "";
/*============定义缓冲样式========================*/
BufferParameters bufferParameters = new BufferParameters();
bufferParameters.setEndCapStyle(BufferParameters.CAP_FLAT);
bufferParameters.setJoinStyle(BufferParameters.JOIN_MITRE);
bufferParameters.setQuadrantSegments(18);
/*=============================================*/
String[] coords = xys.split(";");
Coordinate[] inputPts = new Coordinate[coords.length];
int i=0;
for (String xy :coords) {
String[] pointArr = xy.split(",");
Coordinate point = new Coordinate(Double.parseDouble(pointArr[0]), Double.parseDouble(pointArr[1]));
inputPts[i] = point;
i++;
}
//米转换为度单位
distance *= 0.00001141 ;
/*===============调用偏移方法进行偏移======================*/
OffsetCurveBuilder offsetCurveBuilder = new OffsetCurveBuilder(geometryFactory.getPrecisionModel(), bufferParameters);
Coordinate[] outPts = offsetCurveBuilder.getOffsetCurve(inputPts, distance);
for (Coordinate pt : outPts) {
if ((""+pt.x).equals("NaN") || (""+pt.y).equals("NaN")) {
continue;
}
offsetxy += pt.x+","+pt.y+";";
}
return offsetxy.substring(0, offsetxy.length()-1);
}
/**
* 设置保留小数位数
* @param value 输入数据
* @param num 小数点位数
*/
public static double getScale(double value,int num) {
BigDecimal bg = new BigDecimal(value);
BigDecimal nbg = bg.setScale(num,BigDecimal.ROUND_DOWN);
double rv = nbg.doubleValue();
return rv;
}
public static Geometry wktToGeometry(String wktPoint) {
WKTReader fromText = new WKTReader();
Geometry geom = null;
try {
geom = fromText.read(wktPoint);
} catch (ParseException e) {
throw new RuntimeException("Not a WKT string:" + wktPoint);
}
return geom;
}
/**
* 构建GEOMtry
* @param areaType 01矩形 02多边形 03圆形
* @param coords 01时:左上;右下坐标 02时:x,y;x1,y1;...;03时:圆心坐标[;边界坐标]|[半径]
* @return
* @throws Exception
*/
public static Geometry getGeomtry(String areaType,String coords) throws Exception{
Geometry geom = null;
// String sql = "select e from "+poName+" e where within(e.POINT, GeomFromText(:filter)) = true";
if (StringUtils.equals(areaType, "03")){
String[] sps = coords.split("\\|");
double dis = 0;
if (sps.length==1){
String[] psps = sps[0].split(";");
String cp = psps[0];
String ep = psps[1];
double cpx = Double.parseDouble(cp.split(",")[0]);
double cpy = Double.parseDouble(cp.split(",")[1]);
// geom = GeomsConvertUtil.createPoint(cp);
double epx = Double.parseDouble(ep.split(",")[0]);
double epy = Double.parseDouble(ep.split(",")[1]);
dis =getDistance(cpx,cpy, epx,epy);
// geom = GeomsConvertUtil.createCircle(cpx, cpy, dis);
}else{
dis = Double.parseDouble(sps[1]);
geom = GeomsConvertUtil.createPoint(sps[0]);
}
//(st_distance 计算的结果单位是度,需要乘111195(地球半径6371000*PI/180)是将值转化为米);
// sql ="select e from VehiclePo e where st_distance(e.POINT, GeomFromText(:filter)) <= "+dis/111195;
}else if (StringUtils.equals(areaType, "01")){
String[] points = coords.split(";");
String[] leftUp = points[0].split(",");
String[] rightDown = points[1].split(",");
String xys = points[0]+";"+rightDown[0]+","+leftUp[1]+";";
xys += points[1]+";"+leftUp[0]+","+rightDown[1];
geom = GeomsConvertUtil.genGeometry(xys);
System.out.println("rectangle coords:"+GeomsConvertUtil.getPolygonString(geom));
}else if (StringUtils.equals(areaType, "02")){
geom = GeomsConvertUtil.genGeometry(coords);
System.out.println("multi poly coords:"+GeomsConvertUtil.getPolygonString(geom));
}
return geom;
}
public static String getMultiLinearString(Geometry geomBuf) {
String multis = "";
int num = geomBuf.getNumGeometries();
for (int i = 0; i < num; i++) {
Geometry polygon = geomBuf.getGeometryN(i);
String ps = getLinearRingString(polygon);
if (i != num - 1)
multis += ps + "|";
else
multis += ps;
}
return multis;
}
/**
* 点空间坐标转换为字符串
* @param geomBuf
* @return
*/
public static String getPointGeomString(Point geomBuf) {
String xys = "";
Coordinate coord = geomBuf.getCoordinate();
xys = coord.x+","+coord.y;
return xys;
}
/**
* 格式化boundary点为x,y;x1,y1;...
*
* @param geomBuf
* @return
*/
public static String getLinearRingString(Geometry geomBuf) {
String geomstring = geomBuf.toString();
geomstring = geomstring.substring(12, geomstring.length() - 1);
geomstring = geomstring.replaceAll(", ", ";");
geomstring = geomstring.replaceAll(" ", ",");
return geomstring;
}
/**
* 格式化boundary点为x,y;x1,y1;...
*
* @param geomBuf
* @return
*/
public static String getPolygonString(Geometry geomBuf) {
String geomstring = geomBuf.toString();
geomstring = geomstring.substring(10, geomstring.length() - 2);
geomstring = geomstring.replaceAll(", ", ";");
geomstring = geomstring.replaceAll(" ", ",");
return geomstring;
}
/**
* create a Circle 创建一个圆,圆心(x,y) 半径RADIUS
*
* @param x
* @param y
* @param RADIUS
* @return
*/
public static LineString createCircle(double x, double y, final double RADIUS) {
final int SIDES = 32;// 圆上面的点个数
Coordinate coords[] = new Coordinate[SIDES + 1];
for (int i = 0; i < SIDES; i++) {
double angle = i * Math.PI * 2.0 /SIDES ;
double dx = Math.cos(angle) * RADIUS;
double dy = Math.sin(angle) * RADIUS;
double dlng = dx / (Rc * Math.cos(y * Math.PI / 180) * Math.PI / 180);
double dlat = dy / (Rc * Math.PI / 180);
double newlng = x + dlng;
double newlat = y + dlat;
coords[i] = new Coordinate(newlng, newlat);
}
coords[SIDES] = coords[0];
//coords[SIDES+1] = new Coordinate(x, y);
LineString ring = geometryFactory.createLineString(coords);
return ring;
}
// 毫秒经纬度转换为浮点经纬度,格式为x,y;x,y
public static String millsToDu(String coords) {
String cs = "";
String cdsplit[] = coords.split(" ");
int i = 0;
for (String point : cdsplit) {
String[] s = point.split(";");
if (i == cdsplit.length - 1)
cs += Long.parseLong(s[0]) / 3600000d + ","
+ Long.parseLong(s[1]) / 3600000d;
else
cs += Long.parseLong(s[0]) / 3600000d + ","
+ Long.parseLong(s[1]) / 3600000d + ";";
i++;
}
return cs;
}
/**
* 获取路段geometry
*
* @param points
* 格式:x,y;x,y
* @return
*/
public static LineString getLineString(String points) {
String[] linepoints = points.split(";");
Coordinate[] coordinates1 = new Coordinate[linepoints.length];
int i = 0;
for (String lp : linepoints) {
String[] sp = lp.split(",");
Coordinate cd = new Coordinate(Double.parseDouble(sp[0]),
Double.parseDouble(sp[1]));
coordinates1[i] = cd;
i++;
}
LineString lineString1 = geometryFactory.createLineString(coordinates1);
return lineString1;
}
/**
* 联合多边形
*
* @param geoms
* @return
*/
public static GeometryCollection createGeomCollection(Geometry[] geoms) {
GeometryCollection polygonCollection = geometryFactory
.createGeometryCollection(geoms);
return polygonCollection;
}
public static MultiLineString createMultiLineString(LineString[] geoms) {
return geometryFactory.createMultiLineString(geoms);
}
private static WKTReader reader = new WKTReader(geometryFactory);
/**
* 创建点几何图形
* @param xy 格式:x,y
* @author 周士广
* @date 2019年10月12日
*/
public static Point createPoint(String xy) {
if (StringUtils.isNotBlank(xy)) {
String[] xys = StringUtils.split(StringUtils.trim(xy), ",");
if (xys.length == 2) {
Coordinate coord = new Coordinate(Double.parseDouble(xys[0]),
Double.parseDouble(xys[1]));
Point point = geometryFactory.createPoint(coord);
return point;
}
}
return null;
}
/**************************************************************************
*从指定的原点出发,偏移输入角度后,向此方向延伸输入距离,返回此时的位置
*origin_lon:原点经度
*origin_lat:原点纬度
*azimuth:偏移角度
*distance:延伸距离
*ret_lon:返回位置的经度
*ret_lat:返回位置的纬度
**************************************************************************/
public static Double[] azimuth_offset(double origin_lon, double origin_lat, Integer azimuth,double distance)
{
Double[] lonlat=new Double[2];
if(azimuth!=null && azimuth.equals(0) && distance>0){
lonlat[0] = origin_lon + distance * Math.sin(azimuth* Math.PI / 180) * 180 / ( Math.PI * 6371229 * Math.cos(origin_lat * Math.PI / 180));
lonlat[1] = origin_lat + distance * Math.cos(azimuth* Math.PI / 180) / ( Math.PI * 6371229 / 180);
}else{
lonlat[0]=origin_lon;
lonlat[1]=origin_lat;
}
return lonlat;
}
/**
* 根据源经纬度与目标经纬度计算出距离(米)
* @param long1 经度1
* @param lat1 维度1
* @param long2 经度2
* @param lat2 纬度2
* @return
*/
public static double getDistance(double long1, double lat1, double long2, double lat2) {
double a, b, R;
R = 6378137; // 地球半径
lat1 = lat1 * Math.PI / 180.0;
lat2 = lat2 * Math.PI / 180.0;
a = lat1 - lat2;
b = (long1 - long2) * Math.PI / 180.0;
double d;
double sa2, sb2;
sa2 = Math.sin(a / 2.0);
sb2 = Math.sin(b / 2.0);
d = 2 * R * Math.asin(Math.sqrt(sa2 * sa2 + Math.cos(lat1) * Math.cos(lat2) * sb2 * sb2));
return d;
}
/**
* wkt转x,y;x,y#x,y;x,y格式
* @param wkt
* @return
* @throws ParseException
*/
//MULTILINESTRING((113.715396 30.398834,113.715199 30.398557,113.714889 30.398129,113.714434 30.397481,113.71393 30.396791,113.713338 30.395962,113.71306 30.395573,113.712279 30.394459,113.711772 30.393743,113.71149 30.393354,113.711324 30.393113))
public static String formatWkt(String wkt) throws ParseException{
String xys = "";
WKTReader wktReader = new WKTReader();
Geometry geoms = wktReader.read(wkt);
int size = geoms.getNumGeometries();
for (int i=0; i<size;i++){
Geometry geom = geoms.getGeometryN(i);
String text = geom.toText();
text = text.substring(text.indexOf("(")+1,text.length()-1);
text = text.replaceAll(", ", ";");
text = text.replaceAll(" ", ",");
if (i != size-1){
xys += text+ "#";
}else{
xys += text;
}
}
return xys;
}
public static JSONArray formatWktToJsonArray(String wkt) throws ParseException{
JSONArray list = new JSONArray();
WKTReader wktReader = new WKTReader();
Geometry geoms = wktReader.read(wkt);
int size = geoms.getNumGeometries();
for (int i=0; i<size;i++){
Geometry geom = geoms.getGeometryN(i);
String text = geom.toText();
text = text.substring(text.indexOf("(")+1,text.length()-1);
text = text.replaceAll(", ", ";");
text = text.replaceAll(" ", ",");
list.add(text);
}
return list;
}
/**
* 道路线集合
* @param geomBuf
* @return
*/
public static JSONArray getMultiLineString(Geometry geomBuf) {
JSONArray list = new JSONArray();
int num = geomBuf.getNumGeometries();
for (int i = 0; i < num; i++) {
Geometry polygon = geomBuf.getGeometryN(i);
String ps = getLinearRingString(polygon);
list.add(ps);
}
return list;
}
public static JSONArray getMultiPolyString(Geometry geomBuf) {
JSONArray list = new JSONArray();
int num = geomBuf.getNumGeometries();
for (int i = 0; i < num; i++) {
Geometry polygon = geomBuf.getGeometryN(i);
String ps = getPolygonString(polygon);
ps = ps.replaceAll("\\)", "").replaceAll("\\(", "");
list.add(ps);
}
return list;
}
public static String getMaxAreaPolygon(Geometry geomBuf) {
int num = geomBuf.getNumGeometries();
double maxarea = 0;
Geometry maxPolygon = null;
for (int i = 0; i < num; i++) {
Geometry polygon = geomBuf.getGeometryN(i);
double area = polygon.getArea();
if (i==0){
maxarea=area;
maxPolygon = polygon;
}else{
if (area > maxarea){
maxarea = area;
maxPolygon = polygon;
}
}
}
if (maxPolygon != null){
String ps = getPolygonString(maxPolygon);
ps = ps.replaceAll("\\)", "").replaceAll("\\(", "");
return ps;
}
return null;
}
/**
* 创建geom
* @param points eg:x,y;x,y;...
* @return
*/
public static Geometry genGeometry(String points) throws Exception{
String[] linepoints = points.split(";");
Coordinate[] coordinates1 = new Coordinate[linepoints.length+1];
int i = 0;
for (String lp : linepoints) {
String[] sp = lp.split(",");
Coordinate cd = new Coordinate(Double.parseDouble(sp[0]),
Double.parseDouble(sp[1]));
coordinates1[i] = cd;
i++;
}
coordinates1[coordinates1.length-1]=coordinates1[0];
Polygon geom = geometryFactory.createPolygon(coordinates1);
return geom;
}
/**
* 判断坐标的点point(x,y)是否在geometry表示的Polygon中
* @param poing
* @param geometry wkt格式
* @return
*/
public static boolean withinGeo(String poing,String geometry) throws ParseException {
WKTReader reader = new WKTReader( geometryFactory );
Point point = (Point)reader.read(poing);
Polygon polygon = (Polygon) reader.read(geometry);
return point.within(polygon);
}
/**
* 单边缓冲
*/
public static Geometry singleSidedBufferCurve(Geometry geom, double distance) {
BufferParameters bufParam = new BufferParameters();
bufParam.setSingleSided(true);
OffsetCurveBuilder ocb = new OffsetCurveBuilder(
geom.getFactory().getPrecisionModel(), bufParam
);
Coordinate[] pts = ocb.getLineCurve(geom.getCoordinates(), distance * 0.00001141 );
Geometry curve = geom.getFactory().createLineString(pts);
return curve;
}
/**
* 米勒投影算法经纬度转平面坐标
* @param lat 纬度
* @param lon 经度
* @return
* @author liyun
* @date 2019年8月19日
*/
public static double[] MillierConvertion(double lat, double lon)
{
//double L = 6381372 * Math.PI * 2;//地球周长
double L = 6378137 * Math.PI * 2;//地球周长
double W=L;// 平面展开后,x轴等于周长
double H=L/2;// y轴约等于周长一半
double mill=2.3;// 米勒投影中的一个常数,范围大约在正负2.3之间
double x = lon * Math.PI / 180;// 将经度从度数转换为弧度
double y = lat * Math.PI / 180;// 将纬度从度数转换为弧度
y=1.25 * Math.log( Math.tan( 0.25 * Math.PI + 0.4 * y ) );// 米勒投影的转换
// 弧度转为实际距离
x = ( W / 2 ) + ( W / (2 * Math.PI) ) * x;
y = ( H / 2 ) - ( H / ( 2 * mill ) ) * y;
double[] result=new double[2];
result[0]=x;
result[1]=y;
return result;
}
/**
* 米勒投影算法 平面坐标转经纬度
* @param x 坐标
* @param y 坐标
* @return
* @author liyun
* @date 2019年8月19日
*/
public static double[] lonLatConvertion(double x, double y){
//double L = 6381372 * Math.PI * 2;//地球周长
double L = 6378137 * Math.PI * 2;//地球周长
double W = L;// 平面展开后,x轴等于周长
double H = L / 2;// y轴约等于周长一半
double mill = 2.3;// 米勒投影中的一个常数,范围大约在正负2.3之间
double lat;
lat = ((H / 2 - y) * 2 * mill) / (1.25 * H);
lat = ((Math.atan(Math.exp(lat)) - 0.25 * Math.PI) * 180) / (0.4 * Math.PI);
double lon;
lon = (x - W / 2) * 360 / W;
double[] result = new double[2];
result[0] = (double) Math.round(lon * 100000000) / 100000000;
result[1] = (double) Math.round(lat * 100000000) / 100000000;
return result;
}
/**
* 求两个空间几何图形最近距离的坐标点
* @param geom1
* @param geom2
* @author 周士广
* @date 2019年10月12日
* return x,y
*/
public static String getNearestDistancePoint(Geometry geom1,Geometry geom2) {
DistanceOp t = new DistanceOp(geom1,geom2);
Coordinate[] a = t.nearestPoints();
String arr = a[0].x+","+a[0].y;
return arr;
}
/**
* 获geom2到geom1的最近距离
*
* @author shiguang.zhou
* @date 2021年1月14日
*/
public static double getNearestDistance(String linewkt,String point) {
Geometry geom1 = getLineString(linewkt);
Geometry geom2 = createPoint(point);
DistanceOp t = new DistanceOp(geom1,geom2);
double dis = t.distance();
return ArithOfBigDecmial.mul(dis, 6371000 * Math.PI / 180);
}
/**
* 绘制圆形
* @param lng 圆心坐标精度
* @param lat 圆心坐标点数
* @param radius 半径 单位米
* @param numpoints 圆弧点数
*/
public static String addPolylinescircle(double lng,double lat, double radius,int numpoints) {
double r = 6371000.79;
double phase = 2 * Math.PI / numpoints;
StringBuilder xys = new StringBuilder();
//画图
for (int i = 0; i < numpoints; i++) {
/**
* 计算坐标点
*/
double dx = (radius * Math.cos(i * phase));
double dy = (radius * Math.sin(i * phase));//乘以1.6 椭圆比例
/**
* 转换成经纬度
*/
double dlng = dx / (r * Math.cos(lat * Math.PI / 180) * Math.PI / 180);
double dlat = dy / (r * Math.PI / 180);
double newlng = lng + dlng;
xys.append(newlng);
xys.append(",");
xys.append(lat + dlat);
if (i != numpoints-1)
xys.append(";");
}
return xys.toString();
}
/**
* 根据曲线百分比位置获取响应的点坐标
*
* @param coords 坐标串,格式:x1,y1;x2,y2;...;xn,yn
* @param segIndex 线段索引号
* @param fraction 百分比,范围 :[0,1]
* @author 周士广
* @date 2019年11月25日
*/
public static String getCureLinePointByFraction(String coords, int segIndex, double fraction) {
// 根据曲线位置获取响应的点坐标
LineString ls = GeomsConvertUtil.getLineString(coords);
LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
Coordinate point = indexedLine.extractPoint(new LinearLocation(segIndex, fraction));
String ptxy = point.x + "," + point.y;
return ptxy;
}
/**
* 根据给定距离,寻找线段中距离终点的坐标位置
*
* @param coords 坐标串,格式:x1,y1;x2,y2;...;xn,yn
* @param dist 单位米
* @param loc 定位dist距离位置是在第几个线段,以及终点在该线段中所占的百 分比
*
*/
public static Coordinate getLocationByDistance(String coords, Double dist, LinearLocation loc) {
LineString ls = GeomsConvertUtil.getLineString(coords);
LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
// 获取距离终点位置坐标
Coordinate point = indexedLine.extractPoint(loc);
return point;
}
/**
* 提取子线段
* @param coords x,y;x1,y1;...,xn,yn
* @param inputPt
* @param posFlg 1:inputPt参数作为终点的所有线段 2:inputPt参数作为起点到尾部所有线段 3:截取到inputPt点所在的线段
* @author shiguang.zhou
* @date 2020年7月15日
*/
public static Geometry getSubLineString(String coords,Coordinate inputPt,int posFlg) {
LineString ls = GeomsConvertUtil.getLineString(coords);
LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
//线段的起始
LinearLocation startLoc = indexedLine.getStartIndex();
//线段的终点索引
LinearLocation endLoc = indexedLine.getEndIndex();
//查找点在第几个线段
LinearLocation indexLoc = indexedLine.indexOf(inputPt);
if (posFlg==2) {
startLoc = indexLoc;
indexLoc = endLoc;
}else if (posFlg==3) {
startLoc = indexLoc;
//截取到inputPt所在线段终点
indexLoc = new LinearLocation(indexLoc.getSegmentIndex(), 1);
}
double frac = indexLoc.getSegmentFraction();
//截取子线段
Geometry geom = indexedLine.extractLine(startLoc, indexLoc);
return geom;
}
/**
* 获取从开始点到终止点的线段坐标
* @param coords x,y;x1,y1;...,xn,yn
* @param indexLoc 开始点
* @param type 1:从开始点延划线方向向后取(indexLoc作为起点) 2:从开始点延划线方向向前取(indexLoc作为终点)
* @author shiguang.zhou
* @date 2020年7月15日
*/
public static String getSubLineStringByLoc(String coords,LinearLocation indexLoc,int type) {
String retwkt = null;
LineString ls = GeomsConvertUtil.getLineString(coords);
LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
LinearLocation startLoc = indexedLine.getStartIndex();
LinearLocation endLoc = indexedLine.getEndIndex();
try {
Geometry subgeom = null;
if (type==1) {
subgeom = indexedLine.extractLine(indexLoc, endLoc);
} else if (type==2) {
subgeom = indexedLine.extractLine(startLoc, indexLoc);
}
retwkt = formatWkt(subgeom.toText());
} catch (ParseException e) {
e.printStackTrace();
}
return retwkt;
}
/**
* 查找点在第几个线段及在这个线段中占据的百分比
*
* @author shiguang.zhou
* @date 2020年9月29日
*/
public static LinearLocation getFraction(String coords,String xy) {
String[] sps = xy.split(",");
Coordinate pt = new Coordinate(Double.valueOf(sps[0]), Double.valueOf(sps[1]));
LineString ls = GeomsConvertUtil.getLineString(coords);
LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
//查找点在第几个线段
LinearLocation loc = indexedLine.indexOf(pt);
return loc;
}
public static Geometry getSubLineString(String coords,Coordinate startPt,Coordinate endPt) {
LineString ls = GeomsConvertUtil.getLineString(coords);
LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
LinearLocation indexLocTmp = null;
//查找点在第几个线段
LinearLocation startindexLoc = indexedLine.indexOf(startPt);
//查找点在第几个线段
LinearLocation endindexLoc = indexedLine.indexOf(endPt);
if (endindexLoc.getSegmentIndex()<startindexLoc.getSegmentIndex()) {
indexLocTmp = startindexLoc;
startindexLoc = endindexLoc;
endindexLoc = indexLocTmp;
}
//截取子线段
Geometry geom = indexedLine.extractLine(startindexLoc, endindexLoc);
return geom;
}
/**
* 路段长度,单位米
* @param coords 线段坐标 格式:x1,y1;x2,y2;...
* @author shiguang.zhou
* @date 2020年4月14日
*/
public static double getSegmentTotalLeng(String coords) {
double totalLen = 0;
LineString ls = GeomsConvertUtil.getLineString(coords);
// LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
//
// LinearLocation startLoc = indexedLine.getStartIndex();
// LinearLocation endLoc = indexedLine.getEndIndex();
//
// int startIndex = startLoc.getSegmentIndex();
// int endIndex = endLoc.getSegmentIndex();
//
// while (startIndex < endIndex) {
// LinearLocation loc = new LinearLocation(startIndex, 0);
// LineSegment lseg = loc.getSegment(ls);
// double leng = lseg.getLength();// 单位为度
// leng = ArithOfBigDecmial.mul(leng, 6371000 * Math.PI / 180);// 度转换为米
// totalLen += leng;
// startIndex++;
// }
// totalLen = ArithOfBigDecmial.round(totalLen, 2);
totalLen =ArithOfBigDecmial.mul(ls.getLength(), 6371000 * Math.PI / 180);
totalLen = ArithOfBigDecmial.round(totalLen, 2);
return totalLen;
}
/**
* 定位dist距离位置是在第几个线段,以及终点在该线段中所占的百 分比
*
* @author shiguang.zhou
* @date 2020年4月14日
*/
public static LinearLocation getLinearLocation(String coords, Double dist) {
LineString ls = GeomsConvertUtil.getLineString(coords);
double degree = dist / (6371000 * Math.PI / 180);// 米转换为度
// 定位dist距离位置是在第几个线段,以及终点在该线段中所占的百 分比
LinearLocation loc = LengthLocationMap.getLocation(ls, degree);
// System.out.println(loc.getSegmentIndex() + "," + loc.getSegmentFraction());
return loc;
}
/**
* 计算两个几何对象间的距离
*
* @author shiguang.zhou
* @date 2020年4月14日
*
* @return 距离单位米
*/
public static double getDistance(Geometry a, Geometry b) {
double dist = a.distance(b);// 单位为度
dist = ArithOfBigDecmial.mul(dist * 6371000, Math.PI);// 6371000*Math.PI/180;//度转换为米
dist = ArithOfBigDecmial.div(dist, 180);
return dist;
}
/**
* 度转换为米
*
* @author shiguang.zhou
* @date 2021年1月20日
*/
public static double duToMeter(double du) {
return ArithOfBigDecmial.mul(du * 6371000, Math.PI);
}
/**
* 米转换为度
*
* @author shiguang.zhou
* @date 2021年3月9日
*/
public static double meterToDu(double dist) {
double degree = dist / (6371000 * Math.PI / 180);// 米转换为度
return degree;
}
public static String getCureLineSegmentByFraction(String coords, int startSegIndex, int endSegIndex,
double fraction) {
String subline = null;
LineString ls = GeomsConvertUtil.getLineString(coords);
LocationIndexedLine indexedLine = new LocationIndexedLine(ls);
Geometry eline = indexedLine.extractLine(new LinearLocation(startSegIndex, fraction), new LinearLocation());
try {
subline = GeomsConvertUtil.formatWkt(eline.toString());
int index = subline.indexOf("#");
if (index > 0)
subline = subline.substring(0, index);
} catch (ParseException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return subline;
}
/**
* 判断点是否在区域内
* @param areaWkt 格式:x1,y1;x2,y2;...;xn,yn
* @param point 格式:x,y
* @author shiguang.zhou
* @date 2021年1月14日
*/
public static boolean isInArea(String areaWkt,String point) {
try {
Geometry areaGeom = genGeometry(areaWkt);
Point ptgeom = createPoint(point);
return ptgeom.within(areaGeom);
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
/**
* 根据行驶距离计算位置坐标点
*
* @author shiguang.zhou
* @date 2020年8月25日
*/
public static Double[] getVhcPosPoint(String lanecoord,double distance) {
LinearLocation lineLoc = GeomsConvertUtil.getLinearLocation(lanecoord, distance);
Coordinate coord = GeomsConvertUtil.getLocationByDistance(lanecoord, distance,
lineLoc);
double x = coord.x;
double y = coord.y;
return new Double[] {x , y};
}
public static boolean isCanPassCloseArea(String areaWkt,String linexy) {
try {
Geometry areaGeom = genGeometry(areaWkt);
LineString ls = getLineString(linexy);
return areaGeom.intersects(ls);
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
public static Geometry unionGeo(Geometry a,Geometry b){
return a.union(b);
}
/**
* 按照两条线 距离截取n个面
*
* @param lineWkt1 边缘线1
* @param lineWkt2 边缘线2
* @param distince 截取的面的长度
* @param num 截取面的个数
* @param type 进口需要反转(1进口)
* @return
*/
public static List<String> getSubPolygonList(String lineWkt1, String lineWkt2, double distince, int num, int type) {
if(type == 1) {
lineWkt1 = reWkt(lineWkt1);
lineWkt2 = reWkt(lineWkt2);
}
String apoint = lineWkt1.split(";")[0];
String bpoint = lineWkt2.split(";")[0];
List<String> pList = new ArrayList<>();
String bpoint1 = "";
for (int i = 1; i < num; i++) {
Double[] point = getVhcPosPoint(lineWkt1, distince * i);
Double[] point2 = getVhcPosPoint(lineWkt2, distince * i);
String apoint1 = point[0] + "," + point[1];
if ((point2[0] + "," + point2[1]).equals(bpoint1)) {
break;
}
bpoint1 = point2[0] + "," + point2[1];
String po = apoint + ";" + apoint1 + ";" + bpoint1 + ";" + bpoint + ";" + apoint;
pList.add(po);
apoint = apoint1;
bpoint = bpoint1;
}
return pList;
}
public static String reWkt(String wkt) {
String[] aktArr = wkt.split(";");
StringBuilder wks = new StringBuilder();
for(int i = aktArr.length - 1; i >= 0; i--) {
wks.append(";").append(aktArr[i]);
}
if(wks.length() == 0) {
return "";
}else {
return wks.substring(1);
}
}
/**
* 合并多条线
* @return
*/
public List<String> lineMerger(List<Geometry> list) {
LineMerger lineMerger = new LineMerger();
lineMerger.add(list);
Collection<Geometry> mergerLineStrings = lineMerger.getMergedLineStrings();
List<String> lineList = new ArrayList<>();
for (Geometry g : mergerLineStrings) {
lineList.add(g.toText());
}
return lineList;
}
public static void main(String[] args) throws Exception {
String wkt = "121.424661,31.39226;121.424881,31.39151;121.427819,31.381745;121.427953,31.381296;121.428082,31.380837;121.429196,31.376526;121.429994,31.373874;121.430299,31.372705;121.430537,31.371323;121.430723,31.370427;121.430914,31.369555;121.431103,31.368691;121.43127,31.367942;121.432941,31.362217;121.433261,31.360896;121.433515,31.35981;121.433691,31.358866;121.43398,31.35595;121.434063,31.355082;121.434143,31.354235;121.434701,31.348262;121.434975,31.347014;121.435106,31.346518;121.435187,31.346232;121.435625,31.345065;121.435902,31.344285;121.436606,31.342377;121.437375,31.340518;121.437711,31.339703;121.438044,31.338884;121.439232,31.33604;121.441546,31.33113;121.443878,31.326024;121.445757,31.321816;121.446752,31.31966;121.447063,31.318936;121.447369,31.318237;121.447905,31.316858;121.448035,31.316267;121.448187,31.315546;121.448293,31.314782;121.448497,31.313164;121.448623,31.310281;121.448696,31.307901;121.44866,31.307303;121.448642,31.306604;121.448625,31.305913;121.448931,31.300276;121.448984,31.299198;121.449126,31.298117;121.449373,31.296527;121.450076,31.293463;121.450251,31.292556;121.450429,31.291631;121.450841,31.289895;121.451269,31.287919;121.451429,31.286805;121.451543,31.285801;121.451693,31.284324;121.451924,31.28113;121.452023,31.279895;121.452124,31.278661;121.45231,31.277347;121.452461,31.276862;121.452637,31.276333;121.453007,31.275443;121.453265,31.274885;121.454004,31.273619;121.455329,31.271675;121.45583,31.27095;121.457568,31.268372;121.457939,31.267754;121.458115,31.267362;121.458265,31.26697;121.458421,31.265833;121.458557,31.263704;121.45866,31.26213;121.458817,31.260169;121.45895,31.259749;121.459204,31.258891;121.45946,31.258028;121.459485,31.257836;121.459485,31.257535;121.459496,31.256501;121.459479,31.255648;121.459418,31.254958;121.459025,31.252636;121.458995,31.252406;121.459004,31.252209;121.459112,31.250582;121.459083,31.250372;121.459011,31.250177;121.458846,31.25002;121.458636,31.249863;121.457204,31.249166;121.456868,31.248956;121.456573,31.248734;121.456389,31.248532;121.456215,31.248302;121.456047,31.247995;121.455684,31.247255;121.455313,31.2465;121.455125,31.246055;121.455018,31.245642;121.454982,31.245213;121.454998,31.244865;121.455065,31.244482;121.455181,31.244115;121.455346,31.243768;121.455529,31.243495;121.455743,31.243257;121.456017,31.242991;121.456234,31.242817;121.456465,31.242656;121.456712,31.242519;121.456953,31.242422;121.457267,31.242317;121.457648,31.242207;121.458699,31.241883;121.459971,31.241503;121.460583,31.241306;121.461344,31.24104;121.467182,31.23874;121.468151,31.238373;121.469041,31.238051;121.470426,31.237687;121.471569,31.237297;121.471853,31.237137;121.472116,31.236953;121.472481,31.236665;121.472974,31.236165;121.474106,31.234365;121.475137,31.232781;121.476134,31.231231;121.477547,31.22947;121.478086,31.228782;121.478159,31.228679;121.478189,31.228622;121.478233,31.22853;121.478277,31.228423;121.478386,31.228174;121.47843,31.227951;121.478461,31.227758;121.478468,31.227702;121.478473,31.22761;121.478472,31.22719;121.478448,31.226138;121.478445,31.225997;121.478438,31.225904;121.478417,31.225777;121.478378,31.22561;121.478332,31.225509;121.478246,31.225362;121.478035,31.225136;121.47761,31.224797;121.477185,31.224532;121.476407,31.224124;121.475127,31.223526;121.474212,31.223128;121.473306,31.222745;121.472383,31.222356;121.466695,31.22004;121.461164,31.217746;121.459957,31.217231;121.458745,31.216717;121.452162,31.213851;121.451048,31.213382;121.44972,31.212813;121.449563,31.212682;121.449412,31.212538;121.449314,31.212327;121.449314,31.212087;121.449296,31.211233;121.449211,31.210127;121.448814,31.208214;121.448792,31.208145;121.448652,31.207754;121.448576,31.207569;121.448494,31.207395;121.448466,31.207353;121.447669,31.206248;121.446883,31.205164;121.446424,31.204528;121.445965,31.203891;121.444296,31.201692;121.443138,31.200113;121.442022,31.198663;121.440277,31.196306;121.439249,31.194984;121.439126,31.194794;121.439068,31.194684;121.439012,31.194564;121.438969,31.194472;121.438133,31.191698;121.437404,31.189259;121.437235,31.188728;121.437119,31.188241;121.436961,31.187457;121.436745,31.186035;121.436528,31.184209;121.436272,31.182001;121.436151,31.180969;121.435985,31.179869;121.435917,31.179221;121.435513,31.175943;121.435218,31.173487;121.435111,31.17245;121.435031,31.171169;121.434848,31.169209;121.434702,31.168228;121.434551,31.167266;121.434476,31.166688;121.43411,31.163133;121.434067,31.162858;121.434014,31.162583;121.433201,31.159207;121.432658,31.157705;121.432278,31.157017;121.431802,31.156247;121.430796,31.155095;121.430041,31.154579;121.429217,31.154016;121.428206,31.15342;121.424098,31.151061;121.420987,31.148834;121.417147,31.145353;121.416352,31.144585;121.415073,31.143098;121.414604,31.142675;121.414107,31.142217;121.413622,31.141771;121.409971,31.138218;121.405856,31.133814;121.403605,31.131565;121.40291,31.1309;121.402241,31.130274;121.401049,31.129156;121.400066,31.128214;121.398684,31.126855;121.396102,31.124412;121.395084,31.123459;121.3947,31.123076;121.394223,31.122539;121.393845,31.122059;121.393524,31.121616;121.39302,31.120899;121.392492,31.120155;121.39125,31.118396;121.390298,31.117133;121.38802,31.114151;121.387549,31.113551;121.38586,31.11171;121.385373,31.111152";
/* LineString ls = GeomsConvertUtil.getLineString(wkt);
LocationIndexedLine lil = new LocationIndexedLine(ls);
LinearLocation start = lil.indexOf(new Coordinate(8, 5));
LinearLocation end = lil.indexOf(new Coordinate(17, 10));
Geometry result = lil.extractLine(start, end);
System.out.println(result.toText());*/
Geometry result = GeomsConvertUtil.getSubLineString(wkt, new Coordinate(121.385373,31.111152), new Coordinate(121.393020,31.120899));
System.out.println(result.toText());
Geometry result1 = GeomsConvertUtil.getSubLineString(wkt, new Coordinate(121.393025,31.120899), new Coordinate(121.385373,31.111152));
System.out.println(result1.toText());
}
}
package com.wanji.indicators.util;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class HttpUtil {
private static final Logger logger = LoggerFactory.getLogger(HttpUtil.class);
public static byte[] httpRequest(String urlStr, String method, String content, String encoding, int timeOut, Map<String, String> headProperty) {
HttpURLConnection conn = null;
byte[] btemp = null;
OutputStreamWriter out = null;
InputStream input = null;
ByteArrayOutputStream bout = new ByteArrayOutputStream();
try {
URL url = new URL(urlStr);
conn = (HttpURLConnection)url.openConnection();
conn.setRequestMethod(method.toUpperCase());
conn.setConnectTimeout(timeOut * 1000);
conn.setReadTimeout(timeOut * 1000);
conn.setDoInput(true);
conn.setDoOutput(true);
if (headProperty != null)
for (String key : headProperty.keySet())
conn.addRequestProperty(key, headProperty.get(key));
conn.connect();
if (conn.getRequestMethod() == null ||
!conn.getRequestMethod().equalsIgnoreCase("get")) {
out = new OutputStreamWriter(conn.getOutputStream(), encoding);
out.write(content);
out.close();
}
input = conn.getInputStream();
byte[] bufferByte = new byte[256];
int l = -1;
while ((l = input.read(bufferByte)) > -1) {
bout.write(bufferByte, 0, l);
bout.flush();
}
btemp = bout.toByteArray();
int code = conn.getResponseCode();
String msg = conn.getResponseMessage();
Map<String, List<String>> maps = conn.getHeaderFields();
bout.close();
input.close();
} catch (Exception e) {
logger.error("", e);
} finally {
try {
if (input != null)
input.close();
if (out != null)
out.close();
if (conn != null)
conn.disconnect();
} catch (IOException iOException) {}
}
return btemp;
}
public static void main(String[] args) {
Base64.Encoder encoder = Base64.getEncoder();
String cnt = "elastic:elastic";
String authorization = encoder.encodeToString(cnt.getBytes(StandardCharsets.UTF_8));
Map<String, String> headerMap = new HashMap<>();
headerMap.put("Authorization", "Basic " + authorization);
headerMap.put("Content-Type", "application/json");
String url = "http://192.168.11.161:9200/cross.event.state-2022-02/_search?pretty";
String postParams = "{\n \"query\": {\n \"bool\": {\n \"must\": [\n {\n \"match\": {\n \"typeCode\": \"30\"\n } \n },\n {\n \"match\": {\n \"type\": \"cross.event.state\"\n }\n }\n ]\n }\n }\n}";
byte[] ret = httpRequest(url, "post", postParams, "utf-8", 10, headerMap);
System.out.println(new String(ret));
}
}
package com.wanji.indicators.util;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.*;
import java.util.*;
public class JdbcUtil {
private static final Logger log = LoggerFactory.getLogger(JdbcUtil.class);
private Connection conn;
private PreparedStatement stmt;
private ResultSet rs;
private String driver;
private String url;
private String userName;
private String passWord;
private static volatile JdbcUtil instance = null;
public static JdbcUtil getInstance() {
if (instance == null)
synchronized (JdbcUtil.class) {
if (instance == null)
instance = new JdbcUtil();
}
return instance;
}
public JdbcUtil() {
Properties prop = new Properties();
try {
prop.load(JdbcUtil.class.getClassLoader().getResourceAsStream("config.properties"));
this.driver = prop.getProperty("jdbc.driver");
String postgres_host = System.getenv("POSTGRES_HOST");
String postgres_port = System.getenv("POSTGRES_PORT");
String postgres_db = System.getenv("POSTGRES_DB");
String postgres_user = System.getenv("POSTGRES_USER");
String postgres_password = System.getenv("POSTGRES_PASSWORD");
String postgres_schema = System.getenv("POSTGRES_SCHEMA");
if (StringUtils.isNotBlank(postgres_host) &&
StringUtils.isNotBlank(postgres_port) &&
StringUtils.isNotBlank(postgres_db) &&
StringUtils.isNotBlank(postgres_user) &&
StringUtils.isNotBlank(postgres_password)) {
if (StringUtils.isBlank(postgres_schema))
postgres_schema = "public";
this.url = "jdbc:postgresql://" + postgres_host + ":" + postgres_port + "/" + postgres_db + "?currentSchema=" + postgres_schema;
this.userName = postgres_user;
this.passWord = postgres_password;
} else {
this.url = prop.getProperty("jdbc.url");
this.userName = prop.getProperty("jdbc.username");
this.passWord = prop.getProperty("jdbc.password");
}
} catch (Exception e) {
log.error("数据库链接异常: " + e.getMessage());
}
}
public Connection getConn() {
try {
Class.forName(this.driver);
this.conn = DriverManager.getConnection(this.url, this.userName, this.passWord);
} catch (ClassNotFoundException e) {
e.printStackTrace();
return null;
} catch (SQLException e) {
e.printStackTrace();
return null;
}
return this.conn;
}
public void close() {
try {
if (this.rs != null) {
this.rs.close();
this.rs = null;
}
if (this.stmt != null) {
this.stmt.close();
this.stmt = null;
}
if (this.conn != null) {
this.conn.close();
this.conn = null;
}
log.info("数据库连接资源释放 ==>> rs:" + this.rs + " stmt:" + this.stmt + " conn:" + this.conn);
} catch (SQLException e) {
log.error("资源释放发生异常: " + e.getMessage());
}
}
public ResultSet executeSql(String sql, Object... args) {
try {
getConn();
this.stmt = this.conn.prepareStatement(sql);
if (null != args && args.length != 0)
for (int i = 0; i < args.length; i++)
this.stmt.setObject(i + 1, args[i]);
this.rs = this.stmt.executeQuery();
} catch (SQLException e) {
log.error("数据查询异常: " + e.getMessage());
return null;
}
return this.rs;
}
private <T> T excuteQuery(Class<T> klass, String sql, Object... args) {
try {
this.rs = executeSql(sql, args);
ResultSetMetaData metaData = this.rs.getMetaData();
Map<String, Object> resultMap = new HashMap<>();
if (this.rs.next())
for (int i = 1; i <= metaData.getColumnCount(); i++) {
String columnname = metaData.getColumnLabel(i);
Object obj = this.rs.getObject(i);
resultMap.put(columnname, obj);
}
return (T)JSON.parseObject(JSON.toJSONString(resultMap), klass);
} catch (Exception e) {
log.error("数据查询异常" + e.getMessage());
} finally {
close();
}
return (T)JSON.toJavaObject((JSON)new JSONObject(), klass);
}
public <T> List<T> excuteQueryToList(Class<T> klass, String sql, Object... args) {
try {
this.rs = executeSql(sql, args);
List<Map<String, String>> resultList = new ArrayList<>();
while (this.rs != null && this.rs.next()) {
Map<String, String> resultMap = new HashMap<>();
ResultSetMetaData metaData = this.rs.getMetaData();
int columnCount = metaData.getColumnCount();
for (int i = 1; i <= columnCount; i++)
resultMap.put(metaData.getColumnName(i), this.rs.getString(i));
resultList.add(resultMap);
}
log.info("\n数据库查询成功: " + sql + " \n查询到数据: " + resultList.size() + "条 ");
return JSON.parseArray(JSON.toJSONString(resultList), klass);
} catch (Exception e) {
log.error("数据查询异常 " + e.getMessage() + " time: " + System.currentTimeMillis());
} finally {
close();
}
return null;
}
}
package com.wanji.indicators.util;
import org.apache.commons.lang3.StringUtils;
import java.io.*;
import java.nio.charset.Charset;
import java.util.Properties;
import java.util.UUID;
public class PropertiesHelper implements Serializable {
private Properties properties;
private String bootstrapServers;
private PropertiesHelper() {
this.properties = getProperties();
}
private static class PropertiesHelperHolder {
private static final PropertiesHelper propertiesHelper = new PropertiesHelper();
}
public static final PropertiesHelper getInstance() {
return PropertiesHelperHolder.propertiesHelper;
}
public Properties getProperties() {
if (this.properties == null) {
this.properties = new Properties();
try {
this.bootstrapServers = System.getenv("KAFKA_BOOTSTRAP_SERVERS");
//this.properties.load(PropertiesHelper.class.getResourceAsStream("/config.properties"));
InputStream inputStream = PropertiesHelper.class.getResourceAsStream("/config.properties");
BufferedReader bf = new BufferedReader(new InputStreamReader(inputStream, Charset.defaultCharset()));
properties.load(bf);
} catch (IOException e) {
e.printStackTrace();
}
}
return this.properties;
}
public synchronized Properties getConsumerProperties() {
Properties consumerProps = new Properties();
if (StringUtils.isNotBlank(this.bootstrapServers)) {
consumerProps.setProperty("bootstrap.servers", this.bootstrapServers);
} else {
String kafka_bootstrap_servers = System.getenv("KAFKA_BOOTSTRAP_SERVERS");
if (StringUtils.isNotBlank(kafka_bootstrap_servers)) {
consumerProps.setProperty("bootstrap.servers", kafka_bootstrap_servers);
} else {
consumerProps.setProperty("bootstrap.servers", this.properties.getProperty("bootstrap.servers"));
}
}
String s = UUID.randomUUID().toString().replace("-", "");
consumerProps.setProperty("topic", this.properties.getProperty("consumer.topic"));
consumerProps.setProperty("group.id", this.properties.getProperty("consumer.group.id"));
consumerProps.setProperty("key-deserializer", this.properties.getProperty("consumer.key-deserializer"));
consumerProps.setProperty("value-deserializer", this.properties.getProperty("consumer.value-deserializer"));
return consumerProps;
}
public static Properties getConsumerProperties(Properties configProp) {
Properties consumerProps = new Properties();
consumerProps.setProperty("bootstrap.servers", configProp.getProperty("bootstrap.servers"));
String s = UUID.randomUUID().toString().replace("-", "");
consumerProps.setProperty("topic", configProp.getProperty("consumer.topic"));
consumerProps.setProperty("group.id", configProp.getProperty("consumer.group.id"));
consumerProps.setProperty("key-deserializer", configProp.getProperty("consumer.key-deserializer"));
consumerProps.setProperty("value-deserializer", configProp.getProperty("consumer.value-deserializer"));
return consumerProps;
}
public synchronized Properties getProducerProperties() {
Properties producerProps = new Properties();
if (StringUtils.isNotBlank(this.bootstrapServers)) {
producerProps.setProperty("bootstrap.servers", this.bootstrapServers);
} else {
String kafka_bootstrap_servers = System.getenv("KAFKA_BOOTSTRAP_SERVERS");
if (StringUtils.isNotBlank(kafka_bootstrap_servers)) {
producerProps.setProperty("bootstrap.servers", kafka_bootstrap_servers);
} else {
producerProps.setProperty("bootstrap.servers", this.properties.getProperty("bootstrap.servers"));
}
}
producerProps.setProperty("key-serializer", this.properties.getProperty("producer.key-serializer"));
producerProps.setProperty("value-serializer", this.properties.getProperty("producer.value-serializer"));
return producerProps;
}
public synchronized Properties getElasticProperties() {
Properties esProps = new Properties();
esProps.setProperty("elasticsearch.server", this.properties.getProperty("elasticsearch.server"));
esProps.setProperty("elasticsearch.username", this.properties.getProperty("elasticsearch.username"));
esProps.setProperty("elasticsearch.password", this.properties.getProperty("elasticsearch.password"));
return esProps;
}
}
package com.wanji.indicators.util;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
public class PtInPolyUtil {
private static double EARTH_RADIUS = 6378.137D;
private static final double R = 6371000.0D;
private static double rad(double d) {
return d * Math.PI / 180.0D;
}
public static double getDistance(double lng1, double lat1, double lng2, double lat2) {
double radLat1 = rad(lat1);
double radLat2 = rad(lat2);
double difference = radLat1 - radLat2;
double mdifference = rad(lng1) - rad(lng2);
double distance = 2.0D * Math.asin(Math.sqrt(Math.pow(Math.sin(difference / 2.0D), 2.0D) +
Math.cos(radLat1) * Math.cos(radLat2) *
Math.pow(Math.sin(mdifference / 2.0D), 2.0D)));
distance *= EARTH_RADIUS;
distance *= 1000.0D;
return distance;
}
public static double getAngle(double lng_a, double lat_a, double lng_b, double lat_b) {
double longitude1 = lng_a;
double longitude2 = lng_b;
double latitude1 = Math.toRadians(lat_a);
double latitude2 = Math.toRadians(lat_b);
double longDiff = Math.toRadians(longitude2 - longitude1);
double y = Math.sin(longDiff) * Math.cos(latitude2);
double x = Math.cos(latitude1) * Math.sin(latitude2) - Math.sin(latitude1) * Math.cos(latitude2) * Math.cos(longDiff);
return (Math.toDegrees(Math.atan2(y, x)) + 360.0D) % 360.0D;
}
public static double pointToLine(double x1, double y1, double x2, double y2, double x0, double y0) {
double space = 0.0D;
double a = getDistance(x1, y1, x2, y2);
double b = getDistance(x1, y1, x0, y0);
double c = getDistance(x2, y2, x0, y0);
if (c + b == a) {
space = 0.0D;
return space;
}
if (a <= 1.0E-6D) {
space = b;
return space;
}
if (c * c >= a * a + b * b) {
space = b;
return space;
}
if (b * b >= a * a + c * c) {
space = c;
return space;
}
double p = (a + b + c) / 2.0D;
double s = Math.sqrt(p * (p - a) * (p - b) * (p - c));
space = 2.0D * s / a;
return space;
}
public static String getCenterOfGravityPoint(String mPoints) {
String[] arr = mPoints.split(";");
double x = 0.0D;
double y = 0.0D;
int len = arr.length;
for (int i = 0; i < len; i++) {
String[] xys = arr[i].split(",");
x += Double.valueOf(xys[0]).doubleValue();
y += Double.valueOf(xys[1]).doubleValue();
}
x /= len;
y /= len;
return x + "," + y;
}
public static boolean isPtInPoly(Point point, List<Point> pts) {
int N = pts.size();
boolean boundOrVertex = true;
int intersectCount = 0;
double precision = 2.0E-10D;
Point p = point;
Point p1 = pts.get(0);
for (int i = 1; i <= N; i++) {
if (p.equals(p1))
return boundOrVertex;
Point p2 = pts.get(i % N);
if (p.x < Math.min(p1.x, p2.x) || p.x > Math.max(p1.x, p2.x)) {
p1 = p2;
} else {
if (p.x > Math.min(p1.x, p2.x) && p.x < Math.max(p1.x, p2.x)) {
if (p.y <= Math.max(p1.y, p2.y)) {
if (p1.x == p2.x && p.y >= Math.min(p1.y, p2.y))
return boundOrVertex;
if (p1.y == p2.y) {
if (p1.y == p.y)
return boundOrVertex;
intersectCount++;
} else {
double xinters = (p.x - p1.x) * (p2.y - p1.y) / (p2.x - p1.x) + p1.y;
if (Math.abs(p.y - xinters) < precision)
return boundOrVertex;
if (p.y < xinters)
intersectCount++;
}
}
} else if (p.x == p2.x && p.y <= p2.y) {
Point p3 = pts.get((i + 1) % N);
if (p.x >= Math.min(p1.x, p3.x) && p.x <= Math.max(p1.x, p3.x)) {
intersectCount++;
} else {
intersectCount += 2;
}
}
p1 = p2;
}
}
if (intersectCount % 2 == 0)
return false;
return true;
}
public static Map<String, Object> findNeighPosition(double longitude, double latitude) {
double r = 6371.0D;
double dis = 5.0D;
double dlng = 2.0D * Math.asin(Math.sin(dis / 2.0D * r) / Math.cos(latitude * Math.PI / 180.0D));
dlng = dlng * 180.0D / Math.PI;
double dlat = dis / r;
dlat = dlat * 180.0D / Math.PI;
double minlat = latitude - dlat;
double maxlat = latitude + dlat;
double minlng = longitude - dlng;
double maxlng = longitude + dlng;
Map<String, Object> map = new HashMap<>();
map.put("minlat", Double.valueOf(minlat));
map.put("maxlat", Double.valueOf(maxlat));
map.put("minlng", Double.valueOf(minlng));
map.put("maxlng", Double.valueOf(maxlng));
return map;
}
public static void main(String[] args) {
System.out.println(getDir(184.0D));
}
public static String getDir(double angle) {
if (angle > 337.5D || angle <= 22.5D)
return "北方向";
if (angle > 22.5D && angle <= 67.5D)
return "东北方向";
if (angle > 67.5D && angle <= 112.5D)
return "东方向";
if (angle > 112.5D && angle <= 157.5D)
return "东南方向";
if (angle > 157.5D && angle <= 202.5D)
return "南方向";
if (angle > 202.5D && angle <= 247.5D)
return "西南方向";
if (angle > 247.5D && angle <= 292.5D)
return "西方向";
if (angle > 292.5D && angle <= 337.5D)
return "西北方向";
return "";
}
public static String getDirEn(double angle) {
if (angle > 337.5D || angle <= 22.5D)
return "North direction";
if (angle > 22.5D && angle <= 67.5D)
return "Northeast direction";
if (angle > 67.5D && angle <= 112.5D)
return "East direction";
if (angle > 112.5D && angle <= 157.5D)
return "Southeast direction";
if (angle > 157.5D && angle <= 202.5D)
return "South direction";
if (angle > 202.5D && angle <= 247.5D)
return "Southwest direction";
if (angle > 247.5D && angle <= 292.5D)
return "West direction";
if (angle > 292.5D && angle <= 337.5D)
return "Northwest direction";
return "";
}
public static double[] calLocationByDistanceAndLocationAndDirection(double angle, double startLong, double startLat, double distance) {
double[] result = new double[2];
double x = distance / 6371000.0D;
angle = Math.toRadians(angle);
startLong = Math.toRadians(startLong);
startLat = Math.toRadians(startLat);
double lat = Math.asin(Math.sin(startLat) * Math.cos(x) + Math.cos(startLat) * Math.sin(x) * Math.cos(angle));
double lon = startLong + Math.atan2(Math.sin(angle) * Math.sin(x) * Math.cos(startLat), Math.cos(x) - Math.sin(startLat) * Math.sin(lat));
lon = Math.toDegrees(lon);
lat = Math.toDegrees(lat);
result[0] = lon;
result[1] = lat;
return result;
}
public static int segIntersect(double ax, double ay, double bx, double by, double cx, double cy, double dx, double dy) {
return segIntersect(new Point(ax, ay), new Point(bx, by), new Point(cx, cy), new Point(dx, dy));
}
private static int segIntersect(Point A, Point B, Point C, Point D) {
Point intersection = new Point();
if (Math.abs(B.getY() - A.getY()) + Math.abs(B.getX() - A.getX()) + Math.abs(D.getY() - C.getY()) +
Math.abs(D.getX() - C.getX()) == 0.0D)
return 0;
if (Math.abs(B.getY() - A.getY()) + Math.abs(B.getX() - A.getX()) == 0.0D)
return 0;
if (Math.abs(D.getY() - C.getY()) + Math.abs(D.getX() - C.getX()) == 0.0D)
return 0;
if ((B.getY() - A.getY()) * (C.getX() - D.getX()) - (B.getX() - A.getX()) * (C.getY() - D.getY()) == 0.0D)
return 0;
intersection
.setX(((B.getX() - A.getX()) * (C.getX() - D.getX()) * (C
.getY() - A.getY()) - C.getX() * (B
.getX() - A.getX()) * (C.getY() - D.getY()) + A
.getX() * (B.getY() - A.getY()) * (C.getX() - D.getX())) / ((B
.getY() - A.getY()) * (C.getX() - D.getX()) - (B
.getX() - A.getX()) * (C.getY() - D.getY())));
intersection
.setY(((B.getY() - A.getY()) * (C.getY() - D.getY()) * (C
.getX() - A.getX()) - C.getY() * (B
.getY() - A.getY()) * (C.getX() - D.getX()) + A
.getY() * (B.getX() - A.getX()) * (C.getY() - D.getY())) / ((B
.getX() - A.getX()) * (C.getY() - D.getY()) - (B
.getY() - A.getY()) * (C.getX() - D.getX())));
if ((intersection.getX() - A.getX()) * (intersection.getX() - B.getX()) <= 0.0D && (intersection
.getX() - C.getX()) * (intersection
.getX() - D.getX()) <= 0.0D && (intersection
.getY() - A.getY()) * (intersection
.getY() - B.getY()) <= 0.0D && (intersection
.getY() - C.getY()) * (intersection
.getY() - D.getY()) <= 0.0D) {
if ((A.getX() == C.getX() && A.getY() == C.getY()) || (A.getX() == D.getX() && A.getY() == D.getY()) || (B
.getX() == C.getX() && B.getY() == C.getY()) || (B.getX() == D.getX() && B.getY() == D.getY()))
return 2;
return 1;
}
return -1;
}
public static class Point {
public double x;
public double y;
public Point() {}
public Point(double x, double y) {
this.x = x;
this.y = y;
}
public double getX() {
return this.x;
}
public void setX(double x) {
this.x = x;
}
public double getY() {
return this.y;
}
public void setY(double y) {
this.y = y;
}
public boolean equals(Object o) {
if (this == o)
return true;
if (o == null || getClass() != o.getClass())
return false;
Point point = (Point)o;
return (Double.compare(point.x, this.x) == 0 &&
Double.compare(point.y, this.y) == 0);
}
public int hashCode() {
return Objects.hash(new Object[] { Double.valueOf(this.x), Double.valueOf(this.y) });
}
public String toString() {
return "Point [x=" + this.x + ", y=" + this.y + "]";
}
}
}
package com.wanji.indicators.util;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.shaded.jackson2.org.yaml.snakeyaml.Yaml;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class YmlUtils {
private static final Logger log = LoggerFactory.getLogger(YmlUtils.class);
public static String bootstrap_file = "C:\\Work\\IDEA\\holo-rsu\\event-compute\\branch-3\\flink-cross-event-compute\\config\\application-prd.yml";
private static Map<String, String> result = new HashMap<>();
public static Map<String, String> getYmlByFileName(String filePath, String... keys) {
result.clear();
if (StringUtils.isBlank(filePath) || "null".equals(filePath))
filePath = bootstrap_file;
InputStream in = null;
try {
File file = new File(filePath);
try {
in = new BufferedInputStream(new FileInputStream(file));
log.info("读取外部配置文件,文件路径: " + filePath);
} catch (FileNotFoundException e) {
in = YmlUtils.class.getResourceAsStream("/application-prd.yml");
log.info("配置文件不存在,读取默认配置文件: " + e.getMessage());
}
Yaml props = new Yaml();
Object obj = props.loadAs(in, Map.class);
Map<String, Object> param = (Map<String, Object>)obj;
for (Map.Entry<String, Object> entry : param.entrySet()) {
String key = entry.getKey();
Object val = entry.getValue();
if (keys.length != 0 && !keys[0].equals(key))
continue;
if (val instanceof Map) {
forEachYaml(key, (Map<String, Object>)val, 1, keys);
continue;
}
result.put(key, val.toString());
}
return result;
} catch (Exception e) {
log.error(e.getMessage(), e);
} finally {
if (in != null)
try {
in.close();
} catch (IOException e) {
log.error(e.getMessage(), e);
}
}
return null;
}
public static String getValue(String key) throws FileNotFoundException {
Map<String, String> map = getYmlByFileName(null, new String[0]);
if (map == null)
return null;
return map.get(key);
}
public static Map<String, String> forEachYaml(String key_str, Map<String, Object> obj, int i, String... keys) {
for (Map.Entry<String, Object> entry : obj.entrySet()) {
String key = entry.getKey();
Object val = entry.getValue();
if (keys.length > i && !keys[i].equals(key))
continue;
String str_new = "";
if (StringUtils.isNotEmpty(key_str)) {
str_new = key_str + "." + key;
} else {
str_new = key;
}
if (val instanceof Map) {
forEachYaml(str_new, (Map<String, Object>)val, ++i, keys);
i--;
continue;
}
result.put(str_new, val.toString());
}
return result;
}
public Map<String, String> getLocalYml() {
Properties properties = new Properties();
Map<String, String> map = new HashMap<>();
try {
InputStream in = YmlUtils.class.getResourceAsStream("/application-prd.yml");
properties.load(in);
for (Object k : properties.keySet())
map.put(k.toString(), properties.getProperty(k.toString()));
} catch (IOException e) {
e.printStackTrace();
}
return map;
}
public static void main(String[] args) throws FileNotFoundException {
Map<String, String> ymlByFileName = getYmlByFileName(bootstrap_file, new String[0]);
Set<Map.Entry<String, String>> entries = ymlByFileName.entrySet();
for (Map.Entry<String, String> entry : entries)
System.out.println((String)entry.getKey() + ":" + (String)entry.getValue());
Map<String, String> localYml = (new YmlUtils()).getLocalYml();
Set<Map.Entry<String, String>> entriess = localYml.entrySet();
for (Map.Entry<String, String> entry : entriess)
System.out.println((String)entry.getKey() + ":" + (String)entry.getValue());
}
}
#kafka settings kafka1:9092,kafka2:9092,kafka3:9092 192.168.11.102:9092
bootstrap.servers=10.102.1.182:9092
#bootstrap.servers=192.168.2.103:9092
#consumer-settings
consumer.topic=JNMatchResultMiniData
consumer.group.id=FlinkConsumerGroup-V56
consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
#producer-settings
producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
#elasticsearch.server=10.102.1.182:9200
elasticsearch.server=192.168.2.103:9200
elasticsearch.username=elastic
elasticsearch.password=Wanji300552
track_index_name=car-track-30s
route_index_name=car-route-path
check.point.uri=file:///data/projects/savepoint
alarm.feishu.url=https://open.feishu.cn/open-apis/bot/v2/hook/0840f036-299e-4595-8a34-dea1c4ba6942
\ No newline at end of file
#kafka settings kafka1:9092,kafka2:9092,kafka3:9092 192.168.11.102:9092
#bootstrap.servers=10.102.1.182:9092
bootstrap.servers=192.168.2.80:9092
#consumer-settings
consumer.topic=CSMatchResultMiniData
consumer.group.id=FlinkConsumerGroup-V56
consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
consumer.value-deserializer=org.apache.kafka.common.serialization.ByteArrayDeserializer
#producer-settings
producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
#elasticsearch.server=10.102.1.182:9200
elasticsearch.server=192.168.2.80:9200
elasticsearch.username=elastic
elasticsearch.password=Wanji300552
check.point.uri=file:///data/soft/flink/flink-1.14.3/bin/savepoint
alarm.feishu.url=https://open.feishu.cn/open-apis/bot/v2/hook/0840f036-299e-4595-8a34-dea1c4ba6942
\ No newline at end of file
jdbc.driver=com.mysql.jdbc.Driver
jdbc.url=jdbc:mysql://192.168.2.78:3306/t_roadnetwork_changsha?userUnicode=true&characterEncoding=utf-8
jdbc.username=root
jdbc.password=Wanji300552
#kafka settings kafka1:9092,kafka2:9092,kafka3:9092 192.168.11.102:9092
#bootstrap.servers=10.102.1.182:9092
#bootstrap.servers=106.120.201.126:14576
bootstrap.servers=192.168.2.80:9092
#consumer-settings
consumer.topic=CSMatchResultMiniData
consumer.group.id=FlinkConsumerGroup-V56
consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
consumer.value-deserializer=org.apache.kafka.common.serialization.ByteArrayDeserializer
consumer.period.indicator.topic=cs_JN040001LanePeriodicData
consumer.snapshot.indicator.topic=cs_JN040001LaneSnapshotData
#producer-settings
producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
#elasticsearch.server=10.102.1.182:9200
elasticsearch.server=192.168.2.80:9200
elasticsearch.username=elastic
elasticsearch.password=Wanji300552
#redis.host=10.102.1.182
redis.host=192.168.2.78
redis.port=6379
redis.password=Wanji300552
redis.database=14
check.point.uri=file:///data/soft/flink/flink-1.14.3/bin/savepoint
alarm.feishu.url=https://open.feishu.cn/open-apis/bot/v2/hook/0840f036-299e-4595-8a34-dea1c4ba6942
\ No newline at end of file
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
log4j.rootLogger=info, console,R
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n
log4j.appender.logfile.encoding=UTF-8
#DailyRollingFileAppender每天产生一个日志文件
log4j.appender.R=org.apache.log4j.DailyRollingFileAppender
#设置日志文件保存路径
log4j.appender.R.File=/data/soft/flink/logs/flink-traffic-indicators-compute.log
#log4j.appender.R.File=/data/flink-traffic-indicators-compute.log
#日志输出格式
log4j.appender.R.layout.ConversionPattern=%-d{yyyy-MM-dd HH\:mm\:ss} [%c]-[%p] %m%n
#设置日志文件后缀名,决定着多长时间创建一个新的文件!yyyyMMdd每天一个,yyyyMMddHH第小时一个,...
log4j.appender.R.DatePattern='.'yyyy-MM-dd
#日志布局格式
log4j.appender.R.layout=org.apache.log4j.PatternLayout
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.wanji.indicators.mapper.BaseLaneInfoMapper">
<!-- 通用设置 -->
<!-- 通用查询列 -->
<sql id="Base_Column_List">
id, code, sort, type, dir, turn, category, cross_id, rid, segment_id, length, width, wkt, gmt_create, gmt_modified
</sql>
<!-- 通用条件列 -->
<sql id="BaseLaneInfoByCondition">
<if test="id!=null and id!=''">
AND id = #{id}
</if>
<if test="code!=null and code!=''">
AND code = #{code}
</if>
<if test="sort!=null and sort!=''">
AND sort = #{sort}
</if>
<if test="type!=null and type!=''">
AND type = #{type}
</if>
<if test="dir!=null and dir!=''">
AND dir = #{dir}
</if>
<if test="turn!=null and turn!=''">
AND turn = #{turn}
</if>
<if test="category!=null and category!=''">
AND category = #{category}
</if>
<if test="crossId!=null and crossId!=''">
AND cross_id = #{crossId}
</if>
<if test="rid!=null and rid!=''">
AND rid = #{rid}
</if>
<if test="segmentId!=null and segmentId!=''">
AND segment_id = #{segmentId}
</if>
<if test="length!=null and length!=''">
AND length = #{length}
</if>
<if test="width!=null and width!=''">
AND width = #{width}
</if>
<if test="wkt!=null and wkt!=''">
AND wkt = #{wkt}
</if>
<if test="gmtCreate!=null">
AND gmt_create = #{gmtCreate}
</if>
<if test="gmtModified!=null">
AND gmt_modified = #{gmtModified}
</if>
</sql>
<!-- 通用设置列 -->
<sql id="BaseLaneInfoSetColumns">
<if test="code!=null and code!=''">
code = #{code},
</if>
<if test="sort!=null and sort!=''">
sort = #{sort},
</if>
<if test="type!=null and type!=''">
type = #{type},
</if>
<if test="dir!=null and dir!=''">
dir = #{dir},
</if>
<if test="turn!=null and turn!=''">
turn = #{turn},
</if>
<if test="category!=null and category!=''">
category = #{category},
</if>
<if test="crossId!=null and crossId!=''">
cross_id = #{crossId},
</if>
<if test="rid!=null and rid!=''">
rid = #{rid},
</if>
<if test="segmentId!=null and segmentId!=''">
segment_id = #{segmentId},
</if>
<if test="length!=null and length!=''">
length = #{length},
</if>
<if test="width!=null and width!=''">
width = #{width},
</if>
<if test="wkt!=null and wkt!=''">
wkt = #{wkt},
</if>
<if test="gmtCreate!=null">
gmt_create = #{gmtCreate},
</if>
<if test="gmtModified!=null">
gmt_modified = #{gmtModified},
</if>
</sql>
<!-- 通用查询映射结果 -->
<resultMap id="BaseLaneInfoMap" type="com.wanji.indicators.entity.BaseLaneInfo">
<id column="id" property="id"/>
<result column="code" property="code"/>
<result column="sort" property="sort"/>
<result column="type" property="type"/>
<result column="dir" property="dir"/>
<result column="turn" property="turn"/>
<result column="category" property="category"/>
<result column="cross_id" property="crossId"/>
<result column="rid" property="rid"/>
<result column="segment_id" property="segmentId"/>
<result column="length" property="length"/>
<result column="width" property="width"/>
<result column="wkt" property="wkt"/>
<result column="gmt_create" property="gmtCreate"/>
<result column="gmt_modified" property="gmtModified"/>
<result column="rid_level" property="ridLevel"/>
<result column="rid_wkt" property="ridWkt"/>
<result column="rid_length" property="ridLength"/>
</resultMap>
<!-- 查询表t_base_lane_info所有信息 -->
<select id="findAllBaseLaneInfo" resultMap="BaseLaneInfoMap">
SELECT a.id,b.`level` rid_level,b.wkt rid_wkt
from t_base_lane_info a
JOIN t_base_rid_info b
on a.rid=b.id
</select>
<!-- 根据主键id查询表t_base_lane_info信息 -->
<select id="findBaseLaneInfoByid" resultMap="BaseLaneInfoMap">
SELECT
<include refid="Base_Column_List"/>
FROM t_base_lane_info
WHERE id=#{id}
</select>
<!-- 根据条件查询表t_base_lane_info信息 -->
<select id="findBaseLaneInfoByCondition" resultMap="BaseLaneInfoMap">
SELECT
<include refid="Base_Column_List"/>
FROM t_base_lane_info
WHERE 1=1
<include refid="BaseLaneInfoByCondition" />
</select>
<!-- 根据主键id删除表t_base_lane_info信息 -->
<delete id="deleteBaseLaneInfoByid">
DELETE FROM
t_base_lane_info
WHERE id=#{id}
</delete>
<!-- 根据主键id更新表t_base_lane_info信息 -->
<update id="updateBaseLaneInfoByid" parameterType="com.wanji.indicators.entity.BaseLaneInfo">
UPDATE t_base_lane_info
<set>
<include refid="BaseLaneInfoSetColumns"/>
</set>
WHERE
id=#{id}
</update>
<!-- 新增表t_base_lane_info信息 -->
<insert id="addBaseLaneInfo">
INSERT INTO t_base_lane_info (
id
,code
,sort
,type
,dir
,turn
,category
,cross_id
,rid
,segment_id
,length
,width
,wkt
,gmt_create
,gmt_modified
) VALUES (
#{id}
,#{code}
,#{sort}
,#{type}
,#{dir}
,#{turn}
,#{category}
,#{crossId}
,#{rid}
,#{segmentId}
,#{length}
,#{width}
,#{wkt}
,#{gmtCreate}
,#{gmtModified}
)
</insert>
</mapper>
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="com.wanji.indicators.mapper.BaseRidInfoMapper">
<!-- 通用设置 -->
<!-- 通用查询列 -->
<sql id="Base_Column_List">
id, name, road_id, road_name, road_dir_id, start_cross_id, end_cross_id, out_dir, in_dir, start_angle, end_angle, direction, sort, trend, level, area_code, length, width, is_oneway, type, main_flag, wkt, sc_id, sc_name, sc_sort, gmt_create, gmt_modified
</sql>
<!-- 通用条件列 -->
<sql id="BaseRidInfoByCondition">
<if test="id!=null and id!=''">
AND id = #{id}
</if>
<if test="name!=null and name!=''">
AND name = #{name}
</if>
<if test="roadId!=null and roadId!=''">
AND road_id = #{roadId}
</if>
<if test="roadName!=null and roadName!=''">
AND road_name = #{roadName}
</if>
<if test="roadDirId!=null and roadDirId!=''">
AND road_dir_id = #{roadDirId}
</if>
<if test="startCrossId!=null and startCrossId!=''">
AND start_cross_id = #{startCrossId}
</if>
<if test="endCrossId!=null and endCrossId!=''">
AND end_cross_id = #{endCrossId}
</if>
<if test="outDir!=null and outDir!=''">
AND out_dir = #{outDir}
</if>
<if test="inDir!=null and inDir!=''">
AND in_dir = #{inDir}
</if>
<if test="startAngle!=null and startAngle!=''">
AND start_angle = #{startAngle}
</if>
<if test="endAngle!=null and endAngle!=''">
AND end_angle = #{endAngle}
</if>
<if test="direction!=null and direction!=''">
AND direction = #{direction}
</if>
<if test="sort!=null and sort!=''">
AND sort = #{sort}
</if>
<if test="trend!=null and trend!=''">
AND trend = #{trend}
</if>
<if test="level!=null and level!=''">
AND level = #{level}
</if>
<if test="areaCode!=null and areaCode!=''">
AND area_code = #{areaCode}
</if>
<if test="length!=null and length!=''">
AND length = #{length}
</if>
<if test="width!=null and width!=''">
AND width = #{width}
</if>
<if test="isOneway!=null and isOneway!=''">
AND is_oneway = #{isOneway}
</if>
<if test="type!=null and type!=''">
AND type = #{type}
</if>
<if test="mainFlag!=null and mainFlag!=''">
AND main_flag = #{mainFlag}
</if>
<if test="wkt!=null and wkt!=''">
AND wkt = #{wkt}
</if>
<if test="scId!=null and scId!=''">
AND sc_id = #{scId}
</if>
<if test="scName!=null and scName!=''">
AND sc_name = #{scName}
</if>
<if test="scSort!=null and scSort!=''">
AND sc_sort = #{scSort}
</if>
<if test="gmtCreate!=null">
AND gmt_create = #{gmtCreate}
</if>
<if test="gmtModified!=null">
AND gmt_modified = #{gmtModified}
</if>
</sql>
<!-- 通用设置列 -->
<sql id="BaseRidInfoSetColumns">
<if test="name!=null and name!=''">
name = #{name},
</if>
<if test="roadId!=null and roadId!=''">
road_id = #{roadId},
</if>
<if test="roadName!=null and roadName!=''">
road_name = #{roadName},
</if>
<if test="roadDirId!=null and roadDirId!=''">
road_dir_id = #{roadDirId},
</if>
<if test="startCrossId!=null and startCrossId!=''">
start_cross_id = #{startCrossId},
</if>
<if test="endCrossId!=null and endCrossId!=''">
end_cross_id = #{endCrossId},
</if>
<if test="outDir!=null and outDir!=''">
out_dir = #{outDir},
</if>
<if test="inDir!=null and inDir!=''">
in_dir = #{inDir},
</if>
<if test="startAngle!=null and startAngle!=''">
start_angle = #{startAngle},
</if>
<if test="endAngle!=null and endAngle!=''">
end_angle = #{endAngle},
</if>
<if test="direction!=null and direction!=''">
direction = #{direction},
</if>
<if test="sort!=null and sort!=''">
sort = #{sort},
</if>
<if test="trend!=null and trend!=''">
trend = #{trend},
</if>
<if test="level!=null and level!=''">
level = #{level},
</if>
<if test="areaCode!=null and areaCode!=''">
area_code = #{areaCode},
</if>
<if test="length!=null and length!=''">
length = #{length},
</if>
<if test="width!=null and width!=''">
width = #{width},
</if>
<if test="isOneway!=null and isOneway!=''">
is_oneway = #{isOneway},
</if>
<if test="type!=null and type!=''">
type = #{type},
</if>
<if test="mainFlag!=null and mainFlag!=''">
main_flag = #{mainFlag},
</if>
<if test="wkt!=null and wkt!=''">
wkt = #{wkt},
</if>
<if test="scId!=null and scId!=''">
sc_id = #{scId},
</if>
<if test="scName!=null and scName!=''">
sc_name = #{scName},
</if>
<if test="scSort!=null and scSort!=''">
sc_sort = #{scSort},
</if>
<if test="gmtCreate!=null">
gmt_create = #{gmtCreate},
</if>
<if test="gmtModified!=null">
gmt_modified = #{gmtModified},
</if>
</sql>
<!-- 通用查询映射结果 -->
<resultMap id="BaseRidInfoMap" type="com.wanji.indicators.entity.BaseRidInfo">
<id column="id" property="id"/>
<result column="name" property="name"/>
<result column="road_id" property="roadId"/>
<result column="road_name" property="roadName"/>
<result column="road_dir_id" property="roadDirId"/>
<result column="start_cross_id" property="startCrossId"/>
<result column="end_cross_id" property="endCrossId"/>
<result column="out_dir" property="outDir"/>
<result column="in_dir" property="inDir"/>
<result column="start_angle" property="startAngle"/>
<result column="end_angle" property="endAngle"/>
<result column="direction" property="direction"/>
<result column="sort" property="sort"/>
<result column="trend" property="trend"/>
<result column="level" property="level"/>
<result column="area_code" property="areaCode"/>
<result column="length" property="length"/>
<result column="width" property="width"/>
<result column="is_oneway" property="isOneway"/>
<result column="type" property="type"/>
<result column="main_flag" property="mainFlag"/>
<result column="wkt" property="wkt"/>
<result column="sc_id" property="scId"/>
<result column="sc_name" property="scName"/>
<result column="sc_sort" property="scSort"/>
<result column="gmt_create" property="gmtCreate"/>
<result column="gmt_modified" property="gmtModified"/>
</resultMap>
<!-- 查询表t_base_rid_info所有信息 -->
<select id="findAllBaseRidInfo" resultMap="BaseRidInfoMap">
SELECT
<include refid="Base_Column_List"/>
FROM t_base_rid_info
</select>
<!-- 根据主键id查询表t_base_rid_info信息 -->
<select id="findBaseRidInfoByid" resultMap="BaseRidInfoMap">
SELECT
<include refid="Base_Column_List"/>
FROM t_base_rid_info
WHERE id=#{id}
</select>
<!-- 根据条件查询表t_base_rid_info信息 -->
<select id="findBaseRidInfoByCondition" resultMap="BaseRidInfoMap">
SELECT
<include refid="Base_Column_List"/>
FROM t_base_rid_info
WHERE 1=1
<include refid="BaseRidInfoByCondition" />
</select>
<!-- 根据主键id删除表t_base_rid_info信息 -->
<delete id="deleteBaseRidInfoByid">
DELETE FROM
t_base_rid_info
WHERE id=#{id}
</delete>
<!-- 根据主键id更新表t_base_rid_info信息 -->
<update id="updateBaseRidInfoByid" parameterType="com.wanji.indicators.entity.BaseRidInfo">
UPDATE t_base_rid_info
<set>
<include refid="BaseRidInfoSetColumns"/>
</set>
WHERE
id=#{id}
</update>
<!-- 新增表t_base_rid_info信息 -->
<insert id="addBaseRidInfo">
INSERT INTO t_base_rid_info (
id
,name
,road_id
,road_name
,road_dir_id
,start_cross_id
,end_cross_id
,out_dir
,in_dir
,start_angle
,end_angle
,direction
,sort
,trend
,level
,area_code
,length
,width
,is_oneway
,type
,main_flag
,wkt
,sc_id
,sc_name
,sc_sort
,gmt_create
,gmt_modified
) VALUES (
#{id}
,#{name}
,#{roadId}
,#{roadName}
,#{roadDirId}
,#{startCrossId}
,#{endCrossId}
,#{outDir}
,#{inDir}
,#{startAngle}
,#{endAngle}
,#{direction}
,#{sort}
,#{trend}
,#{level}
,#{areaCode}
,#{length}
,#{width}
,#{isOneway}
,#{type}
,#{mainFlag}
,#{wkt}
,#{scId}
,#{scName}
,#{scSort}
,#{gmtCreate}
,#{gmtModified}
)
</insert>
</mapper>
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.1.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-4.1.xsd
">
<import resource="classpath:spring-mybatis.xml" />
<!-- 包自动扫描 -->
<context:component-scan base-package="com.wanji.indicators"/>
</beans>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:mybatis="http://mybatis.org/schema/mybatis-spring"
xsi:schemaLocation="
http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.1.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-4.1.xsd
http://mybatis.org/schema/mybatis-spring http://mybatis.org/schema/mybatis-spring.xsd">
<!-- 加载配置文件 -->
<context:property-placeholder location="classpath:db.properties"/>
<!-- 配置数据源 -->
<bean id="dataSource" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close">
<property name="driverClassName" value="${jdbc.driver}"/>
<property name="url" value="${jdbc.url}"/>
<property name="username" value="${jdbc.username}"/>
<property name="password" value="${jdbc.password}"/>
<property name="initialSize" value="1"/>
<property name="minIdle" value="1"/>
<property name="maxActive" value="20"/>
</bean>
<bean id="sqlSessionFactory" class="org.mybatis.spring.SqlSessionFactoryBean">
<property name="dataSource" ref="dataSource" />
<property name="mapperLocations" value="classpath*:mapper/*.xml" />
<property name="transactionFactory">
<bean class="org.apache.ibatis.transaction.managed.ManagedTransactionFactory" />
</property>
</bean>
<mybatis:scan base-package="com.wanji.mapper" />
</beans>
\ No newline at end of file
package test;
public class ApplicationContextTest {
public static void main(String[] args) {
// ApplicationContext beanConf = new ClassPathXmlApplicationContext("spring-container.xml");
// FlowService flowService = beanConf.getBean(FlowServiceImpl.class);
// List<FlowData> users = flowService.findAll();
// System.out.println(users);
}
}
\ No newline at end of file
package test;
import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.generator.AutoGenerator;
import com.baomidou.mybatisplus.generator.InjectionConfig;
import com.baomidou.mybatisplus.generator.config.*;
import com.baomidou.mybatisplus.generator.config.po.TableInfo;
import com.baomidou.mybatisplus.generator.config.rules.NamingStrategy;
import com.baomidou.mybatisplus.generator.engine.FreemarkerTemplateEngine;
import java.util.ArrayList;
import java.util.List;
/**
* @ClassName:Generator
* @Description:代码自动生成器
*/
public class CodeGenerator {
static String baseOutputDir = "D:\\workspace\\flink-traffic-indicators-compute";
public static void main(String[] args) {
// 生成地址 : // System.getProperty("user.dir") == 得到当前项目的实际地址
// String outputDir = System.getProperty("user.dir") + "/src/main/java";
String outputDir = baseOutputDir + "/src/main/java";
// 表名, 注意大小写
String[] tableNames = new String[] {"t_base_lane_info"};
// 数据库地址
String url = "jdbc:mysql://10.102.1.182:3306/t_roadnetwork_changsha?userUnicode=true&characterEncoding=utf-8";
// 用户名
String userName = "root";
// 密码
String password = "Wanji300552";
// 父包路径
String parentPackage = "com.wanji.indicators";
// 需要去掉的表名前缀
String prefixTable = "t_";
generate(outputDir, tableNames, url, userName, password, parentPackage, prefixTable);
}
/**
* @param outputDir 生成地址
* @param tableNames 表名
* @param url 数据库地址
* @param userName 用户名
* @param password 密码
* @param parentPackage 父包路径
* @param prefixTable 需要去掉的表名前缀
*/
public static void generate(String outputDir, String[] tableNames, String url, String userName,
String password, String parentPackage, String prefixTable) {
// =============== 全局配置 ==================
GlobalConfig gc = new GlobalConfig();
gc.setOutputDir(outputDir)
.setActiveRecord(true) // 是否支持 AR, 实体类只需继承 Model 类即可进行强大的 CRUD 操作
.setAuthor("fengyi") // 设置作者名字
.setFileOverride(true) // 文件覆盖(全新文件)
.setIdType(IdType.AUTO) // 主键策略
.setBaseResultMap(true) // SQL 映射文件
.setBaseColumnList(true) // SQL 片段
.setServiceName("%sService") // service的名字
.setMapperName("%sMapper")
.setOpen(false)
;
// ================= 数据源配置 ===============
DataSourceConfig dsc = new DataSourceConfig();
dsc.setDbType(DbType.MYSQL)
.setDriverName("com.mysql.cj.jdbc.Driver");
dsc.setUrl(url)
.setUsername(userName)
.setPassword(password);
// ================= 包配置 ===================
PackageConfig pc = new PackageConfig();
pc.setParent(parentPackage) // 配置父包路径
// .setModuleName("base") // 配置业务包路径
.setMapper("mapper")
.setEntity("entity")
// .setEntity("entity")
.setService("service")
//.setServiceImpl("service.impl"); // 会自动生成 impl,可以不设定
//.setController("web.controller")
;
// ================== 自定义配置 =================
InjectionConfig cfg = new InjectionConfig() {
@Override
public void initMap() {
// to do nothing
}
};
List<FileOutConfig> focList = new ArrayList<>();
// 调整 xml 生成目录演示
focList.add(new FileOutConfig("/templates/mapper.xml.ftl") {
@Override
public String outputFile(TableInfo tableInfo) {
// 自定义输入文件名称
return baseOutputDir + "/src/main/resources/mapper/"
+ tableInfo.getEntityName() + "Mapper" + StringPool.DOT_XML;
}
});
cfg.setFileOutConfigList(focList);
// =================== 策略配置 ==================
StrategyConfig strategy = new StrategyConfig();
strategy.setNaming(NamingStrategy.underline_to_camel) // 表名命名: underline_to_camel 底线变驼峰
.setColumnNaming(NamingStrategy.underline_to_camel) // 字段命名: underline_to_camel 底线变驼峰
.setInclude(tableNames) // 需要生成的 表名
.setCapitalMode(true) // 全局大写命名 ORACLE 注意
.setTablePrefix(prefixTable) // 去掉 表的前缀
// .setFieldPrefix(pc.getModuleName() + "_") // 去掉字段前缀
// .setSuperEntityClass("com.maoxs.pojo") // 继承类
// .setSuperControllerClass("com.maoxs.controller") // 继承类
// .setSuperEntityColumns("id") // 设置超级超级列
.setEntityLombokModel(true) // 是否加入lombok
.setControllerMappingHyphenStyle(true) // 设置controller映射联字符
.setRestControllerStyle(true)
;
//自动填充字段,在项目开发过程中,例如创建时间,修改时间,每次,都需要我们来指定,太麻烦了,设置为自动填充规则,就不需要我们赋值咯
// List<TableFill> list = new ArrayList<TableFill>();
// TableFill tableFill1 = new TableFill("register_date",FieldFill.INSERT);
// TableFill tableFill2 = new TableFill("update_time",FieldFill.INSERT_UPDATE);
// list.add(tableFill1);
// list.add(tableFill2);
// strategy.setTableFillList(list);
// ================== 自定义模板配置: 默认配置位置 mybatis-plus/src/main/resources/templates ======================
// 放置自己项目的 src/main/resources/templates 目录下, 默认名称一下可以不配置,也可以自定义模板名称
TemplateConfig tc = new TemplateConfig();
tc.setXml(null) // 设置生成xml的模板
.setEntity("/templates/entity.java") // 设置生成entity的模板
.setMapper("/templates/mapper.java") // 设置生成mapper的模板
.setController(null) // 设置生成controller的模板
.setService("/templates/service.java") // 设置生成service的模板
.setServiceImpl("/templates/serviceImpl.java"); // 设置生成serviceImpl的模板
// ==================== 生成配置 ===================
AutoGenerator mpg = new AutoGenerator();
mpg.setCfg(cfg)
.setTemplate(tc)
.setGlobalConfig(gc)
.setDataSource(dsc)
.setPackageInfo(pc)
.setStrategy(strategy)
.setTemplateEngine(new FreemarkerTemplateEngine()); // 选择 freemarker引擎,注意 pom 依赖必须有!
mpg.execute();
System.out.println("MyBatis Plus Code Generator Finished!!");
}
}
/*
Navicat MySQL Data Transfer
Source Server : Localhost
Source Server Version : 50713
Source Host : localhost:3306
Source Database : test
Target Server Type : MYSQL
Target Server Version : 50713
File Encoding : 65001
Date: 2017-06-23 14:25:27
*/
SET FOREIGN_KEY_CHECKS=0;
-- ----------------------------
-- Table structure for user
-- ----------------------------
DROP TABLE IF EXISTS `user`;
CREATE TABLE `user` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`username` varchar(255) NOT NULL,
`password` varchar(255) NOT NULL,
`nick_name` varchar(255) DEFAULT NULL,
`sex` int(1) DEFAULT NULL,
`register_date` datetime DEFAULT NULL ON UPDATE CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
-- ----------------------------
-- Records of user
-- ----------------------------
INSERT INTO `user` VALUES ('1', '89921218@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('2', '2@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-2', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('3', '3@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-3', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('4', '4@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-4', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('5', '5@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-5', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('6', '6@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-6', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('7', '7@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-7', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('8', '8@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-8', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('9', '9@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-9', '1', '2017-06-23 14:24:23');
INSERT INTO `user` VALUES ('10', '10@qq.com', '1ee04e0b1cb5af7367c80c22e42efd8b', '土豆-10', '1', '2017-06-23 14:24:23');
SET FOREIGN_KEY_CHECKS=1;
package ${package.Entity};
<#list table.importPackages as pkg>
import ${pkg};
</#list>
<#if swagger2>
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
</#if>
<#if entityLombokModel>
import lombok.Data;
import lombok.EqualsAndHashCode;
</#if>
import com.baomidou.mybatisplus.annotation.TableName;
<#if entityLombokModel>
@Data
@EqualsAndHashCode(callSuper=false)
</#if>
<#if !swagger2>
@TableName("${table.name}")
</#if>
<#if swagger2>
@ApiModel(value="${entity}", description="${table.comment!}")
</#if>
<#if superEntityClass??>
public class ${entity} extends ${superEntityClass}<#if activeRecord><${entity}></#if> {
<#elseif activeRecord>
public class ${entity} extends Model<${entity}> {
<#else>
public class ${entity} implements Serializable {
</#if>
private static final long serialVersionUID = 1L;
<#-- ---------- BEGIN 字段循环遍历 ---------->
<#list table.fields as field>
<#if field.keyFlag>
<#assign keyPropertyName="${field.propertyName}"/>
</#if>
<#if field.comment!?length gt 0>
<#if swagger2>
@ApiModelProperty(value = "${field.comment}")
<#else>
/**
* ${field.comment}
*/
</#if>
</#if>
<#if !swagger2>
<#if field.keyFlag>
<#-- 主键 -->
<#if field.keyIdentityFlag>
@TableId(value = "${field.name}", type = IdType.AUTO)
<#elseif idType??>
@TableId(value = "${field.name}", type = IdType.${idType})
<#elseif field.convert>
@TableId("${field.name}")
</#if>
<#-- 普通字段 -->
<#elseif field.fill??>
<#-- ----- 存在字段填充设置 ----->
<#if field.convert>
@TableField(value = "${field.name}", fill = FieldFill.${field.fill})
<#else>
@TableField(fill = FieldFill.${field.fill})
</#if>
<#elseif field.convert>
@TableField("${field.name}")
</#if>
</#if>
<#-- 乐观锁注解 -->
<#if (versionFieldName!"") == field.name>
@Version
</#if>
<#-- 逻辑删除注解 -->
<#if (logicDeleteFieldName!"") == field.name>
@TableLogic
</#if>
private ${field.propertyType} ${field.propertyName};
</#list>
<#------------ END 字段循环遍历 ---------->
<#if !entityLombokModel>
<#list table.fields as field>
<#if field.propertyType == "boolean">
<#assign getprefix="is"/>
<#else>
<#assign getprefix="get"/>
</#if>
public ${field.propertyType} ${getprefix}${field.capitalName}() {
return ${field.propertyName};
}
<#if entityBuilderModel>
public ${entity} set${field.capitalName}(${field.propertyType} ${field.propertyName}) {
<#else>
public void set${field.capitalName}(${field.propertyType} ${field.propertyName}) {
</#if>
this.${field.propertyName} = ${field.propertyName};
<#if entityBuilderModel>
return this;
</#if>
}
</#list>
</#if>
<#if entityColumnConstant>
<#list table.fields as field>
public static final String ${field.name?upper_case} = "${field.name}";
</#list>
</#if>
<#if activeRecord>
@Override
protected Serializable pkVal() {
<#if keyPropertyName??>
return this.${keyPropertyName};
<#else>
return null;
</#if>
}
</#if>
<#if !entityLombokModel>
@Override
public String toString() {
return "${entity}{" +
<#list table.fields as field>
<#if field_index==0>
"${field.propertyName}=" + ${field.propertyName} +
<#else>
", ${field.propertyName}=" + ${field.propertyName} +
</#if>
</#list>
"}";
}
</#if>
}
package ${package.Entity};
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
import java.util.Date;
import javax.persistence.Id;
import javax.persistence.Entity;
import javax.persistence.Table;
<#list table.importPackages as pkg>
<#if pkg == "java.util.Date">
import ${pkg};
</#if>
</#list>
/**
* ${table.name} : ${table.comment!}
*/
@Data
@Entity
@Table(name = "${table.name}")
@ApiModel("${table.comment!}")
@JsonIgnoreProperties(value = {"hibernateLazyInitializer"})
public class ${entity} implements Serializable {
private static final long serialVersionUID = 1L;
<#-- ---------- 属性私有化 ---------->
<#list table.fields as field>
<#if field.keyFlag>
<#assign keyPropertyName="${field.propertyName}"/>
</#if>
<#if field.keyFlag>
<#-- 主键 -->
/**
* 主键 : ${field.name}, ${field.comment!}
*/
<#-- 乐观锁注解 -->
<#if (versionFieldName!"") == field.name>
@Version
</#if>
<#-- 逻辑删除注解 -->
<#if (logicDeleteFieldName!"") == field.name>
@TableLogic
</#if>
<#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate">
@Column(name = "${field.name}")
@ApiModelProperty("${field.comment!}")
@JsonFormat(locale="zh", timezone="GMT+8", pattern="yyyy-MM-dd HH:mm:ss")
private Date ${field.propertyName};
</#if>
<#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate">
@Id
@Column(name = "${field.name}")
@ApiModelProperty("${field.comment!}")
private ${field.propertyType} ${field.propertyName};
</#if>
<#-- 普通字段 -->
<#elseif !field.keyFlag>
/**
* ${field.name}, ${field.comment!}
*/
<#-- 乐观锁注解 -->
<#if (versionFieldName!"") == field.name>
@Version
</#if>
<#-- 逻辑删除注解 -->
<#if (logicDeleteFieldName!"") == field.name>
@TableLogic
</#if>
<#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate">
@Column(name = "${field.name}")
@ApiModelProperty("${field.comment!}")
@JsonFormat(locale="zh", timezone="GMT+8", pattern="yyyy-MM-dd HH:mm:ss")
private Date ${field.propertyName};
</#if>
<#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate">
@Column(name = "${field.name}")
@ApiModelProperty("${field.comment!}")
private ${field.propertyType} ${field.propertyName};
</#if>
</#if>
<#--&lt;#&ndash; 乐观锁注解 &ndash;&gt;-->
<#-- <#if (versionFieldName!"") == field.name>-->
<#-- @Version-->
<#-- </#if>-->
<#--&lt;#&ndash; 逻辑删除注解 &ndash;&gt;-->
<#-- <#if (logicDeleteFieldName!"") == field.name>-->
<#-- @TableLogic-->
<#-- </#if>-->
<#-- <#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate">-->
<#-- @Column(name = "${field.name}")-->
<#-- @ApiModelProperty("${field.comment!}")-->
<#-- @JsonFormat(locale="zh", timezone="GMT+8", pattern="yyyy-MM-dd HH:mm:ss")-->
<#-- private Date ${field.propertyName};-->
<#-- </#if>-->
<#-- <#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate">-->
<#-- @Column(name = "${field.name}")-->
<#-- @ApiModelProperty("${field.comment!}")-->
<#-- private ${field.propertyType} ${field.propertyName};-->
<#-- </#if>-->
</#list>
<#------------ 构造函数 ----------- -->
<#-- public ${entity}(<#list table.fields as field><#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate">Date ${field.propertyName}</#if><#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate">${field.propertyType} ${field.propertyName}</#if><#sep>,</#list>){-->
<#-- <#list table.fields as field>-->
<#-- this.${field.propertyName} = ${field.propertyName};-->
<#-- </#list>-->
<#-- }-->
<#-- -->
<#-- public ${entity}(){-->
<#-- }-->
<#------------ getter.setter封装 ---------->
<#--<#if !entityLombokModel>-->
<#-- <#list table.fields as field>-->
<#-- <#if field.propertyType == "boolean">-->
<#-- <#assign getprefix="is"/>-->
<#-- <#else>-->
<#-- <#assign getprefix="get"/>-->
<#-- </#if>-->
<#-- public <#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate">Date</#if><#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate">${field.propertyType}</#if> ${getprefix}${field.capitalName}() {-->
<#-- return ${field.propertyName};-->
<#-- }-->
<#-- <#if entityBuilderModel>-->
<#-- public ${entity} set${field.capitalName}(${field.propertyType} ${field.propertyName}) {-->
<#-- <#else>-->
<#-- public void set${field.capitalName}(<#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate">Date</#if><#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate">${field.propertyType}</#if> ${field.propertyName}) {-->
<#-- </#if>-->
<#-- this.${field.propertyName} = ${field.propertyName};-->
<#-- <#if entityBuilderModel>-->
<#-- return this;-->
<#-- </#if>-->
<#-- }-->
<#-- </#list>-->
<#--</#if>-->
<#------------- 重写toString() ----------------->
<#--<#if !entityLombokModel>-->
<#-- @Override-->
<#-- public String toString() {-->
<#-- return "${entity}{" +-->
<#-- <#list table.fields as field>-->
<#-- <#if field_index==0>-->
<#-- "${field.propertyName}=" + ${field.propertyName} +-->
<#-- <#else>-->
<#-- ", ${field.propertyName}=" + ${field.propertyName} +-->
<#-- </#if>-->
<#-- </#list>-->
<#-- "}";-->
<#-- }-->
<#--</#if>-->
}
package ${package.Entity};
import java.io.Serializable;
import java.util.Date;
<#list table.importPackages as pkg>
<#if pkg == "java.util.Date">
import ${pkg};
</#if>
</#list>
/**
* ${table.name} : ${table.comment!}
*/
@TableName("${table.name}")
@ApiModel("${table.comment!}")
public class ${entity} implements Serializable {
private static final long serialVersionUID = 1L;
<#-- ---------- 属性私有化 ---------->
<#list table.fields as field>
<#if field.keyFlag>
<#assign keyPropertyName="${field.propertyName}"/>
</#if>
<#if field.keyFlag>
<#-- 主键 -->
/**
* 主键 : ${field.name}, ${field.comment!}
*/
<#-- 普通字段 -->
<#elseif !field.keyFlag>
/**
* ${field.name}, ${field.comment!}
*/
</#if>
<#-- 乐观锁注解 -->
<#if (versionFieldName!"") == field.name>
@Version
</#if>
<#-- 逻辑删除注解 -->
<#if (logicDeleteFieldName!"") == field.name>
@TableLogic
</#if>
<#if field.propertyType == "LocalDateTime">
@TableField("${field.name}")
@ApiModelProperty("${field.comment!}")
private Date ${field.propertyName};
</#if>
<#if field.propertyType != "LocalDateTime">
@TableField("${field.name}")
@ApiModelProperty("${field.comment!}")
private ${field.propertyType} ${field.propertyName};
</#if>
</#list>
<#------------ 构造函数 ----------- -->
public ${entity}(<#list table.fields as field><#if field.propertyType == "LocalDateTime">Date ${field.propertyName}</#if><#if field.propertyType != "LocalDateTime">${field.propertyType} ${field.propertyName}</#if><#sep>,</#list>){
<#list table.fields as field>
this.${field.propertyName} = ${field.propertyName};
</#list>
}
public ${entity}(){
}
<#------------ getter.setter封装 ---------->
<#if !entityLombokModel>
<#list table.fields as field>
<#if field.propertyType == "boolean">
<#assign getprefix="is"/>
<#else>
<#assign getprefix="get"/>
</#if>
public <#if field.propertyType == "LocalDateTime">Date</#if><#if field.propertyType != "LocalDateTime">${field.propertyType}</#if> ${getprefix}${field.capitalName}() {
return ${field.propertyName};
}
<#if entityBuilderModel>
public ${entity} set${field.capitalName}(${field.propertyType} ${field.propertyName}) {
<#else>
public void set${field.capitalName}(<#if field.propertyType == "LocalDateTime">Date</#if><#if field.propertyType != "LocalDateTime">${field.propertyType}</#if> ${field.propertyName}) {
</#if>
this.${field.propertyName} = ${field.propertyName};
<#if entityBuilderModel>
return this;
</#if>
}
</#list>
</#if>
<#------------- 重写toString() ----------------->
<#if !entityLombokModel>
@Override
public String toString() {
return "${entity}{" +
<#list table.fields as field>
<#if field_index==0>
"${field.propertyName}=" + ${field.propertyName} +
<#else>
", ${field.propertyName}=" + ${field.propertyName} +
</#if>
</#list>
"}";
}
</#if>
}
package ${package.Mapper};
import ${package.Entity}.${entity};
import java.util.List;
import org.apache.ibatis.annotations.Param;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
public interface ${table.mapperName} extends BaseMapper<${entity}>{
/**
* 查询表${table.name}所有信息
*/
List<${entity}> findAll${entity}();
<#list table.fields as field>
<#if field.keyFlag>
/**
* 根据主键${field.propertyName}查询表${table.name}信息
* @param ${field.propertyName}
*/
${entity} find${entity}By${field.propertyName}(@Param("${field.propertyName}") ${field.propertyType} ${field.propertyName});
</#if>
</#list>
/**
* 根据条件查询表${table.name}信息
* @param ${table.entityPath}
*/
List<${entity}> find${entity}ByCondition(${entity} ${table.entityPath});
<#list table.fields as field>
<#if field.keyFlag>
/**
* 根据主键${field.propertyName}查询表${table.name}信息
* @param ${field.propertyName}
*/
Integer delete${entity}By${field.propertyName}(@Param("${field.propertyName}") ${field.propertyType} ${field.propertyName});
</#if>
</#list>
<#list table.fields as field>
<#if field.keyFlag>
/**
* 根据主键${field.propertyName}更新表${table.name}信息
* @param ${table.entityPath}
*/
Integer update${entity}By${field.propertyName}(${entity} ${table.entityPath});
</#if>
</#list>
<#list table.fields as field>
<#if field.keyFlag>
/**
* 新增表${table.name}信息
* @param ${table.entityPath}
*/
Integer add${entity}(${entity} ${table.entityPath});
</#if>
</#list>
}
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="${package.Mapper}.${table.mapperName}">
<!-- 通用设置 -->
<#if baseColumnList>
<!-- 通用查询列 -->
<sql id="Base_Column_List">
<#list table.commonFields as field>
${field.name},
</#list>
${table.fieldNames}
</sql>
<!-- 通用条件列 -->
<sql id="${entity}ByCondition">
<#list table.fields as field>
<#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate"><#--生成普通字段 -->
<if test="${field.propertyName}!=null">
AND ${field.name} = ${r"#{"}${field.propertyName}${r"}"}
</if>
</#if>
<#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate"><#--生成普通字段 -->
<if test="${field.propertyName}!=null and ${field.propertyName}!=''">
AND ${field.name} = ${r"#{"}${field.propertyName}${r"}"}
</if>
</#if>
</#list>
</sql>
<!-- 通用设置列 -->
<sql id="${entity}SetColumns">
<#list table.fields as field>
<#if !field.keyFlag><#--生成普通字段 -->
<#if field.propertyType == "LocalDateTime" || field.propertyType == "LocalDate">
<if test="${field.propertyName}!=null">
${field.name} = ${r"#{"}${field.propertyName}${r"}"},
</if>
</#if>
<#if field.propertyType != "LocalDateTime" && field.propertyType != "LocalDate">
<if test="${field.propertyName}!=null and ${field.propertyName}!=''">
${field.name} = ${r"#{"}${field.propertyName}${r"}"},
</if>
</#if>
</#if>
</#list>
</sql>
</#if>
<#if baseResultMap>
<!-- 通用查询映射结果 -->
<resultMap id="${entity}Map" type="${package.Entity}.${entity}">
<#list table.fields as field>
<#if field.keyFlag><#--生成主键排在第一位-->
<id column="${field.name}" property="${field.propertyName}"/>
</#if>
</#list>
<#list table.commonFields as field><#--生成公共字段 -->
<result column="${field.name}" property="${field.propertyName}"/>
</#list>
<#list table.fields as field>
<#if !field.keyFlag><#--生成普通字段 -->
<result column="${field.name}" property="${field.propertyName}"/>
</#if>
</#list>
</resultMap>
</#if>
<!-- 查询表${table.name}所有信息 -->
<select id="findAll${entity}" resultMap="${entity}Map">
SELECT
<include refid="Base_Column_List"/>
FROM ${table.name}
</select>
<#list table.fields as field>
<#if field.keyFlag>
<!-- 根据主键${field.propertyName}查询表${table.name}信息 -->
<select id="find${entity}By${field.propertyName}" resultMap="${entity}Map">
SELECT
<include refid="Base_Column_List"/>
FROM ${table.name}
WHERE ${field.name}=${r"#{"}${field.propertyName}${r"}"}
</select>
</#if>
</#list>
<!-- 根据条件查询表${table.name}信息 -->
<select id="find${entity}ByCondition" resultMap="${entity}Map">
SELECT
<include refid="Base_Column_List"/>
FROM ${table.name}
WHERE 1=1
<include refid="${entity}ByCondition" />
</select>
<#list table.fields as field>
<#if field.keyFlag>
<!-- 根据主键${field.propertyName}删除表${table.name}信息 -->
<delete id="delete${entity}By${field.propertyName}">
DELETE FROM
${table.name}
WHERE ${field.name}=${r"#{"}${field.propertyName}${r"}"}
</delete>
</#if>
</#list>
<#list table.fields as field>
<#if field.keyFlag>
<!-- 根据主键${field.propertyName}更新表${table.name}信息 -->
<update id="update${entity}By${field.propertyName}" parameterType="${package.Entity}.${entity}">
UPDATE ${table.name}
<set>
<include refid="${entity}SetColumns"/>
</set>
WHERE
<#list table.fields as field><#if field.keyFlag>${field.name}=${r"#{"}${field.propertyName}${r"}"}</#if></#list>
</update>
</#if>
</#list>
<#list table.fields as field>
<#if field.keyFlag>
<!-- 新增表${table.name}信息 -->
<insert id="add${entity}">
INSERT INTO ${table.name} (
<#list table.fields as field>
<#if field_index gt 0>,</#if>${field.name}
</#list>
) VALUES (
<#list table.fields as field>
<#if field_index gt 0>,</#if>${r"#{"}${field.propertyName}${r"}"}
</#list>
)
</insert>
</#if>
</#list>
</mapper>
package ${package.Service};
import ${package.Entity}.${entity};
import ${superServiceClassPackage};
import com.baomidou.mybatisplus.core.metadata.IPage;
import java.util.List;
/**
* <p>
* ${table.comment!} 服务类
* </p>
*
* @author ${author}
* @since ${date}
*/
<#if kotlin>
interface ${table.serviceName} : ${superServiceClass}<${entity}>
<#else>
public interface ${table.serviceName} extends ${superServiceClass}<${entity}> {
}
</#if>
\ No newline at end of file
package ${package.ServiceImpl};
import ${package.Entity}.${entity};
import ${package.Mapper}.${table.mapperName};
import ${package.Service}.${table.serviceName};
import ${superServiceImplClassPackage};
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.StringUtils;
import javax.annotation.Resource;
import java.util.List;
/**
* <p>
* ${table.comment!} 服务实现类
* </p>
*
* @author ${author}
* @since ${date}
*/
@Service
<#if kotlin>
open class ${table.serviceImplName} : ${superServiceImplClass}<${table.mapperName}, ${entity}>(), ${table.serviceName} {
}
<#else>
public class ${table.serviceImplName} extends ${superServiceImplClass}<${table.mapperName}, ${entity}> implements ${table.serviceName} {
@Resource
private ${table.mapperName} ${table.entityPath}Mapper;
}
</#if>
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment