Commit e19121e3 authored by zhoushiguang's avatar zhoushiguang

行人指标区域加载问题修改

parent ede5bef5
...@@ -46,10 +46,8 @@ public class PedestrianDataCache { ...@@ -46,10 +46,8 @@ public class PedestrianDataCache {
private Map<String, String> loadData(String file, int type) { private Map<String, String> loadData(String file, int type) {
Map<String, String> retMap = new HashMap<>(); Map<String, String> retMap = new HashMap<>();
String filePath = PedestrianDataCache.class.getResource(file).getPath(); try (InputStream inputStream = PropertiesHelper.class.getResourceAsStream(file);
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, Charset.defaultCharset()));
try (FileReader fr = new FileReader(filePath);
BufferedReader br = new BufferedReader(fr);
) { ) {
StringBuilder jsonBuilder = new StringBuilder(); StringBuilder jsonBuilder = new StringBuilder();
String str = null; String str = null;
...@@ -57,6 +55,7 @@ public class PedestrianDataCache { ...@@ -57,6 +55,7 @@ public class PedestrianDataCache {
while ((str = br.readLine()) != null) { while ((str = br.readLine()) != null) {
jsonBuilder.append(str); jsonBuilder.append(str);
} }
log.info("加载行人区域信息,路径:{},数据:{}",file,jsonBuilder.toString());
if (jsonBuilder.length() > 0) { if (jsonBuilder.length() > 0) {
JSONObject jsonObject = JSON.parseObject(jsonBuilder.toString()); JSONObject jsonObject = JSON.parseObject(jsonBuilder.toString());
JSONArray jsonArray = jsonObject.getJSONArray("features"); JSONArray jsonArray = jsonObject.getJSONArray("features");
......
...@@ -8,6 +8,7 @@ import com.wanji.indicators.model.event.accident.AccidentShowModel; ...@@ -8,6 +8,7 @@ import com.wanji.indicators.model.event.accident.AccidentShowModel;
import com.wanji.indicators.sink.AccidentEventFileSink; import com.wanji.indicators.sink.AccidentEventFileSink;
import com.wanji.indicators.sink.EventFileSink; import com.wanji.indicators.sink.EventFileSink;
import com.wanji.indicators.source.ConflictTurnSource; import com.wanji.indicators.source.ConflictTurnSource;
import com.wanji.indicators.source.CongestionSource;
import com.wanji.indicators.source.ConsulAreaSource; import com.wanji.indicators.source.ConsulAreaSource;
import com.wanji.indicators.util.PropertiesHelper; import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.common.functions.FlatMapFunction;
...@@ -84,7 +85,7 @@ public class AccidentEventMainNew implements Serializable { ...@@ -84,7 +85,7 @@ public class AccidentEventMainNew implements Serializable {
//匹配拥堵检测区域 //匹配拥堵检测区域
// SingleOutputStreamOperator<AccidentFrameModel> checkCongestionArea = accidentFrameModelFilter // SingleOutputStreamOperator<AccidentFrameModel> checkCongestionArea = accidentFrameModelFilter
// .connect(env.addSource(new ConsulAreaSource()).broadcast()) // .connect(env.addSource(new CongestionSource()).broadcast())
// .flatMap(new AccidentCheckCongestionAreaCoFlatMap()) // .flatMap(new AccidentCheckCongestionAreaCoFlatMap())
// .setParallelism(1) // .setParallelism(1)
// .name("事故检测-匹配拥堵检测区域"); // .name("事故检测-匹配拥堵检测区域");
......
package com.wanji.indicators.source;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.wanji.indicators.service.BaseLaneInfoService;
import com.wanji.indicators.service.impl.BaseLaneInfoServiceImpl;
import com.wanji.indicators.util.PropertiesHelper;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.asynchttpclient.AsyncHttpClient;
import org.asynchttpclient.BoundRequestBuilder;
import org.asynchttpclient.Dsl;
import org.asynchttpclient.ws.WebSocket;
import org.asynchttpclient.ws.WebSocketListener;
import org.asynchttpclient.ws.WebSocketUpgradeHandler;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import javax.xml.bind.DatatypeConverter;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
/**
* @author fengyi
* @date 2023/10/22
* @description
*/
@Slf4j
public class LightStatusKafkaSource extends RichSourceFunction<String> {
private volatile boolean running = true;
KafkaSource<String> lightStatusSource = null;
public void open(Configuration parameters) throws Exception {
super.open(parameters);
PropertiesHelper instance = PropertiesHelper.getInstance();
lightStatusSource = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "false")
.setBootstrapServers(instance.getProperties().getProperty("bootstrap.servers"))
.setTopics("cross_lights_status")
.setGroupId(UUID.randomUUID().toString())
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
}
@Override
public void run(SourceContext<String> ctx) throws Exception {
}
@Override
public void cancel() {
log.info("cancel function called");
running = false;
}
}
...@@ -631,7 +631,7 @@ FROM ...@@ -631,7 +631,7 @@ FROM
tb.rid, tb.rid,
tb.in_dir, tb.in_dir,
tb.lane_turn, tb.lane_turn,
CAST(ROUND(SUM( tb.traffic_flow * tb.flow_coefficient )) AS INT) traffic_flow, CAST(ROUND(SUM( tb.traffic_flow * tb.flow_coefficient ),2) AS INT) traffic_flow,
AVG( tb.mean_v * tb.flow_coefficient) mean_v, AVG( tb.mean_v * tb.flow_coefficient) mean_v,
MAX( tb.queue_length ) queue_length, MAX( tb.queue_length ) queue_length,
AVG( tb.green_light_efficiency ) green_light_efficiency, AVG( tb.green_light_efficiency ) green_light_efficiency,
......
...@@ -13,7 +13,10 @@ import java.io.Serializable; ...@@ -13,7 +13,10 @@ import java.io.Serializable;
public class PedCrossingWalkIndicatorResult implements Serializable { public class PedCrossingWalkIndicatorResult implements Serializable {
private String crossId; private String crossId;
private String rid;
private Integer ridDir ;
//行人绿灯时长
Integer pedGreenTime;
//绿灯开始时间 //绿灯开始时间
long startTime ; long startTime ;
String startTimeStr; String startTimeStr;
...@@ -28,7 +31,6 @@ public class PedCrossingWalkIndicatorResult implements Serializable { ...@@ -28,7 +31,6 @@ public class PedCrossingWalkIndicatorResult implements Serializable {
double accRate ; double accRate ;
//人数 //人数
private int pedCount; private int pedCount;
//平均等待时长 //平均等待时长
double avgWaitTime ; double avgWaitTime ;
//平均等待人数 //平均等待人数
......
...@@ -19,12 +19,15 @@ public class PedCrossingWalkModel implements Serializable { ...@@ -19,12 +19,15 @@ public class PedCrossingWalkModel implements Serializable {
private double passTime; private double passTime;
//第一次进入时间 //第一次进入时间
private long arriveTime; private long arriveTime;
private String arriveTimeStr;
//离开时间 //离开时间
private long leaveTime; private long leaveTime;
private String leaveTimeStr;
//是否离开斑马线区域标记 //是否离开斑马线区域标记
private boolean isLeave; private boolean isLeave;
//当前时间 //当前时间
private long nowTime; private long nowTime;
private String nowTimeStr;
//实时速度 //实时速度
private double speed; private double speed;
//初始进入时速度 //初始进入时速度
...@@ -35,6 +38,8 @@ public class PedCrossingWalkModel implements Serializable { ...@@ -35,6 +38,8 @@ public class PedCrossingWalkModel implements Serializable {
private long acceleratedPassDuration; private long acceleratedPassDuration;
//加速通过开始时间 //加速通过开始时间
private long acceleratedPassStartTime; private long acceleratedPassStartTime;
//加速开始速度
private double accelerateStartSpeed;
//经度 //经度
private double lng; private double lng;
//纬度 //纬度
......
package com.wanji.indicators.task.pedestrian.stream; package com.wanji.indicators.task.pedestrian.stream;
import com.alibaba.fastjson.JSONObject; import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.event.cross.conflictpoint.func.ConflictLaneInfoCoFlatMap;
import com.wanji.indicators.event.stream.func.CrossFrameFlatMap; import com.wanji.indicators.event.stream.func.CrossFrameFlatMap;
import com.wanji.indicators.model.CrossFrameModel; import com.wanji.indicators.model.CrossFrameModel;
import com.wanji.indicators.model.CrossRidTurnLampStatusModel; import com.wanji.indicators.model.CrossRidTurnLampStatusModel;
import com.wanji.indicators.source.LaneInfoSource; import com.wanji.indicators.source.LaneInfoSource;
import com.wanji.indicators.source.websocket.LightStatusWebSocketSource; import com.wanji.indicators.source.websocket.LightStatusWebSocketSource;
import com.wanji.indicators.task.pedestrian.service.func.PedIndicationProcessFunction; import com.wanji.indicators.task.pedestrian.service.func.*;
import com.wanji.indicators.task.pedestrian.service.func.RefPedRidInfoCoFlatMap;
import com.wanji.indicators.task.pedestrian.service.func.TrafficLightStatusFlatMap;
import com.wanji.indicators.task.pedestrian.service.func.TrafficLightStatusInfoCoFlatMap;
import com.wanji.indicators.task.pedestrian.service.model.PedCrossingWalkIndicatorResult; import com.wanji.indicators.task.pedestrian.service.model.PedCrossingWalkIndicatorResult;
import com.wanji.indicators.util.PropertiesHelper; import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction; import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema; import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource; import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer; import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema; import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
...@@ -46,6 +47,7 @@ public class PedestrianMain { ...@@ -46,6 +47,7 @@ public class PedestrianMain {
PropertiesHelper instance = PropertiesHelper.getInstance(); PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties(); Properties properties = instance.getProperties();
String topic = properties.getProperty("consumer.topic"); String topic = properties.getProperty("consumer.topic");
String kafkaServer = properties.getProperty("bootstrap.servers");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1); env.setParallelism(1);
...@@ -62,7 +64,7 @@ public class PedestrianMain { ...@@ -62,7 +64,7 @@ public class PedestrianMain {
.setProperty("auto.offset.commit", "true") .setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000") .setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "false") .setProperty("commit.offsets.on.checkpoint", "false")
.setBootstrapServers(properties.getProperty("bootstrap.servers")) .setBootstrapServers(kafkaServer)
.setTopics(topic) .setTopics(topic)
.setGroupId(GROUP_ID + UUID.randomUUID().toString()) .setGroupId(GROUP_ID + UUID.randomUUID().toString())
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)) .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
...@@ -76,12 +78,13 @@ public class PedestrianMain { ...@@ -76,12 +78,13 @@ public class PedestrianMain {
.setProperty("auto.offset.commit", "true") .setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000") .setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "false") .setProperty("commit.offsets.on.checkpoint", "false")
.setBootstrapServers(properties.getProperty("bootstrap.servers")) .setBootstrapServers(kafkaServer)
.setTopics("cross_lights_status") .setTopics("cross_lights_status")
.setGroupId(GROUP_ID + UUID.randomUUID().toString()) .setGroupId(GROUP_ID + UUID.randomUUID().toString())
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST)) .setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class)) .setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build(); .build();
DataStream<String> lightStatusStream = env DataStream<String> lightStatusStream = env
.fromSource(lightStatusSource, WatermarkStrategy.noWatermarks(), "cross_lights_status-data-source"); .fromSource(lightStatusSource, WatermarkStrategy.noWatermarks(), "cross_lights_status-data-source");
...@@ -103,13 +106,18 @@ public class PedestrianMain { ...@@ -103,13 +106,18 @@ public class PedestrianMain {
// }); // });
//连接灯态数据源 //连接灯态数据源
ConnectedStreams<CrossFrameModel, Map<String,List<CrossRidTurnLampStatusModel>>> connStream = thinningDataStream.connect(turnLightStatusStream); ConnectedStreams<CrossFrameModel, Map<String,List<CrossRidTurnLampStatusModel>>> connStream =
thinningDataStream.connect(turnLightStatusStream);
SingleOutputStreamOperator<CrossFrameModel> bindLightStatusStream = SingleOutputStreamOperator<CrossFrameModel> bindLightStatusStream =
connStream.flatMap(new TrafficLightStatusInfoCoFlatMap()).setParallelism(1).name("全域轨迹帧数据-路口灯态绑定"); connStream
.flatMap(new TrafficLightStatusInfoCoFlatMap())
.setParallelism(1)
.name("全域轨迹帧数据-路口灯态绑定");
SingleOutputStreamOperator<CrossFrameModel> joinLaneStream = bindLightStatusStream SingleOutputStreamOperator<CrossFrameModel> joinLaneStream = bindLightStatusStream
.flatMap(new RefPedRidInfoCoFlatMap()) .connect(env.addSource(new LaneInfoSource()).broadcast())
.flatMap(new BindPedRidInfoCoFlatMap())
.setParallelism(1) .setParallelism(1)
.name("全域轨迹帧数据-车道属性数据绑定"); .name("全域轨迹帧数据-车道属性数据绑定");
...@@ -129,11 +137,17 @@ public class PedestrianMain { ...@@ -129,11 +137,17 @@ public class PedestrianMain {
try { try {
FlinkKafkaProducer producer = new FlinkKafkaProducer<>("analysis.pedestrian.indicators", KafkaSink<String> sink = KafkaSink.<String>builder()
new KeyedSerializationSchemaWrapper<>(new SimpleStringSchema()), .setBootstrapServers(kafkaServer)
instance.getProducerProperties(), FlinkKafkaProducer.Semantic.NONE); .setRecordSerializer(KafkaRecordSerializationSchema.builder()
//存储到kafka .setTopic("analysis.pedestrian.indicators")
toJsonStream.addSink(producer).name("行人指标计算-数据发送至kafka").setParallelism(1); .setValueSerializationSchema(new SimpleStringSchema())
.build()
)
.setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
.build();
toJsonStream.sinkTo(sink).name("行人指标计算-数据发送至kafka").setParallelism(1);
env.execute("行人指标计算"); env.execute("行人指标计算");
} catch (Exception e) { } catch (Exception e) {
......
package com.wanji.indicators.util; package com.wanji.indicators.util;
import org.joda.time.DateTime;
/** /**
* @author fengyi * @author fengyi
* @date 2023/1/12 * @date 2023/1/12
...@@ -7,6 +9,14 @@ package com.wanji.indicators.util; ...@@ -7,6 +9,14 @@ package com.wanji.indicators.util;
*/ */
public class CarNumGenerator { public class CarNumGenerator {
public static void main(String[] args) {
DateTime currentDateTime = new DateTime();
DateTime dateTime = currentDateTime.withMillisOfDay(0).minusHours(-4);
dateTime = currentDateTime.withTimeAtStartOfDay();
System.out.println(DateUtil.toDateTime(dateTime.getMillis(),DateUtil.YYYY_MM_DD_HH_MM_SS));
}
/** /**
* 中国各个地区的数组 * 中国各个地区的数组
*/ */
...@@ -45,8 +55,4 @@ public class CarNumGenerator { ...@@ -45,8 +55,4 @@ public class CarNumGenerator {
return area + cityCode + sb; return area + cityCode + sb;
} }
public static void main(String[] args) {
String carNum = getCarNum();
System.out.println(carNum);
}
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment