Commit e19121e3 authored by zhoushiguang's avatar zhoushiguang

行人指标区域加载问题修改

parent ede5bef5
......@@ -46,10 +46,8 @@ public class PedestrianDataCache {
private Map<String, String> loadData(String file, int type) {
Map<String, String> retMap = new HashMap<>();
String filePath = PedestrianDataCache.class.getResource(file).getPath();
try (FileReader fr = new FileReader(filePath);
BufferedReader br = new BufferedReader(fr);
try (InputStream inputStream = PropertiesHelper.class.getResourceAsStream(file);
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream, Charset.defaultCharset()));
) {
StringBuilder jsonBuilder = new StringBuilder();
String str = null;
......@@ -57,6 +55,7 @@ public class PedestrianDataCache {
while ((str = br.readLine()) != null) {
jsonBuilder.append(str);
}
log.info("加载行人区域信息,路径:{},数据:{}",file,jsonBuilder.toString());
if (jsonBuilder.length() > 0) {
JSONObject jsonObject = JSON.parseObject(jsonBuilder.toString());
JSONArray jsonArray = jsonObject.getJSONArray("features");
......
......@@ -8,6 +8,7 @@ import com.wanji.indicators.model.event.accident.AccidentShowModel;
import com.wanji.indicators.sink.AccidentEventFileSink;
import com.wanji.indicators.sink.EventFileSink;
import com.wanji.indicators.source.ConflictTurnSource;
import com.wanji.indicators.source.CongestionSource;
import com.wanji.indicators.source.ConsulAreaSource;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.functions.FlatMapFunction;
......@@ -84,7 +85,7 @@ public class AccidentEventMainNew implements Serializable {
//匹配拥堵检测区域
// SingleOutputStreamOperator<AccidentFrameModel> checkCongestionArea = accidentFrameModelFilter
// .connect(env.addSource(new ConsulAreaSource()).broadcast())
// .connect(env.addSource(new CongestionSource()).broadcast())
// .flatMap(new AccidentCheckCongestionAreaCoFlatMap())
// .setParallelism(1)
// .name("事故检测-匹配拥堵检测区域");
......
package com.wanji.indicators.source;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.wanji.indicators.service.BaseLaneInfoService;
import com.wanji.indicators.service.impl.BaseLaneInfoServiceImpl;
import com.wanji.indicators.util.PropertiesHelper;
import lombok.extern.slf4j.Slf4j;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.kafka.clients.consumer.OffsetResetStrategy;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.asynchttpclient.AsyncHttpClient;
import org.asynchttpclient.BoundRequestBuilder;
import org.asynchttpclient.Dsl;
import org.asynchttpclient.ws.WebSocket;
import org.asynchttpclient.ws.WebSocketListener;
import org.asynchttpclient.ws.WebSocketUpgradeHandler;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import javax.xml.bind.DatatypeConverter;
import java.io.IOException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
/**
* @author fengyi
* @date 2023/10/22
* @description
*/
@Slf4j
public class LightStatusKafkaSource extends RichSourceFunction<String> {
private volatile boolean running = true;
KafkaSource<String> lightStatusSource = null;
public void open(Configuration parameters) throws Exception {
super.open(parameters);
PropertiesHelper instance = PropertiesHelper.getInstance();
lightStatusSource = KafkaSource.<String>builder()
.setProperties(instance.getConsumerProperties())
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "false")
.setBootstrapServers(instance.getProperties().getProperty("bootstrap.servers"))
.setTopics("cross_lights_status")
.setGroupId(UUID.randomUUID().toString())
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
}
@Override
public void run(SourceContext<String> ctx) throws Exception {
}
@Override
public void cancel() {
log.info("cancel function called");
running = false;
}
}
......@@ -631,7 +631,7 @@ FROM
tb.rid,
tb.in_dir,
tb.lane_turn,
CAST(ROUND(SUM( tb.traffic_flow * tb.flow_coefficient )) AS INT) traffic_flow,
CAST(ROUND(SUM( tb.traffic_flow * tb.flow_coefficient ),2) AS INT) traffic_flow,
AVG( tb.mean_v * tb.flow_coefficient) mean_v,
MAX( tb.queue_length ) queue_length,
AVG( tb.green_light_efficiency ) green_light_efficiency,
......
......@@ -13,7 +13,10 @@ import java.io.Serializable;
public class PedCrossingWalkIndicatorResult implements Serializable {
private String crossId;
private String rid;
private Integer ridDir ;
//行人绿灯时长
Integer pedGreenTime;
//绿灯开始时间
long startTime ;
String startTimeStr;
......@@ -28,7 +31,6 @@ public class PedCrossingWalkIndicatorResult implements Serializable {
double accRate ;
//人数
private int pedCount;
//平均等待时长
double avgWaitTime ;
//平均等待人数
......
......@@ -19,12 +19,15 @@ public class PedCrossingWalkModel implements Serializable {
private double passTime;
//第一次进入时间
private long arriveTime;
private String arriveTimeStr;
//离开时间
private long leaveTime;
private String leaveTimeStr;
//是否离开斑马线区域标记
private boolean isLeave;
//当前时间
private long nowTime;
private String nowTimeStr;
//实时速度
private double speed;
//初始进入时速度
......@@ -35,6 +38,8 @@ public class PedCrossingWalkModel implements Serializable {
private long acceleratedPassDuration;
//加速通过开始时间
private long acceleratedPassStartTime;
//加速开始速度
private double accelerateStartSpeed;
//经度
private double lng;
//纬度
......
package com.wanji.indicators.task.pedestrian.stream;
import com.alibaba.fastjson.JSONObject;
import com.wanji.indicators.event.cross.conflictpoint.func.ConflictLaneInfoCoFlatMap;
import com.wanji.indicators.event.stream.func.CrossFrameFlatMap;
import com.wanji.indicators.model.CrossFrameModel;
import com.wanji.indicators.model.CrossRidTurnLampStatusModel;
import com.wanji.indicators.source.LaneInfoSource;
import com.wanji.indicators.source.websocket.LightStatusWebSocketSource;
import com.wanji.indicators.task.pedestrian.service.func.PedIndicationProcessFunction;
import com.wanji.indicators.task.pedestrian.service.func.RefPedRidInfoCoFlatMap;
import com.wanji.indicators.task.pedestrian.service.func.TrafficLightStatusFlatMap;
import com.wanji.indicators.task.pedestrian.service.func.TrafficLightStatusInfoCoFlatMap;
import com.wanji.indicators.task.pedestrian.service.func.*;
import com.wanji.indicators.task.pedestrian.service.model.PedCrossingWalkIndicatorResult;
import com.wanji.indicators.util.PropertiesHelper;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.functions.FilterFunction;
import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.connector.base.DeliveryGuarantee;
import org.apache.flink.connector.kafka.sink.KafkaRecordSerializationSchema;
import org.apache.flink.connector.kafka.sink.KafkaSink;
import org.apache.flink.connector.kafka.source.KafkaSource;
import org.apache.flink.connector.kafka.source.enumerator.initializer.OffsetsInitializer;
import org.apache.flink.connector.kafka.source.reader.deserializer.KafkaRecordDeserializationSchema;
......@@ -46,6 +47,7 @@ public class PedestrianMain {
PropertiesHelper instance = PropertiesHelper.getInstance();
Properties properties = instance.getProperties();
String topic = properties.getProperty("consumer.topic");
String kafkaServer = properties.getProperty("bootstrap.servers");
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
......@@ -62,7 +64,7 @@ public class PedestrianMain {
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "false")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setBootstrapServers(kafkaServer)
.setTopics(topic)
.setGroupId(GROUP_ID + UUID.randomUUID().toString())
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
......@@ -76,12 +78,13 @@ public class PedestrianMain {
.setProperty("auto.offset.commit", "true")
.setProperty("auto.commit.interval.ms", "1000")
.setProperty("commit.offsets.on.checkpoint", "false")
.setBootstrapServers(properties.getProperty("bootstrap.servers"))
.setBootstrapServers(kafkaServer)
.setTopics("cross_lights_status")
.setGroupId(GROUP_ID + UUID.randomUUID().toString())
.setStartingOffsets(OffsetsInitializer.committedOffsets(OffsetResetStrategy.LATEST))
.setDeserializer(KafkaRecordDeserializationSchema.valueOnly(StringDeserializer.class))
.build();
DataStream<String> lightStatusStream = env
.fromSource(lightStatusSource, WatermarkStrategy.noWatermarks(), "cross_lights_status-data-source");
......@@ -103,13 +106,18 @@ public class PedestrianMain {
// });
//连接灯态数据源
ConnectedStreams<CrossFrameModel, Map<String,List<CrossRidTurnLampStatusModel>>> connStream = thinningDataStream.connect(turnLightStatusStream);
ConnectedStreams<CrossFrameModel, Map<String,List<CrossRidTurnLampStatusModel>>> connStream =
thinningDataStream.connect(turnLightStatusStream);
SingleOutputStreamOperator<CrossFrameModel> bindLightStatusStream =
connStream.flatMap(new TrafficLightStatusInfoCoFlatMap()).setParallelism(1).name("全域轨迹帧数据-路口灯态绑定");
connStream
.flatMap(new TrafficLightStatusInfoCoFlatMap())
.setParallelism(1)
.name("全域轨迹帧数据-路口灯态绑定");
SingleOutputStreamOperator<CrossFrameModel> joinLaneStream = bindLightStatusStream
.flatMap(new RefPedRidInfoCoFlatMap())
.connect(env.addSource(new LaneInfoSource()).broadcast())
.flatMap(new BindPedRidInfoCoFlatMap())
.setParallelism(1)
.name("全域轨迹帧数据-车道属性数据绑定");
......@@ -129,11 +137,17 @@ public class PedestrianMain {
try {
FlinkKafkaProducer producer = new FlinkKafkaProducer<>("analysis.pedestrian.indicators",
new KeyedSerializationSchemaWrapper<>(new SimpleStringSchema()),
instance.getProducerProperties(), FlinkKafkaProducer.Semantic.NONE);
//存储到kafka
toJsonStream.addSink(producer).name("行人指标计算-数据发送至kafka").setParallelism(1);
KafkaSink<String> sink = KafkaSink.<String>builder()
.setBootstrapServers(kafkaServer)
.setRecordSerializer(KafkaRecordSerializationSchema.builder()
.setTopic("analysis.pedestrian.indicators")
.setValueSerializationSchema(new SimpleStringSchema())
.build()
)
.setDeliverGuarantee(DeliveryGuarantee.AT_LEAST_ONCE)
.build();
toJsonStream.sinkTo(sink).name("行人指标计算-数据发送至kafka").setParallelism(1);
env.execute("行人指标计算");
} catch (Exception e) {
......
package com.wanji.indicators.util;
import org.joda.time.DateTime;
/**
* @author fengyi
* @date 2023/1/12
......@@ -7,6 +9,14 @@ package com.wanji.indicators.util;
*/
public class CarNumGenerator {
public static void main(String[] args) {
DateTime currentDateTime = new DateTime();
DateTime dateTime = currentDateTime.withMillisOfDay(0).minusHours(-4);
dateTime = currentDateTime.withTimeAtStartOfDay();
System.out.println(DateUtil.toDateTime(dateTime.getMillis(),DateUtil.YYYY_MM_DD_HH_MM_SS));
}
/**
* 中国各个地区的数组
*/
......@@ -45,8 +55,4 @@ public class CarNumGenerator {
return area + cityCode + sb;
}
public static void main(String[] args) {
String carNum = getCarNum();
System.out.println(carNum);
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment