Commit 13b53835 authored by duanruiming's avatar duanruiming

Merge remote-tracking branch 'origin/master'

parents 05a6cf32 37b4ac10
package net.wanji.opt.common;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import net.wanji.opt.dto.PhaseEmptyResult;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndTimestamp;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.*;
/**
* 从 Kafka 中按时间范围消费数据
*
* @author Kent HAN
* @date 2024/4/16 8:45
*/
public class KafkaConsumerUtil {
private final KafkaConsumer<String, String> consumer;
private final ObjectMapper objectMapper;
public KafkaConsumerUtil(String bootstrapServers, String groupId) {
Properties props = new Properties();
props.put("bootstrap.servers", bootstrapServers);
props.put("group.id", groupId);
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", StringDeserializer.class.getName());
this.consumer = new KafkaConsumer<>(props);
this.objectMapper = new ObjectMapper();
}
public List<PhaseEmptyResult> consumeEmptyPhaseForTimeRange(
String topic, int partition, long startTime, long endTime) {
List<PhaseEmptyResult> results = new ArrayList<>();
TopicPartition topicPartition = new TopicPartition(topic, partition);
consumer.assign(Collections.singletonList(topicPartition));
HashMap<TopicPartition, Long> timestampToSearch = new HashMap<>();
timestampToSearch.put(topicPartition, startTime);
OffsetAndTimestamp offsetAndTimestamp = consumer.offsetsForTimes(timestampToSearch).get(topicPartition);
if (offsetAndTimestamp == null) {
return results;
}
long startOffset = offsetAndTimestamp.offset();
// 开始消费
consumer.seek(topicPartition, startOffset);
int emptyPollCount = 0; // 记录空轮询次数的计数器
int maxEmptyPolls = 10; // 设置最大空轮询次数
try {
boolean keepConsuming = true;
while (keepConsuming) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100));
if (records.isEmpty()) {
emptyPollCount++; // 如果没有记录,增加空轮询计数
if (emptyPollCount >= maxEmptyPolls) {
// 如果达到最大空轮询次数,退出循环
break;
}
} else {
emptyPollCount = 0; // 如果有记录,重置空轮询计数器
for (ConsumerRecord<String, String> record : records) {
long recordTime = record.timestamp();
if (recordTime >= startTime && recordTime <= endTime) {
PhaseEmptyResult phaseEmptyResult =
objectMapper.readValue(record.value(), PhaseEmptyResult.class);
results.add(phaseEmptyResult);
} else if (recordTime > endTime) {
keepConsuming = false;
break;
}
}
}
}
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
} finally {
consumer.close();
}
return results;
}
}
package net.wanji.opt.dto;
import lombok.Data;
/**
* 相位空放 Kafka 实体类
*/
@Data
public class PhaseEmptyResult {
private String crossId;
private String rid;
//进口车道方向
private String direction;
//绿灯转向
private String turn;
//转向空放时间/绿灯总时长
private Double index;
private Long globalTimeStamp;
private Long startTime;
private Long endTime;
private Integer duration;
// 检测时间
private Long detectTime;
}
......@@ -4,6 +4,7 @@ import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.util.ObjectUtil;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.extern.slf4j.Slf4j;
import net.wanji.common.enums.CrossStatusEnum;
import net.wanji.common.enums.TurnConvertEnum;
......@@ -23,10 +24,13 @@ import net.wanji.databus.po.LaneInfoPO;
import net.wanji.databus.vo.RunningEvaluateCrossListVO;
import net.wanji.opt.bo.CrossNameBO;
import net.wanji.opt.bo.MetricsDetailBO;
import net.wanji.opt.common.KafkaConsumerUtil;
import net.wanji.opt.dto.PhaseEmptyResult;
import net.wanji.opt.service.RunningEvaluateService;
import net.wanji.opt.vo.*;
import org.jetbrains.annotations.NotNull;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.text.DecimalFormat;
......@@ -46,6 +50,13 @@ import java.util.stream.Stream;
@Slf4j
@Service
public class RunningEvaluateServiceImpl implements RunningEvaluateService {
@Value("${spring.kafka.bootstrap-servers}")
private String bootstrapServers;
@Value("${spring.kafka.empty-phase-topic}")
private String emptyPhaseTopic;
private final CrossDataRealtimeMapper crossDataRealtimeMapper;
private final CrossDataHistMapper crossDataHistMapper;
private final BaseCrossSectionMapper baseCrossSectionMapper;
......@@ -64,6 +75,8 @@ public class RunningEvaluateServiceImpl implements RunningEvaluateService {
SimpleDateFormat HOUR_SDF = new SimpleDateFormat("HH:mm");
SimpleDateFormat DAY_SDF = new SimpleDateFormat("yyyy-MM-dd");
ObjectMapper objectMapper = new ObjectMapper();
public RunningEvaluateServiceImpl(CrossDataRealtimeMapper crossDataRealtimeMapper,
CrossDataHistMapper crossDataHistMapper,
@Qualifier("baseCrossSectionMapper") BaseCrossSectionMapper baseCrossSectionMapper,
......@@ -299,6 +312,13 @@ public class RunningEvaluateServiceImpl implements RunningEvaluateService {
} else { // 路口级别
metricHistDTOList = crossDataHistMapper.selectMetricHistDTO(
crossId, startStamp, endStamp);
// 相位空放指标
long startTime = startDate.getTime();
long endTime = endDate.getTime();
KafkaConsumerUtil kafkaConsumerUtil = new KafkaConsumerUtil(bootstrapServers, "empty-phase-comsumer");
List<PhaseEmptyResult> phaseEmptyResults =
kafkaConsumerUtil.consumeEmptyPhaseForTimeRange(emptyPhaseTopic, 0, startTime, endTime);
fillPhaseEmpty(metricHistDTOList, crossId, phaseEmptyResults);
}
RunningEvaluateMetricsDetailVO res = new RunningEvaluateMetricsDetailVO();
......@@ -321,6 +341,29 @@ public class RunningEvaluateServiceImpl implements RunningEvaluateService {
return res;
}
private void fillPhaseEmpty(List<MetricHistDTO> metricHistDTOList, String crossId,
List<PhaseEmptyResult> phaseEmptyResults) {
List<PhaseEmptyResult> crossResults = phaseEmptyResults.stream()
.filter(result -> crossId.equals(result.getCrossId()))
.collect(Collectors.toList());
for (MetricHistDTO metricHistDTO : metricHistDTOList) {
// 计算结束时间
Date endTime = new Date(metricHistDTO.getStartTime().getTime() + 5 * 60 * 1000); // 增加5分钟
// 遍历每个 PhaseEmptyResult 来查找和累加 duration
for (PhaseEmptyResult phaseEmptyResult : crossResults) {
if (phaseEmptyResult.getDetectTime() * 1000 >= metricHistDTO.getStartTime().getTime()
&& phaseEmptyResult.getDetectTime() * 1000 <= endTime.getTime()) {
// 累加 duration
metricHistDTO.setEmptyPhase(metricHistDTO.getEmptyPhase() + phaseEmptyResult.getDuration());
}
}
}
}
private List<RunningEvaluateMetricsDetailVO.CrossMetrics> buildMetricsList(
List<MetricHistDTO> metricHistDTOList, Integer minutes) {
List<RunningEvaluateMetricsDetailVO.CrossMetrics> res = new ArrayList<>();
......@@ -352,6 +395,8 @@ public class RunningEvaluateServiceImpl implements RunningEvaluateService {
double sturationSum = 0.0;
double stopTimesSum = 0.0;
int delayTimeSum = 0;
double effusionRateSum = 0.0;
int emptyPhaseSum = 0;
for (MetricHistDTO metricHistDTO : dtoList) {
Integer flow = metricHistDTO.getFlow();
Double speed = metricHistDTO.getSpeed();
......@@ -359,12 +404,16 @@ public class RunningEvaluateServiceImpl implements RunningEvaluateService {
Double sturation = metricHistDTO.getSturation();
Double stopTimes = metricHistDTO.getStopTimes();
Integer delayTime = metricHistDTO.getDelayTime();
Double effusionRate = metricHistDTO.getEffusionRate();
Integer emptyPhase = metricHistDTO.getEmptyPhase();
if (flow != null) flowSum += flow;
if (speed != null) speedSum += speed;
if (capacity != null) capacitySum += capacity;
if (sturation != null) sturationSum += sturation;
if (stopTimes != null) stopTimesSum += stopTimes;
if (delayTime != null) delayTimeSum += delayTime;
if (effusionRate != null) effusionRateSum += effusionRate;
if (emptyPhase != null) emptyPhaseSum += emptyPhase;
}
int size = dtoList.size();
crossMetrics.setFlow(flowSum);
......@@ -373,6 +422,8 @@ public class RunningEvaluateServiceImpl implements RunningEvaluateService {
crossMetrics.setSturation(sturationSum / size);
crossMetrics.setStopTimes(stopTimesSum / size);
crossMetrics.setDelayTime(delayTimeSum / size);
crossMetrics.setEffusionRate(effusionRateSum / size);
crossMetrics.setEmptyPhase(emptyPhaseSum);
}
res.add(crossMetrics);
}
......
......@@ -1286,6 +1286,16 @@ public class TrendServiceImpl implements TrendService {
Double vehheadTime = po.getVehheadTime();
vo.setVehheadTime((int) Math.round(vehheadTime));
Double timeOccupancyDouble = po.getTimeOccupancy();
if (timeOccupancyDouble != null) {
vo.setTimeOccupancy((int) Math.round(timeOccupancyDouble * 100));
}
Double vehicleNumsRatioMeanDoulbe = po.getVehicleNumsRatioMean();
if (vehicleNumsRatioMeanDoulbe != null) {
vo.setVehicleNumsRatioMean((int) Math.round(vehicleNumsRatioMeanDoulbe * 100));
}
res.add(vo);
}
......
......@@ -72,6 +72,9 @@ public class MainlineSchemeAnalysisVO {
private String metricName;
@ApiModelProperty(value = "是否在干线路口评价下拉列表和底部曲线图下拉列表展示,0否 1是")
private Integer isShown;
private Integer isShownRight;
@ApiModelProperty(value = "是否在干线方案评价下拉列表展示,0否 1是")
private Integer isShownLeft = 1;
}
}
......@@ -37,7 +37,7 @@ public class MainlineSchemeEvaluateVO {
private String metricTime;
@ApiModelProperty(value = "数值")
private Integer value;
private Double value;
}
}
......@@ -97,5 +97,12 @@ public class RunningEvaluateMetricsDetailVO {
@ApiModelProperty(value = "平均延误(秒)")
private Integer delayTime ;
@ApiModelProperty(value = "溢流率")
private Double effusionRate ;
@ApiModelProperty(value = "绿灯空放时长")
private Integer emptyPhase ;
}
}
......@@ -77,5 +77,12 @@ public class TableQueryVO {
@ApiModelProperty(value = "平均车头时距")
private Integer vehheadTime;
@ApiModelProperty(value = "时间占有率")
private Integer timeOccupancy;
@ApiModelProperty(value = "空间占有率")
// 空间占有率即车辆负荷比
private Integer vehicleNumsRatioMean;
}
}
......@@ -2,7 +2,7 @@ spring:
elasticsearch:
username: elastic
password: Wanji300552
uris: http://37.12.182.31:9200
uris: http://10.102.1.182:9200
application:
name: opt
datasource:
......@@ -23,13 +23,13 @@ spring:
driverClassName: com.mysql.cj.jdbc.Driver
holo:
type: com.alibaba.druid.pool.DruidDataSource
url: jdbc:mysql://10.102.1.182:3306/holo_roadnet?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true&sessionVariables=sql_mode='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION'&useSSL=false&useCursorFetch=true
url: jdbc:mysql://10.102.1.182:3306/holo_roadnet_jn?useUnicode=true&characterEncoding=UTF-8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true&sessionVariables=sql_mode='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION'&useSSL=false&useCursorFetch=true
username: root
password: Wanji300552
driverClassName: com.mysql.cj.jdbc.Driver
redis:
host: 10.102.1.182
port: 14728
port: 6379
password: Wanji300552
jedis:
pool:
......@@ -41,7 +41,8 @@ spring:
database: 3
kafka:
bootstrap-servers: 37.12.182.31:9092
bootstrap-servers: 10.102.1.182:9092
empty-phase-topic: phase.emptiness.analysis
#消费者配置
consumer:
max-poll-records: 1
......
DELETE FROM t_cross_dir_data_hist
WHERE start_time >= '2024-04-02 00:00:00' AND start_time <= '2024-04-03 00:00:00';
DELIMITER $$
CREATE PROCEDURE InsertRandomData()
BEGIN
DECLARE startDateTime DATETIME;
DECLARE endDateTime DATETIME;
DECLARE currentDateTime DATETIME;
DECLARE randomChar1 CHAR(1);
DECLARE randomChar2 CHAR(1);
DECLARE allChars VARCHAR(36);
SET startDateTime = '2024-04-02 00:00:00';
SET endDateTime = '2024-04-03 00:00:00';
SET currentDateTime = startDateTime;
SET allChars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
WHILE currentDateTime < endDateTime DO
SET randomChar1 = SUBSTRING(allChars, FLOOR(RAND() * 36) + 1, 1);
SET randomChar2 = SUBSTRING(allChars, FLOOR(RAND() * 36) + 1, 1);
INSERT INTO t_cross_dir_data_hist (id, dir_type, in_out_type, cross_id, length, status, traffic_index, start_time, capacity, duration, flow, speed, queue_length, stop_times, delay_time, sturation, no_stop_rate, one_stop_rate, two_stop_rate, three_stop_rate, batch_time, effusion_rate, green_light_efficiency)
VALUES
(CONCAT('0VHGU0745B0_1_', randomChar1, '_', randomChar2), 1, 1, '0VHGU0745B0', RAND() * 1000, FLOOR(RAND() * 5) + 1, 0.4 + (RAND() * 0.2), currentDateTime, FLOOR(5 + (RAND() * 16)), FLOOR(RAND() * 60) + 10, FLOOR(5 + (RAND() * 11)), RAND() * 120, FLOOR(10 + (RAND() * 21)), RAND() * 10, FLOOR(RAND() * 300) + 10, RAND(), RAND(), RAND(), RAND(), RAND(), UNIX_TIMESTAMP(currentDateTime), RAND(), RAND()),
(CONCAT('0VHGU0745B0_3_', randomChar1, '_', randomChar2), 3, 1, '0VHGU0745B0', RAND() * 1000, FLOOR(RAND() * 5) + 1, 0.4 + (RAND() * 0.2), currentDateTime, FLOOR(5 + (RAND() * 16)), FLOOR(RAND() * 60) + 10, FLOOR(5 + (RAND() * 11)), RAND() * 120, FLOOR(10 + (RAND() * 21)), RAND() * 10, FLOOR(RAND() * 300) + 10, RAND(), RAND(), RAND(), RAND(), RAND(), UNIX_TIMESTAMP(currentDateTime), RAND(), RAND()),
(CONCAT('0VHGU0745B0_5_', randomChar1, '_', randomChar2), 5, 1, '0VHGU0745B0', RAND() * 1000, FLOOR(RAND() * 5) + 1, 0.4 + (RAND() * 0.2), currentDateTime, FLOOR(5 + (RAND() * 16)), FLOOR(RAND() * 60) + 10, FLOOR(5 + (RAND() * 11)), RAND() * 120, FLOOR(10 + (RAND() * 21)), RAND() * 10, FLOOR(RAND() * 300) + 10, RAND(), RAND(), RAND(), RAND(), RAND(), UNIX_TIMESTAMP(currentDateTime), RAND(), RAND());
SET currentDateTime = ADDTIME(currentDateTime, '0:5:0');
END WHILE;
END$$
DELIMITER ;
CALL InsertRandomData();
DROP PROCEDURE IF EXISTS InsertRandomData;
DELETE FROM t_cross_data_hist
WHERE start_time >= '2024-04-02 00:00:00' AND start_time <= '2024-04-03 00:00:00';
DELIMITER $$
CREATE PROCEDURE InsertRandomData()
BEGIN
DECLARE startTime TIMESTAMP;
DECLARE endTime TIMESTAMP;
DECLARE currentTime TIMESTAMP;
SET startTime = '2024-04-02 00:00:00';
SET endTime = '2024-04-03 00:00:00';
SET currentTime = startTime;
WHILE currentTime <= endTime DO
INSERT INTO t_cross_data_hist (
cross_id, status, type, traffic_index, start_time, duration,
is_unbalance, is_spillover, is_congestion, unbalance_index,
spillover_index, congestion_index, unbalance_dirs, spillover_dirs,
congestion_dirs, flow, flow_rate, speed, queue_length, stop_times,
delay_time, sturation, batch_time, gmt_create, gmt_modified, clear_rate,
not_clear_car_nums, load_balance, green_light_efficiency, effusion_rate,
no_stop_rate, one_stop_rate, two_stop_rate, three_stop_rate, end_time
) VALUES (
'0VHGU0745B0', FLOOR(RAND() * 5), FLOOR(RAND() * 3), RAND() * 10,
currentTime, FLOOR(RAND() * 121), FLOOR(RAND() * 2), FLOOR(RAND() * 2),
FLOOR(RAND() * 2), RAND() * 100, RAND() * 100, RAND() * 100,
LPAD(FLOOR(RAND() * 10000), 4, '0'), LPAD(FLOOR(RAND() * 10000), 4, '0'),
LPAD(FLOOR(RAND() * 10000), 4, '0'), FLOOR(80 + RAND() * 31), RAND() * 100,
FLOOR(40 + RAND() * 21), FLOOR(90 + RAND() * 61), RAND() * 10, FLOOR(4 + RAND() * 7),
RAND() * 100, UNIX_TIMESTAMP(currentTime), NOW(), NOW(), RAND(),
FLOOR(RAND() * 501), RAND() * 100, RAND(), RAND(), RAND(), RAND(),
RAND(), RAND(), DATE_ADD(currentTime, INTERVAL FLOOR(RAND() * 121) MINUTE)
);
SET currentTime = ADDTIME(currentTime, '0:05:00');
END WHILE;
END$$
DELIMITER ;
CALL InsertRandomData();
DROP PROCEDURE IF EXISTS InsertRandomData;
......@@ -51,7 +51,9 @@ public class StrategyAndMetricsEnum {
CORD_RELIABILITY("15", "协调方案可靠性", "%", "cordReliability"),
CORD_QUEUE_RATIO("16", "协调路段排队空间占比", "%", "cordQueueRatio"),
UNCOORDINATE_PHASE_QUEUE("17", "非协调相位二次排队", "%", "uncoordinatePhaseQueue"),
TRVAL_TIME("18", "干线行程时间", "s", "trvalTime");
TRVAL_TIME("18", "干线行程时间", "s", "trvalTime"),
TRAFFIC_INDEX("19", "拥堵指数", "", "trafficIndex"),
EMPTY_PHASE("20", "绿灯空放时长", "m", "emptyPhase");
private final String code;
private final String description;
......@@ -122,7 +124,10 @@ public class StrategyAndMetricsEnum {
Metrics.AVERAGE_DELAY,
Metrics.MAX_QUEUE_LENGTH,
Metrics.STOP_TIMES,
Metrics.AVERAGE_SPEED
Metrics.AVERAGE_SPEED,
Metrics.TRAFFIC_INDEX,
Metrics.EMPTY_PHASE,
Metrics.EFFUSION_RATE
));
map.put(Strategy.BALANCE, Arrays.asList(
Metrics.GREEN_LIGHT_EFFICIENCY,
......
package net.wanji.common.utils.geo;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.util.EntityUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.toilelibre.libe.curl.Curl.curl;
import static org.toilelibre.libe.curl.Curl.$;
/**
* GeoWebCache帮助类
* @author guoliang.dong@wanji.com
*/
@Slf4j
public class GeoWebCacheUtils {
private String geoUrl;
private String geoUsername;
private String geoPassword;
private static GeoWebCacheUtils geoWebCacheUtils = null;
public static GeoWebCacheUtils getGeoServerRESTManager(String url,String username,String password) {
if (geoWebCacheUtils == null) {
geoWebCacheUtils = new GeoWebCacheUtils(url,username,password);
}
return geoWebCacheUtils;
}
public GeoWebCacheUtils(String url,String username,String password){
geoUrl=url;
geoUsername=username;
geoPassword=password;
}
/**
* 新增工作空间
*
* @param workspace 工作空间名
* @return boolean
*/
public boolean addWorkspace(String workspace) {
String cmd = "curl -u " + geoUsername + ":" + geoPassword + " -XPOST -H \"Content-type: text/xml\"\n" +
" -d \"<workspace><name>" + workspace + "</name></workspace>\"\n" +
" " + geoUrl + "/rest/workspaces";
HttpResponse curl = curl(cmd);
StatusLine statusLine = curl.getStatusLine();
return statusLine.getStatusCode()==201;
}
/**
* 创建外存储geotiff
*
* @param workspace
* @param store
* @return
*/
public boolean addExternalGeotiffStore(String workspace ,String store,String fileurl){
String cmd = "curl -u " + geoUsername + ":" + geoPassword + " -XPUT -H \"Content-type: text/plain\"\n" +
" -d \"file://"+fileurl+"\"\n" +
" " + geoUrl + "/rest/workspaces/" + workspace + "/coveragestores/"+store+"/external.geotiff?configure=first&coverageName="+store;
HttpResponse curl = curl(cmd);
StatusLine statusLine = curl.getStatusLine();
return statusLine.getStatusCode()==201;
}
/**
* 获取geoWebCache中的图层
*
* @return Map
*/
public Map<String, Object> getLayers() {
Map<String, Object> map = new HashMap();
String cmd = "curl -u " + geoUsername + ":" + geoPassword + " \"" + geoUrl + "/gwc/rest/layers\"";
List<String> shp = new ArrayList<>();
List<String> image = new ArrayList<>();
HttpResponse curl = curl(cmd);
HttpEntity entity = curl.getEntity();
if (entity != null) {
String result = null;
try {
result = EntityUtils.toString(entity, "UTF-8");
JSONArray jsonArray = JSONArray.parseArray(result);
for (Object o : jsonArray) {
String str = o.toString();
map.put(str,o);
}
} catch (IOException e) {
e.printStackTrace();
}
}
return map;
}
/**
*
*
* @param layer
* @param zoomStart
* @param zoomStop
* @return boolean
*/
/**
* 指定图层进行切片操作
* @param layer 指定图层 shp:test
* @param type * seed (add tiles) * reseed (replace tiles) * truncate (remove tiles)
* @param srs 坐标系srid
* @param zoomStart 1 切片开始层级
* @param zoomStop 15 切片结束层级
* @return
*/
public boolean slice(String layer, String type,int srs,int zoomStart, int zoomStop,String gridSetId) {
int threadCount = 2;
String cmd = "curl -u " + geoUsername + ":" + geoPassword + " \"" + geoUrl + "/gwc/rest/seed/"+layer+".xml\""+ " -XPOST -H \"Content-type: text/xml\" -d '<seedRequest><name>" + layer +
"</name><srs><number>"+srs+"</number></srs><zoomStart>" + zoomStart + "</zoomStart><zoomStop>" + zoomStop + "</zoomStop><format>image/png</format><type>"+type+"</type><threadCount>" + threadCount + "</threadCount><gridSetId>"+gridSetId+"</gridSetId></seedRequest>' \""
+ geoUrl + "/gwc/rest/seed/" + layer + ".xml\"";
HttpResponse curl = curl(cmd);
StatusLine statusLine = curl.getStatusLine();
return statusLine.getStatusCode()==200;
}
/**
* 获取切片的情况
*
* @param layer 指定图层
* @return Map
*/
public Map getSliceType(String layer) {
Map map = new HashMap();
//返回所有图层切片情况 curl -u <user>:<password> -XGET http://localhost:8080/geoserver/gwc/rest/seed.json
//返回指定图层的切片情况
String cmd = "curl -u " + geoUsername + ":" + geoPassword + " -XGET " + geoUrl + "/gwc/rest/seed/" + layer + ".json";
HttpResponse curl = curl(cmd);
StatusLine statusLine = curl.getStatusLine();
if (statusLine.getStatusCode()==200) {
HttpEntity entity = curl.getEntity();
try {
String result = EntityUtils.toString(entity, "UTF-8");
JSONObject jsonArray = JSONObject.parseObject(result);
map.put("res", jsonArray.getJSONArray("long-array-array"));
} catch (IOException e) {
e.printStackTrace();
}
}
return map;
}
/**
* 停止所有正在进行的切片任务
*
* @return boolean
*/
public boolean stopAllSlice() {
String cmd = "curl -u " + geoUsername + ":" + geoPassword + " -d \"kill_all=all\" \"" + geoUrl + "/gwc/rest/seed\"";
HttpResponse curl = curl(cmd);
StatusLine statusLine = curl.getStatusLine();
return statusLine.getStatusCode()==200;
}
/**
* 停止指定图层的切片任务
*
* @return boolean
*/
public boolean stopSliceByLayer(String layer) {
String cmd = "curl -u " + geoUsername + ":" + geoPassword + " -d \"kill_all=all\" \"" + geoUrl + "/gwc/rest/seed/" + layer + "\"";
HttpResponse curl = curl(cmd);
StatusLine statusLine = curl.getStatusLine();
return statusLine.getStatusCode()==200;
}
public static void main(String[]args){
GeoWebCacheUtils geoServerRESTManager=GeoWebCacheUtils.getGeoServerRESTManager("http://127.0.0.1:8080/geoserver","admin","geoserver");
//System.out.println(geoServerRESTManager.addWorkspace("zhangwei"));
//Map<String, Object> layers=geoServerRESTManager.getLayers();
//System.out.println("图层数:"+layers.size());
//System.out.println(layers);
System.out.println(geoServerRESTManager.slice("sf:result8","seed",4326,14,14,"My_EPSG:4326"));
//System.out.println(geoServerRESTManager.stopSliceByLayer("gisc_3f0786e36794%3Apoi"));
//System.out.println(geoServerRESTManager.getSliceType("gisc_3f0786e36794%3Apoi"));
//System.out.println(geoServerRESTManager.getSliceType("gisc_3f0786e36794%3Apoi"));
//System.out.println(geoServerRESTManager.stopAllSlice());
//System.out.println(geoServerRESTManager.addExternalGeotiffStore("sf","result8","D:/ProgramData/GeoServer/data/sf/result8/result.tif"));
}
}
......@@ -4,6 +4,7 @@ package net.wanji.databus.dao.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import net.wanji.databus.dto.MetricHistDTO;
import net.wanji.databus.po.CrossDataHistPO;
import net.wanji.databus.po.CrossDataHistPOExt;
import org.apache.ibatis.annotations.Mapper;
import org.apache.ibatis.annotations.Param;
......@@ -31,4 +32,5 @@ public interface CrossDataHistMapper extends BaseMapper<CrossDataHistPO> {
List<CrossDataHistPO> selectByStartEnd(int startStamp, int endStamp);
List<CrossDataHistPOExt> selectExtByCrossIdAndStartEnd(String crossId, int startStamp, int endStamp);
}
......@@ -73,4 +73,6 @@ public interface CrossDirDataHistMapper extends BaseMapper<CrossDirDataHistPO> {
);
List<CrossDirDataHistPO> selectByTimeSection(String boStartDayStr, String boEndDayStr, String startHourMinuteStr, String endHourMinuteStr);
List<CrossDirDataHistPOExt> selectExtByTimeSection(String boStartDayStr, String boEndDayStr, String startHourMinuteStr, String endHourMinuteStr);
}
......@@ -45,5 +45,7 @@ public class MetricHistDTO {
private Double greenLightEfficiency = 0.0;
@ApiModelProperty(value = "负载均衡度", notes = "")
private Double loadBalance = 0.0;
@ApiModelProperty(value = "绿灯空放时长", notes = "")
private Integer emptyPhase = 0;
}
......@@ -2,13 +2,17 @@ package net.wanji.databus.po;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* @author duanruiming
* @date 2023/03/12 20:49
*/
@EqualsAndHashCode(callSuper = true)
@Data
public class CrossDataHistPOExt extends CrossDataRealtimePO {
@ApiModelProperty(value = "三急一速数量", notes = "")
private Integer emergencyCount;
@ApiModelProperty(value = "绿灯空放时长", notes = "")
private Integer emptyPhase = 0;
}
......@@ -11,5 +11,8 @@ import lombok.Data;
public class CrossDirDataHistPOExt extends CrossDirDataRealtimePO {
@ApiModelProperty(value = "三急一速数量", notes = "")
public Integer emergencyCount;
@ApiModelProperty(value = "绿灯空放时长")
private Integer emptyPhase = 0;
}
package net.wanji.databus.po;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
/**
......@@ -15,5 +12,9 @@ public class CrossLaneDataHistPOExt extends CrossLaneDataRealTimePO{
private Integer dir;
// 车道序号,从左车道开始编号11、12、13...
private Integer sort;
// 时间占有率
private Double timeOccupancy;
// 空间占有率即车辆负荷比
private Double vehicleNumsRatioMean;
}
......@@ -123,4 +123,13 @@
order by batch_time
</select>
<select id="selectExtByCrossIdAndStartEnd" resultType="net.wanji.databus.po.CrossDataHistPOExt">
select <include refid="Base_Column_List"></include>
from t_cross_data_hist
where cross_id = #{crossId}
and batch_time <![CDATA[ >= ]]> #{startStamp}
and batch_time <![CDATA[ <= ]]> #{endStamp}
order by batch_time
</select>
</mapper>
\ No newline at end of file
......@@ -240,4 +240,12 @@
AND in_out_type = 1
</select>
<select id="selectExtByTimeSection" resultType="net.wanji.databus.po.CrossDirDataHistPOExt">
SELECT <include refid="Base_Column_List"></include>
FROM t_cross_dir_data_hist
WHERE batch_time BETWEEN UNIX_TIMESTAMP(STR_TO_DATE(concat(#{boStartDayStr}, #{startHourMinuteStr}), '%Y-%m-%d%H:%i'))
AND UNIX_TIMESTAMP(STR_TO_DATE(concat(#{boEndDayStr}, #{endHourMinuteStr}), '%Y-%m-%d%H:%i'))
AND in_out_type = 1
</select>
</mapper>
\ No newline at end of file
......@@ -95,7 +95,7 @@
<select id="selectByCrossIdAndTimeSpan" resultType="net.wanji.databus.po.CrossLaneDataHistPOExt">
SELECT t2.dir, t2.sort, t1.flow, t1.speed, t1.queue_length, t1.delay_time, t1.stop_times,
t1.vehhead_time, t1.batch_time
t1.vehhead_time, t1.batch_time, t1.time_occupancy, t1.vehicle_nums_ratio_mean
FROM t_lane_data_hist t1 JOIN t_base_lane_info t2 ON t1.id = t2.id
where t1.cross_id = #{crossId}
and batch_time <![CDATA[ >= ]]> #{startTimeStamp}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment