fix(测试计划): 解决定时任务执行问题
This commit is contained in:
parent
490e656a40
commit
0401ec24fa
|
@ -1,9 +1,8 @@
|
|||
package io.metersphere.api.cache;
|
||||
|
||||
|
||||
import io.metersphere.api.dto.automation.APIScenarioReportResult;
|
||||
import io.metersphere.api.jmeter.JmeterThreadUtils;
|
||||
import io.metersphere.api.jmeter.MessageCache;
|
||||
import io.metersphere.base.domain.ApiDefinitionExecResult;
|
||||
import io.metersphere.commons.constants.TestPlanApiExecuteStatus;
|
||||
import io.metersphere.commons.constants.TestPlanResourceType;
|
||||
import lombok.Getter;
|
||||
|
@ -29,7 +28,7 @@ public class TestPlanExecuteInfo {
|
|||
|
||||
private Map<String, String> apiCaseExecuteReportMap = new HashMap<>();
|
||||
private Map<String, String> apiScenarioReportReportMap = new HashMap<>();
|
||||
private Map<String,String> loadCaseReportIdMap = new HashMap<>();
|
||||
private Map<String, String> loadCaseReportIdMap = new HashMap<>();
|
||||
|
||||
private boolean reportDataInDataBase;
|
||||
|
||||
|
@ -40,7 +39,7 @@ public class TestPlanExecuteInfo {
|
|||
private boolean isScenarioAllExecuted;
|
||||
private boolean isLoadCaseAllExecuted;
|
||||
|
||||
public TestPlanExecuteInfo(String reportId,String creator){
|
||||
public TestPlanExecuteInfo(String reportId, String creator) {
|
||||
this.reportId = reportId;
|
||||
this.creator = creator;
|
||||
}
|
||||
|
@ -165,9 +164,12 @@ public class TestPlanExecuteInfo {
|
|||
String executeResult = entry.getValue();
|
||||
if (StringUtils.equalsIgnoreCase(executeResult, TestPlanApiExecuteStatus.RUNNING.name())) {
|
||||
apiCaseExecInfo.put(resourceId, TestPlanApiExecuteStatus.FAILD.name());
|
||||
if (StringUtils.isNotEmpty(apiCaseExecuteReportMap.get(resourceId))) {
|
||||
JmeterThreadUtils.stop(apiCaseExecuteReportMap.get(resourceId));
|
||||
}
|
||||
}
|
||||
|
||||
if(apiCaseExecuteReportMap.containsKey(resourceId)){
|
||||
if (apiCaseExecuteReportMap.containsKey(resourceId)) {
|
||||
MessageCache.executionQueue.remove(apiCaseExecuteReportMap.get(resourceId));
|
||||
}
|
||||
}
|
||||
|
@ -176,9 +178,12 @@ public class TestPlanExecuteInfo {
|
|||
String executeResult = entry.getValue();
|
||||
if (StringUtils.equalsIgnoreCase(executeResult, TestPlanApiExecuteStatus.RUNNING.name())) {
|
||||
apiScenarioCaseExecInfo.put(resourceId, TestPlanApiExecuteStatus.FAILD.name());
|
||||
if (StringUtils.isNotEmpty(apiScenarioReportReportMap.get(resourceId))) {
|
||||
JmeterThreadUtils.stop(apiScenarioReportReportMap.get(resourceId));
|
||||
}
|
||||
}
|
||||
|
||||
if(apiScenarioReportReportMap.containsKey(resourceId)){
|
||||
if (apiScenarioReportReportMap.containsKey(resourceId)) {
|
||||
MessageCache.executionQueue.remove(apiScenarioReportReportMap.get(resourceId));
|
||||
}
|
||||
}
|
||||
|
@ -186,10 +191,13 @@ public class TestPlanExecuteInfo {
|
|||
String resourceId = entry.getKey();
|
||||
String executeResult = entry.getValue();
|
||||
if (StringUtils.equalsIgnoreCase(executeResult, TestPlanApiExecuteStatus.RUNNING.name())) {
|
||||
if (StringUtils.isNotEmpty(loadCaseReportIdMap.get(resourceId))) {
|
||||
JmeterThreadUtils.stop(loadCaseReportIdMap.get(resourceId));
|
||||
}
|
||||
loadCaseExecInfo.put(resourceId, TestPlanApiExecuteStatus.FAILD.name());
|
||||
}
|
||||
|
||||
if(loadCaseReportIdMap.containsKey(resourceId)){
|
||||
if (loadCaseReportIdMap.containsKey(resourceId)) {
|
||||
MessageCache.executionQueue.remove(loadCaseReportIdMap.get(resourceId));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package io.metersphere.api.controller;
|
||||
|
||||
import io.metersphere.api.dto.scenario.request.BodyFile;
|
||||
import io.metersphere.api.jmeter.JmeterThreadUtils;
|
||||
import io.metersphere.api.service.ApiJmeterFileService;
|
||||
import org.apache.dubbo.common.utils.CollectionUtils;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
|
@ -19,6 +20,11 @@ public class ApiJmeterFileController {
|
|||
@Resource
|
||||
private ApiJmeterFileService apiJmeterFileService;
|
||||
|
||||
@GetMapping("stop/{name}")
|
||||
public String stop(@PathVariable String name) {
|
||||
return JmeterThreadUtils.stop(name);
|
||||
}
|
||||
|
||||
@PostMapping("download/files")
|
||||
public ResponseEntity<byte[]> downloadBodyFiles(@RequestBody List<BodyFile> bodyFileList) {
|
||||
byte[] bytes = new byte[10];
|
||||
|
@ -42,7 +48,7 @@ public class ApiJmeterFileController {
|
|||
|
||||
@GetMapping("download")
|
||||
public ResponseEntity<byte[]> downloadJmeterFiles(@RequestParam("testId") String testId, @RequestParam("reportId") String reportId, @RequestParam("runMode") String runMode, @RequestParam("testPlanScenarioId") String testPlanScenarioId) {
|
||||
byte[] bytes = apiJmeterFileService.downloadJmeterFiles(runMode,testId, reportId, testPlanScenarioId);
|
||||
byte[] bytes = apiJmeterFileService.downloadJmeterFiles(runMode, testId, reportId, testPlanScenarioId);
|
||||
return ResponseEntity.ok()
|
||||
.contentType(MediaType.parseMediaType("application/octet-stream"))
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + testId + ".zip\"")
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
package io.metersphere.api.jmeter;
|
||||
|
||||
import io.metersphere.commons.utils.LogUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
public class JmeterThreadUtils {
|
||||
private final static String THREAD_SPLIT = " ";
|
||||
|
||||
public static String stop(String name) {
|
||||
|
||||
ThreadGroup currentGroup = Thread.currentThread().getThreadGroup();
|
||||
|
||||
int noThreads = currentGroup.activeCount();
|
||||
Thread[] lstThreads = new Thread[noThreads];
|
||||
currentGroup.enumerate(lstThreads);
|
||||
StringBuilder threadNames = new StringBuilder();
|
||||
for (int i = 0; i < noThreads; i++) {
|
||||
if (StringUtils.isNotEmpty(lstThreads[i].getName()) && lstThreads[i].getName().startsWith(name)) {
|
||||
String threadName = StringUtils.substringBeforeLast(lstThreads[i].getName(), THREAD_SPLIT);
|
||||
if (StringUtils.isNotEmpty(threadName)) {
|
||||
MessageCache.executionQueue.remove(threadName);
|
||||
}
|
||||
System.out.println("异常强制处理线程编号:" + i + " = " + lstThreads[i].getName());
|
||||
LogUtil.error("异常强制处理线程编号:" + i + " = " + lstThreads[i].getName());
|
||||
threadNames.append(lstThreads[i].getName()).append(";");
|
||||
lstThreads[i].interrupt();
|
||||
}
|
||||
}
|
||||
return threadNames.toString();
|
||||
}
|
||||
}
|
|
@ -25,6 +25,7 @@ import io.metersphere.api.jmeter.MessageCache;
|
|||
import io.metersphere.api.jmeter.ReportCounter;
|
||||
import io.metersphere.api.jmeter.ResourcePoolCalculation;
|
||||
import io.metersphere.api.parse.ApiImportParser;
|
||||
import io.metersphere.api.service.task.NamedThreadFactory;
|
||||
import io.metersphere.api.service.task.SerialScenarioExecTask;
|
||||
import io.metersphere.base.domain.*;
|
||||
import io.metersphere.base.mapper.*;
|
||||
|
@ -1167,9 +1168,9 @@ public class ApiAutomationService {
|
|||
report = createScenarioReport(reportId, testPlanScenarioId, item.getName(), request.getTriggerMode(),
|
||||
request.getExecuteType(), projectId, request.getReportUserID(), request.getConfig(), item.getId());
|
||||
}
|
||||
if(report != null && StringUtils.isNotEmpty(request.getTestPlanReportId())){
|
||||
Map<String,String> scenarioReportIdMap = new HashMap<>();
|
||||
scenarioReportIdMap.put(item.getId(),report.getId());
|
||||
if (report != null && StringUtils.isNotEmpty(request.getTestPlanReportId())) {
|
||||
Map<String, String> scenarioReportIdMap = new HashMap<>();
|
||||
scenarioReportIdMap.put(item.getId(), report.getId());
|
||||
TestPlanReportExecuteCatch.updateTestPlanExecuteResultInfo(request.getTestPlanReportId(), null, scenarioReportIdMap, null);
|
||||
}
|
||||
} else {
|
||||
|
@ -1241,7 +1242,6 @@ public class ApiAutomationService {
|
|||
* @param serialReportId
|
||||
*/
|
||||
private void serial(Map<String, RunModeDataDTO> executeQueue, RunScenarioRequest request, String serialReportId) {
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(executeQueue.size());
|
||||
SqlSession sqlSession = sqlSessionFactory.openSession(ExecutorType.BATCH);
|
||||
ApiScenarioReportMapper batchMapper = sqlSession.getMapper(ApiScenarioReportMapper.class);
|
||||
// 非集合报告,先生成执行队列
|
||||
|
@ -1251,73 +1251,82 @@ public class ApiAutomationService {
|
|||
report.setStatus(APITestStatus.Waiting.name());
|
||||
batchMapper.insert(report);
|
||||
}
|
||||
sqlSession.flushStatements();
|
||||
sqlSession.commit();
|
||||
}
|
||||
// 开始串行执行
|
||||
Thread thread = new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
List<String> reportIds = new LinkedList<>();
|
||||
//记录串行执行中的环境参数,供下一个场景执行时使用。 <envId,<key,data>>
|
||||
Map<String, Map<String, String>> executeEnvParams = new LinkedHashMap<>();
|
||||
ApiTestEnvironmentService apiTestEnvironmentService = CommonBeanFactory.getBean(ApiTestEnvironmentService.class);
|
||||
HashTreeUtil hashTreeUtil = new HashTreeUtil();
|
||||
for (String key : executeQueue.keySet()) {
|
||||
// 终止执行
|
||||
if (MessageCache.terminationOrderDeque.contains(key)) {
|
||||
MessageCache.terminationOrderDeque.remove(key);
|
||||
break;
|
||||
}
|
||||
MessageCache.executionQueue.put(key, System.currentTimeMillis());
|
||||
reportIds.add(key);
|
||||
APIScenarioReportResult report = executeQueue.get(key).getReport();
|
||||
if (StringUtils.isNotEmpty(serialReportId)) {
|
||||
report.setExecuteType(ExecuteType.Marge.name());
|
||||
apiScenarioReportMapper.insert(report);
|
||||
} else {
|
||||
report.setStatus(APITestStatus.Running.name());
|
||||
report.setCreateTime(System.currentTimeMillis());
|
||||
report.setUpdateTime(System.currentTimeMillis());
|
||||
apiScenarioReportMapper.updateByPrimaryKey(report);
|
||||
}
|
||||
try {
|
||||
if (!executeEnvParams.isEmpty()) {
|
||||
HashTree hashTree = executeQueue.get(key).getHashTree();
|
||||
hashTreeUtil.setEnvParamsMapToHashTree(hashTree, executeEnvParams);
|
||||
executeQueue.get(key).setHashTree(hashTree);
|
||||
Thread.currentThread().setName("Scenario串行执行线程");
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(1, new NamedThreadFactory("串行执行等待线程池"));
|
||||
try {
|
||||
List<String> reportIds = new LinkedList<>();
|
||||
//记录串行执行中的环境参数,供下一个场景执行时使用。 <envId,<key,data>>
|
||||
Map<String, Map<String, String>> executeEnvParams = new LinkedHashMap<>();
|
||||
ApiTestEnvironmentService apiTestEnvironmentService = CommonBeanFactory.getBean(ApiTestEnvironmentService.class);
|
||||
HashTreeUtil hashTreeUtil = new HashTreeUtil();
|
||||
for (String key : executeQueue.keySet()) {
|
||||
// 终止执行
|
||||
if (MessageCache.terminationOrderDeque.contains(key)) {
|
||||
MessageCache.terminationOrderDeque.remove(key);
|
||||
break;
|
||||
}
|
||||
Future<ApiScenarioReport> future = executorService.submit(new SerialScenarioExecTask(jMeterService, apiScenarioReportMapper, executeQueue.get(key), request));
|
||||
future.get();
|
||||
// 如果开启失败结束执行,则判断返回结果状态
|
||||
if (request.getConfig().isOnSampleError()) {
|
||||
ApiScenarioReport scenarioReport = apiScenarioReportMapper.selectByPrimaryKey(key);
|
||||
if (scenarioReport == null || !scenarioReport.getStatus().equals("Success")) {
|
||||
reportIds.remove(key);
|
||||
break;
|
||||
MessageCache.executionQueue.put(key, System.currentTimeMillis());
|
||||
reportIds.add(key);
|
||||
APIScenarioReportResult report = executeQueue.get(key).getReport();
|
||||
if (StringUtils.isNotEmpty(serialReportId)) {
|
||||
report.setExecuteType(ExecuteType.Marge.name());
|
||||
apiScenarioReportMapper.insert(report);
|
||||
} else {
|
||||
report.setStatus(APITestStatus.Running.name());
|
||||
report.setCreateTime(System.currentTimeMillis());
|
||||
report.setUpdateTime(System.currentTimeMillis());
|
||||
apiScenarioReportMapper.updateByPrimaryKey(report);
|
||||
}
|
||||
try {
|
||||
if (!executeEnvParams.isEmpty()) {
|
||||
HashTree hashTree = executeQueue.get(key).getHashTree();
|
||||
hashTreeUtil.setEnvParamsMapToHashTree(hashTree, executeEnvParams);
|
||||
executeQueue.get(key).setHashTree(hashTree);
|
||||
}
|
||||
Future<ApiScenarioReport> future = executorService.submit(new SerialScenarioExecTask(jMeterService, apiScenarioReportMapper, executeQueue.get(key), request));
|
||||
future.get();
|
||||
// 如果开启失败结束执行,则判断返回结果状态
|
||||
if (request.getConfig().isOnSampleError()) {
|
||||
ApiScenarioReport scenarioReport = apiScenarioReportMapper.selectByPrimaryKey(key);
|
||||
if (scenarioReport == null || !scenarioReport.getStatus().equals("Success")) {
|
||||
reportIds.remove(key);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Map<String, Map<String, String>> envParamsMap = hashTreeUtil.getEnvParamsDataByHashTree(executeQueue.get(key).getHashTree(), apiTestEnvironmentService);
|
||||
executeEnvParams = hashTreeUtil.mergeParamDataMap(executeEnvParams, envParamsMap);
|
||||
} catch (Exception e) {
|
||||
reportIds.remove(key);
|
||||
MessageCache.executionQueue.remove(key);
|
||||
LogUtil.error("执行终止:" + e.getMessage());
|
||||
break;
|
||||
Map<String, Map<String, String>> envParamsMap = hashTreeUtil.getEnvParamsDataByHashTree(executeQueue.get(key).getHashTree(), apiTestEnvironmentService);
|
||||
executeEnvParams = hashTreeUtil.mergeParamDataMap(executeEnvParams, envParamsMap);
|
||||
} catch (Exception e) {
|
||||
reportIds.remove(key);
|
||||
MessageCache.executionQueue.remove(key);
|
||||
LogUtil.error("执行终止:" + e.getMessage());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// 清理未执行的队列
|
||||
if (reportIds.size() < executeQueue.size()) {
|
||||
List<String> removeList = executeQueue.entrySet().stream().filter(map -> !reportIds.contains(map.getKey()))
|
||||
.map(map -> map.getKey()).collect(Collectors.toList());
|
||||
ApiScenarioReportExample example = new ApiScenarioReportExample();
|
||||
example.createCriteria().andIdIn(removeList);
|
||||
apiScenarioReportMapper.deleteByExample(example);
|
||||
}
|
||||
// 更新集成报告
|
||||
if (StringUtils.isNotEmpty(serialReportId)) {
|
||||
apiScenarioReportService.margeReport(serialReportId, reportIds);
|
||||
executeQueue.clear();
|
||||
// 清理未执行的队列
|
||||
if (reportIds.size() < executeQueue.size()) {
|
||||
List<String> removeList = executeQueue.entrySet().stream().filter(map -> !reportIds.contains(map.getKey()))
|
||||
.map(map -> map.getKey()).collect(Collectors.toList());
|
||||
ApiScenarioReportExample example = new ApiScenarioReportExample();
|
||||
example.createCriteria().andIdIn(removeList);
|
||||
apiScenarioReportMapper.deleteByExample(example);
|
||||
}
|
||||
// 更新集成报告
|
||||
if (StringUtils.isNotEmpty(serialReportId)) {
|
||||
apiScenarioReportService.margeReport(serialReportId, reportIds);
|
||||
executeQueue.clear();
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LogUtil.error(e);
|
||||
} finally {
|
||||
executorService.shutdownNow();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
@ -1334,18 +1343,12 @@ public class ApiAutomationService {
|
|||
SqlSession sqlSession = sqlSessionFactory.openSession(ExecutorType.BATCH);
|
||||
ApiScenarioReportMapper batchMapper = sqlSession.getMapper(ApiScenarioReportMapper.class);
|
||||
// 开始并发执行
|
||||
Thread thread = new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
for (String reportId : executeQueue.keySet()) {
|
||||
//存储报告
|
||||
APIScenarioReportResult report = executeQueue.get(reportId).getReport();
|
||||
batchMapper.insert(report);
|
||||
}
|
||||
sqlSession.flushStatements();
|
||||
}
|
||||
});
|
||||
thread.start();
|
||||
for (String reportId : executeQueue.keySet()) {
|
||||
//存储报告
|
||||
APIScenarioReportResult report = executeQueue.get(reportId).getReport();
|
||||
batchMapper.insert(report);
|
||||
}
|
||||
sqlSession.commit();
|
||||
|
||||
for (String reportId : executeQueue.keySet()) {
|
||||
if (request.getConfig() != null && StringUtils.isNotEmpty(request.getConfig().getResourcePoolId())) {
|
||||
|
@ -1568,9 +1571,9 @@ public class ApiAutomationService {
|
|||
HashTree hashTree = generateHashTree(apiScenarios, request, reportIds);
|
||||
jMeterService.runLocal(reportIds.size() == 1 ? reportIds.get(0) : JSON.toJSONString(reportIds), hashTree, request.getReportId(), runMode);
|
||||
|
||||
Map<String,String> scenarioReportIdMap = new HashMap<>();
|
||||
Map<String, String> scenarioReportIdMap = new HashMap<>();
|
||||
for (String id : ids) {
|
||||
scenarioReportIdMap.put(id,request.getReportId());
|
||||
scenarioReportIdMap.put(id, request.getReportId());
|
||||
}
|
||||
TestPlanReportExecuteCatch.updateTestPlanExecuteResultInfo(request.getTestPlanReportId(), null, scenarioReportIdMap, null);
|
||||
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
package io.metersphere.api.service.task;
|
||||
|
||||
import java.util.concurrent.ThreadFactory;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class NamedThreadFactory implements ThreadFactory {
|
||||
private static AtomicInteger tag = new AtomicInteger(0);
|
||||
private String name;
|
||||
|
||||
public NamedThreadFactory(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Thread newThread(Runnable r) {
|
||||
Thread thread = new Thread(r);
|
||||
thread.setName(this.name + ":" + tag.getAndIncrement());
|
||||
return thread;
|
||||
}
|
||||
}
|
|
@ -27,7 +27,7 @@ import io.metersphere.api.jmeter.MessageCache;
|
|||
import io.metersphere.api.jmeter.ResourcePoolCalculation;
|
||||
import io.metersphere.api.service.ApiDefinitionExecResultService;
|
||||
import io.metersphere.api.service.ApiTestCaseService;
|
||||
import io.metersphere.api.service.NodeKafkaService;
|
||||
import io.metersphere.api.service.task.NamedThreadFactory;
|
||||
import io.metersphere.base.domain.*;
|
||||
import io.metersphere.base.mapper.*;
|
||||
import io.metersphere.base.mapper.ext.ExtTestPlanApiCaseMapper;
|
||||
|
@ -408,7 +408,6 @@ public class TestPlanApiCaseService {
|
|||
}
|
||||
}
|
||||
// 开始选择执行模式
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(planApiCases.size());
|
||||
if (request.getConfig() != null && request.getConfig().getMode().equals(RunModeConstants.SERIAL.toString())) {
|
||||
Map<TestPlanApiCase, ApiDefinitionExecResult> executeQueue = new HashMap<>();
|
||||
planApiCases.forEach(testPlanApiCase -> {
|
||||
|
@ -421,49 +420,57 @@ public class TestPlanApiCaseService {
|
|||
Thread thread = new Thread(new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
for (TestPlanApiCase testPlanApiCase : executeQueue.keySet()) {
|
||||
try {
|
||||
if (executeQueue.get(testPlanApiCase) != null && MessageCache.terminationOrderDeque.contains(executeQueue.get(testPlanApiCase).getId())) {
|
||||
MessageCache.terminationOrderDeque.remove(executeQueue.get(testPlanApiCase).getId());
|
||||
break;
|
||||
}
|
||||
ApiDefinitionExecResult execResult = executeQueue.get(testPlanApiCase);
|
||||
execResult.setId(executeQueue.get(testPlanApiCase).getId());
|
||||
execResult.setStatus(APITestStatus.Running.name());
|
||||
mapper.updateByPrimaryKey(execResult);
|
||||
reportIds.add(execResult.getId());
|
||||
RunModeDataDTO modeDataDTO;
|
||||
if (request.getConfig() != null && StringUtils.isNotBlank(request.getConfig().getResourcePoolId())) {
|
||||
modeDataDTO = new RunModeDataDTO(testPlanApiCase.getId(), UUID.randomUUID().toString());
|
||||
} else {
|
||||
// 生成报告和HashTree
|
||||
HashTree hashTree = generateHashTree(testPlanApiCase.getId());
|
||||
modeDataDTO = new RunModeDataDTO(hashTree, UUID.randomUUID().toString());
|
||||
}
|
||||
modeDataDTO.setApiCaseId(execResult.getId());
|
||||
Future<ApiDefinitionExecResult> future = executorService.submit(new SerialApiExecTask(jMeterService, mapper, modeDataDTO, request.getConfig(), ApiRunMode.API_PLAN.name()));
|
||||
ApiDefinitionExecResult report = future.get();
|
||||
// 如果开启失败结束执行,则判断返回结果状态
|
||||
if (request.getConfig().isOnSampleError()) {
|
||||
if (report == null || !report.getStatus().equals("Success")) {
|
||||
reportIds.remove(execResult.getId());
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(1, new NamedThreadFactory("TestPlanApiCaseService"));
|
||||
try {
|
||||
Thread.currentThread().setName("TestPlanCase串行执行线程");
|
||||
for (TestPlanApiCase testPlanApiCase : executeQueue.keySet()) {
|
||||
try {
|
||||
if (executeQueue.get(testPlanApiCase) != null && MessageCache.terminationOrderDeque.contains(executeQueue.get(testPlanApiCase).getId())) {
|
||||
MessageCache.terminationOrderDeque.remove(executeQueue.get(testPlanApiCase).getId());
|
||||
break;
|
||||
}
|
||||
ApiDefinitionExecResult execResult = executeQueue.get(testPlanApiCase);
|
||||
execResult.setId(executeQueue.get(testPlanApiCase).getId());
|
||||
execResult.setStatus(APITestStatus.Running.name());
|
||||
mapper.updateByPrimaryKey(execResult);
|
||||
reportIds.add(execResult.getId());
|
||||
RunModeDataDTO modeDataDTO;
|
||||
if (request.getConfig() != null && StringUtils.isNotBlank(request.getConfig().getResourcePoolId())) {
|
||||
modeDataDTO = new RunModeDataDTO(testPlanApiCase.getId(), UUID.randomUUID().toString());
|
||||
} else {
|
||||
// 生成报告和HashTree
|
||||
HashTree hashTree = generateHashTree(testPlanApiCase.getId());
|
||||
modeDataDTO = new RunModeDataDTO(hashTree, UUID.randomUUID().toString());
|
||||
}
|
||||
modeDataDTO.setApiCaseId(execResult.getId());
|
||||
Future<ApiDefinitionExecResult> future = executorService.submit(new SerialApiExecTask(jMeterService, mapper, modeDataDTO, request.getConfig(), ApiRunMode.API_PLAN.name()));
|
||||
ApiDefinitionExecResult report = future.get();
|
||||
// 如果开启失败结束执行,则判断返回结果状态
|
||||
if (request.getConfig().isOnSampleError()) {
|
||||
if (report == null || !report.getStatus().equals("Success")) {
|
||||
reportIds.remove(execResult.getId());
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
reportIds.remove(executeQueue.get(testPlanApiCase).getId());
|
||||
LogUtil.error("执行终止:" + e.getMessage());
|
||||
break;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
reportIds.remove(executeQueue.get(testPlanApiCase).getId());
|
||||
LogUtil.error("执行终止:" + e.getMessage());
|
||||
break;
|
||||
}
|
||||
}
|
||||
// 清理未执行的队列
|
||||
if (reportIds.size() < executeQueue.size()) {
|
||||
List<String> removeList = executeQueue.entrySet().stream()
|
||||
.filter(map -> !reportIds.contains(map.getValue().getId()))
|
||||
.map(map -> map.getValue().getId()).collect(Collectors.toList());
|
||||
ApiDefinitionExecResultExample example = new ApiDefinitionExecResultExample();
|
||||
example.createCriteria().andIdIn(removeList);
|
||||
mapper.deleteByExample(example);
|
||||
// 清理未执行的队列
|
||||
if (reportIds.size() < executeQueue.size()) {
|
||||
List<String> removeList = executeQueue.entrySet().stream()
|
||||
.filter(map -> !reportIds.contains(map.getValue().getId()))
|
||||
.map(map -> map.getValue().getId()).collect(Collectors.toList());
|
||||
ApiDefinitionExecResultExample example = new ApiDefinitionExecResultExample();
|
||||
example.createCriteria().andIdIn(removeList);
|
||||
mapper.deleteByExample(example);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LogUtil.error(e);
|
||||
} finally {
|
||||
executorService.shutdownNow();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
|
|
@ -3,6 +3,7 @@ package io.metersphere.track.service;
|
|||
import com.alibaba.fastjson.JSON;
|
||||
import com.github.pagehelper.Page;
|
||||
import com.github.pagehelper.PageHelper;
|
||||
import io.metersphere.api.service.task.NamedThreadFactory;
|
||||
import io.metersphere.base.domain.*;
|
||||
import io.metersphere.base.mapper.LoadTestMapper;
|
||||
import io.metersphere.base.mapper.LoadTestReportMapper;
|
||||
|
@ -153,7 +154,7 @@ public class TestPlanLoadCaseService {
|
|||
if (request.getConfig() != null && request.getConfig().getMode().equals(RunModeConstants.SERIAL.toString())) {
|
||||
serialRun(request);
|
||||
} else {
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(request.getRequests().size());
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(request.getRequests().size(),new NamedThreadFactory("TestPlanLoadCaseService"));
|
||||
request.getRequests().forEach(item -> {
|
||||
executorService.submit(new ParallelExecTask(performanceTestService, testPlanLoadCaseMapper, item));
|
||||
});
|
||||
|
@ -168,7 +169,7 @@ public class TestPlanLoadCaseService {
|
|||
}
|
||||
|
||||
private void serialRun(RunBatchTestPlanRequest request) throws Exception {
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(request.getRequests().size());
|
||||
ExecutorService executorService = Executors.newFixedThreadPool(request.getRequests().size(),new NamedThreadFactory("TestPlanLoadCaseService-serial"));
|
||||
for (RunTestPlanRequest runTestPlanRequest : request.getRequests()) {
|
||||
Future<LoadTestReportWithBLOBs> future = executorService.submit(new SerialExecTask(performanceTestService, testPlanLoadCaseMapper, loadTestReportMapper, runTestPlanRequest, request.getConfig()));
|
||||
LoadTestReportWithBLOBs report = future.get();
|
||||
|
|
|
@ -12,6 +12,7 @@ import io.metersphere.api.dto.automation.TestPlanScenarioRequest;
|
|||
import io.metersphere.api.dto.definition.ApiTestCaseRequest;
|
||||
import io.metersphere.api.dto.definition.TestPlanApiCaseDTO;
|
||||
import io.metersphere.api.service.ShareInfoService;
|
||||
import io.metersphere.api.service.task.NamedThreadFactory;
|
||||
import io.metersphere.base.domain.*;
|
||||
import io.metersphere.base.mapper.*;
|
||||
import io.metersphere.base.mapper.ext.*;
|
||||
|
@ -96,7 +97,7 @@ public class TestPlanReportService {
|
|||
@Resource
|
||||
private ProjectService projectService;
|
||||
|
||||
private final ExecutorService executorService = Executors.newFixedThreadPool(20);
|
||||
private final ExecutorService executorService = Executors.newFixedThreadPool(20,new NamedThreadFactory("TestPlanReportService"));
|
||||
|
||||
public List<TestPlanReportDTO> list(QueryTestPlanReportRequest request) {
|
||||
List<TestPlanReportDTO> list = new ArrayList<>();
|
||||
|
@ -1125,6 +1126,7 @@ public class TestPlanReportService {
|
|||
TestPlanReportExecuteCatch.finishAllTask(planReportId);
|
||||
}
|
||||
}
|
||||
|
||||
this.updateExecuteApis(planReportId);
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ import io.metersphere.api.service.ApiAutomationService;
|
|||
import io.metersphere.api.service.ApiDefinitionService;
|
||||
import io.metersphere.api.service.ApiScenarioReportService;
|
||||
import io.metersphere.api.service.ApiTestCaseService;
|
||||
import io.metersphere.api.service.task.NamedThreadFactory;
|
||||
import io.metersphere.base.domain.*;
|
||||
import io.metersphere.base.mapper.*;
|
||||
import io.metersphere.base.mapper.ext.*;
|
||||
|
@ -189,7 +190,7 @@ public class TestPlanService {
|
|||
@Resource
|
||||
private TestPlanFollowMapper testPlanFollowMapper;
|
||||
|
||||
private final ExecutorService executorService = Executors.newFixedThreadPool(20);
|
||||
private final ExecutorService executorService = Executors.newFixedThreadPool(20,new NamedThreadFactory("TestPlanService"));
|
||||
|
||||
public synchronized TestPlan addTestPlan(AddTestPlanRequest testPlan) {
|
||||
if (getTestPlanByName(testPlan.getName()).size() > 0) {
|
||||
|
|
Loading…
Reference in New Issue