fix(测试计划): 优化测试计划执行

优化测试计划执行
This commit is contained in:
song-tianyang 2021-12-20 13:55:18 +08:00 committed by BugKing
parent 54e497efb8
commit 73577037ff
12 changed files with 328 additions and 130 deletions

View File

@ -285,7 +285,7 @@
<dependency>
<groupId>com.fit2cloud</groupId>
<artifactId>quartz-spring-boot-starter</artifactId>
<version>0.0.7</version>
<version>0.0.9</version>
</dependency>
<dependency>

View File

@ -9,6 +9,7 @@ import io.metersphere.base.mapper.ApiScenarioReportMapper;
import io.metersphere.commons.constants.TestPlanApiExecuteStatus;
import io.metersphere.commons.constants.TestPlanResourceType;
import io.metersphere.commons.utils.CommonBeanFactory;
import io.metersphere.track.dto.TestPlanReportExecuteCheckResultDTO;
import lombok.Getter;
import lombok.Setter;
import org.apache.commons.collections.CollectionUtils;
@ -19,6 +20,7 @@ import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @author song.tianyang
@ -29,13 +31,14 @@ import java.util.Map;
public class TestPlanExecuteInfo {
private String reportId;
private String creator;
private Map<String, String> apiCaseExecInfo = new HashMap<>();
private Map<String, String> apiScenarioCaseExecInfo = new HashMap<>();
private Map<String, String> loadCaseExecInfo = new HashMap<>();
private Map<String, String> apiCaseExecInfo = new ConcurrentHashMap<>();
private Map<String, String> apiScenarioCaseExecInfo = new ConcurrentHashMap<>();
private Map<String, String> loadCaseExecInfo = new ConcurrentHashMap<>();
private Map<String, String> apiCaseExecuteThreadMap = new HashMap<>();
private Map<String, String> apiScenarioThreadMap = new HashMap<>();
private Map<String, String> loadCaseReportIdMap = new HashMap<>();
//案例线程是以reportID为id的 key:关联表ID value:reportID
private Map<String, String> apiCaseExecuteThreadMap = new ConcurrentHashMap<>();
private Map<String, String> apiScenarioThreadMap = new ConcurrentHashMap<>();
private Map<String, String> loadCaseReportIdMap = new ConcurrentHashMap<>();
private Map<String, String> apiCaseReportMap = new HashMap<>();
private Map<String, String> apiScenarioReportMap = new HashMap<>();
@ -82,7 +85,8 @@ public class TestPlanExecuteInfo {
}
}
public synchronized int countUnFinishedNum() {
public synchronized TestPlanReportExecuteCheckResultDTO countUnFinishedNum() {
TestPlanReportExecuteCheckResultDTO executeCheck = new TestPlanReportExecuteCheckResultDTO();
int unFinishedCount = 0;
this.isApiCaseAllExecuted = true;
@ -116,8 +120,21 @@ public class TestPlanExecuteInfo {
if (lastUnFinishedNumCount != unFinishedCount) {
lastUnFinishedNumCount = unFinishedCount;
lastFinishedNumCountTime = System.currentTimeMillis();
executeCheck.setFinishedCaseChanged(true);
}else if(unFinishedCount == 0){
executeCheck.setFinishedCaseChanged(true);
}else {
executeCheck.setFinishedCaseChanged(false);
}
return unFinishedCount;
executeCheck.setTimeOut(false);
if (unFinishedCount > 0) {
//20分钟没有案例执行结果更新则定位超时
long nowTime = System.currentTimeMillis();
if (nowTime - lastFinishedNumCountTime > 1200000) {
executeCheck.setTimeOut(true);
}
}
return executeCheck;
}
public Map<String, Map<String, String>> getExecutedResult() {
@ -200,13 +217,13 @@ public class TestPlanExecuteInfo {
MessageCache.executionQueue.remove(apiScenarioThreadMap.get(resourceId));
}
}
if(CollectionUtils.isNotEmpty(updateScenarioReportList)){
if (CollectionUtils.isNotEmpty(updateScenarioReportList)) {
ApiScenarioReportMapper apiScenarioReportMapper = CommonBeanFactory.getBean(ApiScenarioReportMapper.class);
ApiScenarioReportExample example = new ApiScenarioReportExample();
example.createCriteria().andIdIn(updateScenarioReportList).andStatusEqualTo("Running");
ApiScenarioReport report = new ApiScenarioReport();
report.setStatus("Error");
apiScenarioReportMapper.updateByExampleSelective(report,example);
apiScenarioReportMapper.updateByExampleSelective(report, example);
}
for (Map.Entry<String, String> entry : loadCaseExecInfo.entrySet()) {
@ -227,7 +244,7 @@ public class TestPlanExecuteInfo {
this.countUnFinishedNum();
}
public void updateReport(Map<String, String> apiCaseExecResultInfo, Map<String, String> apiScenarioCaseExecResultInfo) {
public synchronized void updateReport(Map<String, String> apiCaseExecResultInfo, Map<String, String> apiScenarioCaseExecResultInfo) {
if (MapUtils.isNotEmpty(apiCaseExecResultInfo)) {
this.apiCaseReportMap.putAll(apiCaseExecResultInfo);
}
@ -237,4 +254,34 @@ public class TestPlanExecuteInfo {
}
}
public Map<String, String> getRunningApiCaseReportMap() {
//key: reportId, value: testPlanApiCaseId
Map<String, String> returnMap = new HashMap<>();
for (Map.Entry<String,String> entry : apiCaseExecInfo.entrySet()) {
String planCaseId = entry.getKey();
String status = entry.getValue();
if (StringUtils.equalsIgnoreCase(status, TestPlanApiExecuteStatus.RUNNING.name())) {
if (apiCaseExecuteThreadMap.containsKey(planCaseId)) {
returnMap.put(apiCaseExecuteThreadMap.get(planCaseId), planCaseId);
}
}
}
return returnMap;
}
public Map<String, String> getRunningScenarioReportMap() {
//key: reportId, value: testPlanApiScenarioId
Map<String, String> returnMap = new HashMap<>();
for (Map.Entry<String,String> entry : apiScenarioCaseExecInfo.entrySet()) {
String planScenarioId = entry.getKey();
String status = entry.getValue();
if (StringUtils.equalsIgnoreCase(status, TestPlanApiExecuteStatus.RUNNING.name())) {
if (apiScenarioThreadMap.containsKey(planScenarioId)) {
returnMap.put(apiScenarioThreadMap.get(planScenarioId), planScenarioId);
}
}
}
return returnMap;
}
}

View File

@ -1,10 +1,13 @@
package io.metersphere.api.cache;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* @author song.tianyang
@ -48,8 +51,12 @@ public class TestPlanReportExecuteCatch {
}
public synchronized static boolean containsReport(String reportId){
if(StringUtils.isEmpty(reportId)){
return false;
}else {
return testPlanReportMap != null && testPlanReportMap.containsKey(reportId);
}
}
public synchronized static void updateApiTestPlanExecuteInfo(String reportId,
Map<String, String> apiCaseExecInfo, Map<String, String> apiScenarioCaseExecInfo, Map<String, String> loadCaseExecInfo) {
@ -97,4 +104,12 @@ public class TestPlanReportExecuteCatch {
testPlanReportMap.get(planReportId).finishAllTask();
}
}
public static Set<String> getAllReportId(){
if (testPlanReportMap != null) {
return testPlanReportMap.keySet();
}else {
return new HashSet<>();
}
}
}

View File

@ -97,7 +97,7 @@ public class ApiJmeterFileService {
}
hashTree = apiAutomationService.generateHashTree(item, reportId, planEnvMap);
}
return zipFilesToByteArray(remoteTestId, hashTree);
return zipFilesToByteArray(reportId, hashTree);
}
public byte[] downloadJmx(String runMode, String testId, String reportId, String testPlanScenarioId) {

View File

@ -161,6 +161,8 @@ public class ApiScenarioReportService {
LogUtil.info("从缓存中获取场景报告:【" + test.getName() + "");
report = MessageCache.scenarioExecResourceLock.get(test.getName());
LogUtil.info("从缓存中获取场景报告:【" + test.getName() + "】是否为空:" + (report == null));
} else {
LogUtil.info("数据库中获取场景报告结束:" + report.getId());
}
if (report != null) {
report.setName(report.getScenarioName() + "-" + DateUtils.getTimeStr(System.currentTimeMillis()));
@ -320,6 +322,7 @@ public class ApiScenarioReportService {
if (CollectionUtils.isNotEmpty(scenarioResult.getRequestResults())) {
startTime = scenarioResult.getRequestResults().get(0).getStartTime();
}
String resultReportId = scenarioResult.getName();
ApiScenarioReport report = editReport(scenarioResult, startTime);
if (report != null) {
TestResult newResult = createTestResult(result.getTestId(), scenarioResult);
@ -366,7 +369,8 @@ public class ApiScenarioReportService {
testPlanApiScenarioMapper.updateByPrimaryKeySelective(testPlanApiScenario);
scenarioIdList.add(testPlanApiScenario.getApiScenarioId());
} else {
LogUtil.info("TestPlanReport_Id is null. scenario report id : [" + report.getId() + "]; planScenarioIdArr:[" + report.getScenarioId() + "] DATA:" + JSON.toJSONString(scenarioResult));
LogUtil.info("Cannot find TestPlanApiScenario!");
LogUtil.error("TestPlanReport_Id is null. scenario report id : [" + report.getId() + "]; planScenarioIdArr:[" + report.getScenarioId() + "]. plan_scenario_id:" + planScenarioId + ". DATA:" + JSON.toJSONString(scenarioResult));
}
report.setEndTime(System.currentTimeMillis());
@ -376,17 +380,27 @@ public class ApiScenarioReportService {
// 报告详情内容
ApiScenarioReportDetail detail = new ApiScenarioReportDetail();
detail.setContent(JSON.toJSONString(newResult).getBytes(StandardCharsets.UTF_8));
detail.setReportId(report.getId());
detail.setProjectId(report.getProjectId());
if (StringUtils.isNotEmpty(report.getTriggerMode()) && report.getTriggerMode().equals("CASE")) {
report.setTriggerMode(TriggerMode.MANUAL.name());
}
if (StringUtils.equalsIgnoreCase(report.getId(), resultReportId)) {
detail.setReportId(report.getId());
} else {
detail.setReportId(resultReportId);
LogUtil.info("ReportId" + resultReportId + " has changed!");
LogUtil.error("ReportId was changed. ScenarioResultData:" + JSON.toJSONString(scenarioResult) + ";\r\n " +
"ApiScenarioReport:" + JSON.toJSONString(report));
}
try {
apiScenarioReportDetailMapper.insert(detail);
} catch (Exception e) {
LogUtil.error("存储场景报告出错:" + e.getMessage() + "; 步骤信息:" + JSON.toJSONString(scenarioResult));
LogUtil.error("Save scenario report error! errorInfo:" + e.getMessage() + "; ScenarioResultData:" + JSON.toJSONString(scenarioResult));
LogUtil.error(e);
}
scenarioNames.append(report.getName()).append(",");
// 更新场景状态
ApiScenario scenario = apiScenarioMapper.selectByPrimaryKey(report.getScenarioId());
@ -410,7 +424,7 @@ public class ApiScenarioReportService {
MessageCache.executionQueue.remove(report.getId());
reportIds.add(report.getId());
} else {
LogUtil.error("测试计划场景[" + result.getTestId() + "]的场景报告未找到。报告ID:" + scenarioResult.getName() + "。 步骤信息:" + JSON.toJSONString(scenarioResult));
LogUtil.error("未获取到场景报告。 报告ID" + scenarioResult.getName() + "。 步骤信息:" + JSON.toJSONString(scenarioResult));
}
}
testPlanLog.info("TestPlanReportId" + JSONArray.toJSONString(testPlanReportIdList) + " EXECUTE OVER. SCENARIO STATUS : " + JSONObject.toJSONString(scenarioAndErrorMap));

View File

@ -2,7 +2,6 @@ package io.metersphere.controller;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
import io.metersphere.api.service.ApiAutomationService;
import io.metersphere.base.domain.Schedule;
import io.metersphere.controller.request.QueryScheduleRequest;
import io.metersphere.controller.request.ScheduleRequest;
@ -18,8 +17,6 @@ import java.util.List;
public class ScheduleController {
@Resource
private ScheduleService scheduleService;
@Resource
private ApiAutomationService apiAutomationService;
@PostMapping("/list/{goPage}/{pageSize}")
public List<ScheduleDao> list(@PathVariable int goPage, @PathVariable int pageSize, @RequestBody QueryScheduleRequest request) {

View File

@ -3,6 +3,7 @@ package io.metersphere.job.sechedule;
import io.metersphere.commons.constants.ReportTriggerMode;
import io.metersphere.commons.constants.ScheduleGroup;
import io.metersphere.commons.utils.CommonBeanFactory;
import io.metersphere.commons.utils.LogUtil;
import io.metersphere.track.service.TestPlanService;
import org.quartz.*;
@ -16,22 +17,9 @@ import org.quartz.*;
public class TestPlanTestJob extends MsScheduleJob {
private String projectID;
// private PerformanceTestService performanceTestService;
// private TestPlanScenarioCaseService testPlanScenarioCaseService;
// private TestPlanApiCaseService testPlanApiCaseService;
// private ApiTestCaseService apiTestCaseService;
// private TestPlanReportService testPlanReportService;
// private TestPlanLoadCaseService testPlanLoadCaseService;
private TestPlanService testPlanService;
public TestPlanTestJob() {
// this.performanceTestService = CommonBeanFactory.getBean(PerformanceTestService.class);
// this.testPlanScenarioCaseService = CommonBeanFactory.getBean(TestPlanScenarioCaseService.class);
// this.testPlanApiCaseService = CommonBeanFactory.getBean(TestPlanApiCaseService.class);
// this.apiTestCaseService = CommonBeanFactory.getBean(ApiTestCaseService.class);
// this.testPlanReportService = CommonBeanFactory.getBean(TestPlanReportService.class);
// this.testPlanLoadCaseService = CommonBeanFactory.getBean(TestPlanLoadCaseService.class);
this.testPlanService = CommonBeanFactory.getBean(TestPlanService.class);
@ -63,7 +51,17 @@ public class TestPlanTestJob extends MsScheduleJob {
JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
String config = jobDataMap.getString("config");
testPlanService.run(this.resourceId, this.projectID, this.userId, ReportTriggerMode.SCHEDULE.name(),config);
String runResourceId = this.resourceId;
String runProjectId = this.projectID;
String runUserId = this.userId;
LogUtil.info("Start test_plan_scehdule. test_plan_id:" + runProjectId);
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
testPlanService.run(runResourceId, runProjectId, runUserId, ReportTriggerMode.SCHEDULE.name(),config);
}
});
thread.start();
}
public static JobKey getJobKey(String testId) {

View File

@ -66,9 +66,23 @@ public class AppStartListener implements ApplicationListener<ApplicationReadyEve
private TestReviewTestCaseService testReviewTestCaseService;
@Resource
private MockConfigService mockConfigService;
@Resource
private TestPlanService testPlanService;
@Resource
private TestPlanReportService testPlanReportService;
@Value("${jmeter.home}")
private String jmeterHome;
@Value("${quartz.properties.org.quartz.jobStore.acquireTriggersWithinLock}")
private String acquireTriggersWithinLock;
@Value("${quartz.enabled}")
private boolean quartzEnable;
@Value("${quartz.scheduler-name}")
private String quartzScheduleName;
@Value("${quartz.thread-count}")
private int quartzThreadCount;
@Value("${testplan.thread-count}")
private int testplanExecuteThreadPool;
@Override
public void onApplicationEvent(ApplicationReadyEvent applicationReadyEvent) {
@ -77,6 +91,10 @@ public class AppStartListener implements ApplicationListener<ApplicationReadyEve
System.setProperty("jmeter.home", jmeterHome);
//修改需要自定义的线程池
testPlanService.resetThreadPool(testplanExecuteThreadPool);
testPlanReportService.resetThreadPool(testplanExecuteThreadPool);
loadJars();
initPythonEnv();
@ -93,6 +111,13 @@ public class AppStartListener implements ApplicationListener<ApplicationReadyEve
} catch (InterruptedException e) {
e.printStackTrace();
}
LogUtil.info("开始启动定时任务。 相关设置:" +
"quartz.acquireTriggersWithinLock :" + acquireTriggersWithinLock + "\r\n" +
"quartz.enabled :" + quartzEnable + "\r\n" +
"quartz.scheduler-name :" + quartzScheduleName + "\r\n" +
"quartz.thread-count :" + quartzThreadCount + "\r\n" +
"testplan.execute.thread.pool :" + testplanExecuteThreadPool + "\r\n"
);
scheduleService.startEnableSchedules();
@ -102,6 +127,7 @@ public class AppStartListener implements ApplicationListener<ApplicationReadyEve
/**
* 处理初始化数据兼容数据
* 只在第一次升级的时候执行一次
*
* @param initFuc
* @param key
*/
@ -135,7 +161,7 @@ public class AppStartListener implements ApplicationListener<ApplicationReadyEve
initOnceOperate(testPlanLoadCaseService::initOrderField, "init.sort.plan.api.load");
initOnceOperate(testReviewTestCaseService::initOrderField, "init.sort.review.test.case");
initOnceOperate(apiDefinitionService::initDefaultModuleId, "init.default.module.id");
initOnceOperate(mockConfigService::initExpectNum,"init.mock.expectNum");
initOnceOperate(mockConfigService::initExpectNum, "init.mock.expectNum");
}
/**

View File

@ -0,0 +1,11 @@
package io.metersphere.track.dto;
import lombok.Getter;
import lombok.Setter;
@Getter
@Setter
public class TestPlanReportExecuteCheckResultDTO {
private boolean isTimeOut;
private boolean isFinishedCaseChanged;
}

View File

@ -47,6 +47,7 @@ import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.stream.Collectors;
/**
@ -81,11 +82,15 @@ public class TestPlanReportService {
@Resource
ExtTestPlanApiCaseMapper extTestPlanApiCaseMapper;
@Resource
ExtApiDefinitionExecResultMapper extApiDefinitionExecResultMapper;
@Resource
ApiTestCaseMapper apiTestCaseMapper;
@Resource
LoadTestReportMapper loadTestReportMapper;
@Resource
TestPlanLoadCaseMapper testPlanLoadCaseMapper;
@Resource
ExtApiScenarioReportMapper extApiScenarioReportMapper;
@Lazy
@Resource
TestPlanService testPlanService;
@ -100,9 +105,16 @@ public class TestPlanReportService {
private UserService userService;
@Resource
private ProjectService projectService;
private final ExecutorService executorService = Executors.newFixedThreadPool(20, new NamedThreadFactory("TestPlanReportService"));
private final ThreadPoolExecutor planListenerExecutorService = (ThreadPoolExecutor) Executors.newFixedThreadPool(20, new NamedThreadFactory("TestPlanExecuteListen"));
public void resetThreadPool(int threadCount){
if(threadCount > 0){
planListenerExecutorService.setMaximumPoolSize(threadCount);
}
}
public List<TestPlanReportDTO> list(QueryTestPlanReportRequest request) {
List<TestPlanReportDTO> list = new ArrayList<>();
request.setOrders(ServiceUtils.getDefaultOrder(request.getOrders()));
@ -551,8 +563,6 @@ public class TestPlanReportService {
boolean scenarioIsOk = executeInfo.isScenarioAllExecuted();
boolean performanceIsOk = executeInfo.isLoadCaseAllExecuted();
testPlanLog.info("ReportId[" + testPlanReport.getId() + "] count over. Testplan Execute Result: Api is over ->" + apiCaseIsOk + "; scenario is over ->" + scenarioIsOk + "; performance is over ->" + performanceIsOk);
if (apiCaseIsOk) {
testPlanReport.setIsApiCaseExecuting(false);
}
@ -926,7 +936,7 @@ public class TestPlanReportService {
testPlanLog.info("TestPlanReportId[" + testPlanReport.getId() + "] SELECT performance BATCH OVER:" + JSONArray.toJSONString(selectList));
if (performaneReportIDList.isEmpty()) {
testPlanLog.info("TestPlanReportId[" + testPlanReport.getId() + "] performance EXECUTE OVER. TRIGGER_MODE:" + triggerMode + ",REsult:" + JSONObject.toJSONString(finishLoadTestId));
if (StringUtils.equalsAnyIgnoreCase(triggerMode, ReportTriggerMode.API.name() ,ReportTriggerMode.MANUAL.name())) {
if (StringUtils.equalsAnyIgnoreCase(triggerMode, ReportTriggerMode.API.name(), ReportTriggerMode.MANUAL.name())) {
for (String string : finishLoadTestId.keySet()) {
String reportId = caseReportMap.get(string);
TestPlanLoadCaseWithBLOBs updateDTO = new TestPlanLoadCaseWithBLOBs();
@ -1075,13 +1085,17 @@ public class TestPlanReportService {
}
public void countReport(String planReportId) {
boolean isTimeOut = this.checkTestPlanReportIsTimeOut(planReportId);
if (isTimeOut) {
TestPlanReportExecuteCheckResultDTO checkResult = this.checkTestPlanReportIsTimeOut(planReportId);
testPlanLog.info("Check PlanReport:" + planReportId + "; result: "+ JSON.toJSONString(checkResult));
if (checkResult.isTimeOut()) {
//判断是否超时超时时强行停止任务
TestPlanReportExecuteCatch.finishAllTask(planReportId);
checkResult.setFinishedCaseChanged(true);
}
if(checkResult.isFinishedCaseChanged()){
this.updateExecuteApis(planReportId);
}
}
public TestPlanSimpleReportDTO getReport(String reportId) {
TestPlanReportContentExample example = new TestPlanReportContentExample();
@ -1188,7 +1202,7 @@ public class TestPlanReportService {
if (StringUtils.isNotBlank(testPlanReportContent.getLoadAllCases())) {
List<TestPlanLoadCaseDTO> allCases = JSONObject.parseArray(testPlanReportContent.getLoadAllCases(), TestPlanLoadCaseDTO.class);
if(!allCases.isEmpty()){
if (!allCases.isEmpty()) {
isTaskRunning = true;
}
}
@ -1213,22 +1227,68 @@ public class TestPlanReportService {
bloBs.setEndTime(endTime);
TestPlanReportContentExample example = new TestPlanReportContentExample();
example.createCriteria().andTestPlanReportIdEqualTo(testPlanReport.getId());
testPlanReportContentMapper.updateByExampleSelective(bloBs,example);
testPlanReportContentMapper.updateByExampleSelective(bloBs, example);
}
private TestPlanReportExecuteCheckResultDTO checkTestPlanReportIsTimeOut(String planReportId) {
//同步数据库更新状态信息
try {
this.syncReportStatus(planReportId);
} catch (Exception e) {
LogUtil.info("联动数据库同步执行状态失败! " + e.getMessage());
LogUtil.error(e);
}
private boolean checkTestPlanReportIsTimeOut(String planReportId) {
TestPlanExecuteInfo executeInfo = TestPlanReportExecuteCatch.getTestPlanExecuteInfo(planReportId);
int unFinishNum = executeInfo.countUnFinishedNum();
if (unFinishNum > 0) {
//20分钟没有案例执行结果更新则定位超时
long lastCountTime = executeInfo.getLastFinishedNumCountTime();
long nowTime = System.currentTimeMillis();
testPlanLog.info("ReportId: ["+planReportId+"]; timeCount:"+ (nowTime - lastCountTime));
if (nowTime - lastCountTime > 1200000) {
return true;
TestPlanReportExecuteCheckResultDTO checkResult = executeInfo.countUnFinishedNum();
return checkResult;
}
private void syncReportStatus(String planReportId) {
if (TestPlanReportExecuteCatch.containsReport(planReportId)) {
TestPlanExecuteInfo executeInfo = TestPlanReportExecuteCatch.getTestPlanExecuteInfo(planReportId);
if (executeInfo != null) {
//同步接口案例结果
Map<String, String> updateCaseStatusMap = new HashMap<>();
Map<String, String> apiCaseReportMap = executeInfo.getRunningApiCaseReportMap();
if (MapUtils.isNotEmpty(apiCaseReportMap)) {
List<ApiDefinitionExecResult> execList = extApiDefinitionExecResultMapper.selectStatusByIdList(apiCaseReportMap.keySet());
for (ApiDefinitionExecResult report : execList) {
String reportId = report.getId();
String status = report.getStatus();
if (!StringUtils.equalsAnyIgnoreCase(status, "Running", "Waiting")) {
String planCaseId = apiCaseReportMap.get(reportId);
if (StringUtils.isNotEmpty(planCaseId)) {
updateCaseStatusMap.put(planCaseId, status);
}
}
return false;
}
}
//同步场景结果
Map<String, String> updateScenarioStatusMap = new HashMap<>();
Map<String, String> scenarioReportMap = executeInfo.getRunningScenarioReportMap();
if (MapUtils.isNotEmpty(scenarioReportMap)) {
List<ApiScenarioReport> reportList = extApiScenarioReportMapper.selectStatusByIds(scenarioReportMap.keySet());
for (ApiScenarioReport report : reportList) {
String reportId = report.getId();
String status = report.getStatus();
if (!StringUtils.equalsAnyIgnoreCase(status, "Running", "Waiting")) {
String planScenarioId = scenarioReportMap.get(reportId);
if (StringUtils.isNotEmpty(planScenarioId)) {
updateScenarioStatusMap.put(planScenarioId, status);
}
}
}
}
testPlanLog.info("ReportID:"+planReportId+" 本次数据库同步,案例ID"+JSON.toJSONString(apiCaseReportMap.keySet())+";场景ID"+JSON.toJSONString(scenarioReportMap.keySet())+"; 同步结果,案例:"+JSON.toJSONString(updateCaseStatusMap)+";场景:"+JSON.toJSONString(updateScenarioStatusMap));
TestPlanReportExecuteCatch.updateApiTestPlanExecuteInfo(planReportId, updateCaseStatusMap, updateScenarioStatusMap, null);
}else {
testPlanLog.info("同步数据库查询执行信息失败! 报告ID在缓存中未找到"+planReportId);
}
}
}
private void finishTestPlanReport(String planReportId) {
@ -1240,4 +1300,26 @@ public class TestPlanReportService {
TestPlanReportExecuteCatch.remove(planReportId);
}
public void startTestPlanExecuteListen(String reportId){
planListenerExecutorService.submit(() -> {
while (TestPlanReportExecuteCatch.containsReport(reportId)){
//检查是否存在数据库里
TestPlanReportExample example = new TestPlanReportExample();
example.createCriteria().andIdEqualTo(reportId);
long count = testPlanReportMapper.countByExample(example);
if(count > 0){
testPlanLog.info("Start check testPlanReport:" + reportId);
countReport(reportId);
try {
Thread.sleep(10000);
}catch (Exception e){
LogUtil.info(e);
}
}else {
TestPlanReportExecuteCatch.remove(reportId);
}
}
});
}
}

View File

@ -85,8 +85,8 @@ import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -197,7 +197,13 @@ public class TestPlanService {
@Resource
private LoadTestMapper loadTestMapper;
private final ExecutorService executorService = Executors.newFixedThreadPool(40, new NamedThreadFactory("TestPlanService"));
private final ThreadPoolExecutor executorService = (ThreadPoolExecutor) Executors.newFixedThreadPool(20, new NamedThreadFactory("TestPlanService"));
public void resetThreadPool(int threadCount){
if(threadCount > 0){
executorService.setMaximumPoolSize(threadCount);
}
}
public synchronized TestPlan addTestPlan(AddTestPlanRequest testPlan) {
if (getTestPlanByName(testPlan.getName()).size() > 0) {
@ -1120,7 +1126,6 @@ public class TestPlanService {
TestPlanReport testPlanReport = reportInfoDTO.getTestPlanReport();
Map<String, String> planScenarioIdsMap = reportInfoDTO.getPlanScenarioIdMap();
Map<String, String> planApiCaseMap = reportInfoDTO.getApiTestCaseDataMap();
Map<String, String> performanceIdMap = reportInfoDTO.getPerformanceIdMap();
if (runModeConfig.getMode().equals(RunModeConstants.PARALLEL.toString())) {
// 校验并发数量
@ -1141,8 +1146,22 @@ public class TestPlanService {
extTestPlanMapper.updateActualEndTimeIsNullById(testPlanID);
String planReportId = testPlanReport.getId();
testPlanLog.info("ReportId[" + planReportId + "] created. TestPlanID:[" + testPlanID + "]. " + "API Run Config:【" + apiRunConfig + "");
//开启测试计划执行状态的监听
this.listenTaskExecuteStatus(planReportId);
RunModeConfig finalRunModeConfig = runModeConfig;
executorService.submit(() -> {
testPlanLog.info("ReportId[" + planReportId + "] start execute.");
this.executeTestPlan(reportInfoDTO, triggerMode, projectID, userId, finalRunModeConfig);
testPlanLog.info("ReportId[" + planReportId + "] is executing.");
});
return testPlanReport.getId();
}
private void executeTestPlan(TestPlanScheduleReportInfoDTO reportInfoDTO, String triggerMode, String projectID, String userId, RunModeConfig runModeConfig) {
TestPlanReport testPlanReport = reportInfoDTO.getTestPlanReport();
String planReportId = testPlanReport.getId();
Map<String, String> planScenarioIdsMap = reportInfoDTO.getPlanScenarioIdMap();
Map<String, String> planApiCaseMap = reportInfoDTO.getApiTestCaseDataMap();
Map<String, String> performanceIdMap = reportInfoDTO.getPerformanceIdMap();
//不同任务的执行ID
Map<String, String> executePerformanceIdMap = new HashMap<>();
@ -1208,33 +1227,21 @@ public class TestPlanService {
for (String id : planScenarioIdsMap.keySet()) {
executeScenarioCaseIdMap.put(id, TestPlanApiExecuteStatus.RUNNING.name());
}
testPlanLog.info("ReportId[" + planReportId + "] start run. TestPlanID:[" + testPlanID + "]. Execute api :" + JSONObject.toJSONString(executeApiCaseIdMap) + "; Execute scenario:" + JSONObject.toJSONString(executeScenarioCaseIdMap) + "; Execute performance:" + JSONObject.toJSONString(executePerformanceIdMap));
testPlanLog.info("ReportId[" + planReportId + "] start run. TestPlanID:[" + testPlanReport.getTestPlanId() + "]. Execute api :" + JSONObject.toJSONString(executeApiCaseIdMap) + "; Execute scenario:" + JSONObject.toJSONString(executeScenarioCaseIdMap) + "; Execute performance:" + JSONObject.toJSONString(executePerformanceIdMap));
TestPlanReportExecuteCatch.updateApiTestPlanExecuteInfo(planReportId, executeApiCaseIdMap, executeScenarioCaseIdMap, executePerformanceIdMap);
//执行接口案例任务
if(!planApiCaseMap.isEmpty()){
this.executeApiTestCase(triggerMode, planReportId, userId, new ArrayList<>(planApiCaseMap.keySet()), runModeConfig);
}
//执行场景执行任务
this.executeScenarioCase(planReportId, testPlanID, projectID, runModeConfig, triggerMode, userId, planScenarioIdsMap);
return testPlanReport.getId();
if(!planScenarioIdsMap.isEmpty()){
this.executeScenarioCase(planReportId, testPlanReport.getTestPlanId(), projectID, runModeConfig, triggerMode, userId, planScenarioIdsMap);
}
private void listenTaskExecuteStatus(String planReportId) {
executorService.submit(() -> {
try {
//10s 查询一次状态
Thread.sleep(10000);
while (TestPlanReportExecuteCatch.getTestPlanExecuteInfo(planReportId) != null) {
testPlanReportService.countReport(planReportId);
Thread.sleep(10000);
}
} catch (InterruptedException e) {
TestPlanReportExecuteCatch.remove(planReportId);
LogUtil.error(e);
}
});
testPlanReportService.startTestPlanExecuteListen(planReportId);
}
private void executeApiTestCase(String triggerMode, String planReportId, String userId, List<String> planCaseIds, RunModeConfig runModeConfig) {
executorService.submit(() -> {
BatchRunDefinitionRequest request = new BatchRunDefinitionRequest();
if (StringUtils.equals(triggerMode, ReportTriggerMode.API.name())) {
request.setTriggerMode(ApiRunMode.JENKINS_API_PLAN.name());
@ -1243,16 +1250,15 @@ public class TestPlanService {
} else {
request.setTriggerMode(ApiRunMode.SCHEDULE_API_PLAN.name());
}
request.setPlanIds(planCaseIds);
request.setPlanReportId(planReportId);
request.setConfig(runModeConfig);
request.setUserId(userId);
testPlanApiCaseService.run(request);
});
}
private void executeScenarioCase(String planReportId, String testPlanID, String projectID, RunModeConfig runModeConfig, String triggerMode, String userId, Map<String, String> planScenarioIdMap) {
executorService.submit(() -> {
if (!planScenarioIdMap.isEmpty()) {
SchedulePlanScenarioExecuteRequest scenarioRequest = new SchedulePlanScenarioExecuteRequest();
String senarionReportID = UUID.randomUUID().toString();
@ -1281,7 +1287,6 @@ public class TestPlanService {
scenarioRequest.setConfig(runModeConfig);
this.scenarioRunModeConfig(scenarioRequest);
}
});
}
public String getLogDetails(String id) {
@ -1822,7 +1827,6 @@ public class TestPlanService {
config = JSONObject.parseObject(reportConfig);
}
TestPlanSimpleReportDTO report = getReport(planId);
buildFunctionalReport(report, config, planId);
buildApiReport(report, config, executeInfo, isFinish);
buildLoadReport(report, config, executeInfo, planId, false);
return report;

View File

@ -12,7 +12,7 @@ server.ssl.key-alias=localhost
# Hikari
spring.datasource.type=com.zaxxer.hikari.HikariDataSource
spring.datasource.hikari.maximum-pool-size=100
spring.datasource.hikari.maximum-pool-size=200
spring.datasource.hikari.auto-commit=true
spring.datasource.hikari.idle-timeout=10000
spring.datasource.hikari.pool-name=DatebookHikariCP
@ -23,7 +23,7 @@ spring.datasource.hikari.connection-test-query=SELECT 1
spring.datasource.quartz.url=${spring.datasource.url}
spring.datasource.quartz.username=${spring.datasource.username}
spring.datasource.quartz.password=${spring.datasource.password}
spring.datasource.quartz.hikari.maximum-pool-size=50
spring.datasource.quartz.hikari.maximum-pool-size=200
spring.datasource.quartz.hikari.auto-commit=true
spring.datasource.quartz.hikari.idle-timeout=10000
spring.datasource.quartz.hikari.pool-name=DatebookHikariCP
@ -92,7 +92,10 @@ jmeter.home=/opt/jmeter
# quartz
quartz.enabled=true
quartz.scheduler-name=msServerJob
quartz.thread-count=30
quartz.thread-count=60
quartz.properties.org.quartz.jobStore.acquireTriggersWithinLock=true
#schedule
testplan.thread-count=40
# file upload
spring.servlet.multipart.max-file-size=500MB
spring.servlet.multipart.max-request-size=500MB
@ -103,3 +106,4 @@ management.endpoints.web.exposure.include=*