refactor(测试计划): 规范执行日志输出方便执行过程问题排查

This commit is contained in:
fit2-zhao 2022-05-05 13:35:27 +08:00 committed by fit2-zhao
parent 97eef05b7a
commit 4bd942e807
13 changed files with 74 additions and 57 deletions

View File

@ -71,8 +71,7 @@ public class ApiCaseExecuteService {
* @return
*/
public List<MsExecResponseDTO> run(BatchRunDefinitionRequest request) {
List<String> ids = request.getPlanIds();
if (CollectionUtils.isEmpty(ids)) {
if (CollectionUtils.isEmpty(request.getPlanIds())) {
return new LinkedList<>();
}
if (request.getConfig() == null) {
@ -84,12 +83,13 @@ public class ApiCaseExecuteService {
LoggerUtil.debug("开始查询测试计划用例");
TestPlanApiCaseExample example = new TestPlanApiCaseExample();
example.createCriteria().andIdIn(ids);
example.createCriteria().andIdIn(request.getPlanIds());
example.setOrderByClause("`order` DESC");
List<TestPlanApiCase> planApiCases = testPlanApiCaseMapper.selectByExample(example);
if (StringUtils.isEmpty(request.getTriggerMode())) {
request.setTriggerMode(ApiRunMode.API_PLAN.name());
}
LoggerUtil.debug("查询到测试计划用例 " + planApiCases.size());
Map<String, ApiDefinitionExecResult> executeQueue = new LinkedHashMap<>();
List<MsExecResponseDTO> responseDTOS = new LinkedList<>();
@ -121,6 +121,7 @@ public class ApiCaseExecuteService {
}
executeQueue.put(testPlanApiCase.getId(), report);
responseDTOS.add(new MsExecResponseDTO(testPlanApiCase.getId(), report.getId(), request.getTriggerMode()));
LoggerUtil.debug("预生成测试用例结果报告:" + report.getName() + ", ID " + report.getId());
}
apiCaseResultService.batchSave(executeQueue);

View File

@ -12,6 +12,7 @@ import io.metersphere.dto.BaseSystemConfigDTO;
import io.metersphere.dto.JmeterRunRequestDTO;
import io.metersphere.dto.RunModeConfigDTO;
import io.metersphere.service.SystemParameterService;
import io.metersphere.utils.LoggerUtil;
import io.metersphere.vo.BooleanPool;
import org.apache.commons.collections4.MapUtils;
import org.apache.jorphan.collections.HashTree;
@ -49,8 +50,8 @@ public class ApiCaseParallelExecuteService {
runRequest.setReportType(executionQueue.getReportType());
runRequest.setRunType(RunModeConstants.PARALLEL.toString());
runRequest.setQueueId(executionQueue.getId());
Map<String,Object> extendedParameters = new HashMap<>();
extendedParameters.put("userId",result.getUserId());
Map<String, Object> extendedParameters = new HashMap<>();
extendedParameters.put("userId", result.getUserId());
runRequest.setExtendedParameters(extendedParameters);
if (MapUtils.isNotEmpty(executionQueue.getDetailMap())) {
runRequest.setPlatformUrl(GenerateHashTreeUtil.getPlatformUrl(baseInfo, runRequest, executionQueue.getDetailMap().get(result.getId())));
@ -59,6 +60,8 @@ public class ApiCaseParallelExecuteService {
HashTree hashTree = apiScenarioSerialService.generateHashTree(testId, config.getEnvMap(), runRequest);
runRequest.setHashTree(hashTree);
}
LoggerUtil.info("进入并行模式,开始执行用例:[" + result.getName() + "] 报告ID [" + reportId + "]");
jMeterService.run(runRequest);
}
}

View File

@ -20,7 +20,6 @@ public class ExecTask implements Runnable {
@Override
public void run() {
LoggerUtil.info("开始执行报告ID" + request.getReportId() + " 】,资源ID【 " + request.getTestId() + "");
CommonBeanFactory.getBean(JMeterService.class).addQueue(request);
Object res = PoolExecBlockingQueueUtil.take(request.getReportId());
if (res == null && !JmeterThreadUtils.isRunning(request.getReportId(), request.getTestId())) {
@ -29,6 +28,5 @@ public class ExecTask implements Runnable {
JMeterEngineCache.runningEngine.remove(request.getReportId());
}
}
LoggerUtil.info("任务:【 " + request.getReportId() + " 】执行完成");
}
}

View File

@ -39,9 +39,9 @@ public class ExecThreadPoolExecutor {
private final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1, new NamedThreadFactory("MS-BUFFER-SCHEDULED"));
public void addTask(JmeterRunRequestDTO requestDTO) {
outApiThreadPoolExecutorLogger();
ExecTask task = new ExecTask(requestDTO);
threadPool.execute(task);
outApiThreadPoolExecutorLogger("报告:[" + requestDTO.getReportId() + "] 资源:[" + requestDTO.getTestId() + "] 加入执行队列");
}
/**
@ -81,18 +81,17 @@ public class ExecThreadPoolExecutor {
return String.format("%1.2f%%", Double.parseDouble(num1 + "") / Double.parseDouble(num2 + "") * 100);
}
public void outApiThreadPoolExecutorLogger() {
public void outApiThreadPoolExecutorLogger(String message) {
ArrayBlockingQueue queue = (ArrayBlockingQueue) threadPool.getQueue();
StringBuffer buffer = new StringBuffer("API 并发队列详情:\n");
StringBuffer buffer = new StringBuffer("\n" + message);
buffer.append("\n").append("线程池详情:").append("\n");
buffer.append(" 核心线程数:" + threadPool.getCorePoolSize()).append("\n");
buffer.append(" 活动线程数:" + threadPool.getActiveCount()).append("\n");
buffer.append(" 活动线程数:" + threadPool.getActiveCount()).append(" (略有波动非精确数据)").append("\n");
buffer.append(" 最大线程数:" + threadPool.getMaximumPoolSize()).append("\n");
buffer.append(" 线程池活跃度:" + divide(threadPool.getActiveCount(), threadPool.getMaximumPoolSize())).append("\n");
buffer.append(" 任务完成数:" + threadPool.getCompletedTaskCount()).append("\n");
buffer.append(" 队列大小:" + (queue.size() + queue.remainingCapacity())).append("\n");
buffer.append(" 最大队列数:" + (queue.size() + queue.remainingCapacity())).append("\n");
buffer.append(" 当前排队线程数:" + (msRejectedExecutionHandler.getBufferQueue().size() + queue.size())).append("\n");
buffer.append(" 队列剩余大小:" + queue.remainingCapacity()).append("\n");
buffer.append(" 阻塞队列大小:" + PoolExecBlockingQueueUtil.queue.size()).append("\n");
buffer.append(" 执行中队列大小:" + PoolExecBlockingQueueUtil.queue.size()).append("\n");
buffer.append(" 队列使用度:" + divide(queue.size(), queue.size() + queue.remainingCapacity()));
LoggerUtil.info(buffer.toString());

View File

@ -1,6 +1,5 @@
package io.metersphere.api.exec.scenario;
import com.alibaba.fastjson.JSON;
import io.metersphere.api.dto.RunModeDataDTO;
import io.metersphere.api.dto.automation.RunScenarioRequest;
import io.metersphere.api.exec.queue.DBTestQueue;
@ -48,13 +47,11 @@ public class ApiScenarioParallelService {
runRequest.setTestPlanReportId(request.getTestPlanReportId());
runRequest.setPlatformUrl(GenerateHashTreeUtil.getPlatformUrl(baseInfo, runRequest, executionQueue.getDetailMap().get(reportId)));
runRequest.setRunType(RunModeConstants.PARALLEL.toString());
if (LoggerUtil.getLogger().isDebugEnabled()) {
LoggerUtil.debug("Scenario run-开始并发执行:" + JSON.toJSONString(request));
}
// 本地执行生成hashTree
if (!pool.isPool()) {
runRequest.setHashTree(GenerateHashTreeUtil.generateHashTree(dataDTO.getScenario(), dataDTO.getPlanEnvMap(), runRequest));
}
LoggerUtil.info("进入并行模式,准备执行场景:[ " + executeQueue.get(reportId).getReport().getName() + " ], 报告ID [ " + reportId + " ]");
jMeterService.run(runRequest);
}
}

View File

@ -63,15 +63,15 @@ public class ApiScenarioSerialService {
private RedisTemplate<String, Object> redisTemplate;
public void serial(ApiExecutionQueue executionQueue, ApiExecutionQueueDetail queue) {
LoggerUtil.debug("Scenario run-执行脚本装载-进入串行准备");
String reportId = StringUtils.isNotEmpty(executionQueue.getReportId()) ? executionQueue.getReportId() : queue.getReportId();
if (!StringUtils.equalsAny(executionQueue.getRunMode(), ApiRunMode.SCENARIO.name())) {
reportId = queue.getReportId();
}
HashTree hashTree = null;
JmeterRunRequestDTO runRequest = new JmeterRunRequestDTO(queue.getTestId(), reportId, executionQueue.getRunMode(), hashTree);
JmeterRunRequestDTO runRequest = new JmeterRunRequestDTO(queue.getTestId(), reportId, executionQueue.getRunMode(), null);
// 获取可以执行的资源池
BaseSystemConfigDTO baseInfo = CommonBeanFactory.getBean(SystemParameterService.class).getBaseInfo();
// 判断触发资源对象是用例/场景更新对应报告状态
if (!StringUtils.equals(executionQueue.getReportType(), RunModeConstants.SET_REPORT.toString())
|| StringUtils.equalsIgnoreCase(executionQueue.getRunMode(), ApiRunMode.DEFINITION.name())) {
if (StringUtils.equalsAny(executionQueue.getRunMode(), ApiRunMode.SCENARIO.name(), ApiRunMode.SCENARIO_PLAN.name(), ApiRunMode.SCHEDULE_SCENARIO_PLAN.name(), ApiRunMode.SCHEDULE_SCENARIO.name(), ApiRunMode.JENKINS_SCENARIO_PLAN.name())) {
@ -84,6 +84,7 @@ public class ApiScenarioSerialService {
this.put("userId", report.getCreateUser());
}});
apiScenarioReportMapper.updateByPrimaryKey(report);
LoggerUtil.info("进入串行模式,准备执行资源:[ " + report.getName() + " ], 报告ID [ " + report.getId() + " ]");
}
} else {
ApiDefinitionExecResult execResult = apiDefinitionExecResultMapper.selectByPrimaryKey(queue.getReportId());
@ -91,20 +92,22 @@ public class ApiScenarioSerialService {
runRequest.setExtendedParameters(new HashMap<String, Object>() {{
this.put("userId", execResult.getUserId());
}});
execResult.setStartTime(System.currentTimeMillis());
execResult.setStatus(APITestStatus.Running.name());
apiDefinitionExecResultMapper.updateByPrimaryKeySelective(execResult);
LoggerUtil.info("进入串行模式,准备执行资源:[" + execResult.getName() + " ], 报告ID [" + execResult.getId() + "]");
}
}
}
LoggerUtil.info("Scenario run-开始执行队列ID" + executionQueue.getReportId() + "");
runRequest.setReportType(executionQueue.getReportType());
runRequest.setPool(GenerateHashTreeUtil.isResourcePool(executionQueue.getPoolId()));
runRequest.setTestPlanReportId(executionQueue.getReportId());
runRequest.setRunType(RunModeConstants.SERIAL.toString());
runRequest.setQueueId(executionQueue.getId());
runRequest.setPoolId(executionQueue.getPoolId());
try {
runRequest.setReportType(executionQueue.getReportType());
runRequest.setPool(GenerateHashTreeUtil.isResourcePool(executionQueue.getPoolId()));
runRequest.setTestPlanReportId(executionQueue.getReportId());
runRequest.setRunType(RunModeConstants.SERIAL.toString());
runRequest.setQueueId(executionQueue.getId());
runRequest.setPoolId(executionQueue.getPoolId());
if (StringUtils.isEmpty(executionQueue.getPoolId())) {
if (StringUtils.equalsAny(executionQueue.getRunMode(), ApiRunMode.SCENARIO.name(), ApiRunMode.SCENARIO_PLAN.name(), ApiRunMode.SCHEDULE_SCENARIO_PLAN.name(), ApiRunMode.SCHEDULE_SCENARIO.name(), ApiRunMode.JENKINS_SCENARIO_PLAN.name())) {
ApiScenarioWithBLOBs scenario = null;
@ -122,18 +125,21 @@ public class ApiScenarioSerialService {
if ((planEnvMap == null || planEnvMap.isEmpty()) && StringUtils.isNotEmpty(queue.getEvnMap())) {
planEnvMap = JSON.parseObject(queue.getEvnMap(), Map.class);
}
hashTree = GenerateHashTreeUtil.generateHashTree(scenario, planEnvMap, runRequest);
runRequest.setHashTree(GenerateHashTreeUtil.generateHashTree(scenario, planEnvMap, runRequest));
} else {
Map<String, String> map = new LinkedHashMap<>();
if (StringUtils.isNotEmpty(queue.getEvnMap())) {
map = JSON.parseObject(queue.getEvnMap(), Map.class);
}
hashTree = generateHashTree(queue.getTestId(), map, runRequest);
runRequest.setHashTree(generateHashTree(queue.getTestId(), map, runRequest));
}
// 更新环境变量
this.initEnv(hashTree);
if (runRequest.getHashTree() != null) {
this.initEnv(runRequest.getHashTree());
}
}
runRequest.setHashTree(hashTree);
if (queue != null) {
runRequest.setPlatformUrl(GenerateHashTreeUtil.getPlatformUrl(baseInfo, runRequest, queue.getId()));
}
@ -148,7 +154,7 @@ public class ApiScenarioSerialService {
ResultDTO dto = new ResultDTO();
BeanUtils.copyBean(dto, runRequest);
CommonBeanFactory.getBean(ApiExecutionQueueService.class).queueNext(dto);
LoggerUtil.error("执行终止", e);
LoggerUtil.error("执行队列[" + queue.getId() + "报告[" + queue.getReportId() + "入队列失败", e);
}
}
@ -188,6 +194,8 @@ public class ApiScenarioSerialService {
group.getHashTree().add(testElement);
testPlan.getHashTree().add(group);
testPlan.toHashTree(jmeterHashTree, testPlan.getHashTree(), new ParameterConfig());
LoggerUtil.info("报告ID" + runRequest.getReportId() + " 用例资源:" + caseWithBLOBs.getName() + ", 生成执行脚本JMX成功");
return jmeterHashTree;
}
} catch (Exception ex) {
@ -196,7 +204,7 @@ public class ApiScenarioSerialService {
ResultDTO dto = new ResultDTO();
BeanUtils.copyBean(dto, runRequest);
CommonBeanFactory.getBean(ApiExecutionQueueService.class).queueNext(dto);
LoggerUtil.error("生成JMX执行脚本失败", ex);
LoggerUtil.error("报告ID" + runRequest.getReportId() + " 用例资源:" + testId + ", 生成执行脚本失败", ex);
}
return null;
}

View File

@ -24,6 +24,7 @@ import io.metersphere.constants.RunModeConstants;
import io.metersphere.dto.*;
import io.metersphere.plugin.core.MsTestElement;
import io.metersphere.service.EnvironmentGroupProjectService;
import io.metersphere.utils.LoggerUtil;
import io.metersphere.vo.BooleanPool;
import org.apache.commons.lang3.StringUtils;
import org.apache.jorphan.collections.HashTree;
@ -141,12 +142,15 @@ public class GenerateHashTreeUtil {
group.setHashTree(scenarios);
testPlan.getHashTree().add(group);
LoggerUtil.info("报告ID" + runRequest.getReportId() + " 场景资源:" + item.getName() + ", 生成执行脚本JMX成功");
} catch (Exception ex) {
RemakeReportService remakeReportService = CommonBeanFactory.getBean(RemakeReportService.class);
remakeReportService.remake(runRequest);
ResultDTO dto = new ResultDTO();
BeanUtils.copyBean(dto, runRequest);
CommonBeanFactory.getBean(ApiExecutionQueueService.class).queueNext(dto);
LoggerUtil.error("报告ID" + runRequest.getReportId() + " 场景资源:" + item.getName() + ", 生成执行脚本失败", ex);
}
ParameterConfig config = new ParameterConfig();
config.setScenarioId(item.getId());

View File

@ -19,7 +19,7 @@ public class APISingleResultListener extends MsExecListener {
@Override
public void handleTeardownTest(ResultDTO dto, Map<String, Object> kafkaConfig) {
LoggerUtil.info("处理单条执行结果报告【" + dto.getReportId() + " 】,资源【 " + dto.getTestId() + "");
LoggerUtil.info("接收到执行结果开始处理报告【" + dto.getReportId() + " 】,资源【 " + dto.getTestId() + "");
dto.setConsole(FixedCapacityUtils.getJmeterLogger(dto.getReportId()));
CommonBeanFactory.getBean(TestResultService.class).saveResults(dto);
}
@ -30,7 +30,7 @@ public class APISingleResultListener extends MsExecListener {
if (JMeterEngineCache.runningEngine.containsKey(dto.getReportId())) {
JMeterEngineCache.runningEngine.remove(dto.getReportId());
}
LoggerUtil.info("进入TEST-END处理报告【" + dto.getReportId() + "整体执行完成;" + dto.getRunMode());
LoggerUtil.info("进入TEST-END处理报告【" + dto.getReportId() + "" + dto.getRunMode() + " 整体执行完成");
// 全局并发队列
PoolExecBlockingQueueUtil.offer(dto.getReportId());
dto.setConsole(FixedCapacityUtils.getJmeterLogger(dto.getReportId()));
@ -40,7 +40,6 @@ public class APISingleResultListener extends MsExecListener {
if (apiExecutionQueueService == null) {
apiExecutionQueueService = CommonBeanFactory.getBean(ApiExecutionQueueService.class);
}
LoggerUtil.info("执行队列处理:" + dto.getQueueId());
if (StringUtils.isNotEmpty(dto.getQueueId())) {
apiExecutionQueueService.queueNext(dto);
}

View File

@ -114,6 +114,7 @@ public class JMeterService {
JMeterBase.addBackendListener(request, request.getHashTree(), APISingleResultListener.class.getCanonicalName());
}
LoggerUtil.info("报告:[" + request.getReportId() + "] 资源:[" + request.getTestId() + "] 加入JMETER中开始执行");
LocalRunner runner = new LocalRunner(request.getHashTree());
runner.run(request.getReportId());
}

View File

@ -70,7 +70,7 @@ public class ApiDefinitionExecResultService {
private SqlSessionFactory sqlSessionFactory;
public void saveApiResult(ResultDTO dto) {
LoggerUtil.info("接收到API/CASE执行结果【 " + dto.getRequestResults().size() + "");
LoggerUtil.info("接收到API/CASE执行结果【 " + dto.getRequestResults().size() + "");
for (RequestResult item : dto.getRequestResults()) {
if (item.getResponseResult() != null && item.getResponseResult().getResponseTime() <= 0) {
@ -92,6 +92,7 @@ public class ApiDefinitionExecResultService {
}
// 发送通知
result.setResourceId(dto.getTestId());
LoggerUtil.info("执行结果【 " + result.getName() + " 】入库存储完成");
sendNotice(result, user);
}
}
@ -303,8 +304,9 @@ public class ApiDefinitionExecResultService {
*/
public void saveApiResultByScheduleTask(ResultDTO dto) {
if (CollectionUtils.isNotEmpty(dto.getRequestResults())) {
LoggerUtil.info("接收到定时任务执行结果【 " + dto.getRequestResults().size() + "");
LoggerUtil.info("接收到API/CASE执行结果【 " + dto.getRequestResults().size() + "");
for (RequestResult item : dto.getRequestResults()) {
LoggerUtil.info("执行结果【 " + item.getName() + " 】入库存储");
if (!StringUtils.startsWithAny(item.getName(), "PRE_PROCESSOR_ENV_", "POST_PROCESSOR_ENV_")) {
//对响应内容进行进一步解析如果有附加信息比如误报库信息则根据附加信息内的数据进行其他判读
RequestResultExpandDTO expandDTO = ResponseUtil.parseByRequestResult(item);
@ -341,7 +343,6 @@ public class ApiDefinitionExecResultService {
if (StringUtils.isNotEmpty(dto.getReportId())) {
apiIdResultMap.put(dto.getReportId(), status);
}
LoggerUtil.info("TestPlanReportId[" + dto.getTestPlanReportId() + "] APICASE OVER. API CASE STATUS:" + JSONObject.toJSONString(apiIdResultMap));
}

View File

@ -70,6 +70,8 @@ public class ApiExecutionQueueService {
@Transactional(propagation = Propagation.REQUIRES_NEW)
public DBTestQueue add(Object runObj, String poolId, String type, String reportId, String reportType, String runMode, RunModeConfigDTO config) {
LoggerUtil.info("开始生成执行链");
ApiExecutionQueue executionQueue = new ApiExecutionQueue();
executionQueue.setId(UUID.randomUUID().toString());
executionQueue.setCreateTime(System.currentTimeMillis());
@ -133,6 +135,8 @@ public class ApiExecutionQueueService {
extApiExecutionQueueMapper.sqlInsert(queueDetails);
}
resQueue.setDetailMap(detailMap);
LoggerUtil.info("生成执行链结束");
return resQueue;
}

View File

@ -134,6 +134,8 @@ public class ApiScenarioReportResultService {
report.setBaseInfo(JSONObject.toJSONString(getBaseInfo(result)));
report.setContent(JSON.toJSONString(result).getBytes(StandardCharsets.UTF_8));
LoggerUtil.info("报告ID [ " + reportId + " ] 执行请求:【 " + baseResult.getName() + "】 入库存储");
return report;
}
}

View File

@ -968,29 +968,29 @@ public class TestPlanService {
//测试计划准备执行取消测试计划的实际结束时间
extTestPlanMapper.updateActualEndTimeIsNullById(testPlanID);
testPlanLog.info("ReportId[" + planReportId + "] created. TestPlanID:[" + testPlanID + "]. " + "API Run Config:【" + apiRunConfig + "");
LoggerUtil.info("预生成测试计划报告【" + reportInfoDTO.getTestPlanReport() != null ? reportInfoDTO.getTestPlanReport().getName() : "" + "】计划报告ID[" + planReportId + "]");
Map<String, String> apiCaseReportMap = null;
Map<String, String> scenarioReportMap = null;
Map<String, String> loadCaseReportMap = null;
if(reportInfoDTO.getApiTestCaseDataMap()!=null){
if (reportInfoDTO.getApiTestCaseDataMap() != null) {
//执行接口案例任务
LoggerUtil.info("开始执行测试计划接口用例 " + planReportId);
apiCaseReportMap = this.executeApiTestCase(triggerMode, planReportId, userId, new ArrayList<>(reportInfoDTO.getApiTestCaseDataMap().keySet()), runModeConfig);
}
if(reportInfoDTO.getPlanScenarioIdMap()!=null){
if (reportInfoDTO.getPlanScenarioIdMap() != null) {
//执行场景执行任务
LoggerUtil.info("开始执行测试计划场景用例 " + planReportId);
scenarioReportMap = this.executeScenarioCase(planReportId, testPlanID, projectID, runModeConfig, triggerMode, userId, reportInfoDTO.getPlanScenarioIdMap());
}
if(reportInfoDTO.getPerformanceIdMap()!=null){
if (reportInfoDTO.getPerformanceIdMap() != null) {
//执行性能测试任务
LoggerUtil.info("开始执行测试计划性能用例 " + planReportId);
loadCaseReportMap = this.executeLoadCaseTask(planReportId,runModeConfig, triggerMode, reportInfoDTO.getPerformanceIdMap());
loadCaseReportMap = this.executeLoadCaseTask(planReportId, runModeConfig, triggerMode, reportInfoDTO.getPerformanceIdMap());
}
if(apiCaseReportMap!=null&&scenarioReportMap!=null&&loadCaseReportMap!=null){
LoggerUtil.info("开始生成测试计划报告 " + planReportId);
if (apiCaseReportMap != null && scenarioReportMap != null && loadCaseReportMap != null) {
LoggerUtil.info("开始生成测试计划报告内容 " + planReportId);
testPlanReportService.createTestPlanReportContentReportIds(planReportId, apiCaseReportMap, scenarioReportMap, loadCaseReportMap);
}
@ -1090,7 +1090,7 @@ public class TestPlanService {
}
}
if(MapUtils.isNotEmpty(loadCaseReportMap)){
if (MapUtils.isNotEmpty(loadCaseReportMap)) {
//将性能测试加入到队列中
apiExecutionQueueService.add(loadCaseReportMap, null, ApiRunMode.TEST_PLAN_PERFORMANCE_TEST.name(),
planReportId, null, null, new RunModeConfigDTO());
@ -1554,7 +1554,7 @@ public class TestPlanService {
} else if (MapUtils.isNotEmpty(testPlanExecuteReportDTO.getTestPlanScenarioIdAndReportIdMap())) {
scenarioAllCases = testPlanScenarioCaseService.getAllCases(testPlanExecuteReportDTO.getTestPlanScenarioIdAndReportIdMap());
}
this.checkApiCaseCreatorName(apiAllCases,scenarioAllCases);
this.checkApiCaseCreatorName(apiAllCases, scenarioAllCases);
report.setApiAllCases(apiAllCases);
report.setScenarioAllCases(scenarioAllCases);
}
@ -1565,7 +1565,7 @@ public class TestPlanService {
}
}
private void checkApiCaseCreatorName(List<TestPlanFailureApiDTO> apiCases,List<TestPlanFailureScenarioDTO> scenarioCases) {
private void checkApiCaseCreatorName(List<TestPlanFailureApiDTO> apiCases, List<TestPlanFailureScenarioDTO> scenarioCases) {
List<String> userIdList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(apiCases)) {
apiCases.forEach(item -> {
@ -2169,10 +2169,10 @@ public class TestPlanService {
private List<TestPlanExecutionQueue> getTestPlanExecutionQueues(TestplanRunRequest request, Map<String, String> executeQueue) {
List<TestPlanExecutionQueue>planExecutionQueues = new ArrayList<>();
List<TestPlanExecutionQueue> planExecutionQueues = new ArrayList<>();
String resourceId = UUID.randomUUID().toString();
final int[] nextNum = {testPlanExecutionQueueService.getNextNum(resourceId)};
executeQueue.forEach((k, v)->{
executeQueue.forEach((k, v) -> {
TestPlanExecutionQueue executionQueue = new TestPlanExecutionQueue();
executionQueue.setId(UUID.randomUUID().toString());
executionQueue.setCreateTime(System.currentTimeMillis());
@ -2187,7 +2187,7 @@ public class TestPlanService {
return planExecutionQueues;
}
private void runByMode(TestplanRunRequest request, Map<String, TestPlanWithBLOBs> testPlanMap, List<TestPlanExecutionQueue> planExecutionQueues) {
private void runByMode(TestplanRunRequest request, Map<String, TestPlanWithBLOBs> testPlanMap, List<TestPlanExecutionQueue> planExecutionQueues) {
if (CollectionUtils.isNotEmpty(planExecutionQueues)) {
Thread thread = new Thread(new Runnable() {
@Override
@ -2197,14 +2197,14 @@ public class TestPlanService {
TestPlanExecutionQueue planExecutionQueue = planExecutionQueues.get(0);
TestPlanWithBLOBs testPlan = testPlanMap.get(planExecutionQueue.getTestPlanId());
JSONObject jsonObject = JSONObject.parseObject(testPlan.getRunModeConfig());
TestplanRunRequest runRequest = JSON.toJavaObject(jsonObject,TestplanRunRequest.class);
TestplanRunRequest runRequest = JSON.toJavaObject(jsonObject, TestplanRunRequest.class);
runRequest.setReportId(planExecutionQueue.getReportId());
runPlan(runRequest);
}else {
} else {
for (TestPlanExecutionQueue planExecutionQueue : planExecutionQueues) {
TestPlanWithBLOBs testPlan = testPlanMap.get(planExecutionQueue.getTestPlanId());
JSONObject jsonObject = JSONObject.parseObject(testPlan.getRunModeConfig());
TestplanRunRequest runRequest = JSON.toJavaObject(jsonObject,TestplanRunRequest.class);
TestplanRunRequest runRequest = JSON.toJavaObject(jsonObject, TestplanRunRequest.class);
runRequest.setReportId(planExecutionQueue.getReportId());
runPlan(runRequest);
}