fix (接口自动化): 批量执行优化

This commit is contained in:
fit2-zhao 2021-08-30 19:28:02 +08:00 committed by fit2-zhao
parent b0e27a0c51
commit a525f62a4d
6 changed files with 36 additions and 55 deletions

View File

@ -20,7 +20,7 @@ public class ApiJmeterFileController {
private ApiJmeterFileService apiJmeterFileService; private ApiJmeterFileService apiJmeterFileService;
@PostMapping("download/files") @PostMapping("download/files")
public ResponseEntity<byte[]> downloadJmeterFiles(@RequestBody List<BodyFile> bodyFileList) { public ResponseEntity<byte[]> downloadBodyFiles(@RequestBody List<BodyFile> bodyFileList) {
byte[] bytes = new byte[10]; byte[] bytes = new byte[10];
if (CollectionUtils.isNotEmpty(bodyFileList)) { if (CollectionUtils.isNotEmpty(bodyFileList)) {
bytes = apiJmeterFileService.downloadJmeterFiles(bodyFileList); bytes = apiJmeterFileService.downloadJmeterFiles(bodyFileList);
@ -31,11 +31,6 @@ public class ApiJmeterFileController {
.body(bytes); .body(bytes);
} }
@GetMapping("download")
public byte[] downloadJmx(@RequestParam("testId") String testId, @RequestParam("reportId") String reportId, @RequestParam("runMode") String runMode, @RequestParam("testPlanScenarioId") String testPlanScenarioId) {
return apiJmeterFileService.downloadJmx(runMode, testId, reportId, testPlanScenarioId);
}
@GetMapping("download/jar") @GetMapping("download/jar")
public ResponseEntity<byte[]> downloadJmeterFiles() { public ResponseEntity<byte[]> downloadJmeterFiles() {
byte[] bytes = apiJmeterFileService.downloadJmeterJar(); byte[] bytes = apiJmeterFileService.downloadJmeterJar();
@ -44,4 +39,13 @@ public class ApiJmeterFileController {
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + UUID.randomUUID().toString() + ".zip\"") .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + UUID.randomUUID().toString() + ".zip\"")
.body(bytes); .body(bytes);
} }
@GetMapping("download")
public ResponseEntity<byte[]> downloadJmeterFiles(@RequestParam("testId") String testId, @RequestParam("reportId") String reportId, @RequestParam("runMode") String runMode, @RequestParam("testPlanScenarioId") String testPlanScenarioId) {
byte[] bytes = apiJmeterFileService.downloadJmeterFiles(runMode,testId, reportId, testPlanScenarioId);
return ResponseEntity.ok()
.contentType(MediaType.parseMediaType("application/octet-stream"))
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + testId + ".zip\"")
.body(bytes);
}
} }

View File

@ -177,6 +177,7 @@ public class JMeterService {
} else { } else {
runRequest.setJmx(new MsTestPlan().getJmx(hashTree)); runRequest.setJmx(new MsTestPlan().getJmx(hashTree));
kafkaTemplate.send(MsKafkaListener.EXEC_TOPIC, JSON.toJSONString(runRequest)); kafkaTemplate.send(MsKafkaListener.EXEC_TOPIC, JSON.toJSONString(runRequest));
kafkaTemplate.flush();
} }
} }

View File

@ -52,6 +52,29 @@ public class ApiJmeterFileService {
return listBytesToZip(files); return listBytesToZip(files);
} }
public byte[] downloadJmeterFiles(String runMode, String testId, String reportId, String testPlanScenarioId) {
Map<String, String> planEnvMap = new HashMap<>();
if (StringUtils.isNotEmpty(testPlanScenarioId)) {
// 获取场景用例单独的执行环境
TestPlanApiScenario planApiScenario = testPlanApiScenarioMapper.selectByPrimaryKey(testPlanScenarioId);
String environment = planApiScenario.getEnvironment();
if (StringUtils.isNotBlank(environment)) {
planEnvMap = JSON.parseObject(environment, Map.class);
}
}
HashTree hashTree = null;
if (ApiRunMode.DEFINITION.name().equals(runMode) || ApiRunMode.API_PLAN.name().equals(runMode)) {
hashTree = testPlanApiCaseService.generateHashTree(testId);
} else {
ApiScenarioWithBLOBs item = apiScenarioMapper.selectByPrimaryKey(testId);
if (item == null) {
MSException.throwException("未找到执行场景。");
}
hashTree = apiAutomationService.generateHashTree(item, reportId, planEnvMap);
}
return zipFilesToByteArray(testId, hashTree);
}
public byte[] downloadJmx(String runMode, String testId, String reportId, String testPlanScenarioId) { public byte[] downloadJmx(String runMode, String testId, String reportId, String testPlanScenarioId) {
Map<String, String> planEnvMap = new HashMap<>(); Map<String, String> planEnvMap = new HashMap<>();
if (StringUtils.isNotEmpty(testPlanScenarioId)) { if (StringUtils.isNotEmpty(testPlanScenarioId)) {

View File

@ -38,7 +38,6 @@ import io.metersphere.log.vo.OperatingLogDetails;
import io.metersphere.service.SystemParameterService; import io.metersphere.service.SystemParameterService;
import io.metersphere.track.dto.*; import io.metersphere.track.dto.*;
import io.metersphere.track.request.testcase.TestPlanApiCaseBatchRequest; import io.metersphere.track.request.testcase.TestPlanApiCaseBatchRequest;
import io.metersphere.track.service.task.ParallelApiExecTask;
import io.metersphere.track.service.task.SerialApiExecTask; import io.metersphere.track.service.task.SerialApiExecTask;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.ibatis.session.ExecutorType; import org.apache.ibatis.session.ExecutorType;
@ -450,7 +449,6 @@ public class TestPlanApiCaseService {
} }
ApiDefinitionExecResult report = addResult(request, key, APITestStatus.Running.name(), batchMapper); ApiDefinitionExecResult report = addResult(request, key, APITestStatus.Running.name(), batchMapper);
modeDataDTO.setApiCaseId(report.getId()); modeDataDTO.setApiCaseId(report.getId());
executorService.submit(new ParallelApiExecTask(jMeterService, mapper, modeDataDTO, request.getConfig(), ApiRunMode.API_PLAN.name()));
if (request.getConfig() != null && StringUtils.isNotEmpty(request.getConfig().getResourcePoolId())) { if (request.getConfig() != null && StringUtils.isNotEmpty(request.getConfig().getResourcePoolId())) {
jMeterService.runTest(modeDataDTO.getTestId(), modeDataDTO.getApiCaseId(), ApiRunMode.API_PLAN.name(), null, request.getConfig(), hashTree); jMeterService.runTest(modeDataDTO.getTestId(), modeDataDTO.getApiCaseId(), ApiRunMode.API_PLAN.name(), null, request.getConfig(), hashTree);
} else { } else {

View File

@ -1,46 +0,0 @@
/**
*
*/
package io.metersphere.track.service.task;
import io.metersphere.api.dto.RunModeDataDTO;
import io.metersphere.api.dto.automation.RunModeConfig;
import io.metersphere.api.jmeter.JMeterService;
import io.metersphere.base.mapper.ApiDefinitionExecResultMapper;
import io.metersphere.commons.exception.MSException;
import io.metersphere.commons.utils.LogUtil;
import org.apache.commons.lang3.StringUtils;
import java.util.concurrent.Callable;
public class ParallelApiExecTask<T> implements Callable<T> {
private RunModeConfig config;
private JMeterService jMeterService;
private RunModeDataDTO runModeDataDTO;
private String runMode;
private ApiDefinitionExecResultMapper mapper;
public ParallelApiExecTask(JMeterService jMeterService, ApiDefinitionExecResultMapper mapper, RunModeDataDTO runModeDataDTO, RunModeConfig config, String runMode) {
this.jMeterService = jMeterService;
this.config = config;
this.runModeDataDTO = runModeDataDTO;
this.runMode = runMode;
this.mapper = mapper;
}
@Override
public T call() {
try {
if (config != null && StringUtils.isNotBlank(config.getResourcePoolId())) {
jMeterService.runTest(runModeDataDTO.getTestId(), runModeDataDTO.getApiCaseId(), runMode, null, config, runModeDataDTO.getHashTree());
} else {
jMeterService.runLocal(runModeDataDTO.getApiCaseId(), runModeDataDTO.getHashTree(), runModeDataDTO.getReport() != null ? runModeDataDTO.getReport().getTriggerMode() : null, runMode);
}
return null;
} catch (Exception ex) {
LogUtil.error(ex);
MSException.throwException(ex.getMessage());
return null;
}
}
}

View File

@ -52,8 +52,9 @@ spring.flyway.encoding=UTF-8
spring.flyway.validate-on-migrate=false spring.flyway.validate-on-migrate=false
spring.kafka.listener.missing-topics-fatal=false spring.kafka.listener.missing-topics-fatal=false
spring.kafka.producer.properties.max.request.size=32428800 spring.kafka.producer.properties.max.request.size=32428800
spring.messages.basename=i18n/messages spring.kafka.producer.batch-size=16384
spring.messages.basename=i18n/messages
# kafka # kafka
kafka.fields= kafka.fields=
kafka.timestamp=yyyy-MM-dd'T'HH:mm:ss.SSSZZ kafka.timestamp=yyyy-MM-dd'T'HH:mm:ss.SSSZZ