fix(接口测试): 修复大批量执行含自定义脚本和CSV文件导致请求阻塞问题

This commit is contained in:
fit2-zhao 2022-05-11 14:53:37 +08:00 committed by f2c-ci-robot[bot]
parent c85541a793
commit 3af7175776
6 changed files with 681 additions and 645 deletions

File diff suppressed because it is too large Load Diff

View File

@ -7,20 +7,32 @@ import io.metersphere.api.service.TestResultService;
import io.metersphere.cache.JMeterEngineCache; import io.metersphere.cache.JMeterEngineCache;
import io.metersphere.commons.utils.CommonBeanFactory; import io.metersphere.commons.utils.CommonBeanFactory;
import io.metersphere.dto.ResultDTO; import io.metersphere.dto.ResultDTO;
import io.metersphere.jmeter.JMeterBase;
import io.metersphere.jmeter.MsExecListener; import io.metersphere.jmeter.MsExecListener;
import io.metersphere.utils.LoggerUtil; import io.metersphere.utils.LoggerUtil;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.jmeter.samplers.SampleResult;
import java.util.List;
import java.util.Map; import java.util.Map;
public class APISingleResultListener extends MsExecListener { public class APISingleResultListener implements MsExecListener {
private ApiExecutionQueueService apiExecutionQueueService; private ApiExecutionQueueService apiExecutionQueueService;
/**
* 参数初始化方法
*/
@Override @Override
public void handleTeardownTest(ResultDTO dto, Map<String, Object> kafkaConfig) { public void setupTest() {
LoggerUtil.info("初始化监听");
}
@Override
public void handleTeardownTest(List<SampleResult> results, ResultDTO dto, Map<String, Object> kafkaConfig) {
LoggerUtil.info("接收到执行结果开始处理报告【" + dto.getReportId() + " 】,资源【 " + dto.getTestId() + ""); LoggerUtil.info("接收到执行结果开始处理报告【" + dto.getReportId() + " 】,资源【 " + dto.getTestId() + "");
dto.setConsole(FixedCapacityUtils.getJmeterLogger(dto.getReportId())); dto.setConsole(FixedCapacityUtils.getJmeterLogger(dto.getReportId()));
JMeterBase.resultFormatting(results, dto);
CommonBeanFactory.getBean(TestResultService.class).saveResults(dto); CommonBeanFactory.getBean(TestResultService.class).saveResults(dto);
} }
@ -50,6 +62,10 @@ public class APISingleResultListener extends MsExecListener {
} }
} catch (Exception e) { } catch (Exception e) {
LoggerUtil.error(e); LoggerUtil.error(e);
} finally {
if (FixedCapacityUtils.jmeterLogTask.containsKey(dto.getReportId())) {
FixedCapacityUtils.jmeterLogTask.remove(dto.getReportId());
}
} }
} }
} }

View File

@ -11,6 +11,7 @@ import io.metersphere.commons.constants.ApiRunMode;
import io.metersphere.commons.utils.CommonBeanFactory; import io.metersphere.commons.utils.CommonBeanFactory;
import io.metersphere.config.JmeterProperties; import io.metersphere.config.JmeterProperties;
import io.metersphere.config.KafkaConfig; import io.metersphere.config.KafkaConfig;
import io.metersphere.constants.BackendListenerConstants;
import io.metersphere.constants.RunModeConstants; import io.metersphere.constants.RunModeConstants;
import io.metersphere.dto.JmeterRunRequestDTO; import io.metersphere.dto.JmeterRunRequestDTO;
import io.metersphere.dto.JvmInfoDTO; import io.metersphere.dto.JvmInfoDTO;
@ -22,9 +23,11 @@ import io.metersphere.performance.engine.EngineFactory;
import io.metersphere.utils.LoggerUtil; import io.metersphere.utils.LoggerUtil;
import org.apache.commons.collections.CollectionUtils; import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils; import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.jmeter.save.SaveService; import org.apache.jmeter.save.SaveService;
import org.apache.jmeter.testelement.TestElement; import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.threads.ThreadGroup;
import org.apache.jmeter.util.JMeterUtils; import org.apache.jmeter.util.JMeterUtils;
import org.apache.jorphan.collections.HashTree; import org.apache.jorphan.collections.HashTree;
import org.springframework.context.i18n.LocaleContextHolder; import org.springframework.context.i18n.LocaleContextHolder;
@ -72,12 +75,25 @@ public class JMeterService {
} }
} }
/**
* 添加调试监听
*
* @param testId
* @param testPlan
*/
private void addDebugListener(String testId, HashTree testPlan) { private void addDebugListener(String testId, HashTree testPlan) {
MsDebugListener resultCollector = new MsDebugListener(); MsDebugListener resultCollector = new MsDebugListener();
resultCollector.setName(testId); resultCollector.setName(testId);
resultCollector.setProperty(TestElement.TEST_CLASS, MsDebugListener.class.getName()); resultCollector.setProperty(TestElement.TEST_CLASS, MsDebugListener.class.getName());
resultCollector.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass("ViewResultsFullVisualizer")); resultCollector.setProperty(TestElement.GUI_CLASS, SaveService.aliasToClass("ViewResultsFullVisualizer"));
resultCollector.setEnabled(true); resultCollector.setEnabled(true);
// 添加DEBUG标示
HashTree test = ArrayUtils.isNotEmpty(testPlan.getArray()) ? testPlan.getTree(testPlan.getArray()[0]) : null;
if (test != null && ArrayUtils.isNotEmpty(test.getArray()) && test.getArray()[0] instanceof ThreadGroup) {
ThreadGroup group = (ThreadGroup) test.getArray()[0];
group.setProperty(BackendListenerConstants.MS_DEBUG.name(), true);
}
testPlan.add(testPlan.getArray()[0], resultCollector); testPlan.add(testPlan.getArray()[0], resultCollector);
} }

View File

@ -56,7 +56,9 @@ public class KafkaListenerTask implements Runnable {
if (testResult != null) { if (testResult != null) {
if (testResult.getArbitraryData() != null && testResult.getArbitraryData().containsKey("TEST_END") && (Boolean) testResult.getArbitraryData().get("TEST_END")) { if (testResult.getArbitraryData() != null && testResult.getArbitraryData().containsKey("TEST_END") && (Boolean) testResult.getArbitraryData().get("TEST_END")) {
resultDTOS.add(testResult); resultDTOS.add(testResult);
} else { }
// 携带结果
if (CollectionUtils.isNotEmpty(testResult.getRequestResults())) {
String key = RUN_MODE_MAP.get(testResult.getRunMode()); String key = RUN_MODE_MAP.get(testResult.getRunMode());
if (assortMap.containsKey(key)) { if (assortMap.containsKey(key)) {
assortMap.get(key).add(testResult); assortMap.get(key).add(testResult);
@ -68,6 +70,7 @@ public class KafkaListenerTask implements Runnable {
} }
} }
}); });
if (MapUtils.isNotEmpty(assortMap)) { if (MapUtils.isNotEmpty(assortMap)) {
LoggerUtil.info("KAFKA消费执行内容存储开始"); LoggerUtil.info("KAFKA消费执行内容存储开始");
testResultService.batchSaveResults(assortMap); testResultService.batchSaveResults(assortMap);

View File

@ -4,6 +4,7 @@ import groovy.lang.GroovyClassLoader;
import io.metersphere.base.domain.JarConfig; import io.metersphere.base.domain.JarConfig;
import io.metersphere.commons.utils.CommonBeanFactory; import io.metersphere.commons.utils.CommonBeanFactory;
import io.metersphere.commons.utils.LogUtil; import io.metersphere.commons.utils.LogUtil;
import io.metersphere.jmeter.LoadJarService;
import io.metersphere.service.JarConfigService; import io.metersphere.service.JarConfigService;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@ -11,7 +12,7 @@ import java.io.File;
import java.util.List; import java.util.List;
@Service @Service
public class GroovyLoadJarService { public class MsGroovyLoadJarService implements LoadJarService {
/** /**
* groovy 使用的是自己的类加载器 * groovy 使用的是自己的类加载器
* 这里再执行脚本前使用 groovy的加载器加载jar包 * 这里再执行脚本前使用 groovy的加载器加载jar包

View File

@ -51,9 +51,9 @@ public class MsKafkaListener {
}}; }};
// 线程池维护线程的最少数量 // 线程池维护线程的最少数量
private final static int CORE_POOL_SIZE = 50; private final static int CORE_POOL_SIZE = 20;
// 线程池维护线程的最大数量 // 线程池维护线程的最大数量
private final static int MAX_POOL_SIZE = 50; private final static int MAX_POOL_SIZE = 20;
// 线程池维护线程所允许的空闲时间 // 线程池维护线程所允许的空闲时间
private final static int KEEP_ALIVE_TIME = 1; private final static int KEEP_ALIVE_TIME = 1;
// 线程池所使用的缓冲队列大小 // 线程池所使用的缓冲队列大小