feat(接口测试): 接口和场景用例批量执行,重跑逻辑

--task=1016918 --user=陈建星 失败重跑 https://www.tapd.cn/55049933/s/1609836
This commit is contained in:
AgAngle 2024-11-18 17:58:21 +08:00 committed by Craftsman
parent 54269d69b4
commit 0731311e54
11 changed files with 532 additions and 10 deletions

View File

@ -81,6 +81,15 @@ public class ExecTask implements Serializable {
@NotNull(message = "{exec_task.parallel.not_blank}", groups = {Created.class})
private Boolean parallel;
@Schema(description = "用例批量执行环境ID")
private String environmentId;
@Schema(description = "资源池ID")
private String poolId;
@Schema(description = "是否是环境组")
private Boolean envGrouped;
private static final long serialVersionUID = 1L;
public enum Column {
@ -101,7 +110,10 @@ public class ExecTask implements Serializable {
startTime("start_time", "startTime", "BIGINT", false),
endTime("end_time", "endTime", "BIGINT", false),
deleted("deleted", "deleted", "BIT", false),
parallel("parallel", "parallel", "BIT", false);
parallel("parallel", "parallel", "BIT", false),
environmentId("environment_id", "environmentId", "VARCHAR", false),
poolId("pool_id", "poolId", "VARCHAR", false),
envGrouped("env_grouped", "envGrouped", "BIT", false);
private static final String BEGINNING_DELIMITER = "`";

View File

@ -1283,6 +1283,206 @@ public class ExecTaskExample {
addCriterion("parallel not between", value1, value2, "parallel");
return (Criteria) this;
}
public Criteria andEnvironmentIdIsNull() {
addCriterion("environment_id is null");
return (Criteria) this;
}
public Criteria andEnvironmentIdIsNotNull() {
addCriterion("environment_id is not null");
return (Criteria) this;
}
public Criteria andEnvironmentIdEqualTo(String value) {
addCriterion("environment_id =", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdNotEqualTo(String value) {
addCriterion("environment_id <>", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdGreaterThan(String value) {
addCriterion("environment_id >", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdGreaterThanOrEqualTo(String value) {
addCriterion("environment_id >=", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdLessThan(String value) {
addCriterion("environment_id <", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdLessThanOrEqualTo(String value) {
addCriterion("environment_id <=", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdLike(String value) {
addCriterion("environment_id like", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdNotLike(String value) {
addCriterion("environment_id not like", value, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdIn(List<String> values) {
addCriterion("environment_id in", values, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdNotIn(List<String> values) {
addCriterion("environment_id not in", values, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdBetween(String value1, String value2) {
addCriterion("environment_id between", value1, value2, "environmentId");
return (Criteria) this;
}
public Criteria andEnvironmentIdNotBetween(String value1, String value2) {
addCriterion("environment_id not between", value1, value2, "environmentId");
return (Criteria) this;
}
public Criteria andPoolIdIsNull() {
addCriterion("pool_id is null");
return (Criteria) this;
}
public Criteria andPoolIdIsNotNull() {
addCriterion("pool_id is not null");
return (Criteria) this;
}
public Criteria andPoolIdEqualTo(String value) {
addCriterion("pool_id =", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdNotEqualTo(String value) {
addCriterion("pool_id <>", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdGreaterThan(String value) {
addCriterion("pool_id >", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdGreaterThanOrEqualTo(String value) {
addCriterion("pool_id >=", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdLessThan(String value) {
addCriterion("pool_id <", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdLessThanOrEqualTo(String value) {
addCriterion("pool_id <=", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdLike(String value) {
addCriterion("pool_id like", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdNotLike(String value) {
addCriterion("pool_id not like", value, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdIn(List<String> values) {
addCriterion("pool_id in", values, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdNotIn(List<String> values) {
addCriterion("pool_id not in", values, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdBetween(String value1, String value2) {
addCriterion("pool_id between", value1, value2, "poolId");
return (Criteria) this;
}
public Criteria andPoolIdNotBetween(String value1, String value2) {
addCriterion("pool_id not between", value1, value2, "poolId");
return (Criteria) this;
}
public Criteria andEnvGroupedIsNull() {
addCriterion("env_grouped is null");
return (Criteria) this;
}
public Criteria andEnvGroupedIsNotNull() {
addCriterion("env_grouped is not null");
return (Criteria) this;
}
public Criteria andEnvGroupedEqualTo(Boolean value) {
addCriterion("env_grouped =", value, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedNotEqualTo(Boolean value) {
addCriterion("env_grouped <>", value, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedGreaterThan(Boolean value) {
addCriterion("env_grouped >", value, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedGreaterThanOrEqualTo(Boolean value) {
addCriterion("env_grouped >=", value, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedLessThan(Boolean value) {
addCriterion("env_grouped <", value, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedLessThanOrEqualTo(Boolean value) {
addCriterion("env_grouped <=", value, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedIn(List<Boolean> values) {
addCriterion("env_grouped in", values, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedNotIn(List<Boolean> values) {
addCriterion("env_grouped not in", values, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedBetween(Boolean value1, Boolean value2) {
addCriterion("env_grouped between", value1, value2, "envGrouped");
return (Criteria) this;
}
public Criteria andEnvGroupedNotBetween(Boolean value1, Boolean value2) {
addCriterion("env_grouped not between", value1, value2, "envGrouped");
return (Criteria) this;
}
}
public static class Criteria extends GeneratedCriteria {

View File

@ -20,6 +20,9 @@
<result column="end_time" jdbcType="BIGINT" property="endTime" />
<result column="deleted" jdbcType="BIT" property="deleted" />
<result column="parallel" jdbcType="BIT" property="parallel" />
<result column="environment_id" jdbcType="VARCHAR" property="environmentId" />
<result column="pool_id" jdbcType="VARCHAR" property="poolId" />
<result column="env_grouped" jdbcType="BIT" property="envGrouped" />
</resultMap>
<sql id="Example_Where_Clause">
<where>
@ -82,7 +85,7 @@
<sql id="Base_Column_List">
id, num, task_name, `status`, case_count, `result`, task_type, resource_id, trigger_mode,
project_id, organization_id, integrated, create_time, create_user, start_time, end_time,
deleted, parallel
deleted, parallel, environment_id, pool_id, env_grouped
</sql>
<select id="selectByExample" parameterType="io.metersphere.system.domain.ExecTaskExample" resultMap="BaseResultMap">
select
@ -120,13 +123,17 @@
task_type, resource_id, trigger_mode,
project_id, organization_id, integrated,
create_time, create_user, start_time,
end_time, deleted, parallel)
end_time, deleted, parallel,
environment_id, pool_id, env_grouped
)
values (#{id,jdbcType=VARCHAR}, #{num,jdbcType=BIGINT}, #{taskName,jdbcType=VARCHAR},
#{status,jdbcType=VARCHAR}, #{caseCount,jdbcType=BIGINT}, #{result,jdbcType=VARCHAR},
#{taskType,jdbcType=VARCHAR}, #{resourceId,jdbcType=VARCHAR}, #{triggerMode,jdbcType=VARCHAR},
#{projectId,jdbcType=VARCHAR}, #{organizationId,jdbcType=VARCHAR}, #{integrated,jdbcType=BIT},
#{createTime,jdbcType=BIGINT}, #{createUser,jdbcType=VARCHAR}, #{startTime,jdbcType=BIGINT},
#{endTime,jdbcType=BIGINT}, #{deleted,jdbcType=BIT}, #{parallel,jdbcType=BIT})
#{endTime,jdbcType=BIGINT}, #{deleted,jdbcType=BIT}, #{parallel,jdbcType=BIT},
#{environmentId,jdbcType=VARCHAR}, #{poolId,jdbcType=VARCHAR}, #{envGrouped,jdbcType=BIT}
)
</insert>
<insert id="insertSelective" parameterType="io.metersphere.system.domain.ExecTask">
insert into exec_task
@ -185,6 +192,15 @@
<if test="parallel != null">
parallel,
</if>
<if test="environmentId != null">
environment_id,
</if>
<if test="poolId != null">
pool_id,
</if>
<if test="envGrouped != null">
env_grouped,
</if>
</trim>
<trim prefix="values (" suffix=")" suffixOverrides=",">
<if test="id != null">
@ -241,6 +257,15 @@
<if test="parallel != null">
#{parallel,jdbcType=BIT},
</if>
<if test="environmentId != null">
#{environmentId,jdbcType=VARCHAR},
</if>
<if test="poolId != null">
#{poolId,jdbcType=VARCHAR},
</if>
<if test="envGrouped != null">
#{envGrouped,jdbcType=BIT},
</if>
</trim>
</insert>
<select id="countByExample" parameterType="io.metersphere.system.domain.ExecTaskExample" resultType="java.lang.Long">
@ -306,6 +331,15 @@
<if test="record.parallel != null">
parallel = #{record.parallel,jdbcType=BIT},
</if>
<if test="record.environmentId != null">
environment_id = #{record.environmentId,jdbcType=VARCHAR},
</if>
<if test="record.poolId != null">
pool_id = #{record.poolId,jdbcType=VARCHAR},
</if>
<if test="record.envGrouped != null">
env_grouped = #{record.envGrouped,jdbcType=BIT},
</if>
</set>
<if test="_parameter != null">
<include refid="Update_By_Example_Where_Clause" />
@ -330,7 +364,10 @@
start_time = #{record.startTime,jdbcType=BIGINT},
end_time = #{record.endTime,jdbcType=BIGINT},
deleted = #{record.deleted,jdbcType=BIT},
parallel = #{record.parallel,jdbcType=BIT}
parallel = #{record.parallel,jdbcType=BIT},
environment_id = #{record.environmentId,jdbcType=VARCHAR},
pool_id = #{record.poolId,jdbcType=VARCHAR},
env_grouped = #{record.envGrouped,jdbcType=BIT}
<if test="_parameter != null">
<include refid="Update_By_Example_Where_Clause" />
</if>
@ -389,6 +426,15 @@
<if test="parallel != null">
parallel = #{parallel,jdbcType=BIT},
</if>
<if test="environmentId != null">
environment_id = #{environmentId,jdbcType=VARCHAR},
</if>
<if test="poolId != null">
pool_id = #{poolId,jdbcType=VARCHAR},
</if>
<if test="envGrouped != null">
env_grouped = #{envGrouped,jdbcType=BIT},
</if>
</set>
where id = #{id,jdbcType=VARCHAR}
</update>
@ -410,14 +456,17 @@
start_time = #{startTime,jdbcType=BIGINT},
end_time = #{endTime,jdbcType=BIGINT},
deleted = #{deleted,jdbcType=BIT},
parallel = #{parallel,jdbcType=BIT}
parallel = #{parallel,jdbcType=BIT},
environment_id = #{environmentId,jdbcType=VARCHAR},
pool_id = #{poolId,jdbcType=VARCHAR},
env_grouped = #{envGrouped,jdbcType=BIT}
where id = #{id,jdbcType=VARCHAR}
</update>
<insert id="batchInsert" parameterType="map">
insert into exec_task
(id, num, task_name, `status`, case_count, `result`, task_type, resource_id, trigger_mode,
project_id, organization_id, integrated, create_time, create_user, start_time,
end_time, deleted, parallel)
end_time, deleted, parallel, environment_id, pool_id, env_grouped)
values
<foreach collection="list" item="item" separator=",">
(#{item.id,jdbcType=VARCHAR}, #{item.num,jdbcType=BIGINT}, #{item.taskName,jdbcType=VARCHAR},
@ -425,7 +474,8 @@
#{item.taskType,jdbcType=VARCHAR}, #{item.resourceId,jdbcType=VARCHAR}, #{item.triggerMode,jdbcType=VARCHAR},
#{item.projectId,jdbcType=VARCHAR}, #{item.organizationId,jdbcType=VARCHAR}, #{item.integrated,jdbcType=BIT},
#{item.createTime,jdbcType=BIGINT}, #{item.createUser,jdbcType=VARCHAR}, #{item.startTime,jdbcType=BIGINT},
#{item.endTime,jdbcType=BIGINT}, #{item.deleted,jdbcType=BIT}, #{item.parallel,jdbcType=BIT}
#{item.endTime,jdbcType=BIGINT}, #{item.deleted,jdbcType=BIT}, #{item.parallel,jdbcType=BIT},
#{item.environmentId,jdbcType=VARCHAR}, #{item.poolId,jdbcType=VARCHAR}, #{item.envGrouped,jdbcType=BIT}
)
</foreach>
</insert>
@ -493,6 +543,15 @@
<if test="'parallel'.toString() == column.value">
#{item.parallel,jdbcType=BIT}
</if>
<if test="'environment_id'.toString() == column.value">
#{item.environmentId,jdbcType=VARCHAR}
</if>
<if test="'pool_id'.toString() == column.value">
#{item.poolId,jdbcType=VARCHAR}
</if>
<if test="'env_grouped'.toString() == column.value">
#{item.envGrouped,jdbcType=BIT}
</if>
</foreach>
)
</foreach>

View File

@ -88,5 +88,10 @@ ALTER TABLE exec_task_item ADD rerun bit(1) DEFAULT 0 NULL COMMENT '是否是重
-- 任务添加串并行字段
ALTER TABLE exec_task ADD parallel bit(1) DEFAULT 1 NOT NULL COMMENT '是否是并行执行';
-- 任务记录批量执行的环境等信息
ALTER TABLE exec_task ADD environment_id varchar(50) NULL COMMENT '用例批量执行环境ID';
ALTER TABLE exec_task ADD env_grouped bit(1) DEFAULT 0 NULL COMMENT '是否是环境组';
ALTER TABLE exec_task ADD pool_id varchar(50) NULL COMMENT '资源池ID';
-- set innodb lock wait timeout to default
SET SESSION innodb_lock_wait_timeout = DEFAULT;

View File

@ -1,6 +1,9 @@
package io.metersphere.api.service;
import io.metersphere.api.domain.ApiReportRelateTask;
import io.metersphere.api.domain.ApiReportRelateTaskExample;
import io.metersphere.api.domain.ApiScenarioReport;
import io.metersphere.api.mapper.ApiReportRelateTaskMapper;
import io.metersphere.api.service.queue.ApiExecutionQueueService;
import io.metersphere.sdk.constants.ApiBatchRunMode;
import io.metersphere.sdk.constants.CommonConstants;
@ -18,7 +21,9 @@ import io.metersphere.system.domain.ExecTask;
import io.metersphere.system.domain.ExecTaskItem;
import io.metersphere.system.mapper.ExecTaskMapper;
import io.metersphere.system.mapper.ExtExecTaskItemMapper;
import io.metersphere.system.uid.IDGenerator;
import jakarta.annotation.Resource;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.stereotype.Service;
@ -40,6 +45,8 @@ public class ApiBatchRunBaseService {
private ApiExecuteService apiExecuteService;
@Resource
private ExecTaskMapper execTaskMapper;
@Resource
private ApiReportRelateTaskMapper apiReportRelateTaskMapper;
public static final int BATCH_TASK_ITEM_SIZE = 500;
@ -283,4 +290,15 @@ public class ApiBatchRunBaseService {
SubListUtils.dealForSubList(execTaskItems, ApiBatchRunBaseService.BATCH_TASK_ITEM_SIZE,
subExecTaskItems -> initExecutionQueueDetails(queue.getQueueId(), subExecTaskItems));
}
public String getIntegratedReportId(ExecTask execTask) {
ApiReportRelateTaskExample example = new ApiReportRelateTaskExample();
example.createCriteria().andTaskResourceIdEqualTo(execTask.getId());
List<ApiReportRelateTask> apiReportRelateTasks = apiReportRelateTaskMapper.selectByExample(example);
String reportId = IDGenerator.nextStr();
if (CollectionUtils.isNotEmpty(apiReportRelateTasks)) {
reportId = apiReportRelateTasks.getFirst().getReportId();
}
return reportId;
}
}

View File

@ -18,9 +18,10 @@ import io.metersphere.sdk.dto.queue.ExecutionQueueDetail;
import io.metersphere.sdk.util.*;
import io.metersphere.system.domain.ExecTask;
import io.metersphere.system.domain.ExecTaskItem;
import io.metersphere.system.mapper.ExtExecTaskItemMapper;
import io.metersphere.system.service.BaseTaskHubService;
import io.metersphere.system.uid.IDGenerator;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@ -66,6 +67,8 @@ public class ApiTestCaseBatchRunService {
private ProjectMapper projectMapper;
@Resource
private BaseTaskHubService baseTaskHubService;
@Resource
private ExtExecTaskItemMapper extExecTaskItemMapper;
public static final int TASK_BATCH_SIZE = 600;
@ -159,6 +162,10 @@ public class ApiTestCaseBatchRunService {
execTask.setTriggerMode(TaskTriggerMode.BATCH.name());
execTask.setTaskType(ExecTaskType.API_CASE_BATCH.name());
execTask.setIntegrated(runModeConfig.getIntegratedReport());
execTask.setPoolId(runModeConfig.getPoolId());
execTask.setParallel(StringUtils.equals(runModeConfig.getRunMode(), ApiBatchRunMode.PARALLEL.name()));
execTask.setEnvGrouped(runModeConfig.getGrouped());
execTask.setEnvironmentId(runModeConfig.getEnvironmentId());
baseTaskHubService.insertExecTask(execTask);
return execTask;
}
@ -267,6 +274,21 @@ public class ApiTestCaseBatchRunService {
return runModeConfig;
}
public ApiRunModeConfigDTO getRunModeConfig(ExecTask execTask) {
ApiRunModeConfigDTO runModeConfig = BeanUtils.copyBean(new ApiRunModeConfigDTO(), execTask);
runModeConfig.setRunMode(BooleanUtils.isTrue(execTask.getParallel()) ? ApiBatchRunMode.PARALLEL.name() : ApiBatchRunMode.SERIAL.name());
runModeConfig.setPoolId(execTask.getPoolId());
runModeConfig.setEnvironmentId(execTask.getEnvironmentId());
runModeConfig.setGrouped(execTask.getEnvGrouped());
runModeConfig.setIntegratedReport(execTask.getIntegrated());
if (BooleanUtils.isTrue(execTask.getIntegrated())) {
runModeConfig.setCollectionReport(new CollectionReportDTO());
runModeConfig.getCollectionReport().setReportId(apiBatchRunBaseService.getIntegratedReportId(execTask));
runModeConfig.getCollectionReport().setReportName(execTask.getTaskName());
}
return runModeConfig;
}
/**
* 预生成用例的执行报告
*
@ -356,7 +378,7 @@ public class ApiTestCaseBatchRunService {
String integratedReportId = null;
if (runModeConfig.isIntegratedReport()) {
integratedReportId = runModeConfig.getCollectionReport().getReportId() + IDGenerator.nextStr();
integratedReportId = runModeConfig.getCollectionReport().getReportId();
}
if (apiTestCase == null) {
@ -368,6 +390,7 @@ public class ApiTestCaseBatchRunService {
taskRequest.getTaskInfo().setTaskId(queue.getTaskId());
taskRequest.getTaskInfo().setQueueId(queue.getQueueId());
taskRequest.getTaskInfo().setUserId(queue.getUserId());
taskRequest.getTaskInfo().setRerun(queue.getRerun());
taskRequest.getTaskItem().setRequestCount(1L);
taskRequest.getTaskItem().setId(taskItemId);
@ -451,4 +474,55 @@ public class ApiTestCaseBatchRunService {
apiReportMapper.updateByPrimaryKeySelective(report);
}
}
public void rerun(ExecTask execTask, String userId) {
if (BooleanUtils.isTrue(execTask.getParallel())) {
parallelRerunExecute(execTask, userId);
} else {
serialRerunExecute(execTask, userId);
}
}
private void serialRerunExecute(ExecTask execTask, String userId) {
ApiRunModeConfigDTO runModeConfig = getRunModeConfig(execTask);
List<ExecTaskItem> execTaskItems = extExecTaskItemMapper.selectIdAndResourceIdByTaskId(execTask.getId());
// 初始化执行队列
ExecutionQueue queue = apiBatchRunBaseService.getExecutionQueue(runModeConfig, ApiExecuteResourceType.API_CASE.name(), execTask.getId(), userId);
queue.setQueueId(execTask.getId());
queue.setRerun(true);
apiExecutionQueueService.insertQueue(queue);
// 初始化队列项
apiBatchRunBaseService.initExecutionQueueDetails(queue.getQueueId(), execTaskItems);
// 执行第一个任务
ExecutionQueueDetail nextDetail = apiExecutionQueueService.getNextDetail(queue.getQueueId());
executeNextTask(queue, nextDetail);
}
/**
* 并行重跑
*
*/
public void parallelRerunExecute(ExecTask execTask, String userId) {
String projectId = execTask.getProjectId();
List<ExecTaskItem> execTaskItems = extExecTaskItemMapper.selectIdAndResourceIdByTaskId(execTask.getId());
ApiRunModeConfigDTO runModeConfig = getRunModeConfig(execTask);
// 记录用例和任务的映射
Map<String, String> resourceExecTaskItemMap = new TreeMap<>();
execTaskItems.forEach(item -> resourceExecTaskItemMap.put(item.getResourceId(), item.getId()));
TaskBatchRequestDTO taskRequest = getTaskBatchRequestDTO(projectId, runModeConfig);
taskRequest.getTaskInfo().setTaskId(execTask.getId());
taskRequest.getTaskInfo().setSetId(execTask.getId());
taskRequest.getTaskInfo().setUserId(userId);
taskRequest.getTaskInfo().setRerun(true);
// 记录任务项用于统计整体执行情况
apiExecutionSetService.initSet(execTask.getId(), new ArrayList<>(resourceExecTaskItemMap.values()));
apiBatchRunBaseService.parallelBatchExecute(taskRequest, runModeConfig, resourceExecTaskItemMap);
}
}

View File

@ -6,10 +6,12 @@ import io.metersphere.api.dto.scenario.ApiScenarioBatchRunRequest;
import io.metersphere.api.dto.scenario.ApiScenarioDetail;
import io.metersphere.api.mapper.ApiScenarioMapper;
import io.metersphere.api.mapper.ApiScenarioReportMapper;
import io.metersphere.api.mapper.ApiScenarioReportStepMapper;
import io.metersphere.api.mapper.ExtApiScenarioMapper;
import io.metersphere.api.service.ApiBatchRunBaseService;
import io.metersphere.api.service.ApiCommonService;
import io.metersphere.api.service.ApiExecuteService;
import io.metersphere.api.service.definition.ApiTestCaseBatchRunService;
import io.metersphere.api.service.queue.ApiExecutionQueueService;
import io.metersphere.api.service.queue.ApiExecutionSetService;
import io.metersphere.project.domain.Project;
@ -21,6 +23,7 @@ import io.metersphere.sdk.dto.queue.ExecutionQueueDetail;
import io.metersphere.sdk.util.*;
import io.metersphere.system.domain.ExecTask;
import io.metersphere.system.domain.ExecTaskItem;
import io.metersphere.system.mapper.ExtExecTaskItemMapper;
import io.metersphere.system.service.BaseTaskHubService;
import io.metersphere.system.uid.IDGenerator;
import jakarta.annotation.Resource;
@ -67,6 +70,12 @@ public class ApiScenarioBatchRunService {
private ApiCommonService apiCommonService;
@Resource
private BaseTaskHubService baseTaskHubService;
@Resource
private ApiTestCaseBatchRunService apiTestCaseBatchRunService;
@Resource
private ExtExecTaskItemMapper extExecTaskItemMapper;
@Resource
private ApiScenarioReportStepMapper apiScenarioReportStepMapper;
public static final int TASK_BATCH_SIZE = 600;
@ -225,6 +234,10 @@ public class ApiScenarioBatchRunService {
} else {
execTask.setTaskName(Translator.get("api_scenario_batch_task_name"));
}
execTask.setPoolId(runModeConfig.getPoolId());
execTask.setParallel(StringUtils.equals(runModeConfig.getRunMode(), ApiBatchRunMode.PARALLEL.name()));
execTask.setEnvGrouped(runModeConfig.getGrouped());
execTask.setEnvironmentId(runModeConfig.getEnvironmentId());
execTask.setOrganizationId(project.getOrganizationId());
execTask.setTriggerMode(TaskTriggerMode.BATCH.name());
execTask.setTaskType(ExecTaskType.API_SCENARIO_BATCH.name());
@ -336,6 +349,7 @@ public class ApiScenarioBatchRunService {
taskRequest.getTaskInfo().setQueueId(queue.getQueueId());
taskRequest.getTaskInfo().setUserId(queue.getUserId());
taskRequest.getTaskInfo().setTaskId(queue.getTaskId());
taskRequest.getTaskInfo().setRerun(queue.getRerun());
apiExecuteService.execute(taskRequest);
}
@ -432,4 +446,74 @@ public class ApiScenarioBatchRunService {
LogUtils.error("失败停止,补充报告步骤失败:", e);
}
}
public void rerun(ExecTask execTask, String userId) {
if (BooleanUtils.isTrue(execTask.getParallel())) {
parallelRerunExecute(execTask, userId);
} else {
serialRerunExecute(execTask, userId);
}
}
private void serialRerunExecute(ExecTask execTask, String userId) {
ApiRunModeConfigDTO runModeConfig = apiTestCaseBatchRunService.getRunModeConfig(execTask);
List<ExecTaskItem> execTaskItems = extExecTaskItemMapper.selectIdAndResourceIdByTaskId(execTask.getId());
// 删除重跑的步骤
deleteRerunIntegratedStepResult(execTask, execTaskItems, runModeConfig);
// 初始化执行队列
ExecutionQueue queue = apiBatchRunBaseService.getExecutionQueue(runModeConfig, ApiExecuteResourceType.API_SCENARIO.name(), execTask.getId(), userId);
queue.setQueueId(execTask.getId());
queue.setRerun(true);
apiExecutionQueueService.insertQueue(queue);
// 初始化队列项
apiBatchRunBaseService.initExecutionQueueDetails(queue.getQueueId(), execTaskItems);
// 执行第一个任务
ExecutionQueueDetail nextDetail = apiExecutionQueueService.getNextDetail(queue.getQueueId());
executeNextTask(queue, nextDetail);
}
/**
* 并行重跑
*
*/
public void parallelRerunExecute(ExecTask execTask, String userId) {
String projectId = execTask.getProjectId();
List<ExecTaskItem> execTaskItems = extExecTaskItemMapper.selectIdAndResourceIdByTaskId(execTask.getId());
ApiRunModeConfigDTO runModeConfig = apiTestCaseBatchRunService.getRunModeConfig(execTask);
// 删除重跑的步骤
deleteRerunIntegratedStepResult(execTask, execTaskItems, runModeConfig);
// 记录用例和任务的映射
Map<String, String> resourceExecTaskItemMap = new TreeMap<>();
execTaskItems.forEach(item -> resourceExecTaskItemMap.put(item.getResourceId(), item.getId()));
TaskBatchRequestDTO taskRequest = getTaskBatchRequestDTO(projectId, runModeConfig);
taskRequest.getTaskInfo().setTaskId(execTask.getId());
taskRequest.getTaskInfo().setSetId(execTask.getId());
taskRequest.getTaskInfo().setUserId(userId);
taskRequest.getTaskInfo().setRerun(true);
// 记录任务项用于统计整体执行情况
apiExecutionSetService.initSet(execTask.getId(), new ArrayList<>(resourceExecTaskItemMap.values()));
apiBatchRunBaseService.parallelBatchExecute(taskRequest, runModeConfig, resourceExecTaskItemMap);
}
private void deleteRerunIntegratedStepResult(ExecTask execTask, List<ExecTaskItem> execTaskItems, ApiRunModeConfigDTO runModeConfig) {
if (BooleanUtils.isTrue(execTask.getIntegrated())) {
SubListUtils.dealForSubList(execTaskItems, TASK_BATCH_SIZE, subItems -> {
// 删除子步骤,重新执行
ApiScenarioReportStepExample stepExample = new ApiScenarioReportStepExample();
stepExample.createCriteria()
.andReportIdEqualTo(runModeConfig.getCollectionReport().getReportId())
.andParentIdIn(subItems.stream().map(ExecTaskItem::getResourceId).toList());
apiScenarioReportStepMapper.deleteByExample(stepExample);
});
}
}
}

View File

@ -1,5 +1,6 @@
package io.metersphere.system.mapper;
import io.metersphere.system.domain.ExecTask;
import io.metersphere.system.domain.ExecTaskItem;
import io.metersphere.system.dto.table.TableBatchProcessDTO;
import io.metersphere.system.dto.BatchExecTaskReportDTO;
@ -70,4 +71,6 @@ public interface ExtExecTaskItemMapper {
void deleteRerunTaskItemReportRelation(@Param("taskId") String taskId);
Set<String> selectRerunCollectionIds(@Param("taskId") String taskId);
List<ExecTaskItem> selectIdAndResourceIdByTaskId(@Param("taskId") String taskId);
}

View File

@ -327,6 +327,11 @@
from exec_task_item
where task_id = #{taskId} and rerun = true and deleted = false;
</select>
<select id="selectIdAndResourceIdByTaskId" resultType="io.metersphere.system.domain.ExecTaskItem">
select id, resource_id
from exec_task_item
where task_id = #{taskId} and deleted = false
</select>
<update id="resetRerunTaskItem">
UPDATE exec_task_item

View File

@ -0,0 +1,31 @@
package io.metersphere.plan.service.rerun;
import io.metersphere.api.service.definition.ApiTestCaseBatchRunService;
import io.metersphere.sdk.constants.ExecTaskType;
import io.metersphere.system.domain.ExecTask;
import io.metersphere.system.invoker.TaskRerunServiceInvoker;
import io.metersphere.system.service.TaskRerunService;
import jakarta.annotation.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @Author: jianxing
* @CreateTime: 2024-02-06 20:47
*/
@Service
@Transactional(rollbackFor = Exception.class)
public class ApiCaseBatchRerunService implements TaskRerunService {
@Resource
private ApiTestCaseBatchRunService apiTestCaseBatchRunService;
public ApiCaseBatchRerunService() {
TaskRerunServiceInvoker.register(ExecTaskType.API_CASE_BATCH, this);
}
@Override
public void rerun(ExecTask execTask, String userId) {
apiTestCaseBatchRunService.rerun(execTask, userId);
}
}

View File

@ -0,0 +1,31 @@
package io.metersphere.plan.service.rerun;
import io.metersphere.api.service.scenario.ApiScenarioBatchRunService;
import io.metersphere.sdk.constants.ExecTaskType;
import io.metersphere.system.domain.ExecTask;
import io.metersphere.system.invoker.TaskRerunServiceInvoker;
import io.metersphere.system.service.TaskRerunService;
import jakarta.annotation.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
/**
* @Author: jianxing
* @CreateTime: 2024-02-06 20:47
*/
@Service
@Transactional(rollbackFor = Exception.class)
public class ApiScenarioBatchRerunService implements TaskRerunService {
@Resource
private ApiScenarioBatchRunService apiScenarioBatchRunService;
public ApiScenarioBatchRerunService() {
TaskRerunServiceInvoker.register(ExecTaskType.API_SCENARIO_BATCH, this);
}
@Override
public void rerun(ExecTask execTask, String userId) {
apiScenarioBatchRunService.rerun(execTask, userId);
}
}