feat(接口测试): 接口用例批量执行

This commit is contained in:
AgAngle 2024-03-13 11:55:17 +08:00 committed by Craftsman
parent e8a8ff5ef0
commit a9d680bacb
35 changed files with 732 additions and 251 deletions

View File

@ -75,9 +75,6 @@ public class ApiReport implements Serializable {
@Size(min = 1, max = 50, message = "{api_report.pool_id.length_range}", groups = {Created.class, Updated.class})
private String poolId;
@Schema(description = "版本fk")
private String versionId;
@Schema(description = "是否是集成报告", requiredMode = Schema.RequiredMode.REQUIRED)
@NotNull(message = "{api_report.integrated.not_blank}", groups = {Created.class})
private Boolean integrated;
@ -161,7 +158,6 @@ public class ApiReport implements Serializable {
triggerMode("trigger_mode", "triggerMode", "VARCHAR", false),
runMode("run_mode", "runMode", "VARCHAR", false),
poolId("pool_id", "poolId", "VARCHAR", false),
versionId("version_id", "versionId", "VARCHAR", false),
integrated("integrated", "integrated", "BIT", false),
projectId("project_id", "projectId", "VARCHAR", false),
environmentId("environment_id", "environmentId", "VARCHAR", false),

View File

@ -1164,76 +1164,6 @@ public class ApiReportExample {
return (Criteria) this;
}
public Criteria andVersionIdIsNull() {
addCriterion("version_id is null");
return (Criteria) this;
}
public Criteria andVersionIdIsNotNull() {
addCriterion("version_id is not null");
return (Criteria) this;
}
public Criteria andVersionIdEqualTo(String value) {
addCriterion("version_id =", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdNotEqualTo(String value) {
addCriterion("version_id <>", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdGreaterThan(String value) {
addCriterion("version_id >", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdGreaterThanOrEqualTo(String value) {
addCriterion("version_id >=", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdLessThan(String value) {
addCriterion("version_id <", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdLessThanOrEqualTo(String value) {
addCriterion("version_id <=", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdLike(String value) {
addCriterion("version_id like", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdNotLike(String value) {
addCriterion("version_id not like", value, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdIn(List<String> values) {
addCriterion("version_id in", values, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdNotIn(List<String> values) {
addCriterion("version_id not in", values, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdBetween(String value1, String value2) {
addCriterion("version_id between", value1, value2, "versionId");
return (Criteria) this;
}
public Criteria andVersionIdNotBetween(String value1, String value2) {
addCriterion("version_id not between", value1, value2, "versionId");
return (Criteria) this;
}
public Criteria andIntegratedIsNull() {
addCriterion("integrated is null");
return (Criteria) this;

View File

@ -18,7 +18,6 @@
<result column="trigger_mode" jdbcType="VARCHAR" property="triggerMode" />
<result column="run_mode" jdbcType="VARCHAR" property="runMode" />
<result column="pool_id" jdbcType="VARCHAR" property="poolId" />
<result column="version_id" jdbcType="VARCHAR" property="versionId" />
<result column="integrated" jdbcType="BIT" property="integrated" />
<result column="project_id" jdbcType="VARCHAR" property="projectId" />
<result column="environment_id" jdbcType="VARCHAR" property="environmentId" />
@ -96,10 +95,9 @@
<sql id="Base_Column_List">
id, `name`, test_plan_id, create_user, delete_time, delete_user, deleted, update_user,
update_time, start_time, end_time, request_duration, `status`, trigger_mode, run_mode,
pool_id, version_id, integrated, project_id, environment_id, error_count, fake_error_count,
pending_count, success_count, assertion_count, assertion_success_count, request_error_rate,
request_pending_rate, request_fake_error_rate, request_pass_rate, assertion_pass_rate,
script_identifier
pool_id, integrated, project_id, environment_id, error_count, fake_error_count, pending_count,
success_count, assertion_count, assertion_success_count, request_error_rate, request_pending_rate,
request_fake_error_rate, request_pass_rate, assertion_pass_rate, script_identifier
</sql>
<select id="selectByExample" parameterType="io.metersphere.api.domain.ApiReportExample" resultMap="BaseResultMap">
select
@ -137,10 +135,10 @@
deleted, update_user, update_time,
start_time, end_time, request_duration,
`status`, trigger_mode, run_mode,
pool_id, version_id, integrated,
project_id, environment_id, error_count,
fake_error_count, pending_count, success_count,
assertion_count, assertion_success_count, request_error_rate,
pool_id, integrated, project_id,
environment_id, error_count, fake_error_count,
pending_count, success_count, assertion_count,
assertion_success_count, request_error_rate,
request_pending_rate, request_fake_error_rate,
request_pass_rate, assertion_pass_rate, script_identifier
)
@ -149,10 +147,10 @@
#{deleted,jdbcType=BIT}, #{updateUser,jdbcType=VARCHAR}, #{updateTime,jdbcType=BIGINT},
#{startTime,jdbcType=BIGINT}, #{endTime,jdbcType=BIGINT}, #{requestDuration,jdbcType=BIGINT},
#{status,jdbcType=VARCHAR}, #{triggerMode,jdbcType=VARCHAR}, #{runMode,jdbcType=VARCHAR},
#{poolId,jdbcType=VARCHAR}, #{versionId,jdbcType=VARCHAR}, #{integrated,jdbcType=BIT},
#{projectId,jdbcType=VARCHAR}, #{environmentId,jdbcType=VARCHAR}, #{errorCount,jdbcType=BIGINT},
#{fakeErrorCount,jdbcType=BIGINT}, #{pendingCount,jdbcType=BIGINT}, #{successCount,jdbcType=BIGINT},
#{assertionCount,jdbcType=BIGINT}, #{assertionSuccessCount,jdbcType=BIGINT}, #{requestErrorRate,jdbcType=VARCHAR},
#{poolId,jdbcType=VARCHAR}, #{integrated,jdbcType=BIT}, #{projectId,jdbcType=VARCHAR},
#{environmentId,jdbcType=VARCHAR}, #{errorCount,jdbcType=BIGINT}, #{fakeErrorCount,jdbcType=BIGINT},
#{pendingCount,jdbcType=BIGINT}, #{successCount,jdbcType=BIGINT}, #{assertionCount,jdbcType=BIGINT},
#{assertionSuccessCount,jdbcType=BIGINT}, #{requestErrorRate,jdbcType=VARCHAR},
#{requestPendingRate,jdbcType=VARCHAR}, #{requestFakeErrorRate,jdbcType=VARCHAR},
#{requestPassRate,jdbcType=VARCHAR}, #{assertionPassRate,jdbcType=VARCHAR}, #{scriptIdentifier,jdbcType=VARCHAR}
)
@ -208,9 +206,6 @@
<if test="poolId != null">
pool_id,
</if>
<if test="versionId != null">
version_id,
</if>
<if test="integrated != null">
integrated,
</if>
@ -306,9 +301,6 @@
<if test="poolId != null">
#{poolId,jdbcType=VARCHAR},
</if>
<if test="versionId != null">
#{versionId,jdbcType=VARCHAR},
</if>
<if test="integrated != null">
#{integrated,jdbcType=BIT},
</if>
@ -413,9 +405,6 @@
<if test="record.poolId != null">
pool_id = #{record.poolId,jdbcType=VARCHAR},
</if>
<if test="record.versionId != null">
version_id = #{record.versionId,jdbcType=VARCHAR},
</if>
<if test="record.integrated != null">
integrated = #{record.integrated,jdbcType=BIT},
</if>
@ -484,7 +473,6 @@
trigger_mode = #{record.triggerMode,jdbcType=VARCHAR},
run_mode = #{record.runMode,jdbcType=VARCHAR},
pool_id = #{record.poolId,jdbcType=VARCHAR},
version_id = #{record.versionId,jdbcType=VARCHAR},
integrated = #{record.integrated,jdbcType=BIT},
project_id = #{record.projectId,jdbcType=VARCHAR},
environment_id = #{record.environmentId,jdbcType=VARCHAR},
@ -552,9 +540,6 @@
<if test="poolId != null">
pool_id = #{poolId,jdbcType=VARCHAR},
</if>
<if test="versionId != null">
version_id = #{versionId,jdbcType=VARCHAR},
</if>
<if test="integrated != null">
integrated = #{integrated,jdbcType=BIT},
</if>
@ -620,7 +605,6 @@
trigger_mode = #{triggerMode,jdbcType=VARCHAR},
run_mode = #{runMode,jdbcType=VARCHAR},
pool_id = #{poolId,jdbcType=VARCHAR},
version_id = #{versionId,jdbcType=VARCHAR},
integrated = #{integrated,jdbcType=BIT},
project_id = #{projectId,jdbcType=VARCHAR},
environment_id = #{environmentId,jdbcType=VARCHAR},
@ -642,7 +626,7 @@
insert into api_report
(id, `name`, test_plan_id, create_user, delete_time, delete_user, deleted, update_user,
update_time, start_time, end_time, request_duration, `status`, trigger_mode, run_mode,
pool_id, version_id, integrated, project_id, environment_id, error_count, fake_error_count,
pool_id, integrated, project_id, environment_id, error_count, fake_error_count,
pending_count, success_count, assertion_count, assertion_success_count, request_error_rate,
request_pending_rate, request_fake_error_rate, request_pass_rate, assertion_pass_rate,
script_identifier)
@ -653,14 +637,13 @@
#{item.deleted,jdbcType=BIT}, #{item.updateUser,jdbcType=VARCHAR}, #{item.updateTime,jdbcType=BIGINT},
#{item.startTime,jdbcType=BIGINT}, #{item.endTime,jdbcType=BIGINT}, #{item.requestDuration,jdbcType=BIGINT},
#{item.status,jdbcType=VARCHAR}, #{item.triggerMode,jdbcType=VARCHAR}, #{item.runMode,jdbcType=VARCHAR},
#{item.poolId,jdbcType=VARCHAR}, #{item.versionId,jdbcType=VARCHAR}, #{item.integrated,jdbcType=BIT},
#{item.projectId,jdbcType=VARCHAR}, #{item.environmentId,jdbcType=VARCHAR}, #{item.errorCount,jdbcType=BIGINT},
#{item.fakeErrorCount,jdbcType=BIGINT}, #{item.pendingCount,jdbcType=BIGINT}, #{item.successCount,jdbcType=BIGINT},
#{item.assertionCount,jdbcType=BIGINT}, #{item.assertionSuccessCount,jdbcType=BIGINT},
#{item.requestErrorRate,jdbcType=VARCHAR}, #{item.requestPendingRate,jdbcType=VARCHAR},
#{item.requestFakeErrorRate,jdbcType=VARCHAR}, #{item.requestPassRate,jdbcType=VARCHAR},
#{item.assertionPassRate,jdbcType=VARCHAR}, #{item.scriptIdentifier,jdbcType=VARCHAR}
)
#{item.poolId,jdbcType=VARCHAR}, #{item.integrated,jdbcType=BIT}, #{item.projectId,jdbcType=VARCHAR},
#{item.environmentId,jdbcType=VARCHAR}, #{item.errorCount,jdbcType=BIGINT}, #{item.fakeErrorCount,jdbcType=BIGINT},
#{item.pendingCount,jdbcType=BIGINT}, #{item.successCount,jdbcType=BIGINT}, #{item.assertionCount,jdbcType=BIGINT},
#{item.assertionSuccessCount,jdbcType=BIGINT}, #{item.requestErrorRate,jdbcType=VARCHAR},
#{item.requestPendingRate,jdbcType=VARCHAR}, #{item.requestFakeErrorRate,jdbcType=VARCHAR},
#{item.requestPassRate,jdbcType=VARCHAR}, #{item.assertionPassRate,jdbcType=VARCHAR},
#{item.scriptIdentifier,jdbcType=VARCHAR})
</foreach>
</insert>
<insert id="batchInsertSelective" parameterType="map">
@ -721,9 +704,6 @@
<if test="'pool_id'.toString() == column.value">
#{item.poolId,jdbcType=VARCHAR}
</if>
<if test="'version_id'.toString() == column.value">
#{item.versionId,jdbcType=VARCHAR}
</if>
<if test="'integrated'.toString() == column.value">
#{item.integrated,jdbcType=BIT}
</if>

View File

@ -115,7 +115,6 @@ CREATE TABLE IF NOT EXISTS api_report(
`trigger_mode` VARCHAR(20) NOT NULL COMMENT '触发方式' ,
`run_mode` VARCHAR(20) NOT NULL COMMENT '执行模式' ,
`pool_id` VARCHAR(50) NOT NULL COMMENT '资源池' ,
`version_id` VARCHAR(50) COMMENT '版本fk' ,
`integrated` BIT(1) NOT NULL DEFAULT 0 COMMENT '是否是集成报告' ,
`project_id` VARCHAR(50) NOT NULL COMMENT '项目fk' ,
`environment_id` VARCHAR(50) COMMENT '环境' ,

View File

@ -0,0 +1,18 @@
package io.metersphere.sdk.constants;
/**
* 接口批量执行模式
*
* @Author: jianxing
* @CreateTime: 2023-12-08 10:53
*/
public enum ApiBatchRunMode {
/**
* 串行
*/
SERIAL,
/**
* 并行
*/
PARALLEL
}

View File

@ -45,7 +45,7 @@ public class ApiNoticeDTO implements java.io.Serializable {
*/
private String queueId;
/**
* 报告类型
* 是否是集成报告
*/
private String reportType;
private Boolean integratedReport;
}

View File

@ -1,8 +1,8 @@
package io.metersphere.sdk.dto.api.task;
import io.metersphere.sdk.constants.ApiExecuteRunMode;
import jakarta.validation.constraints.NotBlank;
import io.metersphere.sdk.constants.ApiBatchRunMode;
import lombok.Data;
import org.apache.commons.lang3.StringUtils;
import java.io.Serial;
import java.io.Serializable;
@ -13,16 +13,16 @@ public class ApiRunModeConfigDTO implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 运行模式
* {@link ApiExecuteRunMode}
* 运行模式(串行/并行)
* 是否并行执行
* {@link io.metersphere.sdk.constants.ApiBatchRunMode}
*/
@NotBlank
private String runMode;
/**
* 集合报告/独立报告
* 是否是集成报告
*/
private String reportType;
private Boolean integratedReport;
/**
* 集合报告配置
@ -32,22 +32,24 @@ public class ApiRunModeConfigDTO implements Serializable {
/**
* 失败停止
*/
private boolean onSampleError;
private Boolean stopOnFailure = false;
/**
* 资源池
* 资源池为空则使用默认资源池
*/
private String poolId;
/**
* 环境类型
* 是否为环境组
*/
private String environmentType;
private Boolean grouped = false;
/**
* 环境组id
*/
private String environmentGroupId;
/**
* 执行环境id
* 环境或者环境组ID
*/
private String environmentId;
public Boolean isParallel() {
return StringUtils.equals(runMode, ApiBatchRunMode.PARALLEL.name());
}
}

View File

@ -22,8 +22,11 @@ public class TaskRequestDTO implements Serializable {
private String msUrl;
private String kafkaConfig;
private String minioConfig;
private String queueId;
private int poolSize;
/**
* 批量执行时的队列ID
*/
private String queueId;
/**
* 是否需要实时接收单个步骤的结果
*/
@ -84,6 +87,12 @@ public class TaskRequestDTO implements Serializable {
@NotBlank
private String projectId;
/**
* {@link io.metersphere.sdk.constants.ApiBatchRunMode}
*/
@NotBlank
private String runMode;
/**
* 运行配置
*/

View File

@ -1,5 +1,6 @@
package io.metersphere.sdk.dto.queue;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import lombok.Data;
import java.io.Serial;
@ -13,30 +14,15 @@ public class ExecutionQueue implements Serializable {
private String queueId;
/**
* 报告类型/测试计划类型/测试用例类型/测试集类型/场景集合类型
* 执行人
*/
private String reportType;
/**
* 运行模式
*/
private String runMode;
/**
* 资源池ID
*/
private String poolId;
private String userId;
/**
* 创建时间
*/
private Long createTime;
/**
* 是否失败继续
*/
private Boolean failure;
/**
* 开启重试
*/
@ -48,9 +34,9 @@ public class ExecutionQueue implements Serializable {
private Long retryNumber;
/**
* 环境Id
* 运行模式配置
*/
private String environmentId;
private ApiRunModeConfigDTO runModeConfig;
@Serial
private static final long serialVersionUID = 1L;

View File

@ -4,10 +4,12 @@ import lombok.Data;
import java.io.Serial;
import java.io.Serializable;
import java.util.Map;
@Data
public class ExecutionQueueDetail implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
/**
* 资源id每个资源在同一个运行队列中唯一
*/
@ -18,21 +20,9 @@ public class ExecutionQueueDetail implements Serializable {
*/
private Integer sort;
/**
* 当前资源产生的执行报告id
*/
private String reportId;
/**
* 资源类型 / APICASEPLAN_CASE,PLAN_SCENARIOAPI_SCENARIO
* {@link io.metersphere.sdk.constants.ApiExecuteResourceType}
*/
private String resourceType;
/**
* 环境key= projectIDvalue=envID优先使用Queue上的环境如果没有则使用资源上的环境
*/
private Map<String, String> envMap;
@Serial
private static final long serialVersionUID = 1L;
}

View File

@ -1,6 +1,7 @@
package io.metersphere.api.controller;
import io.metersphere.api.service.ApiExecuteService;
import io.metersphere.api.service.definition.ApiReportService;
import io.metersphere.sdk.file.FileRequest;
import io.metersphere.sdk.util.LogUtils;
import jakarta.annotation.Resource;
@ -23,6 +24,8 @@ public class ApiExecuteResourceController {
private StringRedisTemplate stringRedisTemplate;
@Resource
private ApiExecuteService apiExecuteService;
@Resource
private ApiReportService apiReportService;
/**
* 获取执行脚本
@ -37,6 +40,7 @@ public class ApiExecuteResourceController {
LogUtils.info("获取执行脚本: ", key);
String script = stringRedisTemplate.opsForValue().get(key);
stringRedisTemplate.delete(key);
apiReportService.updateRunningReport(reportId);
return Optional.ofNullable(script).orElse(StringUtils.EMPTY);
}

View File

@ -7,10 +7,7 @@ import io.metersphere.api.dto.ReferenceDTO;
import io.metersphere.api.dto.ReferenceRequest;
import io.metersphere.api.dto.definition.*;
import io.metersphere.api.dto.request.ApiTransferRequest;
import io.metersphere.api.service.definition.ApiTestCaseLogService;
import io.metersphere.api.service.definition.ApiTestCaseNoticeService;
import io.metersphere.api.service.definition.ApiTestCaseRecoverService;
import io.metersphere.api.service.definition.ApiTestCaseService;
import io.metersphere.api.service.definition.*;
import io.metersphere.project.service.FileModuleService;
import io.metersphere.sdk.constants.PermissionConstants;
import io.metersphere.sdk.dto.api.task.TaskRequestDTO;
@ -49,6 +46,8 @@ public class ApiTestCaseController {
private ApiTestCaseRecoverService apiTestCaseRecoverService;
@Resource
private FileModuleService fileModuleService;
@Resource
private ApiTestCaseBatchRunService apiTestCaseBatchRunService;
@PostMapping(value = "/add")
@Operation(summary = "接口测试-接口管理-接口用例-新增")
@ -260,6 +259,13 @@ public class ApiTestCaseController {
return apiTestCaseService.debug(request);
}
@PostMapping("/batch/run")
@Operation(summary = "批量执行")
@RequiresPermissions(PermissionConstants.PROJECT_API_DEFINITION_CASE_EXECUTE)
public void batchRun(@Validated @RequestBody ApiTestCaseBatchRunRequest request) {
apiTestCaseBatchRunService.asyncBatchRun(request, SessionUtils.getUserId());
}
@PostMapping("/get-reference")
@Operation(summary = "接口测试-接口管理-接口用例-引用关系")
@RequiresPermissions(PermissionConstants.PROJECT_API_DEFINITION_CASE_READ)

View File

@ -0,0 +1,53 @@
package io.metersphere.api.dto.definition;
import io.metersphere.sdk.constants.ApiBatchRunMode;
import io.metersphere.system.valid.EnumValue;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serializable;
@Data
@EqualsAndHashCode(callSuper = false)
public class ApiTestCaseBatchRunRequest extends ApiTestCaseBatchRequest implements Serializable {
private static final long serialVersionUID = 1L;
@Schema(description = "接口pk")
@Size(max = 50, message = "{api_definition.id.length_range}")
private String apiDefinitionId;
@Valid
@Schema(description = "运行模式配置")
private ApiRunModeRequest runModeConfig;
@Data
public static class ApiRunModeRequest {
@EnumValue(enumClass = ApiBatchRunMode.class)
@Schema(description = "运行模式(SERIAL 串行/ PARALLEL并行)")
@NotBlank
private String runMode;
@Schema(description = "是否是集成报告")
private Boolean integratedReport;
@Schema(description = "集合报告名称")
private String integratedReportName;
@Schema(description = "失败停止")
private Boolean stopOnFailure = false;
@Schema(description = "资源池ID")
private String poolId;
@Schema(description = "是否为环境组")
private Boolean grouped = false;
@Schema(description = "环境或者环境组ID")
private String environmentId;
}
}

View File

@ -1,12 +1,10 @@
package io.metersphere.api.dto.scenario;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import lombok.Data;
import java.util.List;
import java.util.Map;
/**

View File

@ -2,15 +2,19 @@ package io.metersphere.api.listener;
import io.metersphere.api.event.ApiEventSource;
import io.metersphere.api.service.ApiReportSendNoticeService;
import io.metersphere.api.service.definition.ApiTestCaseBatchRunService;
import io.metersphere.api.service.queue.ApiExecutionQueueService;
import io.metersphere.sdk.constants.ApiReportType;
import io.metersphere.sdk.constants.ApiExecuteResourceType;
import io.metersphere.sdk.constants.ApplicationScope;
import io.metersphere.sdk.constants.KafkaTopicConstants;
import io.metersphere.sdk.dto.api.notice.ApiNoticeDTO;
import io.metersphere.sdk.dto.queue.ExecutionQueue;
import io.metersphere.sdk.dto.queue.ExecutionQueueDetail;
import io.metersphere.sdk.util.EnumValidator;
import io.metersphere.sdk.util.JSON;
import io.metersphere.sdk.util.LogUtils;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
@ -28,6 +32,8 @@ public class MessageListener {
@Resource
private ApiExecutionQueueService apiExecutionQueueService;
@Resource
private ApiTestCaseBatchRunService apiTestCaseBatchRunService;
@KafkaListener(id = MESSAGE_CONSUME_ID, topics = KafkaTopicConstants.API_REPORT_TASK_TOPIC, groupId = MESSAGE_CONSUME_ID)
public void messageConsume(ConsumerRecord<?, String> record) {
@ -37,20 +43,44 @@ public class MessageListener {
ApiNoticeDTO dto = JSON.parseObject(record.value(), ApiNoticeDTO.class);
// 集合报告不发送通知
if (!StringUtils.equalsIgnoreCase(dto.getReportType(), ApiReportType.INTEGRATED.name())) {
if (!BooleanUtils.isTrue(dto.getIntegratedReport())) {
apiReportSendNoticeService.sendNotice(dto);
// TODO 通知测试计划处理后续
LogUtils.info("发送通知给测试计划:{}", record.key());
apiEventSource.fireEvent(ApplicationScope.API_TEST, record.value());
}
// TODO 串行触发下次执行
ExecutionQueueDetail detail = apiExecutionQueueService.getNextDetail(dto.getQueueId());
// TODO 调用执行方法
executeNextTask(dto);
}
} catch (Exception e) {
LogUtils.error("接收到发送通知信息:{}", e);
}
}
/**
* 执行批量的下一个任务
*
* @param dto
*/
private void executeNextTask(ApiNoticeDTO dto) {
if (StringUtils.isBlank(dto.getQueueId())) {
return;
}
try {
ExecutionQueue queue = apiExecutionQueueService.getQueue(dto.getQueueId());
// 串行才执行下个任务
if (queue == null || BooleanUtils.isTrue(queue.getRunModeConfig().isParallel())) {
return;
}
ExecutionQueueDetail nextDetail = apiExecutionQueueService.getNextDetail(dto.getQueueId());
ApiExecuteResourceType resourceType = EnumValidator.validateEnum(ApiExecuteResourceType.class, nextDetail.getResourceType());
switch (resourceType) {
case API_CASE -> apiTestCaseBatchRunService.executeNextTask(queue, nextDetail);
default -> {
}
}
} catch (Exception e) {
LogUtils.error("执行任务失败:", e);
}
}
}

View File

@ -74,4 +74,6 @@ public interface ExtApiTestCaseMapper {
DropNode selectDragInfoById(String id);
DropNode selectNodeByPosOperator(NodeSortQueryParam nodeSortQueryParam);
List<ApiTestCase> getApiCaseExecuteInfoByIds(@Param("ids")List<String> ids);
}

View File

@ -317,6 +317,14 @@
</if>
LIMIT 1
</select>
<select id="getApiCaseExecuteInfoByIds" resultType="io.metersphere.api.domain.ApiTestCase">
select id, name, environment_id, project_id
from api_test_case
where id in
<foreach collection="ids" item="id" separator="," open="(" close=")">
#{id}
</foreach>
</select>
<sql id="report_filters">
<if test="${filter} != null and ${filter}.size() > 0">
<foreach collection="${filter}.entrySet()" index="key" item="values">

View File

@ -9,6 +9,7 @@ import io.metersphere.project.api.processor.ScriptProcessor;
import io.metersphere.project.constants.ScriptLanguageType;
import io.metersphere.project.dto.CommonScriptInfo;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.jmeter.config.Arguments;
import org.apache.jmeter.save.SaveService;
@ -86,7 +87,7 @@ public abstract class ScriptProcessorConverter extends MsProcessorConverter<Scri
}
public static boolean isJSR233(ScriptProcessor scriptProcessor) {
if (scriptProcessor.getEnableCommonScript()) {
if (BooleanUtils.isTrue(scriptProcessor.getEnableCommonScript())) {
return !StringUtils.equals(scriptProcessor.getCommonScriptInfo().getScriptLanguage(), ScriptLanguageType.BEANSHELL.name());
} else {
return !StringUtils.equals(scriptProcessor.getScriptLanguage(), ScriptLanguageType.BEANSHELL.name());

View File

@ -23,7 +23,6 @@ import io.metersphere.sdk.constants.ApiExecuteRunMode;
import io.metersphere.sdk.constants.ProjectApplicationType;
import io.metersphere.sdk.constants.StorageType;
import io.metersphere.sdk.dto.api.task.ApiExecuteFileInfo;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import io.metersphere.sdk.dto.api.task.TaskRequestDTO;
import io.metersphere.sdk.exception.MSException;
import io.metersphere.sdk.file.FileCenter;
@ -131,7 +130,7 @@ public class ApiExecuteService {
// 设置执行文件参数
setTaskFileParam(runRequest, taskRequest);
// 误报处理
// 误报处理 todo 多项目
taskRequest.setMsRegexList(projectApplicationService.get(Collections.singletonList(taskRequest.getProjectId())));
if (!StringUtils.equals(taskRequest.getResourceType(), ApiExecuteResourceType.API_DEBUG.name())) {
@ -142,14 +141,14 @@ public class ApiExecuteService {
// 解析执行脚本
String executeScript = parseExecuteScript(runRequest.getTestElement(), parameterConfig);
// 设置插件文件信息
// 设置插件文件信息 todo 多项目
taskRequest.setPluginFiles(apiPluginService.getFileInfoByProjectId(taskRequest.getProjectId()));
// 将测试脚本缓存到 redis
String scriptRedisKey = getScriptRedisKey(taskRequest.getReportId(), taskRequest.getResourceId());
stringRedisTemplate.opsForValue().set(scriptRedisKey, executeScript);
if (StringUtils.equals(taskRequest.getRunModeConfig().getRunMode(), ApiExecuteRunMode.FRONTEND_DEBUG.name())) {
if (StringUtils.equals(taskRequest.getRunMode(), ApiExecuteRunMode.FRONTEND_DEBUG.name())) {
// 前端调试返回执行参数由前端调用本地资源池执行
return taskRequest;
}
@ -194,7 +193,7 @@ public class ApiExecuteService {
String endpoint = TaskRunnerClient.getEndpoint(testResourceNodeDTO.getIp(), testResourceNodeDTO.getPort());
LogUtils.info("开始发送请求【 {}_{} 】到 {} 节点执行", taskRequest.getReportId(), taskRequest.getResourceId(), endpoint);
if (StringUtils.equalsAny(taskRequest.getRunModeConfig().getRunMode(), ApiExecuteRunMode.FRONTEND_DEBUG.name(), ApiExecuteRunMode.BACKEND_DEBUG.name())) {
if (StringUtils.equalsAny(taskRequest.getRunMode(), ApiExecuteRunMode.FRONTEND_DEBUG.name(), ApiExecuteRunMode.BACKEND_DEBUG.name())) {
TaskRunnerClient.debugApi(endpoint, taskRequest);
} else {
TaskRunnerClient.runApi(endpoint, taskRequest);
@ -256,11 +255,8 @@ public class ApiExecuteService {
setServerInfoParam(taskRequest);
taskRequest.setRealTime(true);
taskRequest.setSaveResult(false);
taskRequest.setResourceType(ApiExecuteResourceType.API_DEBUG.name());
ApiRunModeConfigDTO runModeConfig = new ApiRunModeConfigDTO();
runModeConfig.setRunMode(ApiExecuteRunMode.BACKEND_DEBUG.name());
taskRequest.setRunModeConfig(runModeConfig);
taskRequest.setRunMode(ApiExecuteRunMode.BACKEND_DEBUG.name());
return execute(apiRunRequest, taskRequest, new ApiParamConfig());
}
@ -490,7 +486,10 @@ public class ApiExecuteService {
*/
public TaskRequestDTO apiExecute(ApiResourceRunRequest runRequest, TaskRequestDTO taskRequest, ApiParamConfig apiParamConfig) {
// 设置使用脚本前后置的公共脚本信息
apiCommonService.setEnableCommonScriptProcessorInfo(runRequest.getTestElement());
AbstractMsTestElement testElement = runRequest.getTestElement();
apiCommonService.setEnableCommonScriptProcessorInfo(testElement);
testElement.setResourceId(taskRequest.getResourceId());
testElement.setStepId(taskRequest.getResourceId());
return execute(runRequest, taskRequest, apiParamConfig);
}

View File

@ -25,7 +25,6 @@ import io.metersphere.project.dto.MoveNodeSortDTO;
import io.metersphere.project.service.MoveNodeService;
import io.metersphere.project.service.ProjectService;
import io.metersphere.sdk.constants.DefaultRepositoryDir;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import io.metersphere.sdk.dto.api.task.TaskRequestDTO;
import io.metersphere.sdk.exception.MSException;
import io.metersphere.sdk.util.BeanUtils;
@ -220,10 +219,7 @@ public class ApiDebugService extends MoveNodeService {
taskRequest.setSaveResult(false);
taskRequest.setRealTime(true);
taskRequest.setResourceType(ApiResourceType.API_DEBUG.name());
ApiRunModeConfigDTO apiRunModeConfig = new ApiRunModeConfigDTO();
apiRunModeConfig.setRunMode(apiExecuteService.getDebugRunModule(request.getFrontendDebug()));
taskRequest.setRunModeConfig(apiRunModeConfig);
taskRequest.setRunMode(apiExecuteService.getDebugRunModule(request.getFrontendDebug()));
return apiExecuteService.apiExecute(runRequest, taskRequest, apiParamConfig);
}

View File

@ -34,7 +34,6 @@ import io.metersphere.sdk.constants.ApplicationNumScope;
import io.metersphere.sdk.constants.DefaultRepositoryDir;
import io.metersphere.sdk.constants.ModuleConstants;
import io.metersphere.sdk.domain.OperationLogBlob;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import io.metersphere.sdk.dto.api.task.TaskRequestDTO;
import io.metersphere.sdk.exception.MSException;
import io.metersphere.sdk.mapper.OperationLogBlobMapper;
@ -1174,9 +1173,7 @@ public class ApiDefinitionService extends MoveNodeService {
taskRequest.setSaveResult(false);
taskRequest.setRealTime(true);
taskRequest.setResourceType(ApiResourceType.API.name());
ApiRunModeConfigDTO apiRunModeConfig = new ApiRunModeConfigDTO();
apiRunModeConfig.setRunMode(apiExecuteService.getDebugRunModule(request.getFrontendDebug()));
taskRequest.setRunModeConfig(apiRunModeConfig);
taskRequest.setRunMode(apiExecuteService.getDebugRunModule(request.getFrontendDebug()));
// 设置环境
apiParamConfig.setEnvConfig(environmentInfoDTO);

View File

@ -5,6 +5,7 @@ import io.metersphere.api.dto.definition.*;
import io.metersphere.api.dto.report.ApiReportListDTO;
import io.metersphere.api.mapper.*;
import io.metersphere.api.utils.ApiDataUtils;
import io.metersphere.sdk.constants.ApiReportStatus;
import io.metersphere.sdk.dto.api.result.RequestResult;
import io.metersphere.sdk.exception.MSException;
import io.metersphere.sdk.util.BeanUtils;
@ -188,4 +189,16 @@ public class ApiReportService {
}
return apiReportDetails;
}
/**
* 更新执行中的用例报告
* @param reportId
*/
public void updateRunningReport(String reportId) {
ApiReport apiReport = new ApiReport();
apiReport.setId(reportId);
apiReport.setStatus(ApiReportStatus.RUNNING.name());
apiReport.setUpdateTime(System.currentTimeMillis());
apiReportMapper.updateByPrimaryKeySelective(apiReport);
}
}

View File

@ -0,0 +1,406 @@
package io.metersphere.api.service.definition;
import io.metersphere.api.domain.*;
import io.metersphere.api.dto.ApiParamConfig;
import io.metersphere.api.dto.debug.ApiResourceRunRequest;
import io.metersphere.api.dto.definition.ApiTestCaseBatchRunRequest;
import io.metersphere.api.mapper.ApiTestCaseBlobMapper;
import io.metersphere.api.mapper.ApiTestCaseMapper;
import io.metersphere.api.mapper.ExtApiTestCaseMapper;
import io.metersphere.api.service.ApiExecuteService;
import io.metersphere.api.service.queue.ApiExecutionQueueService;
import io.metersphere.api.service.queue.ApiExecutionSetService;
import io.metersphere.api.utils.ApiDataUtils;
import io.metersphere.plugin.api.spi.AbstractMsTestElement;
import io.metersphere.project.service.EnvironmentService;
import io.metersphere.sdk.constants.*;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import io.metersphere.sdk.dto.api.task.CollectionReportDTO;
import io.metersphere.sdk.dto.api.task.TaskRequestDTO;
import io.metersphere.sdk.dto.queue.ExecutionQueue;
import io.metersphere.sdk.dto.queue.ExecutionQueueDetail;
import io.metersphere.sdk.util.BeanUtils;
import io.metersphere.sdk.util.LogUtils;
import io.metersphere.sdk.util.SubListUtils;
import io.metersphere.system.uid.IDGenerator;
import jakarta.annotation.Resource;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.stream.Collectors;
@Service
@Transactional(rollbackFor = Exception.class)
public class ApiTestCaseBatchRunService {
@Resource
private ApiTestCaseMapper apiTestCaseMapper;
@Resource
private ExtApiTestCaseMapper extApiTestCaseMapper;
@Resource
private ApiTestCaseBlobMapper apiTestCaseBlobMapper;
@Resource
private ApiTestCaseService apiTestCaseService;
@Resource
private ApiExecuteService apiExecuteService;
@Resource
private EnvironmentService environmentService;
@Resource
private ApiExecutionQueueService apiExecutionQueueService;
@Resource
private ApiExecutionSetService apiExecutionSetService;
@Resource
private ApiReportService apiReportService;
/**
* 异步批量执行
* @param request
* @param userId
*/
public void asyncBatchRun(ApiTestCaseBatchRunRequest request, String userId) {
Thread.startVirtualThread(() -> batchRun(request, userId));
}
/**
* 批量执行
* @param request
* @param userId
*/
private void batchRun(ApiTestCaseBatchRunRequest request, String userId) {
try {
if (StringUtils.equals(request.getRunModeConfig().getRunMode(), ApiBatchRunMode.PARALLEL.name())) {
parallelExecute(request, userId);
} else {
serialExecute(request, userId);
}
} catch (Exception e) {
LogUtils.error("批量执行用例失败: ", e);
}
}
/**
* 串行批量执行
*
* @param request
*/
public void serialExecute(ApiTestCaseBatchRunRequest request, String userId) throws Exception {
List<String> ids = apiTestCaseService.doSelectIds(request, false);
ApiRunModeConfigDTO runModeConfig = getRunModeConfig(request);
// 初始化集成报告
if (isIntegratedReport(runModeConfig)) {
initIntegratedReport(runModeConfig, ids, userId, request.getProjectId());
}
// 先初始化集成报告设置好报告ID再初始化执行队列
ExecutionQueue queue = initExecutionqueue(ids, runModeConfig, userId);
// 执行第一个任务
ExecutionQueueDetail nextDetail = apiExecutionQueueService.getNextDetail(queue.getQueueId());
executeNextTask(queue, nextDetail);
}
/**
* 并行批量执行
*
* @param request
*/
public void parallelExecute(ApiTestCaseBatchRunRequest request, String userId) {
List<String> ids = apiTestCaseService.doSelectIds(request, false);
ApiRunModeConfigDTO runModeConfig = getRunModeConfig(request);
if (isIntegratedReport(runModeConfig)) {
// 初始化集成报告
ApiReport apiReport = initIntegratedReport(runModeConfig, ids, userId, request.getProjectId());
// 集成报告才需要初始化执行队列用于统计整体执行情况
apiExecutionSetService.initSet(apiReport.getId(), ids);
}
AtomicInteger errorCount = new AtomicInteger();
// 分批处理
SubListUtils.dealForSubList(ids, 100, subIds -> {
List<ApiTestCase> apiTestCases = extApiTestCaseMapper.getApiCaseExecuteInfoByIds(subIds);
Map<String, String> caseReportMap = null;
String integratedReportId = null;
Map<String, ApiTestCase> apiCaseMap = apiTestCases.stream()
.collect(Collectors.toMap(ApiTestCase::getId, Function.identity()));
ApiTestCaseBlobExample example = new ApiTestCaseBlobExample();
example.createCriteria().andIdIn(subIds);
Map<String, ApiTestCaseBlob> apiTestCaseBlobMap = apiTestCaseBlobMapper.selectByExampleWithBLOBs(example).stream()
.collect(Collectors.toMap(ApiTestCaseBlob::getId, Function.identity()));
if (isIntegratedReport(runModeConfig)) {
// 获取集成报告ID
integratedReportId = runModeConfig.getCollectionReport().getReportId();
initApiReportSteps(ids, apiCaseMap, integratedReportId);
} else {
// 初始化非集成报告
List<ApiTestCaseRecord> apiTestCaseRecords = initApiReport(runModeConfig, apiTestCases, userId);
caseReportMap = apiTestCaseRecords.stream()
.collect(Collectors.toMap(ApiTestCaseRecord::getApiTestCaseId, ApiTestCaseRecord::getApiReportId));
}
// 这里ID顺序和队列的ID顺序保持一致
for (String id : subIds) {
String reportId = null;
try {
ApiTestCase apiTestCase = apiCaseMap.get(id);
ApiTestCaseBlob apiTestCaseBlob = apiTestCaseBlobMap.get(id);
if (apiTestCase == null) {
if (isIntegratedReport(runModeConfig)) {
// 用例不存在则在执行集合中删除
apiExecutionSetService.removeItem(integratedReportId, id);
}
LogUtils.info("当前执行任务的用例已删除 {}", apiTestCase.getId());
break;
}
// 如果是集成报告则生成唯一的虚拟ID非集成报告使用单用例的报告ID
reportId = isIntegratedReport(runModeConfig) ? UUID.randomUUID().toString() : caseReportMap.get(id);
TaskRequestDTO taskRequest = getTaskRequestDTO(reportId, apiTestCase, runModeConfig);
execute(taskRequest, apiTestCase, apiTestCaseBlob);
} catch (Exception e) {
LogUtils.error("执行用例失败 {}-{}", reportId, id);
LogUtils.error(e);
if (errorCount.getAndIncrement() > 10) {
LogUtils.error("批量执行用例失败错误次数超过10次停止执行");
return;
}
}
}
});
}
/**
* 初始化集成报告的报告步骤
*
* @param ids
* @param apiCaseMap
* @param reportId
*/
private void initApiReportSteps(List<String> ids, Map<String, ApiTestCase> apiCaseMap, String reportId) {
AtomicLong sort = new AtomicLong(1);
List<ApiReportStep> apiReportSteps = ids.stream().map(id -> {
ApiReportStep apiReportStep = getApiReportStep(apiCaseMap.get(id), reportId, sort.getAndIncrement());
return apiReportStep;
}).collect(Collectors.toList());
apiReportService.insertApiReportStep(apiReportSteps);
}
/**
* 初始化集成报告的报告步骤
*/
private void initApiReportSteps(ApiTestCase apiTestCase, String reportId, long sort) {
ApiReportStep apiReportStep = getApiReportStep(apiTestCase, reportId, sort);
apiReportService.insertApiReportStep(List.of(apiReportStep));
}
private ApiReportStep getApiReportStep(ApiTestCase apiTestCase, String reportId, long sort) {
ApiReportStep apiReportStep = new ApiReportStep();
apiReportStep.setReportId(reportId);
apiReportStep.setStepId(apiTestCase.getId());
apiReportStep.setSort(sort);
apiReportStep.setName(apiTestCase.getName());
apiReportStep.setStepType(ApiExecuteResourceType.API_CASE.name());
return apiReportStep;
}
private ApiRunModeConfigDTO getRunModeConfig(ApiTestCaseBatchRunRequest request) {
ApiRunModeConfigDTO runModeConfig = BeanUtils.copyBean(new ApiRunModeConfigDTO(), request.getRunModeConfig());
if (StringUtils.isNotBlank(request.getRunModeConfig().getIntegratedReportName()) && isIntegratedReport(runModeConfig)) {
runModeConfig.setCollectionReport(new CollectionReportDTO());
runModeConfig.getCollectionReport().setReportName(request.getRunModeConfig().getIntegratedReportName());
}
return runModeConfig;
}
private boolean isIntegratedReport(ApiRunModeConfigDTO runModeConfig) {
return BooleanUtils.isTrue(runModeConfig.getIntegratedReport());
}
/**
* 预生成用例的执行报告
*
* @param runModeConfig
* @param ids
* @return
*/
private ApiReport initIntegratedReport(ApiRunModeConfigDTO runModeConfig, List<String> ids, String userId, String projectId) {
ApiReport apiReport = getApiReport(runModeConfig, userId);
apiReport.setName(runModeConfig.getCollectionReport().getReportName());
apiReport.setIntegrated(true);
apiReport.setProjectId(projectId);
// 初始化集成报告与用例的关联关系
List<ApiTestCaseRecord> records = ids.stream().map(id -> {
ApiTestCaseRecord record = new ApiTestCaseRecord();
record.setApiReportId(apiReport.getId());
record.setApiTestCaseId(id);
return record;
}).collect(Collectors.toList());
apiReportService.insertApiReport(List.of(apiReport), records);
// 设置集成报告执行参数
runModeConfig.getCollectionReport().setReportId(apiReport.getId());
return apiReport;
}
/**
* 执行串行的下一个任务
*
* @param queue
* @param queueDetail
*/
public void executeNextTask(ExecutionQueue queue, ExecutionQueueDetail queueDetail) {
ApiRunModeConfigDTO runModeConfig = queue.getRunModeConfig();
String resourceId = queueDetail.getResourceId();
ApiTestCase apiTestCase = apiTestCaseMapper.selectByPrimaryKey(resourceId);
ApiTestCaseBlob apiTestCaseBlob = apiTestCaseBlobMapper.selectByPrimaryKey(resourceId);
if (apiTestCase == null) {
LogUtils.info("当前执行任务的用例已删除 {}", resourceId);
return;
}
String reportId;
if (isIntegratedReport(runModeConfig)) {
String integratedReportId = runModeConfig.getCollectionReport().getReportId();
initApiReportSteps(apiTestCase, integratedReportId, queueDetail.getSort());
reportId = UUID.randomUUID().toString();
} else {
reportId = initApiReport(runModeConfig, List.of(apiTestCase), queue.getUserId()).get(0).getApiReportId();
}
TaskRequestDTO taskRequest = getTaskRequestDTO(reportId, apiTestCase, runModeConfig);
taskRequest.setQueueId(queue.getQueueId());
execute(taskRequest, apiTestCase, apiTestCaseBlob);
}
/**
* 执行批量的单个任务
*
* @param apiTestCase
* @param apiTestCaseBlob
*/
public void execute(TaskRequestDTO taskRequest, ApiTestCase apiTestCase, ApiTestCaseBlob apiTestCaseBlob) {
ApiParamConfig apiParamConfig = apiExecuteService.getApiParamConfig(taskRequest.getReportId());
apiParamConfig.setEnvConfig(environmentService.get(getEnvId(taskRequest.getRunModeConfig(), apiTestCase)));
ApiResourceRunRequest runRequest = new ApiResourceRunRequest();
runRequest.setTestElement(ApiDataUtils.parseObject(new String(apiTestCaseBlob.getRequest()), AbstractMsTestElement.class));
apiExecuteService.apiExecute(runRequest, taskRequest, apiParamConfig);
}
private TaskRequestDTO getTaskRequestDTO(String reportId, ApiTestCase apiTestCase, ApiRunModeConfigDTO runModeConfig) {
TaskRequestDTO taskRequest = apiTestCaseService.getTaskRequest(reportId, apiTestCase.getId(), apiTestCase.getProjectId(), ApiExecuteRunMode.RUN.name());
taskRequest.setSaveResult(true);
taskRequest.setRealTime(false);
taskRequest.setRunModeConfig(runModeConfig);
return taskRequest;
}
/**
* 预生成用例的执行报告
*
* @param runModeConfig
* @param apiTestCases
* @return
*/
private List<ApiTestCaseRecord> initApiReport(ApiRunModeConfigDTO runModeConfig, List<ApiTestCase> apiTestCases, String userId) {
List<ApiReport> apiReports = new ArrayList<>();
List<ApiTestCaseRecord> apiTestCaseRecords = new ArrayList<>();
for (ApiTestCase apiTestCase : apiTestCases) {
ApiReport apiReport = getApiReport(runModeConfig, apiTestCase, userId);
ApiTestCaseRecord apiTestCaseRecord = getApiTestCaseRecord(apiTestCase, apiReport);
apiReports.add(apiReport);
apiTestCaseRecords.add(apiTestCaseRecord);
}
apiReportService.insertApiReport(apiReports, apiTestCaseRecords);
return apiTestCaseRecords;
}
private ApiTestCaseRecord getApiTestCaseRecord(ApiTestCase apiTestCase, ApiReport apiReport) {
ApiTestCaseRecord apiTestCaseRecord = new ApiTestCaseRecord();
apiTestCaseRecord.setApiTestCaseId(apiTestCase.getId());
apiTestCaseRecord.setApiReportId(apiReport.getId());
return apiTestCaseRecord;
}
private ApiReport getApiReport(ApiRunModeConfigDTO runModeConfig, ApiTestCase apiTestCase, String userId) {
ApiReport apiReport = getApiReport(runModeConfig, userId);
apiReport.setEnvironmentId(getEnvId(runModeConfig, apiTestCase));
apiReport.setName(apiTestCase.getName());
apiReport.setProjectId(apiTestCase.getProjectId());
return apiReport;
}
private ApiReport getApiReport(ApiRunModeConfigDTO runModeConfig, String userId) {
ApiReport apiReport = new ApiReport();
apiReport.setId(IDGenerator.nextStr());
apiReport.setDeleted(false);
apiReport.setIntegrated(false);
apiReport.setEnvironmentId(runModeConfig.getEnvironmentId());
apiReport.setRunMode(runModeConfig.getRunMode());
apiReport.setStatus(ApiReportStatus.PENDING.name());
apiReport.setStartTime(System.currentTimeMillis());
apiReport.setUpdateTime(System.currentTimeMillis());
apiReport.setTriggerMode(TaskTriggerMode.BATCH.name());
apiReport.setUpdateUser(userId);
apiReport.setCreateUser(userId);
apiReport.setPoolId(runModeConfig.getPoolId());
return apiReport;
}
/**
* 获取执行的环境ID
* 优先使用运行配置的环境
* 没有则使用用例自身的环境
*
* @param runModeConfig
* @param apiTestCase
* @return
*/
private String getEnvId(ApiRunModeConfigDTO runModeConfig, ApiTestCase apiTestCase) {
return StringUtils.isBlank(runModeConfig.getEnvironmentId()) ? apiTestCase.getEnvironmentId() : runModeConfig.getEnvironmentId();
}
/**
* 初始化执行队列
*
* @param resourceIds
* @param runModeConfig
* @return
*/
private ExecutionQueue initExecutionqueue(List<String> resourceIds, ApiRunModeConfigDTO runModeConfig, String userId) {
ExecutionQueue queue = getExecutionQueue(runModeConfig, userId);
List<ExecutionQueueDetail> queueDetails = new ArrayList<>();
AtomicInteger sort = new AtomicInteger(0);
for (String resourceId : resourceIds) {
ExecutionQueueDetail queueDetail = new ExecutionQueueDetail();
queueDetail.setResourceType(ApiExecuteResourceType.API_CASE.name());
queueDetail.setResourceId(resourceId);
queueDetail.setSort(sort.getAndIncrement());
queueDetails.add(queueDetail);
}
apiExecutionQueueService.insertQueue(queue, queueDetails);
return queue;
}
private ExecutionQueue getExecutionQueue(ApiRunModeConfigDTO runModeConfig, String userId) {
ExecutionQueue queue = new ExecutionQueue();
queue.setQueueId(UUID.randomUUID().toString());
queue.setRunModeConfig(runModeConfig);
queue.setCreateTime(System.currentTimeMillis());
queue.setUserId(userId);
return queue;
}
}

View File

@ -26,7 +26,6 @@ import io.metersphere.sdk.constants.ApplicationNumScope;
import io.metersphere.sdk.constants.DefaultRepositoryDir;
import io.metersphere.sdk.domain.Environment;
import io.metersphere.sdk.domain.EnvironmentExample;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import io.metersphere.sdk.dto.api.task.TaskRequestDTO;
import io.metersphere.sdk.exception.MSException;
import io.metersphere.sdk.mapper.EnvironmentMapper;
@ -400,10 +399,7 @@ public class ApiTestCaseService extends MoveNodeService {
public List<String> doSelectIds(ApiTestCaseBatchRequest request, boolean deleted) {
if (request.isSelectAll()) {
List<String> ids = extApiTestCaseMapper.getIds(request, deleted);
if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(request.getSelectIds())) {
ids.addAll(request.getSelectIds());
}
if (org.apache.commons.collections4.CollectionUtils.isNotEmpty(request.getExcludeIds())) {
if (CollectionUtils.isNotEmpty(request.getExcludeIds())) {
ids.removeAll(request.getExcludeIds());
}
return new ArrayList<>(ids.stream().distinct().toList());
@ -668,12 +664,10 @@ public class ApiTestCaseService extends MoveNodeService {
return apiExecuteService.apiExecute(runRequest, taskRequest, apiParamConfig);
}
private TaskRequestDTO getTaskRequest(String reportId, String resourceId, String projectId, String runModule) {
public TaskRequestDTO getTaskRequest(String reportId, String resourceId, String projectId, String runModule) {
TaskRequestDTO taskRequest = apiExecuteService.getTaskRequest(reportId, resourceId, projectId);
taskRequest.setResourceType(ApiResourceType.API_CASE.name());
ApiRunModeConfigDTO apiRunModeConfig = new ApiRunModeConfigDTO();
apiRunModeConfig.setRunMode(runModule);
taskRequest.setRunModeConfig(apiRunModeConfig);
taskRequest.setRunMode(runModule);
return taskRequest;
}

View File

@ -0,0 +1,38 @@
package io.metersphere.api.service.queue;
import jakarta.annotation.Resource;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class ApiExecutionSetService {
public static final String SET_PREFIX = "set:";
@Resource
private RedisTemplate<String, String> redisTemplate;
/**
* 初始化执行集合
* 保存需要执行的资源ID
* @param setId
* @param resourceIds
*/
public void initSet(String setId, List<String> resourceIds) {
resourceIds.forEach(resourceId -> redisTemplate.opsForSet().add(SET_PREFIX + setId, resourceId));
}
/**
* 从执行集合中去除选项
*/
public void removeItem(String setId, String resourceId) {
redisTemplate.opsForSet().remove(SET_PREFIX + setId, resourceId);
Long size = redisTemplate.opsForSet().size(SET_PREFIX + setId);
if (size == null || size == 0) {
// 集合没有元素则删除集合
redisTemplate.delete(SET_PREFIX + setId);
}
}
}

View File

@ -44,7 +44,6 @@ import io.metersphere.sdk.domain.Environment;
import io.metersphere.sdk.domain.EnvironmentExample;
import io.metersphere.sdk.domain.EnvironmentGroup;
import io.metersphere.sdk.domain.EnvironmentGroupExample;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import io.metersphere.sdk.dto.api.task.TaskRequestDTO;
import io.metersphere.sdk.exception.MSException;
import io.metersphere.sdk.file.FileCenter;
@ -1159,9 +1158,7 @@ public class ApiScenarioService extends MoveNodeService{
private TaskRequestDTO getTaskRequest(String reportId, String resourceId, String projectId, String runModule) {
TaskRequestDTO taskRequest = apiExecuteService.getTaskRequest(reportId, resourceId, projectId);
taskRequest.setResourceType(ApiResourceType.API_SCENARIO.name());
ApiRunModeConfigDTO apiRunModeConfig = new ApiRunModeConfigDTO();
apiRunModeConfig.setRunMode(runModule);
taskRequest.setRunModeConfig(apiRunModeConfig);
taskRequest.setRunMode(runModule);
return taskRequest;
}

View File

@ -20,7 +20,6 @@ import org.springframework.mock.web.MockMultipartFile;
import java.util.UUID;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**

View File

@ -18,6 +18,7 @@ import io.metersphere.api.utils.ApiDataUtils;
import io.metersphere.project.domain.ProjectApplication;
import io.metersphere.project.domain.ProjectApplicationExample;
import io.metersphere.project.mapper.ProjectApplicationMapper;
import io.metersphere.sdk.constants.ApiExecuteResourceType;
import io.metersphere.sdk.constants.ApiReportStatus;
import io.metersphere.sdk.constants.PermissionConstants;
import io.metersphere.sdk.constants.SessionConstants;
@ -93,7 +94,6 @@ public class ApiReportControllerTests extends BaseTest {
apiReport.setIntegrated(true);
}
apiReport.setTriggerMode("api-trigger-mode" + i);
apiReport.setVersionId("api-version-id" + i);
reports.add(apiReport);
ApiTestCaseRecord record = new ApiTestCaseRecord();
record.setApiTestCaseId("api-resource-id" + i);
@ -108,7 +108,7 @@ public class ApiReportControllerTests extends BaseTest {
apiReportStep.setStepId("api-report-step-id" + i);
apiReportStep.setReportId("api-report-id-success" + i);
apiReportStep.setSort(0L);
apiReportStep.setStepType("case");
apiReportStep.setStepType(ApiExecuteResourceType.API_CASE.name());
steps.add(apiReportStep);
}
apiReportService.insertApiReportStep(steps);
@ -250,7 +250,6 @@ public class ApiReportControllerTests extends BaseTest {
apiReport.setRunMode("api-run-mode");
apiReport.setStatus(ApiReportStatus.SUCCESS.name());
apiReport.setTriggerMode("api-trigger-mode");
apiReport.setVersionId("api-version-id");
reports.add(apiReport);
ApiTestCaseRecord record = new ApiTestCaseRecord();
record.setApiTestCaseId("api-resource-id");
@ -262,7 +261,7 @@ public class ApiReportControllerTests extends BaseTest {
apiReportStep.setStepId("test-report-step-id" + i);
apiReportStep.setReportId("test-report-id");
apiReportStep.setSort((long) i);
apiReportStep.setStepType("case");
apiReportStep.setStepType(ApiExecuteResourceType.API_CASE.name());
steps.add(apiReportStep);
}

View File

@ -96,7 +96,6 @@ public class ApiReportSendNoticeTests extends BaseTest {
apiReport.setStatus(ApiReportStatus.FAKE_ERROR.name());
}
apiReport.setTriggerMode("api-trigger-mode" + i);
apiReport.setVersionId("api-version-id" + i);
reports.add(apiReport);
ApiTestCaseRecord record = new ApiTestCaseRecord();
record.setApiTestCaseId("send-api-resource-id" + i);

View File

@ -247,7 +247,6 @@ public class ApiTaskCenterControllerTests extends BaseTest {
apiReport.setStatus(ApiReportStatus.RUNNING.name());
}
apiReport.setTriggerMode("task-MANUAL");
apiReport.setVersionId("api-version-id" + i);
reports.add(apiReport);
ApiTestCaseRecord record = new ApiTestCaseRecord();
record.setApiTestCaseId("task-api-resource-id" + i);

View File

@ -68,7 +68,8 @@ import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@AutoConfigureMockMvc
@ -97,6 +98,7 @@ public class ApiTestCaseControllerTests extends BaseTest {
private static final String HISTORY = "operation-history/page";
private static final String DEBUG = "debug";
private static final String RUN = "run/{0}/{1}";
private static final String BATCH_RUN = "batch/run";
private static final ResultMatcher ERROR_REQUEST_MATCHER = status().is5xxServerError();
private static ApiTestCase apiTestCase;
@ -433,6 +435,37 @@ public class ApiTestCaseControllerTests extends BaseTest {
requestGetPermissionTest(PermissionConstants.PROJECT_API_DEFINITION_CASE_EXECUTE, RUN, apiTestCase.getId(), "11111");
}
@Test
@Order(3)
public void batchRun() throws Exception {
ApiTestCaseBatchRunRequest request = new ApiTestCaseBatchRunRequest();
List<String> ids = new ArrayList<>();
ids.add(apiTestCase.getId());
request.setSelectIds(ids);
request.setProjectId(apiTestCase.getProjectId());
ApiTestCaseBatchRunRequest.ApiRunModeRequest apiRunModeRequest = new ApiTestCaseBatchRunRequest.ApiRunModeRequest();
apiRunModeRequest.setRunMode(ApiBatchRunMode.PARALLEL.name());
apiRunModeRequest.setIntegratedReport(true);
apiRunModeRequest.setStopOnFailure(false);
apiRunModeRequest.setIntegratedReportName("aaaa");
apiRunModeRequest.setPoolId("poolId");
request.setRunModeConfig(apiRunModeRequest);
this.requestPostWithOk(BATCH_RUN, request);
apiRunModeRequest.setIntegratedReport(false);
apiRunModeRequest.setStopOnFailure(true);
this.requestPostWithOk(BATCH_RUN, request);
apiRunModeRequest.setRunMode(ApiBatchRunMode.SERIAL.name());
this.requestPostWithOk(BATCH_RUN, request);
apiRunModeRequest.setIntegratedReport(true);
this.requestPostWithOk(BATCH_RUN, request);
// @@校验权限
requestPostPermissionTest(PermissionConstants.PROJECT_API_DEFINITION_CASE_EXECUTE, BATCH_RUN, request);
}
@Test
@Order(3)
public void get() throws Exception {
@ -682,7 +715,6 @@ public class ApiTestCaseControllerTests extends BaseTest {
apiReport.setStatus(ApiReportStatus.ERROR.name());
}
apiReport.setTriggerMode("api-trigger-mode" + i);
apiReport.setVersionId("api-version-id" + i);
reports.add(apiReport);
ApiTestCaseRecord record = new ApiTestCaseRecord();
record.setApiTestCaseId(first.getId());

View File

@ -10,6 +10,7 @@ import io.metersphere.api.service.CleanupApiResourceServiceImpl;
import io.metersphere.api.service.definition.ApiReportService;
import io.metersphere.api.service.scenario.ApiScenarioReportService;
import io.metersphere.api.service.schedule.SwaggerUrlImportJob;
import io.metersphere.sdk.constants.ApiExecuteResourceType;
import io.metersphere.sdk.constants.ApiReportStatus;
import io.metersphere.sdk.constants.ProjectApplicationType;
import io.metersphere.sdk.constants.ScheduleType;
@ -254,7 +255,6 @@ public class CleanupApiTests {
apiReport.setStatus(ApiReportStatus.ERROR.name());
}
apiReport.setTriggerMode("api-trigger-mode" + i);
apiReport.setVersionId("api-version-id" + i);
reports.add(apiReport);
ApiTestCaseRecord record = new ApiTestCaseRecord();
record.setApiTestCaseId("clean-resource-id" + i);
@ -268,7 +268,7 @@ public class CleanupApiTests {
apiReportStep.setStepId("clean-api-report-step-id" + projectId + i);
apiReportStep.setReportId("clean-report-id" + projectId + i);
apiReportStep.setSort(0L);
apiReportStep.setStepType("case");
apiReportStep.setStepType(ApiExecuteResourceType.API_CASE.name());
steps.add(apiReportStep);
}
apiReportService.insertApiReportStep(steps);

View File

@ -1,6 +1,8 @@
package io.metersphere.api.service;
import io.metersphere.api.service.queue.ApiExecutionQueueService;
import io.metersphere.sdk.constants.ApiBatchRunMode;
import io.metersphere.sdk.dto.api.task.ApiRunModeConfigDTO;
import io.metersphere.sdk.dto.queue.ExecutionQueue;
import io.metersphere.sdk.dto.queue.ExecutionQueueDetail;
import jakarta.annotation.Resource;
@ -15,7 +17,6 @@ import org.springframework.data.redis.core.ListOperations;
import org.springframework.data.redis.core.RedisTemplate;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@ -40,27 +41,24 @@ public class ApiExecutionQueueServiceTest {
void testInsertQueue() {
ExecutionQueue queue = new ExecutionQueue();
queue.setQueueId("queueId1");
queue.setReportType("REPORT");
queue.setRunMode("SEQUENTIAL");
queue.setPoolId("poolId1");
ApiRunModeConfigDTO runModeConfig = new ApiRunModeConfigDTO();
runModeConfig.setRunMode(ApiBatchRunMode.PARALLEL.name());
runModeConfig.setGrouped(false);
runModeConfig.setEnvironmentId("envId");
queue.setRunModeConfig(runModeConfig);
queue.setCreateTime(System.currentTimeMillis());
queue.setFailure(true);
queue.setRetryEnable(true);
queue.setRetryNumber(3L);
ExecutionQueueDetail queueDetail1 = new ExecutionQueueDetail();
queueDetail1.setResourceId("resourceId1");
queueDetail1.setSort(1);
queueDetail1.setReportId("reportId1");
queueDetail1.setResourceType("API");
queueDetail1.setEnvMap(Map.of("projectID1", "envID1", "projectID2", "envID2"));
ExecutionQueueDetail queueDetail2 = new ExecutionQueueDetail();
queueDetail2.setResourceId("resourceId2");
queueDetail2.setSort(2);
queueDetail2.setReportId("reportId2");
queueDetail2.setResourceType("CASE");
queueDetail2.setEnvMap(Map.of("projectID1", "envID1", "projectID2", "envID2"));
List<ExecutionQueueDetail> queueDetails = List.of(queueDetail1, queueDetail2);

View File

@ -67,26 +67,26 @@ INSERT INTO `api_definition_mock_config` VALUES
DELETE FROM `api_report` WHERE `id` in ('1', '2', '3', '4','5','6', '7', '8', '9','10','11', '12', '13', '14','15','16', '17', '18', '19','20');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('1', 'Test Report 1', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642000001, UNIX_TIMESTAMP() * 1000, 1642002000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_1', b'0', '100001100001', 'env_1', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_1');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('10', 'Test Report 10', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642400101, UNIX_TIMESTAMP() * 1000, 1642402100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_5', b'0', '100001100001', 'env_5', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_10');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('11', 'Test Report 11', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642500001, UNIX_TIMESTAMP() * 1000, 1642502000, 1000, 'FAKE_ERROR', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_6', b'0', '100001100001', 'env_6', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_11');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('12', 'Test Report 12', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642500101, UNIX_TIMESTAMP() * 1000, 1642502100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_6', b'0', '100001100001', 'env_6', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_12');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('13', 'Test Report 13', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642600001, UNIX_TIMESTAMP() * 1000, 1642602000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_7', b'0', '100001100001', 'env_7', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_13');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('14', 'Test Report 14', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642600101, UNIX_TIMESTAMP() * 1000, 1642602100, 1000, 'FAKE_ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_7', b'0', '100001100001', 'env_7', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_14');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('15', 'Test Report 15', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642700001, UNIX_TIMESTAMP() * 1000, 1642702000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_8', b'0', '100001100001', 'env_8', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_15');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('16', 'Test Report 16', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642700101, UNIX_TIMESTAMP() * 1000, 1642702100, 1000, 'SUCCESS', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_8', b'0', '100001100001', 'env_8', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_16');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('17', 'Test Report 17', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642800001, UNIX_TIMESTAMP() * 1000, 1642802000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_9', b'0', '100001100001', 'env_9', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_17');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('18', 'Test Report 18', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642800101, UNIX_TIMESTAMP() * 1000, 1642802100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_9', b'0', '100001100001', 'env_9', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_18');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('19', 'Test Report 19', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642900001, UNIX_TIMESTAMP() * 1000, 1642902000, 1000, 'FAKE_ERROR', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_10', b'0', '100001100001', 'env_10', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_19');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('2', 'Test Report 2', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642000101, UNIX_TIMESTAMP() * 1000, 1642002100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_1', b'0', '100001100001', 'env_1', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_2');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('20', 'Test Report 20', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642900101, UNIX_TIMESTAMP() * 1000, 1642902100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_10', b'0', '100001100001', 'env_10', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_20');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('3', 'Test Report 3', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642100001, UNIX_TIMESTAMP() * 1000, 1642102000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_2', b'0', '100001100001', 'env_2', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_3');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('4', 'Test Report 4', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642100101, UNIX_TIMESTAMP() * 1000, 1642102100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_2', b'1', '100001100001', 'env_2', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_4');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('5', 'Test Report 5', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642200001, UNIX_TIMESTAMP() * 1000, 1642202000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_3', b'1', '100001100001', 'env_3', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_5');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('6', 'Test Report 6', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642200101, UNIX_TIMESTAMP() * 1000, 1642202100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_3', b'1', '100001100001', 'env_3', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_6');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('7', 'Test Report 7', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642300001, UNIX_TIMESTAMP() * 1000, 1642302000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_4', b'1', '100001100001', 'env_4', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_7');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('8', 'Test Report 8', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642300101, UNIX_TIMESTAMP() * 1000, 1642302100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', 'version_4', b'1', '100001100001', 'env_4', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_8');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `version_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('9', 'Test Report 9', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642400001, UNIX_TIMESTAMP() * 1000, 1642402000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', 'version_5', b'1', '100001100001', 'env_5', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_9');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('1', 'Test Report 1', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642000001, UNIX_TIMESTAMP() * 1000, 1642002000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'0', '100001100001', 'env_1', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_1');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('10', 'Test Report 10', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642400101, UNIX_TIMESTAMP() * 1000, 1642402100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'0', '100001100001', 'env_5', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_10');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('11', 'Test Report 11', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642500001, UNIX_TIMESTAMP() * 1000, 1642502000, 1000, 'FAKE_ERROR', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'0', '100001100001', 'env_6', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_11');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('12', 'Test Report 12', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642500101, UNIX_TIMESTAMP() * 1000, 1642502100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'0', '100001100001', 'env_6', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_12');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('13', 'Test Report 13', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642600001, UNIX_TIMESTAMP() * 1000, 1642602000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'0', '100001100001', 'env_7', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_13');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('14', 'Test Report 14', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642600101, UNIX_TIMESTAMP() * 1000, 1642602100, 1000, 'FAKE_ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'0', '100001100001', 'env_7', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_14');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('15', 'Test Report 15', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642700001, UNIX_TIMESTAMP() * 1000, 1642702000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'0', '100001100001', 'env_8', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_15');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('16', 'Test Report 16', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642700101, UNIX_TIMESTAMP() * 1000, 1642702100, 1000, 'SUCCESS', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'0', '100001100001', 'env_8', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_16');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('17', 'Test Report 17', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642800001, UNIX_TIMESTAMP() * 1000, 1642802000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'0', '100001100001', 'env_9', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_17');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('18', 'Test Report 18', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642800101, UNIX_TIMESTAMP() * 1000, 1642802100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'0', '100001100001', 'env_9', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_18');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('19', 'Test Report 19', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642900001, UNIX_TIMESTAMP() * 1000, 1642902000, 1000, 'FAKE_ERROR', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'0', '100001100001', 'env_10', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_19');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('2', 'Test Report 2', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642000101, UNIX_TIMESTAMP() * 1000, 1642002100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'0', '100001100001', 'env_1', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_2');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('20', 'Test Report 20', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642900101, UNIX_TIMESTAMP() * 1000, 1642902100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'0', '100001100001', 'env_10', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_20');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('3', 'Test Report 3', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642100001, UNIX_TIMESTAMP() * 1000, 1642102000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'0', '100001100001', 'env_2', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_3');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('4', 'Test Report 4', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642100101, UNIX_TIMESTAMP() * 1000, 1642102100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'1', '100001100001', 'env_2', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_4');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('5', 'Test Report 5', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642200001, UNIX_TIMESTAMP() * 1000, 1642202000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'1', '100001100001', 'env_3', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_5');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('6', 'Test Report 6', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642200101, UNIX_TIMESTAMP() * 1000, 1642202100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'1', '100001100001', 'env_3', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_6');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('7', 'Test Report 7', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642300001, UNIX_TIMESTAMP() * 1000, 1642302000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'1', '100001100001', 'env_4', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_7');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('8', 'Test Report 8', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642300101, UNIX_TIMESTAMP() * 1000, 1642302100, 1000, 'ERROR', 'AUTOMATED', 'PARALLEL', '100660357777795313', b'1', '100001100001', 'env_4', 10, 5, 10, 85, 150, 145, '50%', '10%', '5%', '80%', '90%', 'script_8');
INSERT INTO `api_report` (`id`, `name`, `test_plan_id`, `create_user`, `delete_time`, `delete_user`, `deleted`, `update_user`, `update_time`, `start_time`, `end_time`, `request_duration`, `status`, `trigger_mode`, `run_mode`, `pool_id`, `integrated`, `project_id`, `environment_id`, `error_count`, `fake_error_count`, `pending_count`, `success_count`, `assertion_count`, `assertion_success_count`, `request_error_rate`, `request_pending_rate`, `request_fake_error_rate`, `request_pass_rate`, `assertion_pass_rate`, `script_identifier`) VALUES ('9', 'Test Report 9', 'NONE', 'admin', NULL, NULL, b'0', 'admin', 1642400001, UNIX_TIMESTAMP() * 1000, 1642402000, 1000, 'SUCCESS', 'MANUAL', 'SEQUENTIAL', '100660357777795313', b'1', '100001100001', 'env_5', 0, 0, 0, 100, 150, 150, '50%', '10%', '5%', '80%', '90%', 'script_9');
DELETE FROM `api_test_case_record` WHERE `api_report_id` in ('1', '2', '3', '4','5','6', '7', '8', '9','10','11', '12', '13', '14','15','16', '17', '18', '19','20');
INSERT INTO `api_test_case_record` (`api_report_id`, `api_test_case_id`) VALUES ('1', '12df5721-c5e6-a38b-e999-3eafcb992094');

View File

@ -184,6 +184,9 @@ public class EnvironmentService extends MoveNodeService {
}
public EnvironmentInfoDTO get(String environmentId) {
if (StringUtils.isBlank(environmentId)) {
return null;
}
EnvironmentInfoDTO environmentInfoDTO = new EnvironmentInfoDTO();
Environment environment = environmentMapper.selectByPrimaryKey(environmentId);
if (environment == null) {