refactor(接口测试): 优化接口导入

This commit is contained in:
wxg0103 2024-05-10 18:22:21 +08:00 committed by Craftsman
parent 090d5bcb93
commit 0d27cedd6b
9 changed files with 174 additions and 101 deletions

View File

@ -12,6 +12,7 @@ import io.metersphere.api.dto.request.ApiEditPosRequest;
import io.metersphere.api.dto.request.ApiTransferRequest;
import io.metersphere.api.dto.request.ImportRequest;
import io.metersphere.api.service.ApiFileResourceService;
import io.metersphere.api.service.definition.ApiDefinitionImportUtilService;
import io.metersphere.api.service.definition.ApiDefinitionLogService;
import io.metersphere.api.service.definition.ApiDefinitionNoticeService;
import io.metersphere.api.service.definition.ApiDefinitionService;
@ -58,6 +59,8 @@ public class ApiDefinitionController {
private FileModuleService fileModuleService;
@Resource
private ApiFileResourceService apiFileResourceService;
@Resource
private ApiDefinitionImportUtilService apiDefinitionImportUtilService;
@PostMapping(value = "/add")
@Operation(summary = "接口测试-接口管理-添加接口定义")
@ -221,7 +224,7 @@ public class ApiDefinitionController {
@Operation(summary = "接口测试-接口管理-导入接口定义")
public void testCaseImport(@RequestPart(value = "file", required = false) MultipartFile file, @RequestPart("request") ImportRequest request) {
request.setUserId(SessionUtils.getUserId());
apiDefinitionService.apiTestImport(file, request, SessionUtils.getCurrentProjectId());
apiDefinitionImportUtilService.apiTestImport(file, request, SessionUtils.getCurrentProjectId());
}
@PostMapping("/operation-history")

View File

@ -25,7 +25,7 @@ public class ApiCaseBatchEditRequest extends ApiTestCaseBatchRequest implements
@Schema(description = "批量编辑的类型 用例等级: Priority,状态 :Status,标签: Tags,用例环境: Environment")
@NotBlank
private String type;
@Schema(description = "默认覆盖原标签")
@Schema(description = "是否追加标签")
private boolean append = false;
@Schema(description = "环境id")
@Size(max = 50, message = "{api_test_case.env_id.length_range}")

View File

@ -25,7 +25,7 @@ public class ApiMockBatchEditRequest extends ApiTestCaseBatchRequest implements
@Schema(description = "批量编辑的类型 状态 :Status,标签: Tags")
@NotBlank
private String type;
@Schema(description = "默认覆盖原标签")
@Schema(description = "是否追加标签")
private boolean append = false;
@Schema(description = "状态 开启/关闭")
private boolean enable;

View File

@ -6,6 +6,10 @@
<result column="tags" jdbcType="VARCHAR" property="tags" typeHandler="io.metersphere.handler.ListTypeHandler" />
</resultMap>
<resultMap id="BaseResultMap" type="io.metersphere.api.domain.ApiDefinitionMock">
<result column="tags" jdbcType="VARCHAR" property="tags" typeHandler="io.metersphere.handler.ListTypeHandler" />
</resultMap>
<select id="list" resultMap="ApiDefinitionMockDTO">
select
m.id, m.create_time, m.update_time, m.create_user, m.`name`, m.tags, m.`enable`, m.expect_num, m.project_id, m.status_code,
@ -36,7 +40,7 @@
<include refid="queryWhereConditionByBatch"/>
</select>
<select id="getTagsByIds" resultType="io.metersphere.api.domain.ApiDefinitionMock">
<select id="getTagsByIds" resultMap="BaseResultMap">
SELECT
m.id, m.tags
FROM

View File

@ -21,6 +21,8 @@ import io.metersphere.api.dto.request.http.RestParam;
import io.metersphere.api.dto.request.http.body.*;
import io.metersphere.api.dto.schema.JsonSchemaItem;
import io.metersphere.api.mapper.*;
import io.metersphere.api.parser.ImportParser;
import io.metersphere.api.parser.ImportParserFactory;
import io.metersphere.api.utils.ApiDataUtils;
import io.metersphere.project.constants.PropertyConstant;
import io.metersphere.project.domain.Project;
@ -31,10 +33,7 @@ import io.metersphere.sdk.constants.ApplicationNumScope;
import io.metersphere.sdk.constants.HttpMethodConstants;
import io.metersphere.sdk.constants.ModuleConstants;
import io.metersphere.sdk.exception.MSException;
import io.metersphere.sdk.util.BeanUtils;
import io.metersphere.sdk.util.JSON;
import io.metersphere.sdk.util.SubListUtils;
import io.metersphere.sdk.util.Translator;
import io.metersphere.sdk.util.*;
import io.metersphere.system.domain.User;
import io.metersphere.system.dto.sdk.ApiDefinitionCaseDTO;
import io.metersphere.system.dto.sdk.BaseTreeNode;
@ -56,8 +55,10 @@ import org.apache.ibatis.session.ExecutorType;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionUtils;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import java.util.*;
import java.util.stream.Collectors;
@ -98,6 +99,42 @@ public class ApiDefinitionImportUtilService {
private static final String FILE_HAR = "har";
private static final String FILE_JSON = "json";
public void apiTestImport(MultipartFile file, ImportRequest request, String projectId) {
if (file != null) {
String originalFilename = file.getOriginalFilename();
if (StringUtils.isNotBlank(originalFilename)) {
String suffixName = originalFilename.substring(originalFilename.indexOf(".") + 1);
this.checkFileSuffixName(request, suffixName);
}
}
if (StringUtils.isBlank(request.getProjectId())) {
request.setProjectId(projectId);
}
ImportParser<?> runService = ImportParserFactory.getImportParser(request.getPlatform());
ApiDefinitionImport apiImport = null;
if (StringUtils.equals(request.getType(), "SCHEDULE")) {
request.setProtocol(ModuleConstants.NODE_PROTOCOL_HTTP);
}
try {
LogUtils.info("=======================数据开始解析====================");
apiImport = (ApiDefinitionImport) Objects.requireNonNull(runService).parse(file == null ? null : file.getInputStream(), request);
LogUtils.info("===================数据解析完成==================");
//TODO 处理mock数据
} catch (Exception e) {
LogUtils.error(e.getMessage(), e);
throw new MSException(Translator.get("parse_data_error"));
}
try {
importApi(request, apiImport);
LogUtils.info("===================数据导入完成==================");
} catch (Exception e) {
LogUtils.error(e);
throw new MSException(Translator.get("user_import_format_wrong"));
}
}
public void checkFileSuffixName(ImportRequest request, String suffixName) {
if (FILE_JMX.equalsIgnoreCase(suffixName)) {
if (!ApiImportPlatform.Jmeter.name().equalsIgnoreCase(request.getPlatform())) {
@ -177,19 +214,33 @@ public class ApiDefinitionImportUtilService {
apiLists = apiLists.stream().filter(t -> modulePathMap.containsKey(t.getModulePath())).toList();
ApiDetailWithData apiDealWithData = new ApiDetailWithData();
//判断数据是否是唯一的
LogUtils.info("开始判断数据是否唯一");
checkApiDataOnly(request, importData, apiLists, apiDealWithData);
LogUtils.info("判断数据是否唯一结束");
ApiDetailWithDataUpdate apiDetailWithDataUpdate = new ApiDetailWithDataUpdate();
LogUtils.info("开始判断数据是否需要更新");
getNeedUpdateData(request, apiDealWithData, apiDetailWithDataUpdate);
LogUtils.info("判断数据是否需要更新结束");
//不用的数据清空保证内存回收
apiLists = new ArrayList<>();
apiModules = new ArrayList<>();
importData = new ArrayList<>();
List<LogDTO> operationLogs = new ArrayList<>();
//数据入库
insertData(modulePathMap, idModuleMap, apiDetailWithDataUpdate, request);
insertData(modulePathMap, idModuleMap, apiDetailWithDataUpdate, request, operationLogs);
batchSaveLog(operationLogs);
}
@Async
@Transactional(rollbackFor = Exception.class)
public void batchSaveLog(List<LogDTO> operationLogs) {
LogUtils.info("插入日志开始");
SubListUtils.dealForSubList(operationLogs, 100, operationLogService::batchAdd);
LogUtils.info("插入日志结束");
}
public Long getNextOrder(String projectId) {
@ -231,7 +282,9 @@ public class ApiDefinitionImportUtilService {
public void insertData(Map<String, BaseTreeNode> modulePathMap,
Map<String, BaseTreeNode> idModuleMap,
ApiDetailWithDataUpdate apiDetailWithDataUpdate,
ImportRequest request) {
ImportRequest request,
List<LogDTO> operationLogs) {
LogUtils.info("开始插入数据");
//先判断是否需要新增模块
List<ApiDefinitionImportDetail> addModuleData = apiDetailWithDataUpdate.getAddModuleData();
List<ApiDefinitionImportDetail> updateModuleData = apiDetailWithDataUpdate.getUpdateModuleData();
@ -259,19 +312,15 @@ public class ApiDefinitionImportUtilService {
ApiDefinitionBlobMapper apiBlobMapper = sqlSession.getMapper(ApiDefinitionBlobMapper.class);
//创建模块
insertModule(request, addModuleList, moduleMapper);
//取出需要更新的数据的id
List<String> updateModuleLists = updateModuleData.stream().map(ApiDefinitionImportDetail::getId).toList();
insertModule(request, addModuleList, moduleMapper, sqlSession);
//更新模块数据
updateApiModule(modulePathMap, request, updateModuleData, apiMapper);
updateApiModule(modulePathMap, request, updateModuleData, apiMapper, sqlSession);
List<LogDTO> operationLogs = new ArrayList<>();
List<ApiDefinitionImportDetail> updateRequestData = apiDetailWithDataUpdate.getUpdateRequestData();
//更新接口请求数据
updateApiRequest(request, updateRequestData, updateModuleLists, apiMapper, apiBlobMapper);
updateApiRequest(request, updateRequestData, apiMapper, apiBlobMapper, sqlSession);
Map<String, ApiDefinitionImportDetail> logData = apiDetailWithDataUpdate.getLogData();
Project project = projectMapper.selectByPrimaryKey(request.getProjectId());
@ -285,13 +334,15 @@ public class ApiDefinitionImportUtilService {
sqlSession.flushStatements();
SqlSessionUtils.closeSqlSession(sqlSession, sqlSessionFactory);
SubListUtils.dealForSubList(operationLogs, 500, operationLogService::batchAdd);
LogUtils.info("插入数据结束");
//发送通知
LogUtils.info("发送通知开始");
List<Map> createResources = new ArrayList<>(JSON.parseArray(JSON.toJSONString(createLists), Map.class));
User user = userMapper.selectByPrimaryKey(request.getUserId());
commonNoticeSendService.sendNotice(NoticeConstants.TaskType.API_DEFINITION_TASK, NoticeConstants.Event.CREATE, createResources, user, request.getProjectId());
List<Map> updateResources = new ArrayList<>(JSON.parseArray(JSON.toJSONString(updateLists), Map.class));
commonNoticeSendService.sendNotice(NoticeConstants.TaskType.API_DEFINITION_TASK, NoticeConstants.Event.UPDATE, updateResources, user, request.getProjectId());
LogUtils.info("发送通知结束");
}
private static void getNeedAddModule(Map<String, BaseTreeNode> modulePathMap, Map<String, BaseTreeNode> idModuleMap, Set<String> differenceSet, List<BaseTreeNode> addModuleList) {
@ -393,46 +444,55 @@ public class ApiDefinitionImportUtilService {
}
}
private static void updateApiRequest(ImportRequest request, List<ApiDefinitionImportDetail> updateRequestData, List<String> updateModuleLists, ApiDefinitionMapper apiMapper, ApiDefinitionBlobMapper apiBlobMapper) {
updateRequestData.forEach(t -> {
ApiDefinition apiDefinition = new ApiDefinition();
apiDefinition.setId(t.getId());
apiDefinition.setUpdateUser(request.getUserId());
apiDefinition.setUpdateTime(System.currentTimeMillis());
apiMapper.updateByPrimaryKeySelective(apiDefinition);
//更新blob数据
ApiDefinitionBlob apiDefinitionBlob = new ApiDefinitionBlob();
apiDefinitionBlob.setId(t.getId());
apiDefinitionBlob.setRequest(JSON.toJSONBytes(t.getRequest()));
apiDefinitionBlob.setResponse(JSON.toJSONBytes(t.getResponse()));
apiBlobMapper.updateByPrimaryKeySelective(apiDefinitionBlob);
private static void updateApiRequest(ImportRequest request, List<ApiDefinitionImportDetail> updateRequestData, ApiDefinitionMapper apiMapper, ApiDefinitionBlobMapper apiBlobMapper, SqlSession sqlSession) {
SubListUtils.dealForSubList(updateRequestData, 100, list -> {
list.forEach(t -> {
ApiDefinition apiDefinition = new ApiDefinition();
apiDefinition.setId(t.getId());
apiDefinition.setUpdateUser(request.getUserId());
apiDefinition.setUpdateTime(System.currentTimeMillis());
apiMapper.updateByPrimaryKeySelective(apiDefinition);
//更新blob数据
ApiDefinitionBlob apiDefinitionBlob = new ApiDefinitionBlob();
apiDefinitionBlob.setId(t.getId());
apiDefinitionBlob.setRequest(JSON.toJSONBytes(t.getRequest()));
apiDefinitionBlob.setResponse(JSON.toJSONBytes(t.getResponse()));
apiBlobMapper.updateByPrimaryKeySelective(apiDefinitionBlob);
});
sqlSession.flushStatements();
});
}
private static void updateApiModule(Map<String, BaseTreeNode> modulePathMap, ImportRequest request, List<ApiDefinitionImportDetail> updateModuleData, ApiDefinitionMapper apiMapper) {
updateModuleData.forEach(t -> {
ApiDefinition apiDefinition = new ApiDefinition();
apiDefinition.setId(t.getId());
apiDefinition.setModuleId(modulePathMap.get(t.getModulePath()).getId());
apiDefinition.setUpdateUser(request.getUserId());
apiDefinition.setUpdateTime(System.currentTimeMillis());
apiMapper.updateByPrimaryKeySelective(apiDefinition);
private static void updateApiModule(Map<String, BaseTreeNode> modulePathMap, ImportRequest request, List<ApiDefinitionImportDetail> updateModuleData, ApiDefinitionMapper apiMapper, SqlSession sqlSession) {
SubListUtils.dealForSubList(updateModuleData, 100, list -> {
list.forEach(t -> {
ApiDefinition apiDefinition = new ApiDefinition();
apiDefinition.setId(t.getId());
apiDefinition.setModuleId(modulePathMap.get(t.getModulePath()).getId());
apiDefinition.setUpdateUser(request.getUserId());
apiDefinition.setUpdateTime(System.currentTimeMillis());
apiMapper.updateByPrimaryKeySelective(apiDefinition);
});
sqlSession.flushStatements();
});
}
private void insertModule(ImportRequest request, List<BaseTreeNode> addModuleList, ApiDefinitionModuleMapper moduleMapper) {
addModuleList.forEach(t -> {
ApiDefinitionModule module = new ApiDefinitionModule();
module.setId(t.getId());
module.setName(t.getName());
module.setParentId(t.getParentId());
module.setProjectId(request.getProjectId());
module.setCreateUser(request.getUserId());
module.setPos(getImportNextModuleOrder(request.getProjectId()));
module.setCreateTime(System.currentTimeMillis());
module.setUpdateUser(request.getUserId());
module.setUpdateTime(System.currentTimeMillis());
moduleMapper.insertSelective(module);
private void insertModule(ImportRequest request, List<BaseTreeNode> addModuleList, ApiDefinitionModuleMapper moduleMapper, SqlSession sqlSession) {
SubListUtils.dealForSubList(addModuleList, 100, list -> {
list.forEach(t -> {
ApiDefinitionModule module = new ApiDefinitionModule();
module.setId(t.getId());
module.setName(t.getName());
module.setParentId(t.getParentId());
module.setProjectId(request.getProjectId());
module.setCreateUser(request.getUserId());
module.setPos(getImportNextModuleOrder(request.getProjectId()));
module.setCreateTime(System.currentTimeMillis());
module.setUpdateUser(request.getUserId());
module.setUpdateTime(System.currentTimeMillis());
moduleMapper.insertSelective(module);
});
sqlSession.flushStatements();
});
}

View File

@ -1,9 +1,14 @@
package io.metersphere.api.service.definition;
import io.metersphere.api.domain.ApiDefinition;
import io.metersphere.api.domain.ApiDefinitionBlob;
import io.metersphere.api.domain.ApiDefinitionBlobExample;
import io.metersphere.api.domain.ApiDefinitionExample;
import io.metersphere.api.dto.definition.*;
import io.metersphere.api.mapper.ApiDefinitionBlobMapper;
import io.metersphere.api.mapper.ApiDefinitionMapper;
import io.metersphere.api.utils.ApiDataUtils;
import io.metersphere.plugin.api.spi.AbstractMsTestElement;
import io.metersphere.project.domain.Project;
import io.metersphere.project.mapper.ProjectMapper;
import io.metersphere.sdk.constants.HttpMethodConstants;
@ -24,6 +29,8 @@ import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@Service
@Transactional(rollbackFor = Exception.class)
@ -31,6 +38,8 @@ public class ApiDefinitionLogService {
@Resource
private ApiDefinitionMapper apiDefinitionMapper;
@Resource
private ApiDefinitionBlobMapper apiDefinitionBlobMapper;
@Resource
private ProjectMapper projectMapper;
@ -122,7 +131,6 @@ public class ApiDefinitionLogService {
}
/**
* 批量删除接口日志
*
@ -224,7 +232,32 @@ public class ApiDefinitionLogService {
* 删除回收站接口定义接口日志
*/
public void batchTrashDelLog(List<String> ids, String userId, String projectId) {
saveBatchLog(projectId, ids, userId, OperationLogType.DELETE.name(), false, OperationLogModule.API_TEST_MANAGEMENT_RECYCLE);
List<LogDTO> dtoList = new ArrayList<>();
if (CollectionUtils.isNotEmpty(ids)) {
Project project = projectMapper.selectByPrimaryKey(projectId);
ApiDefinitionExample example = new ApiDefinitionExample();
example.createCriteria().andIdIn(ids);
List<ApiDefinition> apiDefinitions = apiDefinitionMapper.selectByExample(example);
apiDefinitions.forEach(item -> {
ApiDefinitionDTO apiDefinitionDTO = new ApiDefinitionDTO();
BeanUtils.copyBean(apiDefinitionDTO, item);
LogDTO dto = new LogDTO(
project.getId(),
project.getOrganizationId(),
item.getId(),
userId,
OperationLogType.DELETE.name(),
OperationLogModule.API_TEST_MANAGEMENT_RECYCLE,
item.getName());
dto.setHistory(false);
dto.setPath(OperationLogAspect.getPath());
dto.setMethod(HttpMethodConstants.POST.name());
dto.setOriginalValue(JSON.toJSONBytes(apiDefinitionDTO));
dtoList.add(dto);
});
operationLogService.batchAdd(dtoList);
}
}
private ApiDefinitionDTO getOriginalValue(String id) {
@ -239,7 +272,6 @@ public class ApiDefinitionLogService {
}
private void saveBatchLog(String projectId, List<String> ids, String userId, String operationType, boolean isHistory, String logModule) {
if (StringUtils.isBlank(logModule)) {
logModule = OperationLogModule.API_TEST_MANAGEMENT_DEFINITION;
@ -250,10 +282,22 @@ public class ApiDefinitionLogService {
ApiDefinitionExample example = new ApiDefinitionExample();
example.createCriteria().andIdIn(ids);
List<ApiDefinition> apiDefinitions = apiDefinitionMapper.selectByExample(example);
ApiDefinitionBlobExample blobExample = new ApiDefinitionBlobExample();
blobExample.createCriteria().andIdIn(ids);
List<ApiDefinitionBlob> apiDefinitionBlobs = apiDefinitionBlobMapper.selectByExampleWithBLOBs(blobExample);
//生成map 使用流 key是id value是blob
Map<String, ApiDefinitionBlob> blobMap = apiDefinitionBlobs.stream().collect(Collectors.toMap(ApiDefinitionBlob::getId, b -> b));
String finalLogModule = logModule;
apiDefinitions.forEach(item -> {
ApiDefinitionDTO apiDefinitionDTO = new ApiDefinitionDTO();
CommonBeanFactory.getBean(ApiDefinitionService.class).handleBlob(item.getId(), apiDefinitionDTO);
ApiDefinitionBlob blob = blobMap.get(item.getId());
if (null != blob) {
apiDefinitionDTO.setRequest(ApiDataUtils.parseObject(new String(blob.getRequest()), AbstractMsTestElement.class));
if (blob.getResponse() != null) {
List<HttpResponse> httpResponses = ApiDataUtils.parseArray(new String(blob.getResponse()), HttpResponse.class);
apiDefinitionDTO.setResponse(httpResponses);
}
}
BeanUtils.copyBean(apiDefinitionDTO, item);
LogDTO dto = new LogDTO(
project.getId(),

View File

@ -338,6 +338,7 @@ public class ApiDefinitionMockService {
SubListUtils.dealForSubList(ids, 500, subList -> deleteResourceByIds(subList, request.getProjectId(), userId));
}
}
public void deleteResourceByIds(List<String> ids, String projectId, String userId) {
List<ApiDefinitionMock> mockList = extApiDefinitionMockMapper.getMockInfoByIds(ids);
@ -391,6 +392,7 @@ public class ApiDefinitionMockService {
}
apiTestCaseService.checkTagLength(request.getTags());
if (request.isAppend()) {
List<ApiDefinitionMock> tagsByIds = extApiDefinitionMockMapper.getTagsByIds(ids);
Map<String, ApiDefinitionMock> mockMap = extApiDefinitionMockMapper.getTagsByIds(ids)
.stream()
.collect(Collectors.toMap(ApiDefinitionMock::getId, Function.identity()));

View File

@ -6,16 +6,12 @@ import io.metersphere.api.constants.ApiResourceType;
import io.metersphere.api.controller.result.ApiResultCode;
import io.metersphere.api.domain.*;
import io.metersphere.api.dto.*;
import io.metersphere.api.dto.converter.ApiDefinitionImport;
import io.metersphere.api.dto.debug.ApiFileResourceUpdateRequest;
import io.metersphere.api.dto.debug.ApiResourceRunRequest;
import io.metersphere.api.dto.definition.*;
import io.metersphere.api.dto.request.ApiEditPosRequest;
import io.metersphere.api.dto.request.ApiTransferRequest;
import io.metersphere.api.dto.request.ImportRequest;
import io.metersphere.api.mapper.*;
import io.metersphere.api.parser.ImportParser;
import io.metersphere.api.parser.ImportParserFactory;
import io.metersphere.api.service.ApiCommonService;
import io.metersphere.api.service.ApiExecuteService;
import io.metersphere.api.service.ApiFileResourceService;
@ -61,7 +57,6 @@ import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import java.nio.charset.StandardCharsets;
import java.util.*;
@ -524,7 +519,7 @@ public class ApiDefinitionService extends MoveNodeService {
private void checkAddExist(ApiDefinition apiDefinition) {
if (!StringUtils.equals(apiDefinition.getProtocol(), ApiConstants.HTTP_PROTOCOL)) {
return;
return;
}
ApiDefinitionExample example = new ApiDefinitionExample();
example.createCriteria()
@ -824,9 +819,6 @@ public class ApiDefinitionService extends MoveNodeService {
if (CollectionUtils.isNotEmpty(ids)) {
handleTrashDelApiDefinition(ids, userId, request.getProjectId(), true);
}
String apiDefinitionDirPrefix = DefaultRepositoryDir.getApiDefinitionDir(request.getProjectId(), StringUtils.EMPTY);
apiFileResourceService.deleteByResourceIds(apiDefinitionDirPrefix, ids, request.getProjectId(), userId, OperationLogModule.API_TEST_MANAGEMENT_DEFINITION);
}
private void handleTrashDelApiDefinition(List<String> ids, String userId, String projectId, boolean isBatch) {
@ -966,38 +958,6 @@ public class ApiDefinitionService extends MoveNodeService {
return apiDefinitionDocDTO;
}
public void apiTestImport(MultipartFile file, ImportRequest request, String projectId) {
if (file != null) {
String originalFilename = file.getOriginalFilename();
if (StringUtils.isNotBlank(originalFilename)) {
String suffixName = originalFilename.substring(originalFilename.indexOf(".") + 1);
apiDefinitionImportUtilService.checkFileSuffixName(request, suffixName);
}
}
if (StringUtils.isBlank(request.getProjectId())) {
request.setProjectId(projectId);
}
ImportParser<?> runService = ImportParserFactory.getImportParser(request.getPlatform());
ApiDefinitionImport apiImport = null;
if (StringUtils.equals(request.getType(), "SCHEDULE")) {
request.setProtocol(ModuleConstants.NODE_PROTOCOL_HTTP);
}
try {
apiImport = (ApiDefinitionImport) Objects.requireNonNull(runService).parse(file == null ? null : file.getInputStream(), request);
//TODO 处理mock数据
} catch (Exception e) {
LogUtils.error(e.getMessage(), e);
throw new MSException(Translator.get("parse_data_error"));
}
try {
apiDefinitionImportUtilService.importApi(request, apiImport);
} catch (Exception e) {
LogUtils.error(e);
throw new MSException(Translator.get("user_import_format_wrong"));
}
}
public List<OperationHistoryDTO> list(OperationHistoryRequest request) {
return operationHistoryService.listWidthTable(request, API_TABLE);
}

View File

@ -4,8 +4,8 @@ package io.metersphere.api.service.schedule;
import io.metersphere.api.constants.ApiImportPlatform;
import io.metersphere.api.dto.definition.ApiScheduleDTO;
import io.metersphere.api.dto.request.ImportRequest;
import io.metersphere.api.service.definition.ApiDefinitionImportUtilService;
import io.metersphere.api.service.definition.ApiDefinitionScheduleService;
import io.metersphere.api.service.definition.ApiDefinitionService;
import io.metersphere.sdk.util.BeanUtils;
import io.metersphere.sdk.util.CommonBeanFactory;
import io.metersphere.system.schedule.BaseScheduleJob;
@ -16,12 +16,12 @@ import org.quartz.JobKey;
import org.quartz.TriggerKey;
public class SwaggerUrlImportJob extends BaseScheduleJob {
private ApiDefinitionService apiDefinitionService;
private ApiDefinitionImportUtilService apiDefinitionImportUtilService;
private ApiDefinitionScheduleService apiDefinitionScheduleService;
private UserService userService;
public SwaggerUrlImportJob() {
apiDefinitionService = CommonBeanFactory.getBean(ApiDefinitionService.class);
apiDefinitionImportUtilService = CommonBeanFactory.getBean(ApiDefinitionImportUtilService.class);
apiDefinitionScheduleService = CommonBeanFactory.getBean(ApiDefinitionScheduleService.class);
userService = CommonBeanFactory.getBean(UserService.class);
}
@ -37,7 +37,7 @@ public class SwaggerUrlImportJob extends BaseScheduleJob {
request.setUserId(jobDataMap.getString("userId"));
request.setType("SCHEDULE");
request.setResourceId(resourceId);
apiDefinitionService.apiTestImport(null, request, request.getProjectId());
apiDefinitionImportUtilService.apiTestImport(null, request, request.getProjectId());
}
public static JobKey getJobKey(String resourceId) {