refactor(接口测试): 执行过程日志优化,统一输出规范

This commit is contained in:
fit2-zhao 2022-07-06 18:30:03 +08:00 committed by fit2-zhao
parent 00b081fc45
commit dcb8ad92d9
19 changed files with 51 additions and 54 deletions

View File

@ -30,7 +30,6 @@ import io.metersphere.jmeter.utils.ScriptEngineUtils;
import io.metersphere.plugin.core.MsParameter;
import io.metersphere.plugin.core.MsTestElement;
import io.metersphere.service.EnvironmentGroupProjectService;
import io.metersphere.utils.LoggerUtil;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.jmeter.config.Arguments;
@ -633,7 +632,6 @@ public class ElementUtil {
public static void setBaseParams(AbstractTestElement sampler, MsTestElement parent, ParameterConfig config, String id, String indexPath) {
sampler.setProperty("MS-ID", id);
sampler.setProperty("MS-RESOURCE-ID", ElementUtil.getResourceId(id, config, parent, indexPath));
LoggerUtil.debug("mqtt sampler resourceId :" + sampler.getPropertyAsString("MS-RESOURCE-ID"));
}
public static void accuracyHashTree(HashTree hashTree) {

View File

@ -30,7 +30,6 @@ import io.metersphere.commons.utils.CommonBeanFactory;
import io.metersphere.commons.utils.LogUtil;
import io.metersphere.plugin.core.MsParameter;
import io.metersphere.plugin.core.MsTestElement;
import io.metersphere.utils.LoggerUtil;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.apache.commons.collections.CollectionUtils;
@ -96,7 +95,6 @@ public class MsDubboSampler extends MsTestElement {
if (this.getReferenced() != null && MsTestElementConstants.REF.name().equals(this.getReferenced())) {
boolean ref = this.setRefElement();
if (!ref) {
LoggerUtil.debug("引用对象已经被删除:" + this.getId());
return;
}
hashTree = this.getHashTree();

View File

@ -141,7 +141,6 @@ public class MsHTTPSamplerProxy extends MsTestElement {
if (this.getReferenced() != null && MsTestElementConstants.REF.name().equals(this.getReferenced())) {
boolean ref = this.setRefElement();
if (!ref) {
LoggerUtil.debug("引用对象已经被删除:" + this.getId());
return;
}
hashTree = this.getHashTree();

View File

@ -95,7 +95,7 @@ public class MsJDBCSampler extends MsTestElement {
if (this.getReferenced() != null && MsTestElementConstants.REF.name().equals(this.getReferenced())) {
boolean ref = this.setRefElement();
if (!ref) {
LoggerUtil.debug("引用对象已经被删除:" + this.getId());
LoggerUtil.error("引用对象已经被删除:" + this.getId());
return;
}
hashTree = this.getHashTree();

View File

@ -112,7 +112,7 @@ public class ApiExecuteService {
//通过测试计划id查询环境
request.setReportId(request.getTestPlanId());
}
LoggerUtil.info("开始执行单条用例【 " + testCaseWithBLOBs.getId() + "");
LoggerUtil.info("开始执行单条用例【 " + testCaseWithBLOBs.getId() + "", request.getReportId());
// 多态JSON普通转换会丢失内容需要通过 ObjectMapper 获取
if (testCaseWithBLOBs != null && StringUtils.isNotEmpty(testCaseWithBLOBs.getRequest())) {

View File

@ -23,7 +23,7 @@ public class ExecTask implements Runnable {
CommonBeanFactory.getBean(JMeterService.class).addQueue(request);
Object res = PoolExecBlockingQueueUtil.take(request.getReportId());
if (res == null && !JmeterThreadUtils.isRunning(request.getReportId(), request.getTestId())) {
LoggerUtil.info("执行报告:【 " + request.getReportId() + " 】,资源ID【 " + request.getTestId() + " 】执行超时");
LoggerUtil.info("任务执行超时", request.getReportId());
if (JMeterEngineCache.runningEngine.containsKey(request.getReportId())) {
JMeterEngineCache.runningEngine.remove(request.getReportId());
}

View File

@ -381,7 +381,7 @@ public class ApiScenarioExecuteService {
hashTree = request.getTestElement().generateHashTree(config);
LogUtil.info(request.getTestElement().getJmx(hashTree));
} catch (Exception e) {
LoggerUtil.error(e);
LoggerUtil.error("调试失败", request.getReportId(), e);
MSException.throwException(e.getMessage());
}
if (request.isSaved()) {

View File

@ -51,7 +51,7 @@ public class ApiScenarioParallelService {
if (!pool.isPool()) {
runRequest.setHashTree(GenerateHashTreeUtil.generateHashTree(dataDTO.getScenario(), dataDTO.getPlanEnvMap(), runRequest));
}
LoggerUtil.info("进入并行模式,准备执行场景:[ " + executeQueue.get(reportId).getReport().getName() + " ], 报告ID [ " + reportId + " ]");
LoggerUtil.info("进入并行模式,准备执行场景:[ " + executeQueue.get(reportId).getReport().getName() + " ]", reportId);
jMeterService.run(runRequest);
}
}

View File

@ -146,7 +146,7 @@ public class ApiScenarioSerialService {
ResultDTO dto = new ResultDTO();
BeanUtils.copyBean(dto, runRequest);
CommonBeanFactory.getBean(ApiExecutionQueueService.class).queueNext(dto);
LoggerUtil.error("执行队列[" + queue.getId() + "报告[" + queue.getReportId() + "入队列失败:", e);
LoggerUtil.error("执行队列[" + queue.getId() + "]入队列失败:", queue.getReportId(), e);
}
}
@ -159,7 +159,7 @@ public class ApiScenarioSerialService {
execResult.setStartTime(System.currentTimeMillis());
execResult.setStatus(APITestStatus.Running.name());
apiDefinitionExecResultMapper.updateByPrimaryKeySelective(execResult);
LoggerUtil.info("进入串行模式,准备执行资源:[" + execResult.getName() + " ], 报告ID [" + execResult.getId() + "]");
LoggerUtil.info("进入串行模式,准备执行资源:[" + execResult.getName() + " ]", execResult.getId());
}
}
@ -173,7 +173,7 @@ public class ApiScenarioSerialService {
this.put("userId", report.getCreateUser());
}});
apiScenarioReportMapper.updateByPrimaryKey(report);
LoggerUtil.info("进入串行模式,准备执行资源:[ " + report.getName() + " ], 报告ID [ " + report.getId() + " ]");
LoggerUtil.info("进入串行模式,准备执行资源:[ " + report.getName() + " ]", report.getId());
}
}
@ -232,7 +232,7 @@ public class ApiScenarioSerialService {
testPlan.getHashTree().add(group);
testPlan.toHashTree(jmeterHashTree, testPlan.getHashTree(), new ParameterConfig());
LoggerUtil.info("报告ID" + runRequest.getReportId() + " 用例资源:" + caseWithBLOBs.getName() + ", 生成执行脚本JMX成功");
LoggerUtil.info("用例资源:" + caseWithBLOBs.getName() + ", 生成执行脚本JMX成功", runRequest.getReportId());
return jmeterHashTree;
}
} catch (Exception ex) {
@ -241,7 +241,7 @@ public class ApiScenarioSerialService {
ResultDTO dto = new ResultDTO();
BeanUtils.copyBean(dto, runRequest);
CommonBeanFactory.getBean(ApiExecutionQueueService.class).queueNext(dto);
LoggerUtil.error("报告ID" + runRequest.getReportId() + " 用例资源:" + testId + ", 生成执行脚本失败", ex);
LoggerUtil.error("用例资源:" + testId + ", 生成执行脚本失败", runRequest.getReportId(), ex);
}
return null;
}

View File

@ -175,10 +175,10 @@ public class GenerateHashTreeUtil {
config.setReportType(runRequest.getReportType());
testPlan.toHashTree(jmeterHashTree, testPlan.getHashTree(), config);
LoggerUtil.info("报告ID" + runRequest.getReportId() + " 场景资源:" + item.getName() + ", 生成执行脚本JMX成功");
LoggerUtil.info("场景资源:" + item.getName() + ", 生成执行脚本JMX成功", runRequest.getReportId());
} catch (Exception ex) {
remakeException(runRequest);
LoggerUtil.error("报告ID" + runRequest.getReportId() + " 场景资源:" + item.getName() + ", 生成执行脚本失败", ex);
LoggerUtil.error("场景资源:" + item.getName() + ", 生成执行脚本失败", runRequest.getReportId(), ex);
}
LogUtil.info(testPlan.getJmx(jmeterHashTree));

View File

@ -34,6 +34,7 @@ public class APISingleResultListener implements MsExecListener {
@Override
public void handleTeardownTest(List<SampleResult> results, ResultDTO dto, Map<String, Object> kafkaConfig) {
LoggerUtil.info("接收到执行结果:" + results.size(), dto.getReportId());
LoggerUtil.info("接收到执行结果开始处理报告【" + dto.getReportId() + " 】,资源【 " + dto.getTestId() + "");
queues.addAll(results);
}
@ -69,8 +70,9 @@ public class APISingleResultListener implements MsExecListener {
LoggerUtil.info("Check Processing Test Plan report status" + dto.getQueueId() + "" + dto.getTestId());
apiExecutionQueueService.testPlanReportTestEnded(dto.getTestPlanReportId());
}
LoggerUtil.info("TEST-END处理结果集完成", dto.getReportId());
} catch (Exception e) {
LoggerUtil.error(e);
LoggerUtil.error("结果集处理异常", dto.getReportId(), e);
} finally {
if (JMeterEngineCache.runningEngine.containsKey(dto.getReportId())) {
JMeterEngineCache.runningEngine.remove(dto.getReportId());

View File

@ -127,7 +127,7 @@ public class JMeterService {
JMeterBase.addBackendListener(request, request.getHashTree(), APISingleResultListener.class.getCanonicalName());
}
LoggerUtil.info("报告:[" + request.getReportId() + "] 资源:[" + request.getTestId() + "] 加入JMETER中开始执行");
LoggerUtil.info("资源:[" + request.getTestId() + "] 加入JMETER中开始执行", request.getReportId());
LocalRunner runner = new LocalRunner(request.getHashTree());
runner.run(request.getReportId());
}
@ -137,13 +137,13 @@ public class JMeterService {
// 如果是K8S调用
if (request.getPool().isK8s()) {
try {
LoggerUtil.info("开始发送请求[ " + request.getTestId() + " ] 到K8S节点执行");
LoggerUtil.info("开始发送请求[ " + request.getTestId() + " ] 到K8S节点执行", request.getReportId());
final Engine engine = EngineFactory.createApiEngine(request);
engine.start();
} catch (Exception e) {
RemakeReportService apiScenarioReportService = CommonBeanFactory.getBean(RemakeReportService.class);
apiScenarioReportService.testEnded(request, e.getMessage());
LoggerUtil.error("调用K8S执行请求[ " + request.getTestId() + " ],报告:[" + request.getReportId() + "] 失败:", e);
LoggerUtil.error("调用K8S执行请求[ " + request.getTestId() + " ]失败:", request.getReportId(), e);
}
} else {
this.send(request);
@ -161,26 +161,25 @@ public class JMeterService {
config = SmoothWeighted.getResource(request.getPoolId());
}
if (config == null) {
LoggerUtil.info("未获取到资源池,请检查配置【系统设置-系统-测试资源池】");
LoggerUtil.info("未获取到资源池,请检查配置【系统设置-系统-测试资源池】", request.getReportId());
RemakeReportService remakeReportService = CommonBeanFactory.getBean(RemakeReportService.class);
remakeReportService.remake(request);
return;
}
request.setCorePoolSize(config.getCorePoolSize());
request.setEnable(config.isEnable());
LoggerUtil.info("开始发送请求【 " + request.getReportId() + " 】,资源【 " + request.getTestId() + "" + config.getUrl() + " 节点执行");
LoggerUtil.info("开始发送请求【 " + request.getTestId() + " 】到 " + config.getUrl() + " 节点执行", request.getReportId());
ResponseEntity<String> result = restTemplate.postForEntity(config.getUrl(), request, String.class);
if (result == null || !StringUtils.equals("SUCCESS", result.getBody())) {
RemakeReportService remakeReportService = CommonBeanFactory.getBean(RemakeReportService.class);
remakeReportService.remake(request);
LoggerUtil.error("发送请求[ " + request.getTestId() + " ] 到" + config.getUrl() + " 节点执行失败");
LoggerUtil.error("发送请求[ " + request.getTestId() + " ] 到" + config.getUrl() + " 节点执行失败", request.getReportId());
LoggerUtil.info(result.getBody());
}
} catch (Exception e) {
RemakeReportService remakeReportService = CommonBeanFactory.getBean(RemakeReportService.class);
remakeReportService.remake(request);
LoggerUtil.error("发送请求[ " + request.getTestId() + " ] 执行失败:", e);
LoggerUtil.error(e);
LoggerUtil.error("发送请求[ " + request.getTestId() + " ] 执行失败,进行数据回滚:", request.getReportId(), e);
}
}

View File

@ -49,7 +49,7 @@ public class KafkaListenerTask implements Runnable {
// 分三类存储
Map<String, List<ResultDTO>> assortMap = new LinkedHashMap<>();
List<ResultDTO> resultDTOS = new LinkedList<>();
LoggerUtil.info("报告【" + record.key() + "】开始解析结果");
LoggerUtil.info("KAFKA解析结果任务开始解析结果", String.valueOf(record.key()));
ResultDTO dto = this.formatResult();
if (dto == null) {
return;
@ -57,7 +57,7 @@ public class KafkaListenerTask implements Runnable {
if (dto.getArbitraryData() != null && dto.getArbitraryData().containsKey("TEST_END")
&& (Boolean) dto.getArbitraryData().get("TEST_END")) {
resultDTOS.add(dto);
LoggerUtil.info("KAFKA消费结果处理:【" + record.key() + "】结果状态:" + dto.getArbitraryData().get("TEST_END"));
LoggerUtil.info("KAFKA消费结果处理状态:" + dto.getArbitraryData().get("TEST_END"), String.valueOf(record.key()));
}
// 携带结果
if (CollectionUtils.isNotEmpty(dto.getRequestResults())) {
@ -71,28 +71,28 @@ public class KafkaListenerTask implements Runnable {
}
}
if (MapUtils.isNotEmpty(assortMap)) {
LoggerUtil.info("KAFKA消费执行内容存储开始");
LoggerUtil.info("KAFKA消费执行内容存储开始", String.valueOf(record.key()));
testResultService.batchSaveResults(assortMap);
LoggerUtil.info("KAFKA消费执行内容存储结束");
LoggerUtil.info("KAFKA消费执行内容存储结束", String.valueOf(record.key()));
}
// 更新执行结果
if (CollectionUtils.isNotEmpty(resultDTOS)) {
resultDTOS.forEach(testResult -> {
LoggerUtil.info("报告 【 " + testResult.getReportId() + "资源 " + testResult.getTestId() + " 整体执行完成");
LoggerUtil.info("资源 " + testResult.getTestId() + " 整体执行完成", testResult.getReportId());
testResultService.testEnded(testResult);
LoggerUtil.info("执行队列处理:" + testResult.getQueueId());
LoggerUtil.info("执行队列处理:" + testResult.getQueueId(), testResult.getReportId());
apiExecutionQueueService.queueNext(testResult);
// 全局并发队列
PoolExecBlockingQueueUtil.offer(testResult.getReportId());
// 更新测试计划报告
if (StringUtils.isNotEmpty(testResult.getTestPlanReportId())) {
LoggerUtil.info("Check Processing Test Plan report status" + testResult.getQueueId() + "" + testResult.getTestId());
LoggerUtil.info("Check Processing Test Plan report status" + testResult.getQueueId() + "" + testResult.getTestId(), testResult.getReportId());
apiExecutionQueueService.testPlanReportTestEnded(testResult.getTestPlanReportId());
}
});
}
} catch (Exception e) {
LoggerUtil.error("报告【" + record.key() + "】KAFKA消费失败", e);
LoggerUtil.error("KAFKA消费失败", String.valueOf(record.key()), e);
}
}
@ -104,7 +104,7 @@ public class KafkaListenerTask implements Runnable {
});
}
} catch (Exception e) {
LoggerUtil.error("报告【" + record.key() + "】格式化数据失败:", e);
LoggerUtil.error("结果数据格式化失败:", String.valueOf(record.key()), e);
}
return null;
}

View File

@ -74,7 +74,7 @@ public class ApiExecutionQueueService {
@Transactional(propagation = Propagation.REQUIRES_NEW)
public DBTestQueue add(Object runObj, String poolId, String type, String reportId, String reportType, String runMode, RunModeConfigDTO config) {
LoggerUtil.info("开始生成执行链");
LoggerUtil.info("开始生成执行链", reportId);
ApiExecutionQueue executionQueue = getApiExecutionQueue(poolId, reportId, reportType, runMode, config);
queueMapper.insert(executionQueue);
@ -145,7 +145,7 @@ public class ApiExecutionQueueService {
}
resQueue.setDetailMap(detailMap);
LoggerUtil.info("生成执行链结束");
LoggerUtil.info("生成执行链结束", reportId);
return resQueue;
}
@ -175,7 +175,7 @@ public class ApiExecutionQueueService {
}
private boolean failure(DBTestQueue executionQueue, ResultDTO dto) {
LoggerUtil.info("进入失败停止处理:" + executionQueue.getId());
LoggerUtil.info("进入失败停止处理:" + executionQueue.getId(), dto.getReportId());
boolean isError = false;
if (StringUtils.contains(dto.getRunMode(), ApiRunMode.SCENARIO.name())) {
if (StringUtils.equals(dto.getReportType(), RunModeConstants.SET_REPORT.toString())) {
@ -250,15 +250,15 @@ public class ApiExecutionQueueService {
if (CollectionUtils.isNotEmpty(queues)) {
queue.setQueue(queues.get(0));
} else {
LoggerUtil.info("execution complete,clear queue" + id + "");
LoggerUtil.info("execution complete,clear queue" + id + "", queue.getReportId());
queueMapper.deleteByPrimaryKey(id);
}
} else {
LoggerUtil.info("execution complete,clear queue" + id + "");
LoggerUtil.info("execution complete,clear queue" + id + "", queue.getReportId());
queueMapper.deleteByPrimaryKey(id);
}
} else {
LoggerUtil.info("The queue was accidentally deleted" + id + "");
LoggerUtil.info("The queue was accidentally deleted" + id + "", queue.getReportId());
}
return queue;
}
@ -289,7 +289,7 @@ public class ApiExecutionQueueService {
}
public void queueNext(ResultDTO dto) {
LoggerUtil.info("开始处理队列:" + dto.getReportId() + "QID" + dto.getQueueId());
LoggerUtil.info("开始处理队列:" + dto.getQueueId(), dto.getReportId());
if (StringUtils.equals(dto.getRunType(), RunModeConstants.PARALLEL.toString())) {
ApiExecutionQueueDetailExample example = new ApiExecutionQueueDetailExample();
example.createCriteria().andQueueIdEqualTo(dto.getQueueId()).andTestIdEqualTo(dto.getTestId());
@ -320,7 +320,7 @@ public class ApiExecutionQueueService {
return;
}
}
LoggerUtil.info("开始处理执行队列:" + executionQueue.getId() + " 当前资源是:" + dto.getTestId() + "报告ID" + dto.getReportId());
LoggerUtil.info("开始处理执行队列:" + executionQueue.getId() + " 当前资源是:" + dto.getTestId(), dto.getReportId());
if (executionQueue.getQueue() != null && StringUtils.isNotEmpty(executionQueue.getQueue().getTestId())) {
if (StringUtils.equals(dto.getRunType(), RunModeConstants.SERIAL.toString())) {
LoggerUtil.info("当前执行队列是:" + JSON.toJSONString(executionQueue.getQueue()));
@ -344,14 +344,14 @@ public class ApiExecutionQueueService {
apiScenarioReportService.margeReport(reportId, dto.getRunMode(), dto.getConsole());
}
queueMapper.deleteByPrimaryKey(dto.getQueueId());
LoggerUtil.info("Queue execution ends" + dto.getQueueId());
LoggerUtil.info("Queue execution ends" + dto.getQueueId(), dto.getReportId());
}
ApiExecutionQueueDetailExample example = new ApiExecutionQueueDetailExample();
example.createCriteria().andQueueIdEqualTo(dto.getQueueId()).andTestIdEqualTo(dto.getTestId());
executionQueueDetailMapper.deleteByExample(example);
}
LoggerUtil.info("处理队列结束:" + dto.getReportId() + "QID" + dto.getQueueId());
LoggerUtil.info("处理队列结束:" + dto.getQueueId(), dto.getReportId());
}
public void defendQueue() {
@ -404,7 +404,7 @@ public class ApiExecutionQueueService {
// 删除串行资源锁
redisTemplate.delete(RunModeConstants.SERIAL.name() + "_" + dto.getReportId());
LoggerUtil.info("超时处理报告:【" + report.getId() + "】进入下一个执行");
LoggerUtil.info("超时处理报告处理,进入下一个执行", report.getId());
dto.setTestPlanReportId(queue.getReportId());
dto.setReportId(queue.getReportId());
dto.setRunMode(queue.getRunMode());

View File

@ -922,7 +922,7 @@ public class ApiScenarioReportService {
}
if (dto != null && dto.getArbitraryData() != null && dto.getArbitraryData().containsKey("TIMEOUT") && (Boolean) dto.getArbitraryData().get("TIMEOUT")) {
LoggerUtil.info("报告 【 " + dto.getReportId() + "资源 " + dto.getTestId() + " 执行超时");
LoggerUtil.info("资源 " + dto.getTestId() + " 执行超时", dto.getReportId());
status = ScenarioStatus.Timeout.name();
}
return status;

View File

@ -185,10 +185,10 @@ public class RemakeReportService {
if (JMeterEngineCache.runningEngine.containsKey(dto.getReportId())) {
JMeterEngineCache.runningEngine.remove(dto.getReportId());
}
LoggerUtil.info("进入异常结果处理报告【" + dto.getReportId() + "" + dto.getRunMode() + " 整体执行完成");
LoggerUtil.info("进入异常结果处理" + dto.getRunMode() + " 整体处理完成", dto.getReportId());
// 全局并发队列
PoolExecBlockingQueueUtil.offer(dto.getReportId());
String consoleMsg = FixedCapacityUtils.getJmeterLogger(dto.getReportId(),true);
String consoleMsg = FixedCapacityUtils.getJmeterLogger(dto.getReportId(), true);
dto.setConsole(consoleMsg + "\n" + errorMsg);
// 整体执行结束更新资源状态
CommonBeanFactory.getBean(TestResultService.class).testEnded(dto);
@ -198,11 +198,11 @@ public class RemakeReportService {
}
// 更新测试计划报告
if (StringUtils.isNotEmpty(dto.getTestPlanReportId())) {
LoggerUtil.info("Check Processing Test Plan report status" + dto.getQueueId() + "" + dto.getTestId());
LoggerUtil.info("Check Processing Test Plan report status" + dto.getQueueId() + "" + dto.getTestId(), dto.getReportId());
CommonBeanFactory.getBean(ApiExecutionQueueService.class).testPlanReportTestEnded(dto.getTestPlanReportId());
}
} catch (Exception e) {
LoggerUtil.error(e);
LoggerUtil.error("回退报告异常", request.getReportId(), e);
}
}
}

View File

@ -377,7 +377,7 @@
FROM
api_test_case t1
LEFT JOIN api_definition_exec_result t3 ON t1.last_result_id = t3.id
LEFT JOIN USER deleteUser ON t1.delete_user_id = deleteUser.id
LEFT JOIN user deleteUser ON t1.delete_user_id = deleteUser.id
LEFT JOIN api_definition a ON t1.api_definition_id = a.id
LEFT JOIN project ON t1.project_id = project.id
LEFT JOIN project_version ON project.id = project_version.project_id

View File

@ -3,6 +3,7 @@ package io.metersphere.config;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.metersphere.commons.utils.LogUtil;
import io.metersphere.utils.LoggerUtil;
import org.apache.http.NoHttpResponseException;
import org.apache.http.conn.ConnectTimeoutException;
import org.apache.http.impl.client.HttpClientBuilder;
@ -67,7 +68,7 @@ public class WebConfig implements WebMvcConfigurer {
if (exception instanceof ConnectTimeoutException ||
exception instanceof NoHttpResponseException ||
exception instanceof ConnectException) {
LogUtil.error("重试次数: " + curRetryCount);
LoggerUtil.info("重试次数: " + curRetryCount);
return true;
}
return false;

View File

@ -26,7 +26,7 @@ public class IndexWebSocket {
if (async != null) {
async.sendText("CONN_SUCCEEDED");
}
LoggerUtil.info("客户端: [" + reportId + "] : 连接成功!" + WebSocketUtils.ONLINE_USER_SESSIONS.size());
LoggerUtil.info("客户端: [" + reportId + "] : 连接成功!" + WebSocketUtils.ONLINE_USER_SESSIONS.size(), reportId);
}
/**