删除掉所有没用的工作流相关的类,重新开发。

This commit is contained in:
dengqichen 2024-12-09 15:05:01 +08:00
parent 1d518aa4d3
commit 216839dd59
88 changed files with 632 additions and 4795 deletions

View File

@ -1,21 +1,15 @@
package com.qqchen.deploy.backend.workflow.api;
import com.qqchen.deploy.backend.framework.api.Response;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import com.qqchen.deploy.backend.workflow.dto.NodeInstanceDTO;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.engine.WorkflowEngine;
import com.qqchen.deploy.backend.workflow.query.NodeInstanceQuery;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.annotation.Resource;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
@Slf4j
@RestController
@ -23,30 +17,9 @@ import java.util.Map;
@Tag(name = "节点实例管理", description = "节点实例管理相关接口")
public class NodeInstanceApiController extends BaseController<NodeInstance, NodeInstanceDTO, Long, NodeInstanceQuery> {
@Resource
private WorkflowEngine workflowEngine;
@Operation(summary = "完成节点")
@PostMapping("/{id}/complete")
public Response<Void> completeNode(
@Parameter(description = "节点实例ID", required = true) @PathVariable Long id,
@Parameter(description = "输出变量") @RequestBody(required = false) Map<String, Object> variables
) {
workflowEngine.completeNode(id, variables);
return Response.success();
}
@Operation(summary = "重试节点")
@PostMapping("/{id}/retry")
public Response<Void> retryNode(
@Parameter(description = "节点实例ID", required = true) @PathVariable Long id
) {
workflowEngine.executeNode(id);
return Response.success();
}
@Override
protected void exportData(HttpServletResponse response, List<NodeInstanceDTO> data) {
// TODO: 实现导出功能
}
}
}

View File

@ -1,20 +1,14 @@
package com.qqchen.deploy.backend.workflow.api;
import com.qqchen.deploy.backend.framework.api.Response;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import com.qqchen.deploy.backend.workflow.dto.NodeTypeDTO;
import com.qqchen.deploy.backend.workflow.dto.query.NodeTypeQuery;
import com.qqchen.deploy.backend.workflow.engine.definition.TaskExecutorDefinition;
import com.qqchen.deploy.backend.workflow.entity.NodeType;
import com.qqchen.deploy.backend.workflow.service.INodeTypeService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.annotation.Resource;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@ -32,15 +26,6 @@ public class NodeTypeApiController extends BaseController<NodeType, NodeTypeDTO,
@Resource
private INodeTypeService nodeTypeService;
@Operation(summary = "获取指定节点类型支持的执行器列表")
@GetMapping("/{type}/executors")
public Response<List<TaskExecutorDefinition>> getExecutors(
@Parameter(description = "节点类型", required = true) @PathVariable String type
) {
log.debug("获取节点类型[{}]支持的执行器列表", type);
return Response.success(nodeTypeService.getExecutors(type));
}
@Override
protected void exportData(HttpServletResponse response, List<NodeTypeDTO> data) {

View File

@ -3,8 +3,9 @@ package com.qqchen.deploy.backend.workflow.api;
import com.qqchen.deploy.backend.framework.api.Response;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import com.qqchen.deploy.backend.workflow.dto.WorkflowInstanceDTO;
import com.qqchen.deploy.backend.workflow.dto.request.WorkflowStartRequest;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.engine.WorkflowEngine;
import com.qqchen.deploy.backend.workflow.service.IWorkflowEngineService;
import com.qqchen.deploy.backend.workflow.query.WorkflowInstanceQuery;
import com.qqchen.deploy.backend.workflow.service.IWorkflowInstanceService;
import com.qqchen.deploy.backend.workflow.converter.WorkflowInstanceConverter;
@ -27,7 +28,7 @@ import java.util.Map;
public class WorkflowInstanceApiController extends BaseController<WorkflowInstance, WorkflowInstanceDTO, Long, WorkflowInstanceQuery> {
@Resource
private WorkflowEngine workflowEngine;
private IWorkflowEngineService workflowEngine;
@Resource
private IWorkflowInstanceService workflowInstanceService;
@ -37,42 +38,10 @@ public class WorkflowInstanceApiController extends BaseController<WorkflowInstan
@Operation(summary = "启动工作流实例")
@PostMapping("/start")
public Response<WorkflowInstanceDTO> startWorkflow(@RequestBody StartWorkflowRequest request) {
WorkflowInstance instance = workflowEngine.startWorkflow(
request.getWorkflowCode(),
request.getBusinessKey(),
request.getVariables()
);
return Response.success(converter.toDto(instance));
public Response<WorkflowInstanceDTO> startWorkflow(@RequestBody WorkflowStartRequest request) {
return Response.success(converter.toDto(workflowEngine.startWorkflow(request)));
}
@Operation(summary = "终止工作流实例")
@PostMapping("/{id}/terminate")
public Response<Void> terminateWorkflow(
@Parameter(description = "工作流实例ID", required = true) @PathVariable Long id,
@Parameter(description = "终止原因") @RequestParam(required = false) String reason
) {
workflowEngine.terminateWorkflow(id, reason);
return Response.success();
}
@Operation(summary = "暂停工作流实例")
@PostMapping("/{id}/pause")
public Response<Void> pauseWorkflow(
@Parameter(description = "工作流实例ID", required = true) @PathVariable Long id
) {
workflowEngine.pauseWorkflow(id);
return Response.success();
}
@Operation(summary = "恢复工作流实例")
@PostMapping("/{id}/resume")
public Response<Void> resumeWorkflow(
@Parameter(description = "工作流实例ID", required = true) @PathVariable Long id
) {
workflowEngine.resumeWorkflow(id);
return Response.success();
}
@Override
public void exportData(HttpServletResponse response, List<WorkflowInstanceDTO> data) {
@ -80,16 +49,4 @@ public class WorkflowInstanceApiController extends BaseController<WorkflowInstan
}
@Data
public static class StartWorkflowRequest {
@Parameter(description = "工作流编码", required = true)
private String workflowCode;
@Parameter(description = "业务标识", required = true)
private String businessKey;
@Parameter(description = "工作流变量")
private Map<String, Object> variables;
}
}

View File

@ -1,6 +1,6 @@
package com.qqchen.deploy.backend.workflow.converter;
import com.qqchen.deploy.backend.workflow.engine.definition.TaskExecutorDefinition;
import com.qqchen.deploy.backend.workflow.engine.definition.NodeExecutorDefinition;
import java.util.List;
@ -16,7 +16,7 @@ public interface JsonConverter {
* @param json JSON字符串
* @return 执行器定义列表
*/
List<TaskExecutorDefinition> toExecutorList(String json);
List<NodeExecutorDefinition> toExecutorList(String json);
/**
* 将执行器定义列表转换为JSON字符串
@ -24,5 +24,5 @@ public interface JsonConverter {
* @param executors 执行器定义列表
* @return JSON字符串
*/
String fromExecutorList(List<TaskExecutorDefinition> executors);
String fromExecutorList(List<NodeExecutorDefinition> executors);
}

View File

@ -7,7 +7,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.exception.SystemException;
import com.qqchen.deploy.backend.workflow.converter.JsonConverter;
import com.qqchen.deploy.backend.workflow.engine.definition.TaskExecutorDefinition;
import com.qqchen.deploy.backend.workflow.engine.definition.NodeExecutorDefinition;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
@ -27,7 +27,7 @@ public class DefaultJsonConverter implements JsonConverter {
}
@Override
public List<TaskExecutorDefinition> toExecutorList(String json) {
public List<NodeExecutorDefinition> toExecutorList(String json) {
if (json == null || json.isEmpty()) {
return new ArrayList<>();
}
@ -38,14 +38,14 @@ public class DefaultJsonConverter implements JsonConverter {
throw new SystemException(ResponseCode.WORKFLOW_CONFIG_INVALID, new Object[]{"Executors JSON must be an array"}, null);
}
List<TaskExecutorDefinition> result = new ArrayList<>();
List<NodeExecutorDefinition> result = new ArrayList<>();
for (JsonNode node : rootNode) {
// 将configSchema转换为字符串
if (node.has("configSchema")) {
((ObjectNode) node).put("configSchema", node.get("configSchema").toString());
}
// 将处理后的节点转换为TaskExecutorDefinition对象
result.add(objectMapper.treeToValue(node, TaskExecutorDefinition.class));
result.add(objectMapper.treeToValue(node, NodeExecutorDefinition.class));
}
return result;
} catch (JsonProcessingException e) {
@ -54,7 +54,7 @@ public class DefaultJsonConverter implements JsonConverter {
}
@Override
public String fromExecutorList(List<TaskExecutorDefinition> executors) {
public String fromExecutorList(List<NodeExecutorDefinition> executors) {
if (executors == null || executors.isEmpty()) {
return "[]";
}

View File

@ -1,7 +1,7 @@
package com.qqchen.deploy.backend.workflow.dto;
import com.qqchen.deploy.backend.framework.dto.BaseDTO;
import com.qqchen.deploy.backend.workflow.engine.definition.TaskExecutorDefinition;
import com.qqchen.deploy.backend.workflow.engine.definition.NodeExecutorDefinition;
import com.qqchen.deploy.backend.workflow.enums.NodeCategoryEnum;
import jakarta.validation.Valid;
import jakarta.validation.constraints.NotBlank;
@ -60,7 +60,7 @@ public class NodeTypeDTO extends BaseDTO {
* 3. 每种执行器都有自己的配置模式
*/
@Valid
private List<TaskExecutorDefinition> executors;
private List<NodeExecutorDefinition> executors;
/**
* 节点配置模式(JSON)

View File

@ -0,0 +1,19 @@
package com.qqchen.deploy.backend.workflow.dto.request;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
import java.util.Map;
@Data
public class WorkflowStartRequest {
@NotBlank(message = "工作流编码")
private String workflowCode;
@NotBlank(message = "业务标识")
private String businessKey;
@NotBlank(message = "环境变量")
private Map<String, Object> variables;
}

View File

@ -1,347 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.DefaultWorkflowContext;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.node.NodeExecutor;
import com.qqchen.deploy.backend.workflow.engine.parser.WorkflowDefinitionParser;
import com.qqchen.deploy.backend.workflow.entity.NodeConfig;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.TransitionConfig;
import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import com.qqchen.deploy.backend.workflow.enums.WorkflowDefinitionStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.WorkflowInstanceStatusEnum;
import com.qqchen.deploy.backend.workflow.repository.INodeConfigRepository;
import com.qqchen.deploy.backend.workflow.repository.INodeInstanceRepository;
import com.qqchen.deploy.backend.workflow.repository.ITransitionConfigRepository;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowDefinitionRepository;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowInstanceRepository;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
@Slf4j
@Component
public class DefaultWorkflowEngine implements WorkflowEngine {
@Resource
private IWorkflowDefinitionRepository workflowDefinitionRepository;
@Resource
private IWorkflowInstanceRepository workflowInstanceRepository;
@Resource
private INodeInstanceRepository nodeInstanceRepository;
@Resource
private INodeConfigRepository nodeConfigRepository;
@Resource
private ITransitionConfigRepository transitionConfigRepository;
@Resource
private Map<NodeTypeEnum, NodeExecutor> nodeExecutors;
@Resource
private WorkflowVariableOperations variableOperations;
@Resource
private ObjectMapper objectMapper;
@Resource
private WorkflowDefinitionParser workflowDefinitionParser;
@Override
@Transactional
public WorkflowInstance startWorkflow(String workflowCode, String businessKey, Map<String, Object> variables) {
// 1. 获取工作流定义
WorkflowDefinition definition = workflowDefinitionRepository.findByCodeAndDeletedFalse(workflowCode);
if (definition == null) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NOT_FOUND);
}
// 2. 检查工作流定义状态
if (definition.getStatus() != WorkflowDefinitionStatusEnum.PUBLISHED) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NOT_PUBLISHED);
}
// 3. 创建工作流实例
WorkflowInstance instance = new WorkflowInstance();
instance.setWorkflowDefinition(definition);
instance.setBusinessKey(businessKey);
instance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
instance.setCreateTime(LocalDateTime.now());
workflowInstanceRepository.save(instance);
// 4. 初始化工作流变量
if (variables != null && !variables.isEmpty()) {
variableOperations.setVariables(instance.getId(), variables);
}
// 5. 解析并保存节点和流转配置
try {
// 清除旧的配置如果存在
nodeConfigRepository.deleteByWorkflowDefinitionId(definition.getId());
transitionConfigRepository.deleteByWorkflowDefinitionId(definition.getId());
// 解析并保存新的配置
List<NodeConfig> nodeConfigs = workflowDefinitionParser.parseNodeConfig(definition.getNodeConfig());
List<TransitionConfig> transitions = workflowDefinitionParser.parseTransitionConfig(definition.getTransitionConfig());
// 保存节点配置
for (NodeConfig config : nodeConfigs) {
config.setWorkflowDefinitionId(definition.getId());
nodeConfigRepository.save(config);
}
// 保存流转配置
for (TransitionConfig config : transitions) {
config.setWorkflowDefinitionId(definition.getId());
transitionConfigRepository.save(config);
}
// 6. 查找并创建开始节点
NodeConfig startNodeConfig = nodeConfigs.stream()
.filter(n -> n.getType() == NodeTypeEnum.START)
.findFirst()
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_INVALID, "Start node not found"));
NodeInstance startNode = new NodeInstance();
startNode.setWorkflowInstance(instance);
startNode.setNodeId(startNodeConfig.getNodeId());
startNode.setNodeType(startNodeConfig.getType());
startNode.setName(startNodeConfig.getName());
startNode.setConfig(objectMapper.writeValueAsString(startNodeConfig.getConfig()));
startNode.setStatus(NodeStatusEnum.RUNNING);
startNode.setCreateTime(LocalDateTime.now());
nodeInstanceRepository.save(startNode);
// 7. 执行开始节点
executeNode(startNode.getId());
return instance;
} catch (JsonProcessingException e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_ERROR, e);
}
}
@Override
@Transactional
public void executeNode(Long nodeInstanceId) {
NodeInstance nodeInstance = nodeInstanceRepository.findById(nodeInstanceId)
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_NOT_FOUND));
WorkflowInstance instance = nodeInstance.getWorkflowInstance();
if (!instance.canExecuteNode()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_RUNNING);
}
// 获取节点执行器
NodeExecutor executor = nodeExecutors.get(nodeInstance.getNodeType());
if (executor == null) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTOR_NOT_FOUND);
}
// 创建执行上下文
WorkflowContextOperations context = DefaultWorkflowContext.builder()
.workflowInstance(instance)
.variableOperations(variableOperations)
.build();
try {
// 执行节点
executor.execute(nodeInstance, context);
// 更新节点状态
nodeInstance.setStatus(NodeStatusEnum.COMPLETED);
nodeInstance.setEndTime(LocalDateTime.now());
nodeInstanceRepository.save(nodeInstance);
// 从数据库获取流转配置
List<TransitionConfig> transitions = transitionConfigRepository
.findByWorkflowDefinitionId(instance.getWorkflowDefinition().getId());
// 获取当前节点的后续节点
List<String> nextNodeIds = transitions.stream()
.filter(t -> t.getFrom().equals(nodeInstance.getNodeId()))
.map(TransitionConfig::getTo)
.toList();
// 获取节点配置
List<NodeConfig> nodeConfigs = nodeConfigRepository
.findByWorkflowDefinitionId(instance.getWorkflowDefinition().getId());
// 创建并执行后续节点
for (String nextNodeId : nextNodeIds) {
NodeConfig nodeConfig = nodeConfigs.stream()
.filter(n -> n.getNodeId().equals(nextNodeId))
.findFirst()
.orElse(null);
if (nodeConfig == null) {
log.error("Node configuration not found for node: {}", nextNodeId);
continue;
}
createAndExecuteNextNode(instance, nextNodeId, nodeConfig);
}
// 检查是否所有节点都已完成
List<NodeInstance> uncompletedNodes = nodeInstanceRepository
.findByWorkflowInstanceAndStatusNot(instance, NodeStatusEnum.COMPLETED);
if (uncompletedNodes.isEmpty()) {
workflowInstanceRepository.save(instance);
}
} catch (Exception e) {
// 更新节点状态为失败
nodeInstance.setStatus(NodeStatusEnum.FAILED);
nodeInstance.setError(e.getMessage());
nodeInstance.setEndTime(LocalDateTime.now());
nodeInstanceRepository.save(nodeInstance);
// 更新工作流实例状态为失败
instance.setStatus(WorkflowInstanceStatusEnum.FAILED);
instance.setError(e.getMessage());
workflowInstanceRepository.save(instance);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
private void createAndExecuteNextNode(WorkflowInstance instance, String nextNodeId, NodeConfig nodeConfig) {
NodeInstance nextNode = new NodeInstance();
nextNode.setNodeId(nextNodeId);
nextNode.setWorkflowInstance(instance);
nextNode.setNodeType(nodeConfig.getType());
nextNode.setName(nodeConfig.getName());
// nextNode.setConfigObject(nodeExecuteConfigConverter.toNodeExecutorConfig(nodeConfig));
// nextNode.setConfigObject(nodeConfig);
nextNode.setStatus(NodeStatusEnum.PENDING);
nodeInstanceRepository.save(nextNode);
// 递归执行后续节点
executeNode(nextNode.getId());
}
@Override
@Transactional
public void completeNode(Long nodeInstanceId, Map<String, Object> variables) {
NodeInstance nodeInstance = nodeInstanceRepository.findById(nodeInstanceId)
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_NOT_FOUND));
WorkflowInstance instance = nodeInstance.getWorkflowInstance();
// 设置节点输出变量
if (variables != null && !variables.isEmpty()) {
variableOperations.setVariables(instance.getId(), variables);
}
nodeInstance.setStatus(NodeStatusEnum.COMPLETED);
nodeInstance.setEndTime(LocalDateTime.now());
nodeInstanceRepository.save(nodeInstance);
}
@Override
@Transactional
public void terminateWorkflow(Long instanceId, String reason) {
WorkflowInstance instance = workflowInstanceRepository.findById(instanceId)
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_FOUND));
instance.terminate(reason);
workflowInstanceRepository.save(instance);
// 清理上下文缓存
variableOperations.clearVariables(instance.getId());
}
@Override
@Transactional
public void pauseWorkflow(Long instanceId) {
WorkflowInstance instance = workflowInstanceRepository.findById(instanceId)
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_FOUND));
if (!instance.canPause()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_RUNNING);
}
instance.pause();
workflowInstanceRepository.save(instance);
}
@Override
@Transactional
public void resumeWorkflow(Long instanceId) {
WorkflowInstance instance = workflowInstanceRepository.findById(instanceId)
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_FOUND));
if (!instance.canResume()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_PAUSED);
}
instance.resume();
workflowInstanceRepository.save(instance);
}
@Override
@Transactional
public void retryWorkflow(Long instanceId) {
WorkflowInstance instance = workflowInstanceRepository.findById(instanceId)
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_FOUND));
if (!instance.canRetry()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_RUNNING);
}
// 重试工作流实例
instance.retry();
workflowInstanceRepository.save(instance);
// 获取失败的节点
List<NodeInstance> failedNodes = nodeInstanceRepository.findByWorkflowInstanceAndStatus(
instance, NodeStatusEnum.FAILED);
// 重试失败的节点
for (NodeInstance node : failedNodes) {
node.setStatus(NodeStatusEnum.PENDING);
node.setError(null);
nodeInstanceRepository.save(node);
executeNode(node.getId());
}
}
@Override
@Transactional
public void checkTimeout(Long instanceId, long timeoutMillis) {
WorkflowInstance instance = workflowInstanceRepository.findById(instanceId)
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_INSTANCE_NOT_FOUND));
if (instance.isTimeout(timeoutMillis)) {
String error = "Workflow execution timeout after " + timeoutMillis + " milliseconds";
terminateWorkflow(instanceId, error);
}
}
private NodeInstance createStartNode(WorkflowDefinition definition, WorkflowInstance instance) {
NodeInstance startNode = new NodeInstance();
startNode.setWorkflowInstance(instance);
startNode.setNodeId("start");
startNode.setNodeType(NodeTypeEnum.START);
startNode.setName("开始节点");
startNode.setStatus(NodeStatusEnum.PENDING);
startNode.setCreateTime(LocalDateTime.now());
return nodeInstanceRepository.save(startNode);
}
}

View File

@ -1,116 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import lombok.Data;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* 工作流上下文
* 负责管理工作流执行过程中的运行时状态
*/
@Data
public class WorkflowContext {
/**
* 工作流实例ID
*/
private final Long instanceId;
/**
* 当前节点实例
*/
private NodeInstance currentNode;
/**
* 所有节点实例
*/
private List<NodeInstance> allNodes;
/**
* 临时变量节点间传递不持久化
*/
private final Map<String, Object> tempVariables;
/**
* 变量操作服务
*/
private final WorkflowVariableOperations variableOperations;
public WorkflowContext(Long instanceId, WorkflowVariableOperations variableOperations) {
this.instanceId = instanceId;
this.tempVariables = new ConcurrentHashMap<>();
this.variableOperations = variableOperations;
}
/**
* 获取变量值委托给 WorkflowVariableOperations
* @deprecated 建议直接使用 WorkflowVariableOperations
*/
@Deprecated
public Object getVariable(String key) {
return variableOperations.getVariables(instanceId).get(key);
}
/**
* 设置变量值委托给 WorkflowVariableOperations
* @deprecated 建议直接使用 WorkflowVariableOperations
*/
@Deprecated
public void setVariable(String key, Object value) {
Map<String, Object> vars = new ConcurrentHashMap<>();
vars.put(key, value);
variableOperations.setVariables(instanceId, vars);
}
/**
* 获取所有变量委托给 WorkflowVariableOperations
* @deprecated 建议直接使用 WorkflowVariableOperations
*/
@Deprecated
public Map<String, Object> getVariables() {
return variableOperations.getVariables(instanceId);
}
/**
* 设置多个变量委托给 WorkflowVariableOperations
* @deprecated 建议直接使用 WorkflowVariableOperations
*/
@Deprecated
public void setVariables(Map<String, Object> variables) {
variableOperations.setVariables(instanceId, variables);
}
/**
* 获取临时变量值
*/
public Object getTempVariable(String key) {
return tempVariables.get(key);
}
/**
* 设置临时变量值
*/
public void setTempVariable(String key, Object value) {
tempVariables.put(key, value);
}
/**
* 获取所有临时变量
*/
public Map<String, Object> getTempVariables() {
return new ConcurrentHashMap<>(tempVariables);
}
/**
* 清除临时变量
*/
public void clearTempVariables() {
tempVariables.clear();
}
}

View File

@ -1,53 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import java.util.Map;
/**
* 工作流执行引擎接口
*/
public interface WorkflowEngine {
/**
* 启动工作流实例
*/
WorkflowInstance startWorkflow(String workflowCode, String businessKey, Map<String, Object> variables);
/**
* 执行节点
*/
void executeNode(Long nodeInstanceId);
/**
* 完成节点
*/
void completeNode(Long nodeInstanceId, Map<String, Object> variables);
/**
* 终止工作流实例
*/
void terminateWorkflow(Long instanceId, String reason);
/**
* 暂停工作流实例
*/
void pauseWorkflow(Long instanceId);
/**
* 恢复工作流实例
*/
void resumeWorkflow(Long instanceId);
/**
* 重试工作流实例
*/
void retryWorkflow(Long instanceId);
/**
* 检查工作流实例是否超时
* @param instanceId 工作流实例ID
* @param timeoutMillis 超时时间毫秒
*/
void checkTimeout(Long instanceId, long timeoutMillis);
}

View File

@ -1,7 +1,6 @@
package com.qqchen.deploy.backend.workflow.engine.config;
import com.qqchen.deploy.backend.workflow.engine.executor.node.NodeExecutor;
import com.qqchen.deploy.backend.workflow.engine.executor.task.TaskExecutor;
import com.qqchen.deploy.backend.workflow.engine.executor.INodeExecutor;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -15,20 +14,11 @@ import java.util.stream.Collectors;
public class WorkflowEngineConfig {
@Bean
public Map<String, TaskExecutor> taskExecutorMap(List<TaskExecutor> executors) {
public Map<NodeTypeEnum, INodeExecutor> nodeExecutorMap(List<INodeExecutor> executors) {
return executors.stream()
.collect(Collectors.toMap(
executor -> executor.getClass().getSimpleName().replace("TaskExecutor", "").toUpperCase(),
Function.identity()
));
}
@Bean
public Map<NodeTypeEnum, NodeExecutor> nodeExecutorMap(List<NodeExecutor> executors) {
return executors.stream()
.collect(Collectors.toMap(
NodeExecutor::getNodeType,
Function.identity()
));
.collect(Collectors.toMap(
INodeExecutor::getNodeType,
Function.identity()
));
}
}

View File

@ -1,97 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.context;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import lombok.Getter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
/**
* 默认工作流上下文实现
*/
public class DefaultWorkflowContext implements WorkflowContextOperations {
private static final Logger logger = LoggerFactory.getLogger(DefaultWorkflowContext.class);
@Getter
private final WorkflowInstance workflowInstance;
private final WorkflowVariableOperations variableOperations;
public DefaultWorkflowContext(WorkflowInstance workflowInstance, WorkflowVariableOperations variableOperations) {
this.workflowInstance = workflowInstance;
this.variableOperations = variableOperations;
}
@Override
public WorkflowInstance getInstance() {
return workflowInstance;
}
@Override
public Object getVariable(String key) {
return variableOperations.getVariable(workflowInstance.getId(), key);
}
@Override
public void setVariable(String key, Object value) {
variableOperations.setVariable(workflowInstance.getId(), key, value);
}
@Override
public void setVariables(Map<String, Object> variables) {
variableOperations.setVariables(workflowInstance.getId(), variables);
}
@Override
public Map<String, Object> getVariables() {
return variableOperations.getVariables(workflowInstance.getId());
}
@Override
public void log(String message, LogLevelEnum level) {
switch (level) {
case DEBUG -> logger.debug("[Workflow:{}] {}", workflowInstance.getId(), message);
case INFO -> logger.info("[Workflow:{}] {}", workflowInstance.getId(), message);
case WARN -> logger.warn("[Workflow:{}] {}", workflowInstance.getId(), message);
case ERROR -> logger.error("[Workflow:{}] {}", workflowInstance.getId(), message);
}
}
@Override
public void log(String message, String detail, LogLevelEnum level) {
switch (level) {
case DEBUG -> logger.debug("[Workflow:{}] {} - Detail: {}", workflowInstance.getId(), message, detail);
case INFO -> logger.info("[Workflow:{}] {} - Detail: {}", workflowInstance.getId(), message, detail);
case WARN -> logger.warn("[Workflow:{}] {} - Detail: {}", workflowInstance.getId(), message, detail);
case ERROR -> logger.error("[Workflow:{}] {} - Detail: {}", workflowInstance.getId(), message, detail);
}
}
public static class Builder {
private WorkflowInstance workflowInstance;
private WorkflowVariableOperations variableOperations;
public Builder workflowInstance(WorkflowInstance workflowInstance) {
this.workflowInstance = workflowInstance;
return this;
}
public Builder variableOperations(WorkflowVariableOperations variableOperations) {
this.variableOperations = variableOperations;
return this;
}
public DefaultWorkflowContext build() {
return new DefaultWorkflowContext(workflowInstance, variableOperations);
}
}
public static Builder builder() {
return new Builder();
}
}

View File

@ -1,51 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.context;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import java.util.Map;
/**
* 工作流上下文操作接口
* 定义工作流执行过程中的上下文操作能力
*/
public interface WorkflowContextOperations {
/**
* 获取工作流实例
*/
WorkflowInstance getInstance();
void setVariables(Map<String, Object> variables);
/**
* 获取所有变量
* @deprecated 使用 WorkflowVariableOperations 替代
*/
@Deprecated
Map<String, Object> getVariables();
/**
* 获取变量
* @deprecated 使用 WorkflowVariableOperations 替代
*/
@Deprecated
Object getVariable(String name);
/**
* 设置变量
* @deprecated 使用 WorkflowVariableOperations 替代
*/
@Deprecated
void setVariable(String name, Object value);
/**
* 记录日志
*/
void log(String message, LogLevelEnum level);
/**
* 记录日志(带详情)
*/
void log(String message, String detail, LogLevelEnum level);
}

View File

@ -3,25 +3,9 @@ package com.qqchen.deploy.backend.workflow.engine.definition;
import jakarta.validation.constraints.NotBlank;
import lombok.Data;
/**
* 任务执行器定义
* 用于定义工作流节点支持的任务执行器类型及其配置结构
* 主要用于
* 1. 节点类型配置定义节点类型支持哪些执行器
* 2. 流程设计前端根据执行器定义动态渲染配置表单
* 3. 执行引擎根据执行器定义验证和执行任务
*/
@Data
public class TaskExecutorDefinition {
public class NodeExecutorDefinition {
/**
* 执行器编码
* 用于标识执行器类型需要与具体的执行器实现类对应
* 例如
* - SHELL对应ShellTaskExecutor
* - JENKINS对应JenkinsTaskExecutor
* - HTTP对应HttpTaskExecutor
*/
@NotBlank(message = "执行器编码不能为空")
private String code;
@ -51,7 +35,7 @@ public class TaskExecutorDefinition {
* 用于
* 1. 前端动态渲染配置表单
* 2. 后端验证配置参数
*
*
* 示例 - Shell执行器配置模式
* {
* "type": "object",

View File

@ -0,0 +1,164 @@
package com.qqchen.deploy.backend.workflow.engine.executor;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.model.NodeConfig;
import com.qqchen.deploy.backend.workflow.engine.model.WorkflowGraph;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import com.qqchen.deploy.backend.workflow.repository.INodeInstanceRepository;
import com.qqchen.deploy.backend.workflow.service.IWorkflowLogService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Lazy;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
/**
* 抽象节点执行器
*/
@Slf4j
public abstract class AbstractNodeExecutor implements INodeExecutor {
@Resource
protected ObjectMapper objectMapper;
@Resource
protected INodeInstanceRepository nodeInstanceRepository;
@Resource
protected IWorkflowLogService workflowLogService;
@Resource
@Lazy
private Map<NodeTypeEnum, INodeExecutor> nodeExecutors;
@Override
public void execute(WorkflowInstance workflowInstance, WorkflowGraph graph, NodeConfig currentNode) {
NodeInstance nodeInstance = null;
try {
nodeInstance = createNodeInstance(workflowInstance, currentNode);
} catch (JsonProcessingException e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
try {
// 1. 创建节点实例
// 1. 前置处理
beforeExecute(workflowInstance, nodeInstance);
// 2. 执行节点逻辑
doExecute(workflowInstance, nodeInstance);
// 3. 后置处理
afterExecute(workflowInstance, nodeInstance);
// 4. 更新节点状态
updateNodeStatus(nodeInstance, true);
// 4. 获取并执行下一个节点
List<NodeConfig> nextNodes = graph.getNextNodes(currentNode.getNodeId());
executeNextNodes(workflowInstance, graph, nextNodes);
} catch (Exception e) {
// 5. 异常处理
handleExecutionError(workflowInstance, nodeInstance, e);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
/**
* 执行后续节点
*/
protected void executeNextNodes(WorkflowInstance workflowInstance, WorkflowGraph graph, List<NodeConfig> nextNodes) {
for (NodeConfig nextNode : nextNodes) {
INodeExecutor executor = nodeExecutors.get(nextNode.getType());
executor.execute(workflowInstance, graph, nextNode);
}
}
/**
* 创建节点实例
*/
protected NodeInstance createNodeInstance(WorkflowInstance workflowInstance, NodeConfig config) throws JsonProcessingException {
NodeInstance node = new NodeInstance();
node.setWorkflowInstance(workflowInstance);
node.setNodeId(config.getNodeId());
node.setNodeType(config.getType());
node.setName(config.getName());
node.setConfig(objectMapper.writeValueAsString(config.getConfig()));
node.setStatus(NodeStatusEnum.RUNNING);
node.setCreateTime(LocalDateTime.now());
node.setStartTime(LocalDateTime.now());
return nodeInstanceRepository.save(node);
}
/**
* 执行节点逻辑
*/
protected abstract void doExecute(WorkflowInstance workflowInstance, NodeInstance nodeInstance);
/**
* 前置处理
*/
protected void beforeExecute(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
}
/**
* 后置处理
*/
protected void afterExecute(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
logNodeComplete(nodeInstance);
}
/**
* 处理执行异常
*/
protected void handleExecutionError(WorkflowInstance workflowInstance, NodeInstance nodeInstance, Exception e) {
log.error("Node execution failed. nodeInstance: {}, error: {}", nodeInstance.getId(), e.getMessage(), e);
logSystem(nodeInstance, LogLevelEnum.ERROR, String.format("节点执行失败: %s[%s]", nodeInstance.getName(), nodeInstance.getNodeId()), e.getMessage());
logNodeError(nodeInstance, e.getMessage());
updateNodeStatus(nodeInstance, false);
}
/**
* 更新节点状态
*/
private void updateNodeStatus(NodeInstance nodeInstance, boolean success) {
nodeInstance.setStatus(success ? NodeStatusEnum.COMPLETED : NodeStatusEnum.FAILED);
if (success) {
nodeInstance.setEndTime(LocalDateTime.now());
}
nodeInstanceRepository.save(nodeInstance);
}
/**
* 记录系统日志
*/
protected void logSystem(NodeInstance nodeInstance, LogLevelEnum level, String message, String detail) {
workflowLogService.log(nodeInstance.getWorkflowInstance(), nodeInstance.getNodeId(), level, message, detail);
}
/**
* 记录节点完成日志
*/
protected void logNodeComplete(NodeInstance nodeInstance) {
workflowLogService.log(nodeInstance.getWorkflowInstance(), nodeInstance.getNodeId(), LogLevelEnum.INFO, String.format("节点执行完成: %s", nodeInstance.getName()), null);
}
/**
* 记录节点错误日志
*/
protected void logNodeError(NodeInstance nodeInstance, String error) {
workflowLogService.log(nodeInstance.getWorkflowInstance(), nodeInstance.getNodeId(), LogLevelEnum.ERROR, String.format("节点执行失败: %s", nodeInstance.getName()), error);
nodeInstance.setError(error);
}
}

View File

@ -0,0 +1,38 @@
package com.qqchen.deploy.backend.workflow.engine.executor;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* 结束节点执行器
* 负责完成工作流实例的执行
*/
@Slf4j
@Component("endNodeExecutor")
public class EndNodeExecutor extends AbstractNodeExecutor {
@Override
protected void doExecute(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
}
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.END;
}
@Override
public void validate(String config) {
}
@Override
public void terminate(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
}
}

View File

@ -0,0 +1,27 @@
package com.qqchen.deploy.backend.workflow.engine.executor;
import com.qqchen.deploy.backend.workflow.engine.model.NodeConfig;
import com.qqchen.deploy.backend.workflow.engine.model.WorkflowGraph;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
/**
* 节点执行器接口
*/
public interface INodeExecutor {
/**
* 获取支持的节点类型
*/
NodeTypeEnum getNodeType();
void execute(WorkflowInstance workflowInstance, WorkflowGraph graph, NodeConfig currentNode);
/**
* 验证节点配置
*/
void validate(String config);
void terminate(WorkflowInstance workflowInstance, NodeInstance nodeInstance);
}

View File

@ -0,0 +1,36 @@
package com.qqchen.deploy.backend.workflow.engine.executor;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* 脚本节点执行器
* 支持多种脚本语言Python, Shell, JavaScript等
*/
@Slf4j
@Component("scriptNodeExecutor")
public class ScriptNodeExecutor extends AbstractNodeExecutor {
@Override
protected void doExecute(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
}
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.SCRIPT;
}
@Override
public void validate(String config) {
}
@Override
public void terminate(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
}
}

View File

@ -0,0 +1,37 @@
package com.qqchen.deploy.backend.workflow.engine.executor;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* 开始节点执行器
* 负责工作流实例的初始化工作
*/
@Slf4j
@Component("startNodeExecutor")
public class StartNodeExecutor extends AbstractNodeExecutor {
@Override
protected void doExecute(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
}
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.START;
}
@Override
public void validate(String config) {
}
@Override
public void terminate(WorkflowInstance workflowInstance, NodeInstance nodeInstance) {
}
}

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -0,0 +1,30 @@
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.Data;
/**
* 节点配置基类
*/
public class NodeExecutorConfig {
/**
* 节点ID
*/
private String id;
/**
* 节点名称
*/
private String name;
/**
* 节点类型
*/
private NodeTypeEnum type;
/**
* 描述
*/
private String description;
}

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import lombok.Data;
import lombok.EqualsAndHashCode;

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
package com.qqchen.deploy.backend.workflow.engine.executor.config;
import com.qqchen.deploy.backend.workflow.enums.ScriptLanguageEnum;
import lombok.Data;

View File

@ -1,35 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.gateway;
import lombok.Data;
/**
* 分支汇聚配置
*/
@Data
public class BranchConvergeConfig {
/**
* 汇聚策略
*/
private ConvergeStrategy strategy;
/**
* 需要完成的分支数量N个完成时使用
*/
private Integer requiredCount;
public enum ConvergeStrategy {
ALL("全部完成"),
ANY("任一完成"),
N("N个完成");
private final String description;
ConvergeStrategy(String description) {
this.description = description;
}
public String getDescription() {
return description;
}
}
}

View File

@ -1,46 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.gateway;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.util.List;
/**
* 基于条件的网关配置基类
*/
@Data
@EqualsAndHashCode(callSuper = true)
public abstract class ConditionalGatewayConfig extends GatewayConfig {
/**
* 条件分支列表
*/
private List<ConditionalBranch> branches;
/**
* 默认分支节点ID
*/
private String defaultNodeId;
@Data
public static class ConditionalBranch {
/**
* 分支名称
*/
private String name;
/**
* 条件表达式
*/
private String condition;
/**
* 目标节点ID
*/
private String to;
/**
* 分支描述
*/
private String description;
}
}

View File

@ -1,41 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.gateway;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import java.util.Collections;
import java.util.List;
/**
* 排他网关配置
*/
@Data
@EqualsAndHashCode(callSuper = true)
public class ExclusiveGatewayConfig extends ConditionalGatewayConfig {
private final ExpressionParser expressionParser = new SpelExpressionParser();
@Override
public List<String> getNextNodeIds(WorkflowContextOperations context) {
// 返回第一个满足条件的分支的targetNodeId
for (ConditionalBranch branch : getBranches()) {
if (evaluateCondition(branch.getCondition(), context)) {
return Collections.singletonList(branch.getTo());
}
}
return Collections.singletonList(getDefaultNodeId());
}
private boolean evaluateCondition(String condition, WorkflowContextOperations context) {
try {
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(context);
return Boolean.TRUE.equals(expressionParser.parseExpression(condition)
.getValue(evaluationContext, Boolean.class));
} catch (Exception e) {
return false;
}
}
}

View File

@ -1,41 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.gateway;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.enums.GatewayTypeEnum;
import lombok.Data;
import java.util.List;
/**
* 网关配置基类
*/
@Data
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes({
@JsonSubTypes.Type(value = ExclusiveGatewayConfig.class, name = "EXCLUSIVE"),
@JsonSubTypes.Type(value = ParallelGatewayConfig.class, name = "PARALLEL"),
@JsonSubTypes.Type(value = InclusiveGatewayConfig.class, name = "INCLUSIVE")
})
public abstract class GatewayConfig {
/**
* 网关类型
*/
private GatewayTypeEnum type;
/**
* 网关名称
*/
private String name;
/**
* 网关描述
*/
private String description;
/**
* 获取下一个节点IDs
*/
public abstract List<String> getNextNodeIds(WorkflowContextOperations context);
}

View File

@ -1,59 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.gateway;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.node.NodeExecutor;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
@Slf4j
@Component
public class GatewayNodeExecutor implements NodeExecutor {
private final ObjectMapper objectMapper = new ObjectMapper();
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.GATEWAY;
}
@Override
public void execute(NodeInstance nodeInstance, WorkflowContextOperations context) {
try {
// 解析网关配置
GatewayConfig config = objectMapper.readValue(nodeInstance.getConfig(), GatewayConfig.class);
// 获取下一个节点IDs
List<String> nextNodeIds = config.getNextNodeIds(context);
// 设置输出
nodeInstance.setOutput(objectMapper.createObjectNode()
.put("nextNodeIds", String.join(",", nextNodeIds))
.toString());
} catch (Exception e) {
log.error("Gateway node execution failed", e);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
@Override
public void validate(String config) {
try {
objectMapper.readValue(config, GatewayConfig.class);
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// Gateway nodes are instant operations, no need to terminate
}
}

View File

@ -1,51 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.gateway;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import java.util.List;
import java.util.stream.Collectors;
/**
* 包容网关配置
*/
@Data
@EqualsAndHashCode(callSuper = true)
public class InclusiveGatewayConfig extends ConditionalGatewayConfig {
/**
* 汇聚配置
*/
private BranchConvergeConfig convergeConfig;
private final ExpressionParser expressionParser = new SpelExpressionParser();
@Override
public List<String> getNextNodeIds(WorkflowContextOperations context) {
// 返回所有满足条件的分支的targetNodeId
List<String> nextNodeIds = getBranches().stream()
.filter(branch -> evaluateCondition(branch.getCondition(), context))
.map(ConditionalBranch::getTo)
.collect(Collectors.toList());
// 如果没有满足条件的分支使用默认分支
if (nextNodeIds.isEmpty() && getDefaultNodeId() != null) {
nextNodeIds.add(getDefaultNodeId());
}
return nextNodeIds;
}
private boolean evaluateCondition(String condition, WorkflowContextOperations context) {
try {
StandardEvaluationContext evaluationContext = new StandardEvaluationContext(context);
return Boolean.TRUE.equals(expressionParser.parseExpression(condition)
.getValue(evaluationContext, Boolean.class));
} catch (Exception e) {
return false;
}
}
}

View File

@ -1,51 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.gateway;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.util.List;
import java.util.stream.Collectors;
/**
* 并行网关配置
*/
@Data
@EqualsAndHashCode(callSuper = true)
public class ParallelGatewayConfig extends GatewayConfig {
/**
* 并行分支列表
*/
private List<ParallelBranch> branches;
/**
* 汇聚配置
*/
private BranchConvergeConfig convergeConfig;
@Data
public static class ParallelBranch {
/**
* 分支名称
*/
private String name;
/**
* 目标节点ID
*/
private String to;
/**
* 分支描述
*/
private String description;
}
@Override
public List<String> getNextNodeIds(WorkflowContextOperations context) {
// 返回所有分支的targetNodeId
return branches.stream()
.map(ParallelBranch::getTo)
.collect(Collectors.toList());
}
}

View File

@ -1,133 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
import com.qqchen.deploy.backend.workflow.repository.INodeInstanceRepository;
import com.qqchen.deploy.backend.workflow.service.IWorkflowLogService;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import java.time.LocalDateTime;
/**
* 抽象节点执行器
*/
@Slf4j
public abstract class AbstractNodeExecutor implements NodeExecutor {
@Resource
protected WorkflowVariableOperations variableOperations;
@Resource
protected INodeInstanceRepository nodeInstanceRepository;
@Resource
protected IWorkflowLogService workflowLogService;
@Override
public void execute(NodeInstance nodeInstance, WorkflowContextOperations context) {
try {
// 1. 前置处理
beforeExecute(nodeInstance, context);
// 2. 执行节点逻辑
doExecute(nodeInstance, context);
// 3. 后置处理
afterExecute(nodeInstance, context);
// 4. 更新节点状态
updateNodeStatus(nodeInstance, true);
} catch (Exception e) {
// 5. 异常处理
handleExecutionError(nodeInstance, context, e);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
/**
* 执行节点逻辑
*/
protected abstract void doExecute(NodeInstance nodeInstance, WorkflowContextOperations context);
/**
* 前置处理
*/
protected void beforeExecute(NodeInstance nodeInstance, WorkflowContextOperations context) {
context.log("Starting node execution: " + nodeInstance.getName(), LogLevelEnum.INFO);
logNodeStart(nodeInstance);
}
/**
* 后置处理
*/
protected void afterExecute(NodeInstance nodeInstance, WorkflowContextOperations context) {
context.log("Node execution completed: " + nodeInstance.getName(), LogLevelEnum.INFO);
logNodeComplete(nodeInstance);
}
/**
* 处理执行异常
*/
protected void handleExecutionError(NodeInstance nodeInstance, WorkflowContextOperations context, Exception e) {
log.error("Node execution failed. nodeInstance: {}, error: {}", nodeInstance.getId(), e.getMessage(), e);
context.log("Node execution failed: " + e.getMessage(), LogLevelEnum.ERROR);
logSystem(nodeInstance, LogLevelEnum.ERROR,
String.format("节点执行失败: %s[%s]", nodeInstance.getName(), nodeInstance.getNodeId()),
e.getMessage());
logNodeError(nodeInstance, e.getMessage());
updateNodeStatus(nodeInstance, false);
}
/**
* 更新节点状态
*/
private void updateNodeStatus(NodeInstance nodeInstance, boolean success) {
nodeInstance.setStatus(success ? NodeStatusEnum.COMPLETED : NodeStatusEnum.FAILED);
if (success) {
nodeInstance.setEndTime(LocalDateTime.now());
}
nodeInstanceRepository.save(nodeInstance);
}
/**
* 记录系统日志
*/
protected void logSystem(NodeInstance nodeInstance, LogLevelEnum level, String message, String detail) {
workflowLogService.log(nodeInstance.getWorkflowInstance(), nodeInstance.getNodeId(), level, message, detail);
}
/**
* 记录节点开始日志
*/
protected void logNodeStart(NodeInstance nodeInstance) {
workflowLogService.log(nodeInstance.getWorkflowInstance(), nodeInstance.getNodeId(),
LogLevelEnum.INFO, String.format("节点开始执行: %s", nodeInstance.getName()), null);
nodeInstance.setStatus(NodeStatusEnum.RUNNING);
nodeInstance.setStartTime(LocalDateTime.now());
nodeInstanceRepository.save(nodeInstance);
}
/**
* 记录节点完成日志
*/
protected void logNodeComplete(NodeInstance nodeInstance) {
workflowLogService.log(nodeInstance.getWorkflowInstance(), nodeInstance.getNodeId(),
LogLevelEnum.INFO, String.format("节点执行完成: %s", nodeInstance.getName()), null);
}
/**
* 记录节点错误日志
*/
protected void logNodeError(NodeInstance nodeInstance, String error) {
workflowLogService.log(nodeInstance.getWorkflowInstance(), nodeInstance.getNodeId(),
LogLevelEnum.ERROR, String.format("节点执行失败: %s", nodeInstance.getName()), error);
nodeInstance.setError(error);
}
}

View File

@ -1,54 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import com.qqchen.deploy.backend.workflow.enums.WorkflowInstanceStatusEnum;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowInstanceRepository;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.time.LocalDateTime;
/**
* 结束节点执行器
* 负责完成工作流实例的执行
*/
@Slf4j
@Component("endNodeExecutor")
public class EndNodeExecutor extends AbstractNodeExecutor {
@Resource
private IWorkflowInstanceRepository workflowInstanceRepository;
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.END;
}
@Override
public void validate(String config) {
// 结束节点不需要配置无需验证
}
@Override
protected void doExecute(NodeInstance nodeInstance, WorkflowContextOperations context) {
// 完成工作流实例
WorkflowInstance instance = nodeInstance.getWorkflowInstance();
instance.setStatus(WorkflowInstanceStatusEnum.COMPLETED);
instance.setEndTime(LocalDateTime.now());
workflowInstanceRepository.save(instance);
// 记录日志
context.log("工作流执行完成", LogLevelEnum.INFO);
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// 结束节点不需要终止逻辑
context.log("结束节点无需终止操作", LogLevelEnum.INFO);
}
}

View File

@ -1,37 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
/**
* 节点执行器接口
*/
public interface NodeExecutor {
/**
* 获取支持的节点类型
*/
NodeTypeEnum getNodeType();
/**
* 执行节点
*
* @param nodeInstance 节点实例
* @param context 工作流上下文
*/
void execute(NodeInstance nodeInstance, WorkflowContextOperations context);
/**
* 验证节点配置
*/
void validate(String config);
/**
* 终止节点执行
*
* @param nodeInstance 节点实例
* @param context 工作流上下文
*/
void terminate(NodeInstance nodeInstance, WorkflowContextOperations context);
}

View File

@ -1,196 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.executor.node.config.ScriptNodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import com.qqchen.deploy.backend.workflow.enums.ScriptLanguageEnum;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import jakarta.annotation.Resource;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.ScriptCommand;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.registry.ScriptCommandRegistry;
/**
* 脚本节点执行器
* 支持多种脚本语言Python, Shell, JavaScript等
*/
@Slf4j
@Component
public class ScriptNodeExecutor extends AbstractNodeExecutor {
@Resource
private ObjectMapper objectMapper;
@Resource
private WorkflowVariableOperations variableOperations;
@Resource
private ScriptCommandRegistry commandRegistry;
private final ExecutorService executorService = Executors.newCachedThreadPool();
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.SCRIPT;
}
@Override
public void validate(String config) {
try {
ScriptNodeExecutorConfig scriptConfig = objectMapper.readValue(config, ScriptNodeExecutorConfig.class);
// 验证脚本内容
if (scriptConfig.getScript() == null || scriptConfig.getScript().trim().isEmpty()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "Script content cannot be empty");
}
// 验证脚本语言
if (scriptConfig.getLanguage() == null) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "Script language must be specified");
}
// 验证其他参数
if (scriptConfig.getTimeout() != null && scriptConfig.getTimeout() <= 0) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "Timeout must be positive");
}
if (scriptConfig.getRetryTimes() != null && scriptConfig.getRetryTimes() < 0) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "Retry times cannot be negative");
}
if (scriptConfig.getRetryInterval() != null && scriptConfig.getRetryInterval() < 0) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "Retry interval cannot be negative");
}
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, e);
}
}
@Override
protected void doExecute(NodeInstance nodeInstance, WorkflowContextOperations context) {
try {
// String configJson = nodeInstance.getConfig();
// ScriptNodeExecutorConfig config = objectMapper.readValue(configJson, ScriptNodeExecutorConfig.class);
//
// // 设置重试次数和间隔
// int maxAttempts = config.getRetryTimes() != null ? config.getRetryTimes() : 1;
// long retryInterval = config.getRetryInterval() != null ? config.getRetryInterval() : 0;
//
// Exception lastException = null;
// for (int attempt = 1; attempt <= maxAttempts; attempt++) {
// try {
// // 执行脚本
ScriptNodeExecutorConfig config = new ScriptNodeExecutorConfig();
config.setLanguage(ScriptLanguageEnum.SHELL);
config.setInterpreter("/bin/bash");
config.setScript("ls -a");
executeScript(config, nodeInstance, context);
// return; // 执行成功直接返回
// } catch (Exception e) {
// lastException = e;
// if (attempt < maxAttempts) {
// context.log(String.format("Script execution failed (attempt %d/%d), retrying in %d seconds",
// attempt, maxAttempts, retryInterval), LogLevelEnum.WARN);
// Thread.sleep(retryInterval * 1000L);
// }
// }
// }
//
// // 如果所有重试都失败抛出最后一个异常
// if (lastException != null) {
// throw lastException;
// }
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
private void executeScript(ScriptNodeExecutorConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) throws Exception {
ProcessBuilder processBuilder = new ProcessBuilder();
// 获取命令实现并构建命令
ScriptCommand command = commandRegistry.getCommand(config.getLanguage());
List<String> commandList = command.buildCommand(config);
processBuilder.command(commandList);
// 设置工作目录
if (config.getWorkingDirectory() != null && !config.getWorkingDirectory().trim().isEmpty()) {
processBuilder.directory(new java.io.File(config.getWorkingDirectory()));
}
// 设置环境变量
if (config.getEnvironment() != null && !config.getEnvironment().isEmpty()) {
Map<String, String> env = processBuilder.environment();
env.putAll(config.getEnvironment());
}
Process process = processBuilder.start();
// 创建用于读取输出的Future
Future<List<String>> outputFuture = executorService.submit(() -> readOutput(process.getInputStream()));
Future<List<String>> errorFuture = executorService.submit(() -> readOutput(process.getErrorStream()));
// 等待进程执行完成或超时
boolean completed = true;
if (config.getTimeout() != null && config.getTimeout() > 0) {
completed = process.waitFor(config.getTimeout(), TimeUnit.SECONDS);
if (!completed) {
process.destroyForcibly();
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED,
String.format("Script execution timed out after %d seconds", config.getTimeout()));
}
} else {
process.waitFor();
}
// 获取输出结果
List<String> output = outputFuture.get(5, TimeUnit.SECONDS); // 给5秒时间读取输出
List<String> error = errorFuture.get(5, TimeUnit.SECONDS);
// 检查退出码
int exitCode = process.exitValue();
if (config.getSuccessExitCode() != null && exitCode != config.getSuccessExitCode()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED,
String.format("Script execution failed with exit code: %d%nError output: %s",
exitCode, String.join("\n", error)));
}
// 设置输出变量
Map<String, Object> outputVariables = new HashMap<>();
outputVariables.put("scriptOutput", String.join("\n", output));
outputVariables.put("exitCode", exitCode);
variableOperations.setVariables(nodeInstance.getWorkflowInstance().getId(), outputVariables);
// 记录执行日志
context.log(String.format("Script executed successfully with exit code: %d", exitCode), LogLevelEnum.INFO);
}
private List<String> readOutput(java.io.InputStream inputStream) throws Exception {
List<String> output = new ArrayList<>();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) {
String line;
while ((line = reader.readLine()) != null) {
output.add(line);
}
}
return output;
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// TODO: 实现终止脚本进程的逻辑
context.log("Script node termination is not implemented yet", LogLevelEnum.WARN);
}
}

View File

@ -1,185 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.executor.node.config.ShellNodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import jakarta.annotation.Resource;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
/**
* Shell节点执行器
* @deprecated 请使用 {@link ScriptNodeExecutor} 替代
*/
@Deprecated(since = "1.0", forRemoval = true)
@Slf4j
@Component
public class ShellNodeExecutor extends AbstractNodeExecutor {
@Resource
private ObjectMapper objectMapper;
@Resource
private WorkflowVariableOperations variableOperations;
private final ExecutorService executorService = Executors.newCachedThreadPool();
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.SHELL;
}
@Override
public void validate(String config) {
try {
ShellNodeExecutorConfig shellConfig = objectMapper.readValue(config, ShellNodeExecutorConfig.class);
// 验证执行器类型
if (!"SHELL".equals(shellConfig.getExecutor())) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
// 验证脚本内容
if (shellConfig.getScript() == null || shellConfig.getScript().trim().isEmpty()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
// 验证其他参数
if (shellConfig.getTimeout() != null && shellConfig.getTimeout() <= 0) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
if (shellConfig.getRetryTimes() != null && shellConfig.getRetryTimes() < 0) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
if (shellConfig.getRetryInterval() != null && shellConfig.getRetryInterval() < 0) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
}
@Override
protected void doExecute(NodeInstance nodeInstance, WorkflowContextOperations context) {
try {
String configJson = nodeInstance.getConfig();
ShellNodeExecutorConfig config = objectMapper.readValue(configJson, ShellNodeExecutorConfig.class);
// 验证执行器类型
if (!"SHELL".equals(config.getExecutor())) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR);
}
// 设置重试次数和间隔
int maxAttempts = config.getRetryTimes() != null ? config.getRetryTimes() : 1;
long retryInterval = config.getRetryInterval() != null ? config.getRetryInterval() : 0;
Exception lastException = null;
for (int attempt = 1; attempt <= maxAttempts; attempt++) {
try {
// 执行Shell命令
executeShellCommand(config, nodeInstance, context);
return; // 执行成功直接返回
} catch (Exception e) {
lastException = e;
if (attempt < maxAttempts) {
context.log(String.format("Shell execution failed (attempt %d/%d), retrying in %d seconds", attempt, maxAttempts, retryInterval), LogLevelEnum.WARN);
Thread.sleep(retryInterval * 1000L);
}
}
}
// 如果所有重试都失败抛出最后一个异常
if (lastException != null) {
throw lastException;
}
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
private void executeShellCommand(ShellNodeExecutorConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) throws Exception {
ProcessBuilder processBuilder = new ProcessBuilder();
processBuilder.command("sh", "-c", config.getScript());
// 设置工作目录
if (config.getWorkingDirectory() != null && !config.getWorkingDirectory().trim().isEmpty()) {
processBuilder.directory(new java.io.File(config.getWorkingDirectory()));
}
// 设置环境变量
if (config.getEnvironment() != null && !config.getEnvironment().isEmpty()) {
Map<String, String> env = processBuilder.environment();
env.putAll(config.getEnvironment());
}
Process process = processBuilder.start();
// 创建用于读取输出的Future
Future<List<String>> outputFuture = executorService.submit(() -> readOutput(process.getInputStream()));
Future<List<String>> errorFuture = executorService.submit(() -> readOutput(process.getErrorStream()));
// 等待进程执行完成或超时
boolean completed = true;
if (config.getTimeout() != null && config.getTimeout() > 0) {
completed = process.waitFor(config.getTimeout(), TimeUnit.SECONDS);
if (!completed) {
process.destroyForcibly();
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED,
String.format("Shell execution timed out after %d seconds", config.getTimeout()));
}
} else {
process.waitFor();
}
// 获取输出结果
List<String> output = outputFuture.get(5, TimeUnit.SECONDS); // 给5秒时间读取输出
List<String> error = errorFuture.get(5, TimeUnit.SECONDS);
// 检查退出码
int exitCode = process.exitValue();
if (config.getSuccessExitCode() != null && exitCode != config.getSuccessExitCode()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED,
String.format("Shell execution failed with exit code: %d%nError output: %s",
exitCode, String.join("\n", error)));
}
// 设置输出变量
Map<String, Object> outputVariables = new HashMap<>();
outputVariables.put("shellOutput", String.join("\n", output));
outputVariables.put("exitCode", exitCode);
variableOperations.setVariables(nodeInstance.getWorkflowInstance().getId(), outputVariables);
// 记录执行日志
context.log(String.format("Shell script executed successfully with exit code: %d", exitCode), LogLevelEnum.INFO);
}
private List<String> readOutput(java.io.InputStream inputStream) throws Exception {
List<String> output = new ArrayList<>();
try (BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream))) {
String line;
while ((line = reader.readLine()) != null) {
output.add(line);
}
}
return output;
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// TODO: 实现终止Shell进程的逻辑
context.log("Shell node termination is not implemented yet", LogLevelEnum.WARN);
}
}

View File

@ -1,57 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
/**
* 开始节点执行器
* 负责工作流实例的初始化工作
*/
@Slf4j
@Component("startNodeExecutor")
public class StartNodeExecutor extends AbstractNodeExecutor {
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.START;
}
@Override
public void validate(String config) {
// 开始节点不需要配置无需验证
}
@Override
protected void doExecute(NodeInstance nodeInstance, WorkflowContextOperations context) {
WorkflowInstance instance = context.getInstance();
// 记录启动日志
String message = String.format(
"工作流[%s]开始执行实例ID: %d业务键: %s",
instance.getWorkflowDefinition().getName(),
instance.getId(),
instance.getBusinessKey()
);
context.log(message, LogLevelEnum.INFO);
// 记录启动时间
log.info("Workflow instance {} started at {}", instance.getId(), instance.getCreateTime());
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// 开始节点的终止意味着整个工作流的终止
WorkflowInstance instance = nodeInstance.getWorkflowInstance();
String message = String.format(
"工作流[%s]在启动阶段被终止实例ID: %d",
instance.getWorkflowDefinition().getName(),
instance.getId()
);
context.log(message, LogLevelEnum.WARN);
}
}

View File

@ -1,115 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.task.TaskConfig;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@Slf4j
@Component
public class TaskNodeExecutor implements NodeExecutor {
@Resource
private ObjectMapper objectMapper;
@Override
public NodeTypeEnum getNodeType() {
return NodeTypeEnum.TASK;
}
@Override
public void execute(NodeInstance nodeInstance, WorkflowContextOperations context) {
try {
// 1. 解析任务配置
TaskConfig config = parseConfig(nodeInstance.getConfig());
// 2. 执行具体任务
executeTask(config, nodeInstance, context);
// 3. 更新节点状态
nodeInstance.setStatus(NodeStatusEnum.COMPLETED);
context.log("任务节点执行完成", LogLevelEnum.INFO);
} catch (Exception e) {
log.error("任务节点执行失败", e);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
@Override
public void validate(String config) {
try {
TaskConfig taskConfig = parseConfig(config);
// 验证必填字段
if (taskConfig.getType() == null) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "任务类型不能为空");
}
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, e);
}
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// 终止任务执行
TaskConfig config = parseConfig(nodeInstance.getConfig());
terminateTask(config, nodeInstance, context);
}
private TaskConfig parseConfig(String config) {
try {
return objectMapper.readValue(config, TaskConfig.class);
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, e);
}
}
private void executeTask(TaskConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) {
switch (config.getType()) {
case HTTP:
executeHttpTask(config, nodeInstance, context);
break;
case JAVA:
executeJavaTask(config, nodeInstance, context);
break;
default:
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_TYPE_NOT_SUPPORTED, "不支持的任务类型: " + config.getType());
}
}
private void terminateTask(TaskConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) {
// 根据任务类型执行终止操作
switch (config.getType()) {
case HTTP:
terminateHttpTask(config, nodeInstance, context);
break;
case JAVA:
terminateJavaTask(config, nodeInstance, context);
break;
}
}
private void executeHttpTask(TaskConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) {
// TODO: 实现HTTP请求执行
}
private void executeJavaTask(TaskConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) {
// TODO: 实现Java方法调用
}
private void terminateHttpTask(TaskConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) {
// TODO: 实现HTTP请求终止
}
private void terminateJavaTask(TaskConfig config, NodeInstance nodeInstance, WorkflowContextOperations context) {
// TODO: 实现Java方法终止
}
}

View File

@ -1,48 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.Data;
/**
* 节点配置基类
*/
@Data
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonIgnoreProperties(ignoreUnknown = true) // 忽略未知字段
@JsonSubTypes({
@JsonSubTypes.Type(value = ApprovalNodeExecutorConfig.class, name = "APPROVAL"),
@JsonSubTypes.Type(value = ScriptNodeExecutorConfig.class, name = "SCRIPT"),
@JsonSubTypes.Type(value = ShellNodeExecutorConfig.class, name = "SHELL"),
@JsonSubTypes.Type(value = JenkinsNodeExecutorConfig.class, name = "JENKINS"),
@JsonSubTypes.Type(value = GitNodeExecutorConfig.class, name = "GIT"),
@JsonSubTypes.Type(value = ConditionNodeExecutorConfig.class, name = "CONDITION"),
@JsonSubTypes.Type(value = ParallelNodeExecutorConfig.class, name = "PARALLEL"),
@JsonSubTypes.Type(value = NacosNodeExecutorConfig.class, name = "NACOS"),
@JsonSubTypes.Type(value = HttpNodeExecutorConfig.class, name = "HTTP"),
@JsonSubTypes.Type(value = NotifyNodeExecutorConfig.class, name = "NOTIFY")
})
public class NodeExecutorConfig {
/**
* 节点ID
*/
private String id;
/**
* 节点名称
*/
private String name;
/**
* 节点类型
*/
private NodeTypeEnum type;
/**
* 描述
*/
private String description;
}

View File

@ -1,55 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.config;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.util.Map;
/**
* Shell节点配置
* @deprecated 请使用 {@link ScriptNodeExecutorConfig} 替代设置 language="shell"
*/
@Deprecated(since = "1.0", forRemoval = true)
@Data
@EqualsAndHashCode(callSuper = true)
public class ShellNodeExecutorConfig extends NodeExecutorConfig {
/**
* 执行器类型固定为 SHELL
*/
private String executor = "SHELL";
/**
* Shell脚本内容
*/
private String script;
/**
* 工作目录
*/
private String workingDirectory;
/**
* 超时时间
*/
private Integer timeout;
/**
* 重试次数
*/
private Integer retryTimes;
/**
* 重试间隔
*/
private Integer retryInterval;
/**
* 环境变量
*/
private Map<String, String> environment;
/**
* 成功退出码
*/
private Integer successExitCode = 0;
}

View File

@ -1,61 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.task;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import lombok.extern.slf4j.Slf4j;
import org.springframework.http.*;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
import java.util.Map;
@Slf4j
@Component
public class HttpTaskExecutor implements TaskExecutor {
private final ObjectMapper objectMapper = new ObjectMapper();
private final RestTemplate restTemplate = new RestTemplate();
@Override
public void execute(NodeInstance nodeInstance, WorkflowContextOperations context, Map<String, Object> parameters) {
String url = (String) parameters.get("url");
HttpMethod method = HttpMethod.valueOf((String) parameters.getOrDefault("method", "GET"));
Object body = parameters.get("body");
Map<String, String> headers = (Map<String, String>) parameters.get("headers");
try {
HttpHeaders httpHeaders = new HttpHeaders();
if (headers != null) {
headers.forEach(httpHeaders::add);
}
HttpEntity<?> requestEntity = new HttpEntity<>(body, httpHeaders);
ResponseEntity<String> response = restTemplate.exchange(url, method, requestEntity, String.class);
// 记录执行结果
nodeInstance.setOutput(objectMapper.createObjectNode()
.put("statusCode", response.getStatusCode().value())
.put("body", response.getBody())
.toString());
if (!response.getStatusCode().is2xxSuccessful()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED,
"HTTP request failed with status: " + response.getStatusCode());
}
context.log("HTTP请求执行成功", response.getBody(), LogLevelEnum.INFO);
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// HTTP请求无需终止操作
}
}

View File

@ -1,50 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.task;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
import jakarta.annotation.Resource;
import java.lang.reflect.Method;
import java.util.Map;
@Slf4j
@Component
public class JavaTaskExecutor implements TaskExecutor {
@Resource
private ApplicationContext applicationContext;
private final ObjectMapper objectMapper = new ObjectMapper();
@Override
public void execute(NodeInstance nodeInstance, WorkflowContextOperations context, Map<String, Object> parameters) {
String className = parameters.get("className").toString();
String methodName = parameters.get("methodName").toString();
try {
Class<?> clazz = Class.forName(className);
Object instance = applicationContext.getBean(clazz);
Method method = clazz.getMethod(methodName, NodeInstance.class, WorkflowContextOperations.class, Map.class);
method.invoke(instance, nodeInstance, context, parameters);
} catch (ClassNotFoundException e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "Class not found: " + className);
} catch (NoSuchMethodException e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, "Method not found: " + methodName);
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// Java任务无法中断记录日志
context.log("Java task cannot be terminated: " + nodeInstance.getNodeId(), LogLevelEnum.WARN);
}
}

View File

@ -1,66 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.task;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.exec.*;
import org.springframework.stereotype.Component;
import java.io.ByteArrayOutputStream;
import java.util.Map;
import java.util.concurrent.TimeUnit;
@Slf4j
@Component
public class ShellTaskExecutor implements TaskExecutor {
private final ObjectMapper objectMapper = new ObjectMapper();
@Override
public void execute(NodeInstance nodeInstance, WorkflowContextOperations context, Map<String, Object> parameters) {
String command = (String) parameters.get("command");
Integer timeout = (Integer) parameters.getOrDefault("timeout", 300);
CommandLine cmdLine = CommandLine.parse(command);
DefaultExecutor executor = new DefaultExecutor();
executor.setExitValues(null);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ByteArrayOutputStream errorStream = new ByteArrayOutputStream();
executor.setStreamHandler(new PumpStreamHandler(outputStream, errorStream));
ExecuteWatchdog watchdog = new ExecuteWatchdog(TimeUnit.SECONDS.toMillis(timeout));
executor.setWatchdog(watchdog);
try {
int exitValue = executor.execute(cmdLine);
String output = outputStream.toString();
String error = errorStream.toString();
// 记录执行结果
nodeInstance.setOutput(objectMapper.createObjectNode()
.put("exitValue", exitValue)
.put("output", output)
.put("error", error)
.toString());
if (exitValue != 0) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, error);
}
context.log("Shell命令执行成功", output, LogLevelEnum.INFO);
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
@Override
public void terminate(NodeInstance nodeInstance, WorkflowContextOperations context) {
// TODO: 实现Shell命令终止逻辑
}
}

View File

@ -1,45 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.task;
import com.qqchen.deploy.backend.workflow.enums.TaskTypeEnum;
import lombok.Data;
import java.util.Map;
@Data
public class TaskConfig {
/**
* 任务类型
*/
private TaskTypeEnum type;
/**
* 任务名称
*/
private String name;
/**
* 任务描述
*/
private String description;
/**
* 任务超时时间()
*/
private Integer timeout;
/**
* 重试次数
*/
private Integer retryCount;
/**
* 重试间隔()
*/
private Integer retryInterval;
/**
* 任务参数
*/
private Map<String, Object> parameters;
}

View File

@ -1,22 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor.task;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import java.util.Map;
/**
* 任务执行器接口
*/
public interface TaskExecutor {
/**
* 执行任务
*/
void execute(NodeInstance nodeInstance, WorkflowContextOperations context, Map<String, Object> parameters);
/**
* 终止任务
*/
void terminate(NodeInstance nodeInstance, WorkflowContextOperations context);
}

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.dto;
package com.qqchen.deploy.backend.workflow.engine.model;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.Data;
@ -6,15 +6,15 @@ import lombok.Data;
import java.util.Map;
/**
* 节点配置DTO
* 用于前端传递节点配置信息
* 节点配置
* 用于解析工作流定义中的节点配置JSON
*/
@Data
public class NodeConfigDTO {
public class NodeConfig {
/**
* 节点ID
*/
private String id;
private String nodeId;
/**
* 节点类型
@ -27,12 +27,7 @@ public class NodeConfigDTO {
private String name;
/**
* 节点配置
* 节点配置不同类型的节点有不同的配置项
*/
private Map<String, Object> config;
/**
* 节点描述
*/
private String description;
}
}

View File

@ -0,0 +1,35 @@
package com.qqchen.deploy.backend.workflow.engine.model;
import lombok.Data;
/**
* 流转配置
* 用于解析工作流定义中的流转配置JSON
*/
@Data
public class TransitionConfig {
/**
* 来源节点ID
*/
private String from;
/**
* 目标节点ID
*/
private String to;
/**
* 流转条件
*/
private String condition;
/**
* 流转描述
*/
private String description;
/**
* 优先级数字越小优先级越高
*/
private Integer priority;
}

View File

@ -0,0 +1,42 @@
package com.qqchen.deploy.backend.workflow.engine.model;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import lombok.Builder;
import lombok.Data;
import java.util.List;
import java.util.Objects;
import java.util.stream.Collectors;
@Data
@Builder
public class WorkflowGraph {
private List<NodeConfig> nodes;
private List<TransitionConfig> transitions;
public NodeConfig getStartNode() {
return nodes.stream()
.filter(node -> node.getType() == NodeTypeEnum.START)
.findFirst()
.orElseThrow(() -> new WorkflowEngineException(ResponseCode.WORKFLOW_NOT_FOUND));
}
public List<NodeConfig> getNextNodes(String nodeId) {
return transitions.stream()
.filter(t -> t.getFrom().equals(nodeId))
.map(t -> findNodeById(t.getTo()))
.filter(Objects::nonNull)
.collect(Collectors.toList());
}
private NodeConfig findNodeById(String nodeId) {
return nodes.stream()
.filter(n -> n.getNodeId().equals(nodeId))
.findFirst()
.orElse(null);
}
}

View File

@ -1,15 +1,15 @@
package com.qqchen.deploy.backend.workflow.engine.parser;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.entity.NodeConfig;
import com.qqchen.deploy.backend.workflow.entity.TransitionConfig;
import com.qqchen.deploy.backend.workflow.engine.model.NodeConfig;
import com.qqchen.deploy.backend.workflow.engine.model.TransitionConfig;
import com.qqchen.deploy.backend.workflow.engine.model.WorkflowGraph;
import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@ -20,149 +20,59 @@ import java.util.Map;
/**
* 工作流定义解析器
*/
@Slf4j
@Component
@Slf4j
public class WorkflowDefinitionParser {
private final ObjectMapper objectMapper;
@Resource
private ObjectMapper objectMapper;
public WorkflowDefinitionParser(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
/**
* 解析节点配置
*
* @param nodeConfig 节点配置JSON字符串
* @return 节点配置列表
*/
public List<NodeConfig> parseNodeConfig(String nodeConfig) {
public WorkflowGraph parse(WorkflowDefinition definition) {
try {
log.debug("Parsing node config: {}", nodeConfig);
JsonNode rootNode = objectMapper.readTree(nodeConfig);
JsonNode nodesNode = rootNode.get("nodes");
if (nodesNode == null || !nodesNode.isArray()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_INVALID);
}
// 解析节点配置
JsonNode rootNode = objectMapper.readTree(definition.getNodeConfig());
List<NodeConfig> nodes = parseNodes(rootNode.get("nodes"));
List<NodeConfig> nodes = new ArrayList<>();
for (JsonNode node : nodesNode) {
NodeConfig config = new NodeConfig();
// 将前端的id映射到nodeId
config.setNodeId(node.get("id").asText());
config.setName(node.get("name").asText());
config.setType(NodeTypeEnum.valueOf(node.get("type").asText()));
// 解析节点配置
if (node.has("config")) {
config.setConfig(objectMapper.convertValue(node.get("config"),
new TypeReference<Map<String, Object>>() {}));
}
// 可选字段
if (node.has("description")) {
config.setDescription(node.get("description").asText());
}
nodes.add(config);
log.debug("Parsed node: id={}, type={}", config.getNodeId(), config.getType());
}
return nodes;
} catch (JsonProcessingException e) {
log.error("Failed to parse node config: {}", e.getMessage(), e);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_INVALID, e);
// 解析流转配置
JsonNode transitionRoot = objectMapper.readTree(definition.getTransitionConfig());
List<TransitionConfig> transitions = parseTransitions(transitionRoot.get("transitions"));
return WorkflowGraph.builder()
.nodes(nodes)
.transitions(transitions)
.build();
} catch (Exception e) {
log.error("Failed to parse workflow definition", e);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_ERROR);
}
}
/**
* 解析流转配置
*
* @param transitionConfig 流转配置JSON字符串
* @return 流转配置列表
*/
public List<TransitionConfig> parseTransitionConfig(String transitionConfig) {
try {
log.debug("Parsing transition config: {}", transitionConfig);
JsonNode rootNode = objectMapper.readTree(transitionConfig);
JsonNode transitionsNode = rootNode.get("transitions");
if (transitionsNode == null || !transitionsNode.isArray()) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_INVALID);
}
List<TransitionConfig> transitions = new ArrayList<>();
for (JsonNode node : transitionsNode) {
TransitionConfig config = new TransitionConfig();
config.setFrom(node.get("from").asText());
config.setTo(node.get("to").asText());
// 可选字段
if (node.has("condition")) {
config.setCondition(node.get("condition").asText());
}
if (node.has("description")) {
config.setDescription(node.get("description").asText());
}
if (node.has("priority")) {
config.setPriority(node.get("priority").asInt());
}
transitions.add(config);
log.debug("Parsed transition: {} -> {}, priority={}",
config.getFrom(),
config.getTo(),
config.getPriority());
}
return transitions;
} catch (JsonProcessingException e) {
log.error("Failed to parse transition config: {}", e.getMessage(), e);
throw new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_INVALID, e);
private List<NodeConfig> parseNodes(JsonNode nodesNode) {
List<NodeConfig> nodes = new ArrayList<>();
for (JsonNode node : nodesNode) {
NodeConfig config = new NodeConfig();
config.setNodeId(node.get("id").asText());
config.setType(NodeTypeEnum.valueOf(node.get("type").asText()));
config.setName(node.get("name").asText());
config.setConfig(objectMapper.convertValue(node.get("config"), new TypeReference<Map<String, Object>>() {}));
nodes.add(config);
}
return nodes;
}
/**
* 验证节点配置的完整性
*
* @param nodes 节点配置列表
* @param transitions 流转配置列表
*/
public void validateConfig(List<NodeConfig> nodes, List<TransitionConfig> transitions) {
// 1. 检查是否有开始节点和结束节点
boolean hasStart = false;
boolean hasEnd = false;
for (NodeConfig node : nodes) {
if (node.getType() == NodeTypeEnum.START) {
if (hasStart) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_INVALID,
"工作流只能有一个开始节点");
}
hasStart = true;
} else if (node.getType() == NodeTypeEnum.END) {
hasEnd = true;
}
}
if (!hasStart || !hasEnd) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_INVALID,
"工作流必须包含开始节点和结束节点");
}
// 2. 检查流转配置的完整性
for (TransitionConfig transition : transitions) {
boolean sourceExists = false;
boolean targetExists = false;
for (NodeConfig node : nodes) {
if (node.getNodeId().equals(transition.getFrom())) {
sourceExists = true;
}
if (node.getNodeId().equals(transition.getTo())) {
targetExists = true;
}
}
if (!sourceExists || !targetExists) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_CONFIG_INVALID,
String.format("流转配置中的节点不存在: %s -> %s",
transition.getFrom(),
transition.getTo()));
}
private List<TransitionConfig> parseTransitions(JsonNode transitionsNode) {
List<TransitionConfig> transitions = new ArrayList<>();
for (JsonNode transition : transitionsNode) {
TransitionConfig config = new TransitionConfig();
config.setFrom(transition.get("from").asText());
config.setTo(transition.get("to").asText());
config.setCondition(transition.get("condition").asText());
config.setPriority(transition.get("priority").asInt());
transitions.add(config);
}
return transitions;
}
}

View File

@ -1,6 +1,7 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.script.command;
package com.qqchen.deploy.backend.workflow.engine.script.command;
import com.qqchen.deploy.backend.workflow.engine.executor.config.ScriptNodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.engine.executor.node.config.ScriptNodeExecutorConfig;
import java.util.List;
/**

View File

@ -1,4 +1,4 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.script.command;
package com.qqchen.deploy.backend.workflow.engine.script.command;
import com.qqchen.deploy.backend.workflow.enums.ScriptLanguageEnum;
import java.lang.annotation.*;

View File

@ -1,10 +1,10 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.impl;
package com.qqchen.deploy.backend.workflow.engine.script.command.impl;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.node.config.ScriptNodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.ScriptCommand;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.ScriptLanguageSupport;
import com.qqchen.deploy.backend.workflow.engine.executor.config.ScriptNodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.engine.script.command.ScriptCommand;
import com.qqchen.deploy.backend.workflow.engine.script.command.ScriptLanguageSupport;
import com.qqchen.deploy.backend.workflow.enums.ScriptLanguageEnum;
import org.springframework.stereotype.Component;
import java.util.Arrays;

View File

@ -1,10 +1,10 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.impl;
package com.qqchen.deploy.backend.workflow.engine.script.command.impl;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.node.config.ScriptNodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.ScriptCommand;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.ScriptLanguageSupport;
import com.qqchen.deploy.backend.workflow.engine.executor.config.ScriptNodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.engine.script.command.ScriptCommand;
import com.qqchen.deploy.backend.workflow.engine.script.command.ScriptLanguageSupport;
import com.qqchen.deploy.backend.workflow.enums.ScriptLanguageEnum;
import org.springframework.stereotype.Component;
import java.util.Arrays;

View File

@ -1,9 +1,9 @@
package com.qqchen.deploy.backend.workflow.engine.executor.node.script.registry;
package com.qqchen.deploy.backend.workflow.engine.script.registry;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.ScriptCommand;
import com.qqchen.deploy.backend.workflow.engine.executor.node.script.command.ScriptLanguageSupport;
import com.qqchen.deploy.backend.workflow.engine.script.command.ScriptCommand;
import com.qqchen.deploy.backend.workflow.engine.script.command.ScriptLanguageSupport;
import com.qqchen.deploy.backend.workflow.enums.ScriptLanguageEnum;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Autowired;

View File

@ -1,33 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.transition;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
import com.qqchen.deploy.backend.workflow.repository.INodeInstanceRepository;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.time.LocalDateTime;
import java.util.List;
@Slf4j
@Component
public class TransitionExecutor {
@Resource
private TransitionRuleEngine transitionRuleEngine;
@Resource
private INodeInstanceRepository nodeInstanceRepository;
/**
* 执行节点流转
*/
public void executeTransition(NodeInstance currentNode, WorkflowDefinition definition, WorkflowContextOperations context) {
// 1. 获取下一个节点ID列表
List<String> nextNodeIds = transitionRuleEngine.getNextNodeIds(currentNode, definition, context);
//这里应该从WorkflowDefinition类的transitionConfig获取
}
}

View File

@ -1,28 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.transition;
import lombok.Data;
import java.util.List;
@Data
public class TransitionRule {
/**
* 源节点ID
*/
private String from;
/**
* 目标节点ID
*/
private String to;
/**
* 条件表达式
*/
private String condition;
/**
* 优先级
*/
private Integer priority;
}

View File

@ -1,79 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.transition;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContextOperations;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
import lombok.extern.slf4j.Slf4j;
import org.springframework.expression.Expression;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.expression.spel.support.StandardEvaluationContext;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
@Slf4j
@Component
public class TransitionRuleEngine {
private final ObjectMapper objectMapper = new ObjectMapper();
private final ExpressionParser expressionParser = new SpelExpressionParser();
/**
* 获取下一个节点ID列表
*/
public List<String> getNextNodeIds(NodeInstance currentNode, WorkflowDefinition definition, WorkflowContextOperations context) {
try {
// 解析流转规则
List<TransitionRule> rules = parseTransitionRules(definition.getTransitionConfig());
// 过滤当前节点的规则并按优先级排序
List<TransitionRule> nodeRules = rules.stream()
.filter(rule -> rule.getFrom().equals(currentNode.getNodeId()))
.sorted(Comparator.comparing(TransitionRule::getPriority))
.toList();
// 执行规则匹配
List<String> nextNodeIds = new ArrayList<>();
for (TransitionRule rule : nodeRules) {
if (evaluateCondition(rule.getCondition(), context)) {
nextNodeIds.add(rule.getTo());
}
}
return nextNodeIds;
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_EXECUTION_FAILED, e);
}
}
private List<TransitionRule> parseTransitionRules(String config) {
try {
return objectMapper.readValue(config, new TypeReference<List<TransitionRule>>() {});
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, e);
}
}
private boolean evaluateCondition(String condition, WorkflowContextOperations context) {
if (condition == null || condition.trim().isEmpty()) {
return true;
}
try {
Expression expression = expressionParser.parseExpression(condition);
StandardEvaluationContext evaluationContext = new StandardEvaluationContext();
evaluationContext.setVariables(context.getVariables());
return expression.getValue(evaluationContext, Boolean.class);
} catch (Exception e) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NODE_CONFIG_ERROR, e);
}
}
}

View File

@ -1,126 +0,0 @@
package com.qqchen.deploy.backend.workflow.entity;
import com.qqchen.deploy.backend.framework.domain.Entity;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import jakarta.persistence.Column;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.hibernate.annotations.JdbcTypeCode;
import org.hibernate.type.SqlTypes;
import java.util.Map;
/**
* 节点配置
* 用于定义工作流中的节点包括节点的基本信息和特定配置
*/
@Data
@EqualsAndHashCode(callSuper = true)
@jakarta.persistence.Entity
@Table(name = "wf_node_config")
public class NodeConfig extends Entity<Long> {
/**
* 节点ID在同一个工作流中必须唯一
*/
@NotBlank(message = "节点ID不能为空")
@Column(nullable = false)
private String nodeId;
/**
* 节点名称用于显示
*/
@NotBlank(message = "节点名称不能为空")
@Column(nullable = false)
private String name;
/**
* 节点类型决定了节点的行为
* START: 开始节点每个工作流必须有且只有一个
* END: 结束节点每个工作流必须至少有一个
* TASK: 任务节点执行具体的任务
* GATEWAY: 网关节点控制流程的分支和合并
*/
@NotNull(message = "节点类型不能为空")
@Column(nullable = false)
private NodeTypeEnum type;
/**
* 所属工作流定义ID
*/
@NotNull(message = "工作流定义ID不能为空")
@Column(name = "workflow_definition_id", nullable = false)
private Long workflowDefinitionId;
/**
* 节点配置不同类型的节点有不同的配置
* TASK节点
* - type: SHELL/HTTP/JAVA
* - config: 具体的任务配置
* GATEWAY节点
* - type: EXCLUSIVE/PARALLEL/INCLUSIVE
* - conditions: 分支条件配置
*/
@JdbcTypeCode(SqlTypes.JSON)
@Column(columnDefinition = "json")
private Map<String, Object> config;
/**
* 节点描述用于说明节点的用途
*/
@Column(columnDefinition = "text")
private String description;
/**
* 检查节点配置是否有效
*
* @return true if valid, false otherwise
*/
public boolean isValid() {
if (nodeId == null || nodeId.trim().isEmpty()) {
return false;
}
if (name == null || name.trim().isEmpty()) {
return false;
}
if (type == null) {
return false;
}
// 检查特定类型节点的配置
if (type == NodeTypeEnum.TASK && (config == null || !config.containsKey("type"))) {
return false;
}
if (type == NodeTypeEnum.GATEWAY && (config == null || !config.containsKey("type"))) {
return false;
}
return true;
}
/**
* 获取任务类型仅对TASK类型节点有效
*
* @return 任务类型
*/
public String getTaskType() {
if (type != NodeTypeEnum.TASK || config == null) {
return null;
}
return (String) config.get("type");
}
/**
* 获取网关类型仅对GATEWAY类型节点有效
*
* @return 网关类型
*/
public String getGatewayType() {
if (type != NodeTypeEnum.GATEWAY || config == null) {
return null;
}
return (String) config.get("type");
}
}

View File

@ -6,7 +6,7 @@ import com.qqchen.deploy.backend.framework.domain.Entity;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.utils.SpringUtils;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.node.config.NodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.engine.executor.config.NodeExecutorConfig;
import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import jakarta.persistence.*;

View File

@ -1,106 +0,0 @@
package com.qqchen.deploy.backend.workflow.entity;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.qqchen.deploy.backend.framework.domain.Entity;
import jakarta.persistence.Column;
import jakarta.persistence.Table;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* 流转配置
* 用于定义工作流中节点之间的连接关系和流转条件
*/
@Data
@EqualsAndHashCode(callSuper = true)
@jakarta.persistence.Entity
@Table(name = "wf_transition_config")
public class TransitionConfig extends Entity<Long> {
/**
* 源节点ID
*/
@NotBlank(message = "源节点ID不能为空")
@Column(name = "`from`", nullable = false)
private String from;
/**
* 目标节点ID
*/
@NotBlank(message = "目标节点ID不能为空")
@Column(name = "`to`", nullable = false)
private String to;
/**
* 流转条件使用SpEL表达式
* 为空表示无条件流转
* 示例
* - "${status == 'SUCCESS'}"
* - "${amount > 1000}"
* - "${result.code == 200 && result.data != null}"
*/
@Column(name = "`condition`", columnDefinition = "text")
private String condition;
/**
* 优先级数字越小优先级越高
* 用于控制多个出向流转的执行顺序
* 默认为0
*/
@NotNull(message = "优先级不能为空")
@Column(nullable = false)
private Integer priority = 0;
/**
* 所属工作流定义ID
*/
@NotNull(message = "工作流定义ID不能为空")
@Column(name = "workflow_definition_id", nullable = false)
private Long workflowDefinitionId;
/**
* 流转描述用于说明流转的用途
*/
@Column(columnDefinition = "text")
private String description;
/**
* 检查流转配置是否有效
*
* @return true if valid, false otherwise
*/
public boolean isValid() {
if (from == null || from.trim().isEmpty()) {
return false;
}
if (to == null || to.trim().isEmpty()) {
return false;
}
if (from.equals(to)) {
return false; // 不允许自循环
}
if (workflowDefinitionId == null) {
return false;
}
return true;
}
/**
* 是否是条件流转
*
* @return true if conditional, false otherwise
*/
public boolean isConditional() {
return condition != null && !condition.trim().isEmpty();
}
/**
* 获取优先级如果未设置则返回默认值0
*
* @return 优先级值
*/
public int getPriorityValue() {
return priority != null ? priority : 0;
}
}

View File

@ -1,149 +0,0 @@
package com.qqchen.deploy.backend.workflow.monitor;
import io.micrometer.core.instrument.Counter;
import io.micrometer.core.instrument.Gauge;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.Timer;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.DoubleAdder;
/**
* 工作流上下文监控
* 负责收集和记录工作流变量操作的各项指标
*/
@Slf4j
@Component
public class WorkflowContextMonitor {
private final MeterRegistry meterRegistry;
private final Counter variableSetCounter;
private final Counter variableGetCounter;
private final Counter cacheHitCounter;
private final Counter cacheMissCounter;
private final Timer variableOperationTimer;
private final Timer transactionTimer;
private final Gauge contextCacheSize;
private final DoubleAdder cacheSize = new DoubleAdder();
public WorkflowContextMonitor(MeterRegistry registry) {
this.meterRegistry = registry;
// 变量操作计数器
this.variableSetCounter = Counter.builder("workflow.variable.operations")
.tag("type", "set")
.description("Number of variable set operations")
.register(registry);
this.variableGetCounter = Counter.builder("workflow.variable.operations")
.tag("type", "get")
.description("Number of variable get operations")
.register(registry);
// 缓存命中计数器
this.cacheHitCounter = Counter.builder("workflow.context.cache")
.tag("result", "hit")
.description("Number of context cache hits")
.register(registry);
this.cacheMissCounter = Counter.builder("workflow.context.cache")
.tag("result", "miss")
.description("Number of context cache misses")
.register(registry);
// 操作耗时计时器
this.variableOperationTimer = Timer.builder("workflow.variable.operation.duration")
.description("Time taken for variable operations")
.publishPercentiles(0.5, 0.95, 0.99)
.register(registry);
this.transactionTimer = Timer.builder("workflow.variable.transaction.duration")
.description("Time taken for variable transactions")
.publishPercentiles(0.5, 0.95, 0.99)
.register(registry);
// 缓存大小测量
this.contextCacheSize = Gauge.builder("workflow.context.cache.size",
cacheSize::doubleValue) // 使用 DoubleAdder 来存储和获取值
.description("Current size of workflow context cache")
.register(registry);
}
/**
* 记录变量设置操作
*/
public void recordVariableSet() {
variableSetCounter.increment();
}
/**
* 记录变量获取操作
*/
public void recordVariableGet() {
variableGetCounter.increment();
}
/**
* 记录缓存命中
*/
public void recordCacheHit() {
cacheHitCounter.increment();
}
/**
* 记录缓存未命中
*/
public void recordCacheMiss() {
cacheMissCounter.increment();
}
/**
* 记录操作耗时
*/
public Timer.Sample startOperation() {
return Timer.start();
}
/**
* 停止操作计时
*/
public void stopOperation(Timer.Sample sample) {
sample.stop(variableOperationTimer);
}
/**
* 记录事务耗时
*/
public Timer.Sample startTransaction() {
return Timer.start();
}
/**
* 停止事务计时
*/
public void stopTransaction(Timer.Sample sample) {
sample.stop(transactionTimer);
}
/**
* 更新缓存大小
*/
public void updateCacheSize(long size) {
cacheSize.reset();
cacheSize.add(size);
}
/**
* 记录错误
*/
public void recordError(String operation, Exception e) {
log.error("Workflow variable operation error. Operation: {}, Error: {}",
operation, e.getMessage(), e);
Counter.builder("workflow.variable.errors")
.tag("operation", operation)
.tag("error", e.getClass().getSimpleName())
.register(meterRegistry)
.increment();
}
}

View File

@ -1,26 +0,0 @@
package com.qqchen.deploy.backend.workflow.repository;
import com.qqchen.deploy.backend.workflow.entity.NodeConfig;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface INodeConfigRepository extends JpaRepository<NodeConfig, Long> {
/**
* 根据工作流定义ID查找所有节点配置
*
* @param workflowDefinitionId 工作流定义ID
* @return 节点配置列表
*/
List<NodeConfig> findByWorkflowDefinitionId(Long workflowDefinitionId);
/**
* 根据工作流定义ID删除所有节点配置
*
* @param workflowDefinitionId 工作流定义ID
*/
void deleteByWorkflowDefinitionId(Long workflowDefinitionId);
}

View File

@ -1,26 +0,0 @@
package com.qqchen.deploy.backend.workflow.repository;
import com.qqchen.deploy.backend.workflow.entity.TransitionConfig;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
@Repository
public interface ITransitionConfigRepository extends JpaRepository<TransitionConfig, Long> {
/**
* 根据工作流定义ID查找所有流转配置
*
* @param workflowDefinitionId 工作流定义ID
* @return 流转配置列表
*/
List<TransitionConfig> findByWorkflowDefinitionId(Long workflowDefinitionId);
/**
* 根据工作流定义ID删除所有流转配置
*
* @param workflowDefinitionId 工作流定义ID
*/
void deleteByWorkflowDefinitionId(Long workflowDefinitionId);
}

View File

@ -2,11 +2,8 @@ package com.qqchen.deploy.backend.workflow.service;
import com.qqchen.deploy.backend.framework.service.IBaseService;
import com.qqchen.deploy.backend.workflow.dto.NodeTypeDTO;
import com.qqchen.deploy.backend.workflow.engine.definition.TaskExecutorDefinition;
import com.qqchen.deploy.backend.workflow.entity.NodeType;
import java.util.List;
/**
* 节点类型服务接口
*/
@ -20,22 +17,6 @@ public interface INodeTypeService extends IBaseService<NodeType, NodeTypeDTO, Lo
*/
NodeTypeDTO findByCode(String code);
/**
* 获取指定节点类型支持的执行器列表
* 说明
* 1. 只有TASK类型的节点才有执行器列表
* 2. 一个任务节点可以支持多种执行器
* 3. 每个执行器都有自己的配置模式
*
* 使用场景
* 1. 流程设计时选择节点类型后需要展示该节点支持的执行器列表
* 2. 根据选择的执行器动态渲染配置表单
* 3. 保存节点配置时验证执行器参数是否符合配置模式
*
* @param code 节点类型编码
* @return 执行器列表
*/
List<TaskExecutorDefinition> getExecutors(String code);
/**
* 启用节点类型

View File

@ -0,0 +1,16 @@
package com.qqchen.deploy.backend.workflow.service;
import com.qqchen.deploy.backend.workflow.dto.request.WorkflowStartRequest;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
/**
* 工作流执行引擎接口
*/
public interface IWorkflowEngineService {
/**
* 启动工作流实例
*/
WorkflowInstance startWorkflow(WorkflowStartRequest request);
}

View File

@ -1,52 +0,0 @@
package com.qqchen.deploy.backend.workflow.service;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import java.util.Map;
/**
* 工作流变量操作接口
*/
public interface WorkflowVariableOperations {
/**
* 获取工作流实例的所有变量
*
* @param workflowInstanceId 工作流实例ID
* @return 变量Map
*/
Map<String, Object> getVariables(Long workflowInstanceId);
/**
* 设置工作流实例的变量
*
* @param workflowInstanceId 工作流实例ID
* @param variables 变量Map
*/
void setVariables(Long workflowInstanceId, Map<String, Object> variables);
/**
* 获取工作流实例的指定变量
*
* @param workflowInstanceId 工作流实例ID
* @param key 变量键
* @return 变量值
*/
Object getVariable(Long workflowInstanceId, String key);
/**
* 设置工作流实例的指定变量
*
* @param workflowInstanceId 工作流实例ID
* @param key 变量键
* @param value 变量值
*/
void setVariable(Long workflowInstanceId, String key, Object value);
/**
* 清除实例的所有变量
*
* @param workflowInstanceId 工作流实例ID
*/
void clearVariables(Long workflowInstanceId);
}

View File

@ -1,70 +0,0 @@
package com.qqchen.deploy.backend.workflow.service.impl;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.service.IWorkflowVariableService;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.Map;
import java.util.concurrent.TimeUnit;
/**
* 并发安全的工作流变量操作实现
*/
@Slf4j
@Service
public class ConcurrentWorkflowVariableOperations implements WorkflowVariableOperations {
@Resource
private IWorkflowVariableService variableService;
private final Cache<Long, Map<String, Object>> variableCache;
public ConcurrentWorkflowVariableOperations() {
this.variableCache = Caffeine.newBuilder()
.expireAfterWrite(30, TimeUnit.MINUTES)
.maximumSize(10000)
.build();
}
@Override
public Map<String, Object> getVariables(Long workflowInstanceId) {
return variableCache.get(workflowInstanceId,
id -> variableService.getVariables(id));
}
@Override
public void setVariables(Long workflowInstanceId, Map<String, Object> variables) {
if (variables == null || variables.isEmpty()) {
return;
}
variableService.setVariables(workflowInstanceId, variables);
variableCache.put(workflowInstanceId, variables);
}
@Override
public Object getVariable(Long workflowInstanceId, String key) {
Map<String, Object> variables = getVariables(workflowInstanceId);
return variables != null ? variables.get(key) : null;
}
@Override
public void setVariable(Long workflowInstanceId, String key, Object value) {
Map<String, Object> variables = getVariables(workflowInstanceId);
if (variables != null) {
variables.put(key, value);
setVariables(workflowInstanceId, variables);
}
}
@Override
public void clearVariables(Long workflowInstanceId) {
variableService.clearVariables(workflowInstanceId);
variableCache.invalidate(workflowInstanceId);
}
}

View File

@ -6,18 +6,13 @@ import com.qqchen.deploy.backend.framework.service.impl.BaseServiceImpl;
import com.qqchen.deploy.backend.workflow.converter.JsonConverter;
import com.qqchen.deploy.backend.workflow.converter.NodeTypeConverter;
import com.qqchen.deploy.backend.workflow.dto.NodeTypeDTO;
import com.qqchen.deploy.backend.workflow.engine.definition.TaskExecutorDefinition;
import com.qqchen.deploy.backend.workflow.entity.NodeType;
import com.qqchen.deploy.backend.workflow.enums.NodeCategoryEnum;
import com.qqchen.deploy.backend.workflow.repository.INodeTypeRepository;
import com.qqchen.deploy.backend.workflow.service.INodeTypeService;
import jakarta.annotation.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.Collections;
import java.util.List;
/**
* 节点类型服务实现类
*/
@ -40,27 +35,6 @@ public class NodeTypeServiceImpl extends BaseServiceImpl<NodeType, NodeTypeDTO,
.orElseThrow(() -> new BusinessException(ResponseCode.WORKFLOW_NODE_TYPE_NOT_FOUND));
}
@Override
public List<TaskExecutorDefinition> getExecutors(String code) {
// 1. 查询节点类型
NodeType nodeType = nodeTypeRepository.findByCodeAndDeletedFalse(code)
.orElseThrow(() -> new BusinessException(ResponseCode.WORKFLOW_NODE_TYPE_NOT_FOUND));
// 2. 检查节点类型是否为任务节点
if (!NodeCategoryEnum.TASK.equals(nodeType.getCategory())) {
// 非任务节点没有执行器列表
return Collections.emptyList();
}
// 3. 检查节点类型是否启用
if (!nodeType.getEnabled()) {
throw new BusinessException(ResponseCode.WORKFLOW_NODE_TYPE_DISABLED);
}
// 4. 将JSON字符串转换为执行器列表
return jsonConverter.toExecutorList(nodeType.getExecutors());
}
@Override
@Transactional
public void enable(Long id) {

View File

@ -0,0 +1,84 @@
package com.qqchen.deploy.backend.workflow.service.impl;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.dto.request.WorkflowStartRequest;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.INodeExecutor;
import com.qqchen.deploy.backend.workflow.engine.model.NodeConfig;
import com.qqchen.deploy.backend.workflow.engine.model.WorkflowGraph;
import com.qqchen.deploy.backend.workflow.engine.parser.WorkflowDefinitionParser;
import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import com.qqchen.deploy.backend.workflow.enums.WorkflowDefinitionStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.WorkflowInstanceStatusEnum;
import com.qqchen.deploy.backend.workflow.repository.INodeInstanceRepository;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowDefinitionRepository;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowInstanceRepository;
import com.qqchen.deploy.backend.workflow.service.IWorkflowEngineService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.time.LocalDateTime;
import java.util.Map;
@Slf4j
@Component
public class WorkflowEngineServiceImpl implements IWorkflowEngineService {
@Resource
private IWorkflowDefinitionRepository workflowDefinitionRepository;
@Resource
private IWorkflowInstanceRepository workflowInstanceRepository;
@Resource
private INodeInstanceRepository nodeInstanceRepository;
@Resource
@Lazy
private Map<NodeTypeEnum, INodeExecutor> nodeExecutors;
@Resource
private ObjectMapper objectMapper;
@Resource
private WorkflowDefinitionParser workflowDefinitionParser;
@Override
@Transactional
public WorkflowInstance startWorkflow(WorkflowStartRequest request) {
// 1. 获取工作流定义
WorkflowDefinition definition = workflowDefinitionRepository.findByCodeAndDeletedFalse(request.getWorkflowCode());
if (definition == null) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NOT_FOUND);
}
// 2. 检查工作流定义状态
if (definition.getStatus() != WorkflowDefinitionStatusEnum.PUBLISHED) {
throw new WorkflowEngineException(ResponseCode.WORKFLOW_NOT_PUBLISHED);
}
// 3. 创建工作流实例
WorkflowInstance workflowInstance = new WorkflowInstance();
workflowInstance.setWorkflowDefinition(definition);
workflowInstance.setBusinessKey(request.getBusinessKey());
workflowInstance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
workflowInstance.setCreateTime(LocalDateTime.now());
workflowInstanceRepository.save(workflowInstance);
// 5. 解析工作流配置
WorkflowGraph graph = workflowDefinitionParser.parse(definition);
// 6. 获取并执行开始节点
NodeConfig startNode = graph.getStartNode();
INodeExecutor executor = nodeExecutors.get(startNode.getType());
executor.execute(workflowInstance, graph, startNode);
return workflowInstance;
}
}

View File

@ -11,7 +11,6 @@ import com.qqchen.deploy.backend.workflow.enums.WorkflowDefinitionStatusEnum;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowDefinitionRepository;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowInstanceRepository;
import com.qqchen.deploy.backend.workflow.service.IWorkflowInstanceService;
import com.qqchen.deploy.backend.workflow.service.WorkflowVariableOperations;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
@ -33,8 +32,6 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl<WorkflowInstanc
@Resource
private IWorkflowInstanceRepository workflowInstanceRepository;
@Resource
private WorkflowVariableOperations variableOperations;
@Override
@Transactional
@ -58,11 +55,6 @@ public class WorkflowInstanceServiceImpl extends BaseServiceImpl<WorkflowInstanc
instance.setStatus(WorkflowInstanceStatusEnum.CREATED);
final WorkflowInstance savedInstance = workflowInstanceRepository.save(instance);
// 4. 设置初始变量
if (variables != null && !variables.isEmpty()) {
variableOperations.setVariables(savedInstance.getId(), variables);
}
return converter.toDto(savedInstance);
}

View File

@ -4,7 +4,7 @@ spring:
datasource:
url: jdbc:mysql://localhost:3306/deploy-ease-platform?useSSL=false&serverTimezone=UTC&allowPublicKeyRetrieval=true
username: root
password: ServBay.dev
password: root
driver-class-name: com.mysql.cj.jdbc.Driver
jpa:
hibernate:

View File

@ -405,26 +405,6 @@ CREATE TABLE wf_workflow_definition (
CONSTRAINT UK_workflow_definition_code_version UNIQUE (code, version_no)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='工作流定义表';
-- 流转配置表
CREATE TABLE wf_transition_config (
id BIGINT AUTO_INCREMENT PRIMARY KEY COMMENT '主键ID',
create_by VARCHAR(255) NULL COMMENT '创建人',
create_time DATETIME(6) NULL COMMENT '创建时间',
deleted BIT NOT NULL DEFAULT 0 COMMENT '是否删除0未删除1已删除',
update_by VARCHAR(255) NULL COMMENT '更新人',
update_time DATETIME(6) NULL COMMENT '更新时间',
version INT NOT NULL DEFAULT 0 COMMENT '乐观锁版本号',
workflow_definition_id BIGINT NOT NULL COMMENT '工作流定义ID',
`from` VARCHAR(100) NOT NULL COMMENT '源节点ID',
`to` VARCHAR(100) NOT NULL COMMENT '目标节点ID',
`condition` TEXT NULL COMMENT '流转条件',
description TEXT NULL COMMENT '流转描述',
priority INT NOT NULL DEFAULT 0 COMMENT '优先级',
CONSTRAINT FK_transition_config_workflow FOREIGN KEY (workflow_definition_id) REFERENCES wf_workflow_definition (id),
CONSTRAINT UK_transition_config_workflow_nodes UNIQUE (workflow_definition_id, `from`, `to`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='流转配置表';
-- 工作流实例表
CREATE TABLE wf_workflow_instance (

View File

@ -1,69 +0,0 @@
package com.qqchen.deploy.backend.api;
import com.qqchen.deploy.backend.system.service.IExternalSystemService;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.web.servlet.MockMvc;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
@SpringBootTest
@AutoConfigureMockMvc
@WithMockUser(username = "admin", roles = {"ADMIN"})
class ExternalSystemApiControllerTest {
@Autowired
private MockMvc mockMvc;
@MockBean
private IExternalSystemService externalSystemService;
@Test
void testConnection_WhenSuccess_ShouldReturnTrue() throws Exception {
// Mock
when(externalSystemService.testConnection(1L)).thenReturn(true);
// 执行并验证
mockMvc.perform(get("/api/v1/external-system/1/test-connection"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200))
.andExpect(jsonPath("$.data").value(true));
}
@Test
void testConnection_WhenFailed_ShouldReturnFalse() throws Exception {
// Mock
when(externalSystemService.testConnection(1L)).thenReturn(false);
// 执行并验证
mockMvc.perform(get("/api/v1/external-system/1/test-connection"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200))
.andExpect(jsonPath("$.data").value(false));
}
@Test
void syncData_ShouldReturnSuccess() throws Exception {
// 执行并验证
mockMvc.perform(post("/api/v1/external-system/1/sync"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200));
}
@Test
void updateStatus_ShouldReturnSuccess() throws Exception {
// 执行并验证
mockMvc.perform(put("/api/v1/external-system/1/status")
.param("enabled", "false"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200));
}
}

View File

@ -1,87 +0,0 @@
package com.qqchen.deploy.backend.api;
import com.qqchen.deploy.backend.system.service.ITenantService;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.security.test.context.support.WithMockUser;
import org.springframework.test.web.servlet.MockMvc;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.when;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.put;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
/**
* 租户API控制器测试类
*
* @author QQChen
* @version 1.0.0
*/
@SpringBootTest
@AutoConfigureMockMvc
@WithMockUser(username = "admin", roles = {"ADMIN"})
class TenantApiControllerTest {
@Autowired
private MockMvc mockMvc;
@MockBean
private ITenantService tenantService;
@Test
void getStatus_WhenEnabled_ShouldReturnTrue() throws Exception {
// Mock
when(tenantService.getStatus(1L)).thenReturn(true);
// 执行并验证
mockMvc.perform(get("/api/v1/tenant/1/enabled"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200))
.andExpect(jsonPath("$.success").value(true))
.andExpect(jsonPath("$.data").value(true));
}
@Test
void getStatus_WhenDisabled_ShouldReturnFalse() throws Exception {
// Mock
when(tenantService.getStatus(1L)).thenReturn(false);
// 执行并验证
mockMvc.perform(get("/api/v1/tenant/1/enabled"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200))
.andExpect(jsonPath("$.success").value(true))
.andExpect(jsonPath("$.data").value(false));
}
@Test
void updateStatus_WhenEnabled_ShouldReturnSuccess() throws Exception {
// Mock
doNothing().when(tenantService).updateStatus(1L, true);
// 执行并验证
mockMvc.perform(put("/api/v1/tenant/1/enabled")
.param("enabled", "true"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200))
.andExpect(jsonPath("$.success").value(true));
}
@Test
void updateStatus_WhenDisabled_ShouldReturnSuccess() throws Exception {
// Mock
doNothing().when(tenantService).updateStatus(1L, false);
// 执行并验证
mockMvc.perform(put("/api/v1/tenant/1/enabled")
.param("enabled", "false"))
.andExpect(status().isOk())
.andExpect(jsonPath("$.code").value(200))
.andExpect(jsonPath("$.success").value(true));
}
}

View File

@ -1,226 +0,0 @@
package com.qqchen.deploy.backend.service.impl;
import com.qqchen.deploy.backend.system.entity.ExternalSystem;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.exception.BusinessException;
import com.qqchen.deploy.backend.framework.exception.UniqueConstraintException;
import com.qqchen.deploy.backend.system.enums.ExternalSystemAuthTypeEnum;
import com.qqchen.deploy.backend.system.enums.ExternalSystemSyncStatusEnum;
import com.qqchen.deploy.backend.system.enums.ExternalSystemTypeEnum;
import com.qqchen.deploy.backend.system.model.ExternalSystemDTO;
import com.qqchen.deploy.backend.system.repository.IExternalSystemRepository;
import com.qqchen.deploy.backend.system.service.impl.ExternalSystemServiceImpl;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
@SpringBootTest
class ExternalSystemServiceImplTest {
@MockBean
private IExternalSystemRepository externalSystemRepository;
@Autowired
private ExternalSystemServiceImpl externalSystemService;
private ExternalSystem system;
private ExternalSystemDTO systemDTO;
@BeforeEach
void setUp() {
// 准备测试数据
system = new ExternalSystem();
system.setId(1L);
system.setName("测试Jenkins");
system.setType(ExternalSystemTypeEnum.JENKINS);
system.setUrl("http://jenkins.test.com");
system.setAuthType(ExternalSystemAuthTypeEnum.BASIC);
system.setUsername("admin");
system.setPassword("password");
system.setEnabled(true);
system.setSort(1);
systemDTO = new ExternalSystemDTO();
systemDTO.setName("测试Jenkins");
systemDTO.setType(ExternalSystemTypeEnum.JENKINS);
systemDTO.setUrl("http://jenkins.test.com");
systemDTO.setAuthType(ExternalSystemAuthTypeEnum.BASIC);
systemDTO.setUsername("admin");
systemDTO.setPassword("password");
systemDTO.setEnabled(true);
systemDTO.setSort(1);
}
@Test
void validateUniqueConstraints_WhenNameExists_ShouldThrowException() {
// Mock
when(externalSystemRepository.existsByNameAndDeletedFalse(systemDTO.getName())).thenReturn(true);
// 验证
assertThatThrownBy(() -> externalSystemService.validateUniqueConstraints(systemDTO))
.isInstanceOf(UniqueConstraintException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_NAME_EXISTS);
}
@Test
void validateUniqueConstraints_WhenTypeAndUrlExists_ShouldThrowException() {
// Mock
when(externalSystemRepository.existsByNameAndDeletedFalse(systemDTO.getName())).thenReturn(false);
when(externalSystemRepository.existsByTypeAndUrlAndDeletedFalse(systemDTO.getType(), systemDTO.getUrl()))
.thenReturn(true);
// 验证
assertThatThrownBy(() -> externalSystemService.validateUniqueConstraints(systemDTO))
.isInstanceOf(UniqueConstraintException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_TYPE_URL_EXISTS);
}
@Test
void testConnection_WhenSystemDisabled_ShouldThrowException() {
// 准备数据
system.setEnabled(false);
when(externalSystemRepository.findById(1L)).thenReturn(Optional.of(system));
// 验证
assertThatThrownBy(() -> externalSystemService.testConnection(1L))
.isInstanceOf(BusinessException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_DISABLED);
}
@Test
void testConnection_WhenSystemEnabled_ShouldReturnTrue() {
// Mock
when(externalSystemRepository.findById(1L)).thenReturn(Optional.of(system));
// 执行
boolean result = externalSystemService.testConnection(1L);
// 验证
assertThat(result).isTrue();
}
@Test
void syncData_WhenSystemDisabled_ShouldThrowException() {
// 准备数据
system.setEnabled(false);
when(externalSystemRepository.findById(1L)).thenReturn(Optional.of(system));
// 验证
assertThatThrownBy(() -> externalSystemService.syncData(1L))
.isInstanceOf(BusinessException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_DISABLED);
}
@Test
void syncData_WhenSuccessful_ShouldUpdateStatus() {
// Mock
when(externalSystemRepository.findById(1L)).thenReturn(Optional.of(system));
when(externalSystemRepository.save(any(ExternalSystem.class))).thenReturn(system);
// 执行
externalSystemService.syncData(1L);
// 验证
assertThat(system.getSyncStatus()).isEqualTo(ExternalSystemSyncStatusEnum.SUCCESS);
assertThat(system.getLastSyncTime()).isNotNull();
verify(externalSystemRepository, times(2)).save(any(ExternalSystem.class));
}
@Test
void updateStatus_ShouldUpdateSystemStatus() {
// Mock
when(externalSystemRepository.findById(1L)).thenReturn(Optional.of(system));
when(externalSystemRepository.save(any(ExternalSystem.class))).thenReturn(system);
// 执行
externalSystemService.updateStatus(1L, false);
// 验证
assertThat(system.getEnabled()).isFalse();
verify(externalSystemRepository).save(system);
}
@Test
void validateUniqueConstraints_WhenGitWithoutToken_ShouldThrowException() {
// 准备数据
systemDTO.setType(ExternalSystemTypeEnum.GIT);
systemDTO.setAuthType(ExternalSystemAuthTypeEnum.TOKEN);
systemDTO.setToken(null);
// Mock
when(externalSystemRepository.existsByNameAndDeletedFalse(systemDTO.getName())).thenReturn(false);
when(externalSystemRepository.existsByTypeAndUrlAndDeletedFalse(systemDTO.getType(), systemDTO.getUrl()))
.thenReturn(false);
// 验证
assertThatThrownBy(() -> externalSystemService.validateUniqueConstraints(systemDTO))
.isInstanceOf(BusinessException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_GIT_TOKEN_REQUIRED);
}
@Test
void validateUniqueConstraints_WhenGitWithWrongAuthType_ShouldThrowException() {
// 准备数据
systemDTO.setType(ExternalSystemTypeEnum.GIT);
systemDTO.setAuthType(ExternalSystemAuthTypeEnum.BASIC);
// Mock
when(externalSystemRepository.existsByNameAndDeletedFalse(systemDTO.getName())).thenReturn(false);
when(externalSystemRepository.existsByTypeAndUrlAndDeletedFalse(systemDTO.getType(), systemDTO.getUrl()))
.thenReturn(false);
// 验证
assertThatThrownBy(() -> externalSystemService.validateUniqueConstraints(systemDTO))
.isInstanceOf(BusinessException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_GIT_AUTH_TYPE_ERROR);
}
@Test
void update_WhenGitWithoutToken_ShouldThrowException() {
// 准备数据
systemDTO.setType(ExternalSystemTypeEnum.GIT);
systemDTO.setAuthType(ExternalSystemAuthTypeEnum.TOKEN);
systemDTO.setToken(null);
// 验证
assertThatThrownBy(() -> externalSystemService.update(1L, systemDTO))
.isInstanceOf(BusinessException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_GIT_TOKEN_REQUIRED);
}
@Test
void update_WhenGitWithWrongAuthType_ShouldThrowException() {
// 准备数据
systemDTO.setType(ExternalSystemTypeEnum.GIT);
systemDTO.setAuthType(ExternalSystemAuthTypeEnum.BASIC);
// 验证
assertThatThrownBy(() -> externalSystemService.update(1L, systemDTO))
.isInstanceOf(BusinessException.class)
.hasFieldOrPropertyWithValue("errorCode", ResponseCode.EXTERNAL_SYSTEM_GIT_AUTH_TYPE_ERROR);
}
@Test
void testConnection_WhenSuccess_ShouldUpdateLastConnectTime() {
// Mock
when(externalSystemRepository.findById(1L)).thenReturn(Optional.of(system));
when(externalSystemRepository.save(any(ExternalSystem.class))).thenReturn(system);
// 执行
boolean result = externalSystemService.testConnection(1L);
// 验证
assertThat(result).isTrue();
assertThat(system.getLastConnectTime()).isNotNull();
verify(externalSystemRepository).save(system);
}
}

View File

@ -1,236 +0,0 @@
package com.qqchen.deploy.backend.service.impl;
import com.qqchen.deploy.backend.system.converter.MenuConverter;
import com.qqchen.deploy.backend.system.entity.Menu;
import com.qqchen.deploy.backend.system.entity.Permission;
import com.qqchen.deploy.backend.system.model.MenuDTO;
import com.qqchen.deploy.backend.system.model.response.MenuPermissionTreeResponse;
import com.qqchen.deploy.backend.system.model.response.MenuResponse;
import com.qqchen.deploy.backend.system.model.response.PermissionResponse;
import com.qqchen.deploy.backend.system.repository.IMenuRepository;
import com.qqchen.deploy.backend.system.repository.IPermissionRepository;
import com.qqchen.deploy.backend.system.service.impl.MenuServiceImpl;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
@SpringBootTest
class MenuServiceImplTest {
@MockBean
private IMenuRepository menuRepository;
@MockBean
private IPermissionRepository permissionRepository;
@MockBean
private MenuConverter menuConverter;
@Autowired
private MenuServiceImpl menuService;
private Menu rootMenu;
private Menu childMenu;
private Permission permission1;
private Permission permission2;
@BeforeEach
void setUp() {
// 准备测试数据
rootMenu = new Menu();
rootMenu.setId(1L);
rootMenu.setName("根菜单");
rootMenu.setSort(1);
rootMenu.setType(1);
rootMenu.setParentId(0L);
childMenu = new Menu();
childMenu.setId(2L);
childMenu.setName("子菜单");
childMenu.setSort(1);
childMenu.setType(2);
childMenu.setParentId(1L);
permission1 = new Permission();
permission1.setId(1L);
permission1.setMenuId(2L);
permission1.setName("查看");
permission1.setCode("VIEW");
permission1.setSort(1);
permission2 = new Permission();
permission2.setId(2L);
permission2.setMenuId(2L);
permission2.setName("编辑");
permission2.setCode("EDIT");
permission2.setSort(2);
}
@Test
void getPermissionTree_ShouldReturnCorrectStructure() {
// 准备测试数据
List<Menu> menus = Arrays.asList(rootMenu, childMenu);
List<Permission> permissions = Arrays.asList(permission1, permission2);
// Mock Repository方法
when(menuRepository.findByDeletedFalseOrderBySort()).thenReturn(menus);
when(permissionRepository.findAllByDeletedFalseOrderBySort()).thenReturn(permissions);
// Mock Converter方法
MenuPermissionTreeResponse rootResponse = createMenuPermissionResponse(rootMenu);
MenuPermissionTreeResponse childResponse = createMenuPermissionResponse(childMenu);
when(menuConverter.toMenuPermissionResponse(rootMenu)).thenReturn(rootResponse);
when(menuConverter.toMenuPermissionResponse(childMenu)).thenReturn(childResponse);
when(menuConverter.toPermissionResponseList(Arrays.asList(permission1, permission2)))
.thenReturn(Arrays.asList(
createPermissionResponse(permission1),
createPermissionResponse(permission2)
));
// 执行测试
List<MenuPermissionTreeResponse> result = menuService.getPermissionTree();
// 验证结果
assertThat(result).isNotNull();
assertThat(result).hasSize(1); // 只有一个根节点
MenuPermissionTreeResponse root = result.get(0);
assertThat(root.getId()).isEqualTo(1L);
assertThat(root.getName()).isEqualTo("根菜单");
assertThat(root.getPermissionChildren()).hasSize(1); // 有一个子节点
MenuPermissionTreeResponse child = root.getPermissionChildren().get(0);
assertThat(child.getId()).isEqualTo(2L);
assertThat(child.getName()).isEqualTo("子菜单");
assertThat(child.getPermissions()).hasSize(2); // 有两个权限
PermissionResponse firstPermission = child.getPermissions().get(0);
assertThat(firstPermission.getCode()).isEqualTo("VIEW");
assertThat(firstPermission.getName()).isEqualTo("查看");
}
private MenuPermissionTreeResponse createMenuPermissionResponse(Menu menu) {
MenuPermissionTreeResponse response = new MenuPermissionTreeResponse();
response.setId(menu.getId());
response.setName(menu.getName());
response.setParentId(menu.getParentId());
response.setType(menu.getType());
response.setSort(menu.getSort());
return response;
}
private PermissionResponse createPermissionResponse(Permission permission) {
PermissionResponse response = new PermissionResponse();
response.setId(permission.getId());
response.setCode(permission.getCode());
response.setName(permission.getName());
response.setType(permission.getType());
response.setSort(permission.getSort());
return response;
}
@Test
void getPermissionTree_WithEmptyData_ShouldReturnEmptyList() {
// Mock空数据
when(menuRepository.findByDeletedFalseOrderBySort()).thenReturn(List.of());
when(permissionRepository.findAllByDeletedFalseOrderBySort()).thenReturn(List.of());
// 执行测试
List<MenuPermissionTreeResponse> result = menuService.getPermissionTree();
// 验证结果
assertThat(result).isNotNull();
assertThat(result).isEmpty();
}
@Test
void getMenuTree_ShouldReturnCorrectStructure() {
// 准备测试数据
List<Menu> menus = Arrays.asList(rootMenu, childMenu);
// 准备DTO对象
MenuDTO rootDto = convertToDto(rootMenu);
MenuDTO childDto = convertToDto(childMenu);
rootDto.setChildren(new ArrayList<>(Arrays.asList(childDto)));
List<MenuDTO> dtoList = new ArrayList<>(Arrays.asList(rootDto, childDto));
// 准备Response对象
MenuResponse rootResponse = createMenuResponse(rootMenu);
MenuResponse childResponse = createMenuResponse(childMenu);
rootResponse.setChildren(new ArrayList<>(Arrays.asList(childResponse)));
List<MenuResponse> responseList = new ArrayList<>(Arrays.asList(rootResponse));
// Mock Repository方法
when(menuRepository.findByDeletedFalseOrderBySort()).thenReturn(menus);
// Mock Converter方法
when(menuConverter.toDtoList(menus)).thenReturn(dtoList);
when(menuConverter.toResponseList(List.of(rootDto))).thenReturn(responseList);
// 执行测试
List<MenuResponse> result = menuService.getMenuTree();
// 验证结果
assertThat(result).isNotNull();
assertThat(result).hasSize(1); // 只有一个根节点
MenuResponse root = result.get(0);
assertThat(root.getId()).isEqualTo(1L);
assertThat(root.getName()).isEqualTo("根菜单");
assertThat(root.getType()).isEqualTo(1);
assertThat(root.getParentId()).isEqualTo(0L);
assertThat(root.getChildren()).hasSize(1); // 有一个子节点
MenuResponse child = root.getChildren().get(0);
assertThat(child.getId()).isEqualTo(2L);
assertThat(child.getName()).isEqualTo("子菜单");
assertThat(child.getType()).isEqualTo(2);
assertThat(child.getParentId()).isEqualTo(1L);
assertThat(child.getChildren()).isEmpty(); // 没有子节点
}
@Test
void getMenuTree_WithEmptyData_ShouldReturnEmptyList() {
// Mock空数据
when(menuRepository.findByDeletedFalseOrderBySort()).thenReturn(List.of());
// 执行测试
List<MenuResponse> result = menuService.getMenuTree();
// 验证结果
assertThat(result).isNotNull();
assertThat(result).isEmpty();
}
private MenuResponse createMenuResponse(Menu menu) {
MenuResponse response = new MenuResponse();
response.setId(menu.getId());
response.setName(menu.getName());
response.setParentId(menu.getParentId());
response.setType(menu.getType());
response.setSort(menu.getSort());
response.setChildren(new ArrayList<>());
return response;
}
private MenuDTO convertToDto(Menu menu) {
MenuDTO dto = new MenuDTO();
dto.setId(menu.getId());
dto.setName(menu.getName());
dto.setParentId(menu.getParentId());
dto.setType(menu.getType());
dto.setSort(menu.getSort());
dto.setChildren(new ArrayList<>());
return dto;
}
}

View File

@ -1,102 +0,0 @@
package com.qqchen.deploy.backend.service.impl;
import com.qqchen.deploy.backend.system.entity.Tenant;
import com.qqchen.deploy.backend.system.repository.ITenantRepository;
import com.qqchen.deploy.backend.system.service.impl.TenantServiceImpl;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
/**
* 租户服务测试类
*
* @author QQChen
* @version 1.0.0
*/
@SpringBootTest
class TenantServiceImplTest {
@MockBean
private ITenantRepository tenantRepository;
@Autowired
private TenantServiceImpl tenantService;
private Tenant tenant;
@BeforeEach
void setUp() {
// 准备测试数据
tenant = new Tenant();
tenant.setId(1L);
tenant.setName("测试租户");
tenant.setCode("TEST");
tenant.setEnabled(true);
}
@Test
void getStatus_WhenEnabled_ShouldReturnTrue() {
// Mock
when(tenantRepository.findById(1L)).thenReturn(Optional.of(tenant));
// 执行
boolean result = tenantService.getStatus(1L);
// 验证
assertThat(result).isTrue();
}
@Test
void getStatus_WhenDisabled_ShouldReturnFalse() {
// 准备数据
tenant.setEnabled(false);
// Mock
when(tenantRepository.findById(1L)).thenReturn(Optional.of(tenant));
// 执行
boolean result = tenantService.getStatus(1L);
// 验证
assertThat(result).isFalse();
}
@Test
void updateStatus_WhenDisabled_ShouldUpdateTenantStatus() {
// Mock
when(tenantRepository.findById(1L)).thenReturn(Optional.of(tenant));
when(tenantRepository.save(any(Tenant.class))).thenReturn(tenant);
// 执行
tenantService.updateStatus(1L, false);
// 验证
assertThat(tenant.getEnabled()).isFalse();
verify(tenantRepository).save(tenant);
}
@Test
void updateStatus_WhenEnabled_ShouldUpdateTenantStatus() {
// 准备数据
tenant.setEnabled(false);
// Mock
when(tenantRepository.findById(1L)).thenReturn(Optional.of(tenant));
when(tenantRepository.save(any(Tenant.class))).thenReturn(tenant);
// 执行
tenantService.updateStatus(1L, true);
// 验证
assertThat(tenant.getEnabled()).isTrue();
verify(tenantRepository).save(tenant);
}
}

View File

@ -1,305 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.workflow.engine.context.DefaultWorkflowContext;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContext;
import com.qqchen.deploy.backend.workflow.engine.exception.WorkflowEngineException;
import com.qqchen.deploy.backend.workflow.engine.executor.node.NodeExecutor;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import com.qqchen.deploy.backend.workflow.enums.WorkflowDefinitionStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.WorkflowInstanceStatusEnum;
import com.qqchen.deploy.backend.workflow.repository.INodeInstanceRepository;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowDefinitionRepository;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowInstanceRepository;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
class DefaultWorkflowEngineTest {
@Mock
private IWorkflowDefinitionRepository workflowDefinitionRepository;
@Mock
private IWorkflowInstanceRepository workflowInstanceRepository;
@Mock
private INodeInstanceRepository nodeInstanceRepository;
@Mock
private Map<NodeTypeEnum, NodeExecutor> nodeExecutors;
@Mock
private DefaultWorkflowContext.Factory workflowContextFactory;
@Mock
private NodeExecutor startNodeExecutor;
@Mock
private NodeExecutor taskNodeExecutor;
@Mock
private DefaultWorkflowContext workflowContext;
@InjectMocks
private DefaultWorkflowEngine workflowEngine;
@BeforeEach
void setUp() {
MockitoAnnotations.openMocks(this);
// 配置基本的Mock行为
when(nodeExecutors.get(NodeTypeEnum.START)).thenReturn(startNodeExecutor);
when(nodeExecutors.get(NodeTypeEnum.TASK)).thenReturn(taskNodeExecutor);
when(workflowContextFactory.create(any())).thenReturn(workflowContext);
}
@Test
void startWorkflow_Success() {
// 准备测试数据
String workflowCode = "test-workflow";
String businessKey = "test-key";
Map<String, Object> variables = new HashMap<>();
variables.put("key1", "value1");
WorkflowDefinition definition = new WorkflowDefinition();
definition.setId(1L);
definition.setCode(workflowCode);
definition.setStatus(WorkflowDefinitionStatusEnum.PUBLISHED);
NodeInstance startNode = new NodeInstance();
startNode.setId(1L);
startNode.setNodeType(NodeTypeEnum.START);
startNode.setStatus(NodeStatusEnum.PENDING);
// 配置Mock行为
when(workflowDefinitionRepository.findByCodeAndDeletedFalse(workflowCode)).thenReturn(definition);
when(workflowInstanceRepository.save(any())).thenAnswer(i -> i.getArgument(0));
when(nodeInstanceRepository.save(any())).thenReturn(startNode);
when(nodeInstanceRepository.findById(startNode.getId())).thenReturn(Optional.of(startNode));
doNothing().when(startNodeExecutor).execute(any(), any());
// 执行测试
WorkflowInstance result = workflowEngine.startWorkflow(workflowCode, businessKey, variables);
// 验证结果
assertNotNull(result);
assertEquals(WorkflowInstanceStatusEnum.RUNNING, result.getStatus());
assertNotNull(result.getStartTime());
verify(workflowDefinitionRepository).findByCodeAndDeletedFalse(workflowCode);
verify(workflowInstanceRepository).save(any());
verify(nodeInstanceRepository).save(any());
verify(workflowContextFactory).create(any());
verify(workflowContext, times(variables.size())).setVariable(anyString(), any());
}
@Test
void startWorkflow_WorkflowNotFound() {
// 准备测试数据
String workflowCode = "non-existent";
// 配置Mock行为
when(workflowDefinitionRepository.findByCodeAndDeletedFalse(workflowCode)).thenReturn(null);
// 执行测试并验证异常
WorkflowEngineException exception = assertThrows(WorkflowEngineException.class,
() -> workflowEngine.startWorkflow(workflowCode, "test", null));
assertTrue(exception.getMessage().contains(ResponseCode.WORKFLOW_NOT_FOUND.name()));
}
@Test
void startWorkflow_WorkflowNotPublished() {
// 准备测试数据
String workflowCode = "draft-workflow";
WorkflowDefinition definition = new WorkflowDefinition();
definition.setCode(workflowCode);
definition.setStatus(WorkflowDefinitionStatusEnum.DRAFT);
// 配置Mock行为
when(workflowDefinitionRepository.findByCodeAndDeletedFalse(workflowCode)).thenReturn(definition);
// 执行测试并验证异常
WorkflowEngineException exception = assertThrows(WorkflowEngineException.class,
() -> workflowEngine.startWorkflow(workflowCode, "test", null));
assertTrue(exception.getMessage().contains(ResponseCode.WORKFLOW_NOT_PUBLISHED.name()));
}
@Test
void executeNode_Success() {
// 准备测试数据
Long nodeInstanceId = 1L;
NodeInstance nodeInstance = new NodeInstance();
nodeInstance.setId(nodeInstanceId);
nodeInstance.setNodeType(NodeTypeEnum.TASK);
nodeInstance.setStatus(NodeStatusEnum.PENDING);
WorkflowInstance instance = new WorkflowInstance();
instance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
nodeInstance.setWorkflowInstance(instance);
// 配置Mock行为
when(nodeInstanceRepository.findById(nodeInstanceId)).thenReturn(Optional.of(nodeInstance));
doNothing().when(taskNodeExecutor).execute(any(), any());
// 执行测试
workflowEngine.executeNode(nodeInstanceId);
// 验证结果
assertEquals(NodeStatusEnum.COMPLETED, nodeInstance.getStatus());
assertNotNull(nodeInstance.getEndTime());
verify(nodeInstanceRepository).findById(nodeInstanceId);
verify(taskNodeExecutor).execute(any(), any());
verify(nodeInstanceRepository).save(nodeInstance);
}
@Test
void executeNode_NodeNotFound() {
// 准备测试数据
Long nodeInstanceId = 999L;
// 配置Mock行为
when(nodeInstanceRepository.findById(nodeInstanceId)).thenReturn(Optional.empty());
// 执行测试并验证异常
WorkflowEngineException exception = assertThrows(WorkflowEngineException.class,
() -> workflowEngine.executeNode(nodeInstanceId));
assertTrue(exception.getMessage().contains(ResponseCode.WORKFLOW_NODE_NOT_FOUND.name()));
}
@Test
void executeNode_WorkflowNotRunning() {
// 准备测试数据
Long nodeInstanceId = 1L;
NodeInstance nodeInstance = new NodeInstance();
nodeInstance.setId(nodeInstanceId);
nodeInstance.setNodeType(NodeTypeEnum.TASK);
WorkflowInstance instance = new WorkflowInstance();
instance.setStatus(WorkflowInstanceStatusEnum.COMPLETED);
nodeInstance.setWorkflowInstance(instance);
// 配置Mock行为
when(nodeInstanceRepository.findById(nodeInstanceId)).thenReturn(Optional.of(nodeInstance));
// 执行测试并验证异常
WorkflowEngineException exception = assertThrows(WorkflowEngineException.class,
() -> workflowEngine.executeNode(nodeInstanceId));
assertTrue(exception.getMessage().contains(ResponseCode.WORKFLOW_INSTANCE_NOT_RUNNING.name()));
}
@Test
void terminateWorkflow_Success() {
// 准备测试数据
Long instanceId = 1L;
String reason = "Test termination";
WorkflowInstance instance = new WorkflowInstance();
instance.setId(instanceId);
instance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
NodeInstance runningNode = new NodeInstance();
runningNode.setNodeType(NodeTypeEnum.TASK);
runningNode.setStatus(NodeStatusEnum.RUNNING);
// 配置Mock行为
when(workflowInstanceRepository.findById(instanceId)).thenReturn(Optional.of(instance));
when(nodeInstanceRepository.findByWorkflowInstanceIdAndStatus(instanceId, NodeStatusEnum.RUNNING))
.thenReturn(Arrays.asList(runningNode));
doNothing().when(taskNodeExecutor).terminate(any(), any());
// 执行测试
workflowEngine.terminateWorkflow(instanceId, reason);
// 验证结果
assertEquals(WorkflowInstanceStatusEnum.TERMINATED, instance.getStatus());
assertEquals(reason, instance.getError());
assertNotNull(instance.getEndTime());
assertEquals(NodeStatusEnum.TERMINATED, runningNode.getStatus());
assertNotNull(runningNode.getEndTime());
verify(workflowInstanceRepository).findById(instanceId);
verify(nodeInstanceRepository).findByWorkflowInstanceIdAndStatus(instanceId, NodeStatusEnum.RUNNING);
verify(taskNodeExecutor).terminate(any(), any());
verify(nodeInstanceRepository).save(runningNode);
verify(workflowInstanceRepository).save(instance);
}
@Test
void pauseWorkflow_Success() {
// 准备测试数据
Long instanceId = 1L;
WorkflowInstance instance = new WorkflowInstance();
instance.setId(instanceId);
instance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
NodeInstance runningNode = new NodeInstance();
runningNode.setStatus(NodeStatusEnum.RUNNING);
// 配置Mock行为
when(workflowInstanceRepository.findById(instanceId)).thenReturn(Optional.of(instance));
when(nodeInstanceRepository.findByWorkflowInstanceIdAndStatus(instanceId, NodeStatusEnum.RUNNING))
.thenReturn(Arrays.asList(runningNode));
// 执行测试
workflowEngine.pauseWorkflow(instanceId);
// 验证结果
assertEquals(WorkflowInstanceStatusEnum.PAUSED, instance.getStatus());
assertEquals(NodeStatusEnum.PAUSED, runningNode.getStatus());
verify(workflowInstanceRepository).findById(instanceId);
verify(nodeInstanceRepository).findByWorkflowInstanceIdAndStatus(instanceId, NodeStatusEnum.RUNNING);
verify(nodeInstanceRepository).save(runningNode);
verify(workflowInstanceRepository).save(instance);
}
@Test
void resumeWorkflow_Success() {
// 准备测试数据
Long instanceId = 1L;
WorkflowInstance instance = new WorkflowInstance();
instance.setId(instanceId);
instance.setStatus(WorkflowInstanceStatusEnum.PAUSED);
NodeInstance pausedNode = new NodeInstance();
pausedNode.setId(2L);
pausedNode.setStatus(NodeStatusEnum.PAUSED);
pausedNode.setWorkflowInstance(instance);
// 配置Mock行为
when(workflowInstanceRepository.findById(instanceId)).thenReturn(Optional.of(instance));
when(nodeInstanceRepository.findByWorkflowInstanceIdAndStatus(instanceId, NodeStatusEnum.PAUSED))
.thenReturn(Arrays.asList(pausedNode));
when(nodeInstanceRepository.findById(pausedNode.getId())).thenReturn(Optional.of(pausedNode));
doNothing().when(taskNodeExecutor).execute(any(), any());
// 执行测试
workflowEngine.resumeWorkflow(instanceId);
// 验证结果
assertEquals(WorkflowInstanceStatusEnum.RUNNING, instance.getStatus());
assertEquals(NodeStatusEnum.RUNNING, pausedNode.getStatus());
verify(workflowInstanceRepository).findById(instanceId);
verify(nodeInstanceRepository).findByWorkflowInstanceIdAndStatus(instanceId, NodeStatusEnum.PAUSED);
verify(nodeInstanceRepository).save(pausedNode);
verify(workflowInstanceRepository).save(instance);
}
}

View File

@ -1,36 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
import com.qqchen.deploy.backend.workflow.repository.IWorkflowDefinitionRepository;
import jakarta.annotation.Resource;
import org.junit.jupiter.api.Test;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.core.io.ClassPathResource;
import java.nio.file.Files;
import java.util.HashMap;
@SpringBootTest
public class WorkflowShellTest {
@Resource
private WorkflowEngine workflowEngine;
@Resource
private IWorkflowDefinitionRepository workflowDefinitionRepository;
@Resource
private ObjectMapper objectMapper;
@Test
public void testShellWorkflow() throws Exception {
// 1. 加载工作流定义
String json = Files.readString(new ClassPathResource("workflow-shell-test.json").getFile().toPath());
WorkflowDefinition definition = objectMapper.readValue(json, WorkflowDefinition.class);
workflowDefinitionRepository.save(definition);
// 2. 启动工作流实例
workflowEngine.startWorkflow(definition.getId(), "TEST-" + System.currentTimeMillis(), new HashMap<>());
}
}

View File

@ -1,65 +0,0 @@
package com.qqchen.deploy.backend.workflow.engine.executor;
import com.qqchen.deploy.backend.system.enums.LogLevelEnum;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContext;
import com.qqchen.deploy.backend.workflow.engine.executor.node.StartNodeExecutor;
import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.mockito.Mockito.verify;
class StartNodeExecutorTest {
@Mock
private WorkflowContext workflowContext;
@InjectMocks
private StartNodeExecutor startNodeExecutor;
@BeforeEach
void setUp() {
MockitoAnnotations.openMocks(this);
}
@Test
void getNodeType_ShouldReturnStartType() {
assertEquals(NodeTypeEnum.START, startNodeExecutor.getNodeType());
}
@Test
void execute_ShouldCompleteNodeAndLog() {
// 准备测试数据
NodeInstance nodeInstance = new NodeInstance();
nodeInstance.setNodeType(NodeTypeEnum.START);
nodeInstance.setStatus(NodeStatusEnum.PENDING);
// 执行测试
startNodeExecutor.execute(nodeInstance, workflowContext);
// 验证结果
assertEquals(NodeStatusEnum.COMPLETED, nodeInstance.getStatus());
verify(workflowContext).log("开始节点执行完成", LogLevelEnum.INFO);
}
@Test
void validate_ShouldDoNothing() {
// 开始节点无需配置验证方法不会抛出异常
startNodeExecutor.validate(null);
startNodeExecutor.validate("");
startNodeExecutor.validate("{}");
}
@Test
void terminate_ShouldDoNothing() {
// 开始节点无需终止操作验证方法不会抛出异常
NodeInstance nodeInstance = new NodeInstance();
startNodeExecutor.terminate(nodeInstance, workflowContext);
}
}

View File

@ -1,175 +0,0 @@
//package com.qqchen.deploy.backend.workflow.engine.executor;
//
//import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContext;
//import com.qqchen.deploy.backend.workflow.engine.executor.task.TaskExecutor;
//import com.qqchen.deploy.backend.workflow.entity.NodeInstance;
//import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
//import com.qqchen.deploy.backend.workflow.enums.NodeStatusEnum;
//import com.qqchen.deploy.backend.workflow.enums.NodeTypeEnum;
//import com.qqchen.deploy.backend.workflow.repository.INodeInstanceRepository;
//import org.junit.jupiter.api.BeforeEach;
//import org.junit.jupiter.api.Test;
//import org.mockito.InjectMocks;
//import org.mockito.Mock;
//import org.mockito.MockitoAnnotations;
//
//import java.util.HashMap;
//import java.util.Map;
//
//import static org.junit.jupiter.api.Assertions.*;
//import static org.mockito.ArgumentMatchers.any;
//import static org.mockito.Mockito.*;
//
//class TaskNodeExecutorTest {
//
// @Mock
// private INodeInstanceRepository nodeInstanceRepository;
//
// @Mock
// private Map<String, TaskExecutor> taskExecutors;
//
// @Mock
// private TaskExecutor shellTaskExecutor;
//
// @Mock
// private WorkflowContext context;
//
// @InjectMocks
// private TaskNodeExecutor taskNodeExecutor;
//
// @BeforeEach
// void setUp() {
// MockitoAnnotations.openMocks(this);
//
// // 设置任务执行器映射
// when(taskExecutors.get("SHELL")).thenReturn(shellTaskExecutor);
// }
//
// @Test
// void testExecute() {
// // 准备测试数据
// WorkflowInstance instance = new WorkflowInstance();
// instance.setId(1L);
//
// NodeInstance taskNode = new NodeInstance();
// taskNode.setId(1L);
// taskNode.setNodeType(NodeTypeEnum.TASK);
// taskNode.setStatus(NodeStatusEnum.PENDING);
// taskNode.setConfig("{\"type\":\"SHELL\",\"script\":\"echo hello\"}");
//
// Map<String, Object> input = new HashMap<>();
// input.put("param1", "value1");
//
// // 设置Mock行为
// when(context.getWorkflowInstance()).thenReturn(instance);
// when(context.getVariables()).thenReturn(input);
// when(shellTaskExecutor.execute(any(), any())).thenReturn(true);
//
// // 执行测试
// boolean result = taskNodeExecutor.execute(context, taskNode);
//
// // 验证结果
// assertTrue(result);
// assertEquals(NodeStatusEnum.RUNNING, taskNode.getStatus());
// assertNotNull(taskNode.getStartTime());
// verify(nodeInstanceRepository, times(1)).save(taskNode);
// verify(shellTaskExecutor, times(1)).execute(any(), any());
// }
//
// @Test
// void testExecuteWithError() {
// // 准备测试数据
// WorkflowInstance instance = new WorkflowInstance();
// instance.setId(1L);
//
// NodeInstance taskNode = new NodeInstance();
// taskNode.setId(1L);
// taskNode.setNodeType(NodeTypeEnum.TASK);
// taskNode.setStatus(NodeStatusEnum.PENDING);
// taskNode.setConfig("{\"type\":\"SHELL\",\"script\":\"echo hello\"}");
//
// // 设置Mock行为
// when(context.getWorkflowInstance()).thenReturn(instance);
// when(shellTaskExecutor.execute(any(), any())).thenReturn(false);
//
// // 执行测试
// boolean result = taskNodeExecutor.execute(context, taskNode);
//
// // 验证结果
// assertFalse(result);
// assertEquals(NodeStatusEnum.FAILED, taskNode.getStatus());
// assertNotNull(taskNode.getStartTime());
// assertNotNull(taskNode.getEndTime());
// verify(nodeInstanceRepository, times(1)).save(taskNode);
// verify(shellTaskExecutor, times(1)).execute(any(), any());
// }
//
// @Test
// void testCanExecute() {
// // 准备测试数据
// NodeInstance taskNode = new NodeInstance();
// taskNode.setNodeType(NodeTypeEnum.TASK);
// taskNode.setStatus(NodeStatusEnum.PENDING);
//
// // 执行测试
// boolean result = taskNodeExecutor.canExecute(taskNode);
//
// // 验证结果
// assertTrue(result);
// }
//
// @Test
// void testCannotExecuteWrongType() {
// // 准备测试数据
// NodeInstance startNode = new NodeInstance();
// startNode.setNodeType(NodeTypeEnum.START);
// startNode.setStatus(NodeStatusEnum.PENDING);
//
// // 执行测试
// boolean result = taskNodeExecutor.canExecute(startNode);
//
// // 验证结果
// assertFalse(result);
// }
//
// @Test
// void testCannotExecuteWrongStatus() {
// // 准备测试数据
// NodeInstance taskNode = new NodeInstance();
// taskNode.setNodeType(NodeTypeEnum.TASK);
// taskNode.setStatus(NodeStatusEnum.RUNNING);
//
// // 执行测试
// boolean result = taskNodeExecutor.canExecute(taskNode);
//
// // 验证结果
// assertFalse(result);
// }
//
// @Test
// void testExecuteWithInvalidConfig() {
// // 准备测试数据
// WorkflowInstance instance = new WorkflowInstance();
// instance.setId(1L);
//
// NodeInstance taskNode = new NodeInstance();
// taskNode.setId(1L);
// taskNode.setNodeType(NodeTypeEnum.TASK);
// taskNode.setStatus(NodeStatusEnum.PENDING);
// taskNode.setConfig("invalid json");
//
// // 设置Mock行为
// when(context.getWorkflowInstance()).thenReturn(instance);
//
// // 执行测试
// boolean result = taskNodeExecutor.execute(context, taskNode);
//
// // 验证结果
// assertFalse(result);
// assertEquals(NodeStatusEnum.FAILED, taskNode.getStatus());
// assertNotNull(taskNode.getStartTime());
// assertNotNull(taskNode.getEndTime());
// assertNotNull(taskNode.getError());
// verify(nodeInstanceRepository, times(1)).save(taskNode);
// }
//}

View File

@ -1,117 +0,0 @@
package com.qqchen.deploy.backend.workflow.service;
import com.qqchen.deploy.backend.workflow.engine.context.WorkflowContext;
import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
import com.qqchen.deploy.backend.workflow.service.impl.ConcurrentWorkflowVariableOperations;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.transaction.support.TransactionTemplate;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
class ConcurrentWorkflowVariableOperationsTest {
@Mock
private IWorkflowVariableService variableService;
@Mock
private TransactionTemplate transactionTemplate;
@Mock
private WorkflowContext.Factory contextFactory;
@InjectMocks
private ConcurrentWorkflowVariableOperations operations;
private WorkflowInstance instance;
private WorkflowContext context;
@BeforeEach
void setUp() {
instance = new WorkflowInstance();
instance.setId(1L);
context = mock(WorkflowContext.class);
when(contextFactory.create(any(), anyMap())).thenReturn(context);
when(transactionTemplate.execute(any())).thenAnswer(invocation -> {
return invocation.getArgument(0, TransactionCallback.class).doInTransaction(null);
});
}
@Test
void setVariables_ShouldHandleConcurrentAccess() throws InterruptedException {
// 准备测试数据
int threadCount = 10;
ExecutorService executorService = Executors.newFixedThreadPool(threadCount);
CountDownLatch latch = new CountDownLatch(threadCount);
// 模拟多线程并发访问
for (int i = 0; i < threadCount; i++) {
final int index = i;
executorService.submit(() -> {
try {
Map<String, Object> variables = new HashMap<>();
variables.put("key" + index, "value" + index);
operations.setVariables(instance, variables);
} finally {
latch.countDown();
}
});
}
// 等待所有线程完成
latch.await();
executorService.shutdown();
// 验证结果
verify(variableService, times(threadCount)).setVariable(eq(instance), anyString(), any());
}
@Test
void getVariables_ShouldReturnCachedValues() {
// 准备测试数据
Map<String, Object> expectedVariables = new HashMap<>();
expectedVariables.put("key1", "value1");
when(context.getVariables()).thenReturn(expectedVariables);
// 第一次调用
Map<String, Object> result1 = operations.getVariables(instance);
assertEquals(expectedVariables, result1);
// 第二次调用应该返回缓存的结果
Map<String, Object> result2 = operations.getVariables(instance);
assertEquals(expectedVariables, result2);
// 验证 variableService 只被调用一次
verify(variableService, times(1)).getVariables(instance);
}
@Test
void clearContext_ShouldRemoveFromCache() {
// 首先获取变量以确保上下文被缓存
operations.getVariables(instance);
// 清除上下文
operations.clearContext(instance);
// 再次获取变量应该重新从服务加载
operations.getVariables(instance);
// 验证 variableService 被调用了两次
verify(variableService, times(2)).getVariables(instance);
}
}

View File

@ -1,188 +0,0 @@
//package com.qqchen.deploy.backend.workflow.service;
//
//import com.qqchen.deploy.backend.framework.enums.ResponseCode;
//import com.qqchen.deploy.backend.framework.exception.BusinessException;
//import com.qqchen.deploy.backend.workflow.dto.WorkflowInstanceDTO;
//import com.qqchen.deploy.backend.workflow.entity.WorkflowDefinition;
//import com.qqchen.deploy.backend.workflow.entity.WorkflowInstance;
//import com.qqchen.deploy.backend.workflow.enums.WorkflowInstanceStatusEnum;
//import com.qqchen.deploy.backend.workflow.enums.WorkflowDefinitionStatusEnum;
//import com.qqchen.deploy.backend.workflow.repository.IWorkflowDefinitionRepository;
//import com.qqchen.deploy.backend.workflow.repository.IWorkflowInstanceRepository;
//import com.qqchen.deploy.backend.workflow.service.impl.WorkflowInstanceServiceImpl;
//import org.junit.jupiter.api.BeforeEach;
//import org.junit.jupiter.api.Test;
//import org.junit.jupiter.api.extension.ExtendWith;
//import org.mockito.InjectMocks;
//import org.mockito.Mock;
//import org.mockito.junit.jupiter.MockitoExtension;
//
//import java.util.HashMap;
//import java.util.Map;
//import java.util.Optional;
//
//import static org.junit.jupiter.api.Assertions.*;
//import static org.mockito.ArgumentMatchers.any;
//import static org.mockito.ArgumentMatchers.anyLong;
//import static org.mockito.Mockito.*;
//
//@ExtendWith(MockitoExtension.class)
//class WorkflowInstanceServiceTest {
//
// @Mock
// private IWorkflowDefinitionRepository workflowDefinitionRepository;
//
// @Mock
// private IWorkflowInstanceRepository workflowInstanceRepository;
//
// @Mock
// private IWorkflowVariableService workflowVariableService;
//
// @InjectMocks
// private WorkflowInstanceServiceImpl workflowInstanceService;
//
// private WorkflowDefinition definition;
// private WorkflowInstance instance;
// private Map<String, Object> variables;
//
// @BeforeEach
// void setUp() {
// definition = new WorkflowDefinition();
// definition.setId(1L);
// definition.setCode("TEST-WF");
// definition.setName("Test Workflow");
// definition.setStatus(WorkflowDefinitionStatusEnum.PUBLISHED);
// definition.setEnabled(true);
//
// instance = new WorkflowInstance();
// instance.setId(1L);
// instance.setDefinition(definition);
// instance.setBusinessKey("TEST-KEY");
// instance.setStatus(WorkflowInstanceStatusEnum.CREATED);
//
// variables = new HashMap<>();
// variables.put("key1", "value1");
// variables.put("key2", "value2");
// }
//
// @Test
// void createInstance_Success() {
// when(workflowDefinitionRepository.findById(anyLong())).thenReturn(Optional.of(definition));
// when(workflowInstanceRepository.save(any(WorkflowInstance.class))).thenReturn(instance);
//
// WorkflowInstanceDTO result = workflowInstanceService.createInstance(1L, "TEST-KEY", variables);
//
// assertNotNull(result);
// assertEquals(WorkflowInstanceStatusEnum.CREATED, result.getStatus());
// assertEquals("TEST-KEY", result.getBusinessKey());
// verify(workflowDefinitionRepository).findById(1L);
// verify(workflowInstanceRepository).save(any(WorkflowInstance.class));
// verify(workflowVariableService, times(2)).setVariable(anyLong(), anyString(), any());
// }
//
// @Test
// void createInstance_WorkflowNotFound_ThrowsException() {
// when(workflowDefinitionRepository.findById(anyLong())).thenReturn(Optional.empty());
//
// BusinessException exception = assertThrows(BusinessException.class,
// () -> workflowInstanceService.createInstance(1L, "TEST-KEY", variables));
// assertEquals(ResponseCode.WORKFLOW_NOT_FOUND, exception.getCode());
// }
//
// @Test
// void createInstance_WorkflowNotPublished_ThrowsException() {
// definition.setStatus(WorkflowDefinitionStatusEnum.DRAFT);
// when(workflowDefinitionRepository.findById(anyLong())).thenReturn(Optional.of(definition));
//
// BusinessException exception = assertThrows(BusinessException.class,
// () -> workflowInstanceService.createInstance(1L, "TEST-KEY", variables));
// assertEquals(ResponseCode.WORKFLOW_NOT_PUBLISHED, exception.getCode());
// }
//
// @Test
// void startInstance_Success() {
// when(workflowInstanceRepository.findById(anyLong())).thenReturn(Optional.of(instance));
// when(workflowInstanceRepository.save(any(WorkflowInstance.class))).thenReturn(instance);
//
// WorkflowInstanceDTO result = workflowInstanceService.startInstance(1L);
//
// assertNotNull(result);
// assertEquals(WorkflowInstanceStatusEnum.RUNNING, result.getStatus());
// assertNotNull(result.getStartTime());
// verify(workflowInstanceRepository).findById(1L);
// verify(workflowInstanceRepository).save(any(WorkflowInstance.class));
// }
//
// @Test
// void startInstance_NotFound_ThrowsException() {
// when(workflowInstanceRepository.findById(anyLong())).thenReturn(Optional.empty());
//
// BusinessException exception = assertThrows(BusinessException.class,
// () -> workflowInstanceService.startInstance(1L));
// assertEquals(ResponseCode.WORKFLOW_INSTANCE_NOT_FOUND, exception.getCode());
// }
//
// @Test
// void startInstance_NotCreated_ThrowsException() {
// instance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
// when(workflowInstanceRepository.findById(anyLong())).thenReturn(Optional.of(instance));
//
// BusinessException exception = assertThrows(BusinessException.class,
// () -> workflowInstanceService.startInstance(1L));
// assertEquals(ResponseCode.WORKFLOW_INSTANCE_NOT_RUNNING, exception.getCode());
// }
//
// @Test
// void pauseInstance_Success() {
// instance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
// when(workflowInstanceRepository.findById(anyLong())).thenReturn(Optional.of(instance));
// when(workflowInstanceRepository.save(any(WorkflowInstance.class))).thenReturn(instance);
//
// WorkflowInstanceDTO result = workflowInstanceService.pauseInstance(1L);
//
// assertNotNull(result);
// assertEquals(WorkflowInstanceStatusEnum.PAUSED, result.getStatus());
// verify(workflowInstanceRepository).findById(1L);
// verify(workflowInstanceRepository).save(any(WorkflowInstance.class));
// }
//
// @Test
// void resumeInstance_Success() {
// instance.setStatus(WorkflowInstanceStatusEnum.PAUSED);
// when(workflowInstanceRepository.findById(anyLong())).thenReturn(Optional.of(instance));
// when(workflowInstanceRepository.save(any(WorkflowInstance.class))).thenReturn(instance);
//
// WorkflowInstanceDTO result = workflowInstanceService.resumeInstance(1L);
//
// assertNotNull(result);
// assertEquals(WorkflowInstanceStatusEnum.RUNNING, result.getStatus());
// verify(workflowInstanceRepository).findById(1L);
// verify(workflowInstanceRepository).save(any(WorkflowInstance.class));
// }
//
// @Test
// void terminateInstance_Success() {
// instance.setStatus(WorkflowInstanceStatusEnum.RUNNING);
// when(workflowInstanceRepository.findById(anyLong())).thenReturn(Optional.of(instance));
// when(workflowInstanceRepository.save(any(WorkflowInstance.class))).thenReturn(instance);
//
// WorkflowInstanceDTO result = workflowInstanceService.terminateInstance(1L, "Test reason");
//
// assertNotNull(result);
// assertEquals(WorkflowInstanceStatusEnum.TERMINATED, result.getStatus());
// assertEquals("Test reason", result.getError());
// assertNotNull(result.getEndTime());
// verify(workflowInstanceRepository).findById(1L);
// verify(workflowInstanceRepository).save(any(WorkflowInstance.class));
// }
//
// @Test
// void terminateInstance_AlreadyTerminated_ThrowsException() {
// instance.setStatus(WorkflowInstanceStatusEnum.TERMINATED);
// when(workflowInstanceRepository.findById(anyLong())).thenReturn(Optional.of(instance));
//
// BusinessException exception = assertThrows(BusinessException.class,
// () -> workflowInstanceService.terminateInstance(1L, "Test reason"));
// assertEquals(ResponseCode.WORKFLOW_INSTANCE_NOT_RUNNING, exception.getCode());
// }
//}