diff --git a/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/BaseTaskNodeConfig.java b/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/BaseTaskNodeConfig.java index 13f2f487..55909f29 100644 --- a/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/BaseTaskNodeConfig.java +++ b/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/BaseTaskNodeConfig.java @@ -12,113 +12,113 @@ import java.util.Map; @Data public class BaseTaskNodeConfig extends BaseNodeConfig { - /** - * 任务优先级(0-100) - */ - @SchemaProperty( - title = "任务优先级", - description = "工作流节点的任务优先级", - minimum = 0, - maximum = 100 - ) - private Integer priority; - - /** - * 超时时间(秒) - */ - @SchemaProperty( - title = "超时时间", - description = "任务执行的最大时间(秒)", - minimum = 1, - maximum = 3600, - defaultValue = "300" - ) - private Integer timeoutDuration; - - /** - * 超时处理策略 - */ - @SchemaProperty( - title = "超时处理策略", - description = "任务超时后的处理策略", - enumValues = {"FAIL", "CONTINUE", "RETRY"}, - enumNames = {"失败", "继续", "重试"}, - defaultValue = "FAIL" - ) - private String timeoutStrategy; - - /** - * 重试次数 - */ - @SchemaProperty( - title = "重试次数", - description = "任务失败后的重试次数", - minimum = 0, - maximum = 10, - defaultValue = "0" - ) - private Integer retryTimes; - - /** - * 重试间隔(秒) - */ - @SchemaProperty( - title = "重试间隔", - description = "两次重试之间的等待时间(秒)", - minimum = 1, - maximum = 3600, - defaultValue = "60" - ) - private Integer retryInterval; - - /** - * 重试策略 - */ - @SchemaProperty( - title = "重试策略", - description = "任务重试的策略", - enumValues = {"FIXED", "EXPONENTIAL"}, - enumNames = {"固定间隔", "指数退避"}, - defaultValue = "FIXED" - ) - private String retryStrategy; - - - /** - * 是否异步 - */ - @SchemaProperty( - title = "是否异步", - description = "节点是否以异步方式执行", - defaultValue = "false" - ) - private Boolean async; - - /** - * 是否独占 - */ - @SchemaProperty( - title = "是否独占", - description = "节点是否以独占方式执行", - defaultValue = "true" - ) - private Boolean exclusive; - - /** - * 自定义属性 - */ - @SchemaProperty( - title = "自定义属性", - description = "节点的自定义属性配置" - ) - private Map customProperties; - - /** - * 文档说明 - */ - @SchemaProperty( - title = "文档说明", - description = "节点的详细文档说明" - ) - private String documentation; +// /** +// * 任务优先级(0-100) +// */ +// @SchemaProperty( +// title = "任务优先级", +// description = "工作流节点的任务优先级", +// minimum = 0, +// maximum = 100 +// ) +// private Integer priority; +// +// /** +// * 超时时间(秒) +// */ +// @SchemaProperty( +// title = "超时时间", +// description = "任务执行的最大时间(秒)", +// minimum = 1, +// maximum = 3600, +// defaultValue = "300" +// ) +// private Integer timeoutDuration; +// +// /** +// * 超时处理策略 +// */ +// @SchemaProperty( +// title = "超时处理策略", +// description = "任务超时后的处理策略", +// enumValues = {"FAIL", "CONTINUE", "RETRY"}, +// enumNames = {"失败", "继续", "重试"}, +// defaultValue = "FAIL" +// ) +// private String timeoutStrategy; +// +// /** +// * 重试次数 +// */ +// @SchemaProperty( +// title = "重试次数", +// description = "任务失败后的重试次数", +// minimum = 0, +// maximum = 10, +// defaultValue = "0" +// ) +// private Integer retryTimes; +// +// /** +// * 重试间隔(秒) +// */ +// @SchemaProperty( +// title = "重试间隔", +// description = "两次重试之间的等待时间(秒)", +// minimum = 1, +// maximum = 3600, +// defaultValue = "60" +// ) +// private Integer retryInterval; +// +// /** +// * 重试策略 +// */ +// @SchemaProperty( +// title = "重试策略", +// description = "任务重试的策略", +// enumValues = {"FIXED", "EXPONENTIAL"}, +// enumNames = {"固定间隔", "指数退避"}, +// defaultValue = "FIXED" +// ) +// private String retryStrategy; +// +// +// /** +// * 是否异步 +// */ +// @SchemaProperty( +// title = "是否异步", +// description = "节点是否以异步方式执行", +// defaultValue = "false" +// ) +// private Boolean async; +// +// /** +// * 是否独占 +// */ +// @SchemaProperty( +// title = "是否独占", +// description = "节点是否以独占方式执行", +// defaultValue = "true" +// ) +// private Boolean exclusive; +// +// /** +// * 自定义属性 +// */ +// @SchemaProperty( +// title = "自定义属性", +// description = "节点的自定义属性配置" +// ) +// private Map customProperties; +// +// /** +// * 文档说明 +// */ +// @SchemaProperty( +// title = "文档说明", +// description = "节点的详细文档说明" +// ) +// private String documentation; } diff --git a/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/ScriptNodeConfig.java b/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/ScriptNodeConfig.java index ce42331f..bc50edd8 100644 --- a/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/ScriptNodeConfig.java +++ b/backend/src/main/java/com/qqchen/deploy/backend/workflow/dto/nodeConfig/ScriptNodeConfig.java @@ -3,6 +3,7 @@ package com.qqchen.deploy.backend.workflow.dto.nodeConfig; import com.qqchen.deploy.backend.workflow.annotation.SchemaProperty; import lombok.Data; import lombok.EqualsAndHashCode; + import java.util.List; import java.util.Map; @@ -12,6 +13,16 @@ import java.util.Map; @Data @EqualsAndHashCode(callSuper = true) public class ScriptNodeConfig extends BaseTaskNodeConfig { + + + @SchemaProperty( + title = "执行委派者", + description = "执行委派者", + required = true, + defaultValue = "${shellTaskDelegate}" + ) + private String delegate; + /** * 脚本内容 */ @@ -22,7 +33,7 @@ public class ScriptNodeConfig extends BaseTaskNodeConfig { required = true ) private String script; - + /** * 脚本语言 */ @@ -35,7 +46,7 @@ public class ScriptNodeConfig extends BaseTaskNodeConfig { defaultValue = "shell" ) private String language; - + /** * 解释器路径 */ @@ -45,7 +56,7 @@ public class ScriptNodeConfig extends BaseTaskNodeConfig { required = true ) private String interpreter; - + /** * 工作目录 */ @@ -55,7 +66,7 @@ public class ScriptNodeConfig extends BaseTaskNodeConfig { defaultValue = "/tmp" ) private String workingDirectory; - + /** * 环境变量 */ @@ -64,7 +75,7 @@ public class ScriptNodeConfig extends BaseTaskNodeConfig { description = "脚本执行时的环境变量" ) private Map environment; - + /** * 成功退出码 */ @@ -74,7 +85,7 @@ public class ScriptNodeConfig extends BaseTaskNodeConfig { defaultValue = "0" ) private Integer successExitCode; - + /** * 支持的脚本语言列表 */ @@ -83,5 +94,5 @@ public class ScriptNodeConfig extends BaseTaskNodeConfig { enumValues = {"shell", "python", "javascript", "groovy"} ) private List supportedLanguages; - + } diff --git a/backend/src/main/java/com/qqchen/deploy/backend/workflow/enums/NodeTypeEnums.java b/backend/src/main/java/com/qqchen/deploy/backend/workflow/enums/NodeTypeEnums.java index fd87906a..4e2d5726 100644 --- a/backend/src/main/java/com/qqchen/deploy/backend/workflow/enums/NodeTypeEnums.java +++ b/backend/src/main/java/com/qqchen/deploy/backend/workflow/enums/NodeTypeEnums.java @@ -5,7 +5,6 @@ import com.qqchen.deploy.backend.workflow.dto.graph.WorkflowNodeGraph; import lombok.Getter; import java.util.Arrays; -import java.util.List; /** * 工作流节点类型枚举 @@ -24,18 +23,8 @@ public enum NodeTypeEnums { START_EVENT( "START_EVENT", // 节点类型编码 "开始节点", // 节点显示名称 + NodeCategoryEnums.EVENT, "工作流的起点", // 节点简要描述 - "标记流程的开始位置,可以定义流程启动条件和初始化流程变量", // 节点详细描述 - Arrays.asList( // 节点功能列表 - "标记流程的开始位置", - "定义流程启动条件", - "初始化流程变量" - ), - Arrays.asList( // 使用场景列表 - "用户手动启动流程", - "定时触发流程", - "外部系统调用启动" - ), new WorkflowNodeGraph() // UI配置 .setShape("circle") .setSize(40, 40) @@ -53,274 +42,180 @@ public enum NodeTypeEnums { END_EVENT( "END_EVENT", "结束节点", + NodeCategoryEnums.EVENT, "工作流的终点", - "标记流程的结束位置,可以定义流程结束时的清理操作和设置返回值", - Arrays.asList( - "标记流程的结束位置", - "定义结束时清理操作", - "设置流程结果和返回值" - ), - Arrays.asList( - "流程正常结束", - "流程异常终止", - "需要返回处理结果" - ), new WorkflowNodeGraph() .setShape("circle") .setSize(40, 40) .setStyle("#fff1f0", "#ff4d4f", "stop") .configPorts(Arrays.asList("in")) ), - - /** - * 用户任务节点 - * 需要人工处理的任务节点 - * 特点: - * - 需要用户交互 - * - 可以分配给特定用户或角色 - * - 支持审批、填写表单等操作 - */ - USER_TASK( - "USER_TASK", - "用户任务", - "人工处理任务", - "需要人工处理的任务节点,支持任务分配、表单填写、处理期限等功能", - Arrays.asList( - "分配任务给指定用户或角色", - "支持任务表单的填写", - "设置处理期限和提醒", - "支持任务的转办、委托、退回" - ), - Arrays.asList( - "审批流程", - "表单填写", - "人工审核", - "数据确认" - ), - new WorkflowNodeGraph() - .setShape("rect") - .setSize(120, 60) - .setStyle("#ffffff", "#1890ff", "user") - .configPorts(Arrays.asList("in", "out")) - ), - - /** - * 服务任务节点 - * 自动执行的系统服务任务 - * 特点: - * - 自动执行,无需人工干预 - * - 可以调用外部服务或系统API - * - 支持异步执行 - */ - SERVICE_TASK( - "SERVICE_TASK", - "服务任务", - "系统服务调用", - "自动执行的系统服务任务,支持同步/异步调用外部服务和系统API", - Arrays.asList( - "调用系统服务或外部接口", - "执行自动化操作", - "支持异步执行和结果回调", - "数据转换和处理" - ), - Arrays.asList( - "调用外部系统API", - "发送通知消息", - "数据同步处理", - "自动化操作" - ), - new WorkflowNodeGraph() - .setShape("rectangle") - .setSize(120, 60) - .setStyle("#ffffff", "#1890ff", "api") - .configPorts(Arrays.asList("in", "out")) - ), - - /** - * 脚本任务节点 - * 执行自定义脚本的任务节点 - * 特点: - * - 支持多种脚本语言(如Shell、Python等) - * - 可以执行自定义业务逻辑 - * - 适合复杂的数据处理和计算 - */ SCRIPT_TASK( "SCRIPT_TASK", - "脚本任务", - "脚本执行任务", - "执行自定义脚本的任务节点,支持多种脚本语言和复杂的业务逻辑", - Arrays.asList( - "执行自定义脚本代码", - "支持多种脚本语言", - "访问流程变量", - "支持复杂的业务逻辑" - ), - Arrays.asList( - "数据处理和转换", - "条件判断", - "自定义业务规则", - "系统集成" - ), + "结束节点", + NodeCategoryEnums.TASK, + "工作流的终点", new WorkflowNodeGraph() - .setShape("rectangle") - .setSize(120, 60) - .setStyle("#ffffff", "#1890ff", "code") - .configPorts(Arrays.asList("in", "out")) - ), - - /** - * 排他网关 - * 基于条件的分支控制 - * 特点: - * - 只会选择一个分支执行 - * - 需要设置分支条件 - * - 适合互斥的业务场景 - */ - EXCLUSIVE_GATEWAY( - "EXCLUSIVE_GATEWAY", - "排他网关", - "条件分支控制", - "基于条件的分支控制,只会选择一个分支执行", - Arrays.asList( - "根据条件选择一个分支执行", - "支持复杂的条件表达式", - "可以设置默认分支" - ), - Arrays.asList( - "条件判断", - "分支选择", - "业务规则路由" - ), - new WorkflowNodeGraph() - .setShape("diamond") - .setSize(50, 50) - .setStyle("#fff7e6", "#faad14", "fork") - .configPorts(Arrays.asList("in", "out")) - ), - - /** - * 并行网关 - * 并行执行多个分支 - * 特点: - * - 所有分支同时执行 - * - 等待所有分支完成才继续 - * - 适合并行处理的业务场景 - */ - PARALLEL_GATEWAY( - "PARALLEL_GATEWAY", - "并行网关", - "并行分支控制", - "将流程分成多个并行分支同时执行,等待所有分支完成后合并", - Arrays.asList( - "将流程分成多个并行分支", - "等待所有分支完成后合并", - "支持复杂的并行处理" - ), - Arrays.asList( - "并行审批", - "多任务同时处理", - "并行数据处理" - ), - new WorkflowNodeGraph() - .setShape("diamond") - .setSize(50, 50) - .setStyle("#fff7e6", "#faad14", "branches") - .configPorts(Arrays.asList("in", "out")) - ), - - /** - * 子流程节点 - * 嵌套的子流程 - * 特点: - * - 可以包含完整的子流程 - * - 支持流程的模块化和复用 - * - 可以独立部署和版本控制 - */ - SUBPROCESS( - "SUB_PROCESS", - "子流程", - "嵌入式子流程", - "在当前流程中嵌入子流程,支持流程的模块化和复用", - Arrays.asList( - "在当前流程中嵌入子流程", - "重用流程片段", - "支持事务处理", - "独立的变量范围" - ), - Arrays.asList( - "流程复用", - "模块化处理", - "事务管理", - "错误处理" - ), - new WorkflowNodeGraph() - .setShape("rectangle") - .setSize(120, 60) - .setStyle("#ffffff", "#1890ff", "apartment") - .configPorts(Arrays.asList("in", "out")) - ), - - /** - * 调用活动节点 - * 调用外部定义的流程 - * 特点: - * - 可以调用其他已定义的流程 - * - 支持流程的复用 - * - 可以传递参数和接收返回值 - */ - CALL_ACTIVITY( - "CALL_ACTIVITY", - "调用活动", - "外部流程调用", - "调用外部定义的流程,支持跨系统流程调用和参数传递", - Arrays.asList( - "调用外部定义的流程", - "支持跨系统流程调用", - "传递和接收参数", - "支持异步调用" - ), - Arrays.asList( - "跨系统流程集成", - "公共流程复用", - "分布式流程处理", - "大型流程解耦" - ), - new WorkflowNodeGraph() - .setShape("rectangle") - .setSize(120, 60) - .setStyle("#ffffff", "#1890ff", "api") - .configPorts(Arrays.asList("in", "out")) + .setShape("circle") + .setSize(40, 40) + .setStyle("#fff1f0", "#ff4d4f", "stop") + .configPorts(Arrays.asList("in")) ); +// +// /** +// * 用户任务节点 +// * 需要人工处理的任务节点 +// * 特点: +// * - 需要用户交互 +// * - 可以分配给特定用户或角色 +// * - 支持审批、填写表单等操作 +// */ +// USER_TASK( +// "USER_TASK", +// "用户任务", +// "人工处理任务", +// new WorkflowNodeGraph() +// .setShape("rect") +// .setSize(120, 60) +// .setStyle("#ffffff", "#1890ff", "user") +// .configPorts(Arrays.asList("in", "out")) +// ), +// +// /** +// * 服务任务节点 +// * 自动执行的系统服务任务 +// * 特点: +// * - 自动执行,无需人工干预 +// * - 可以调用外部服务或系统API +// * - 支持异步执行 +// */ +// SERVICE_TASK( +// "SERVICE_TASK", +// "服务任务", +// "系统服务调用", +// new WorkflowNodeGraph() +// .setShape("rectangle") +// .setSize(120, 60) +// .setStyle("#ffffff", "#1890ff", "api") +// .configPorts(Arrays.asList("in", "out")) +// ), +// +// /** +// * 脚本任务节点 +// * 执行自定义脚本的任务节点 +// * 特点: +// * - 支持多种脚本语言(如Shell、Python等) +// * - 可以执行自定义业务逻辑 +// * - 适合复杂的数据处理和计算 +// */ +// SCRIPT_TASK( +// "SCRIPT_TASK", +// "脚本任务", +// "脚本执行任务", +// new WorkflowNodeGraph() +// .setShape("rectangle") +// .setSize(120, 60) +// .setStyle("#ffffff", "#1890ff", "code") +// .configPorts(Arrays.asList("in", "out")) +// ), +// +// /** +// * 排他网关 +// * 基于条件的分支控制 +// * 特点: +// * - 只会选择一个分支执行 +// * - 需要设置分支条件 +// * - 适合互斥的业务场景 +// */ +// EXCLUSIVE_GATEWAY( +// "EXCLUSIVE_GATEWAY", +// "排他网关", +// "条件分支控制", +// new WorkflowNodeGraph() +// .setShape("diamond") +// .setSize(50, 50) +// .setStyle("#fff7e6", "#faad14", "fork") +// .configPorts(Arrays.asList("in", "out")) +// ), +// +// /** +// * 并行网关 +// * 并行执行多个分支 +// * 特点: +// * - 所有分支同时执行 +// * - 等待所有分支完成才继续 +// * - 适合并行处理的业务场景 +// */ +// PARALLEL_GATEWAY( +// "PARALLEL_GATEWAY", +// "并行网关", +// "并行分支控制", +// new WorkflowNodeGraph() +// .setShape("diamond") +// .setSize(50, 50) +// .setStyle("#fff7e6", "#faad14", "branches") +// .configPorts(Arrays.asList("in", "out")) +// ), +// +// /** +// * 子流程节点 +// * 嵌套的子流程 +// * 特点: +// * - 可以包含完整的子流程 +// * - 支持流程的模块化和复用 +// * - 可以独立部署和版本控制 +// */ +// SUBPROCESS( +// "SUB_PROCESS", +// "子流程", +// "嵌入式子流程", +// new WorkflowNodeGraph() +// .setShape("rectangle") +// .setSize(120, 60) +// .setStyle("#ffffff", "#1890ff", "apartment") +// .configPorts(Arrays.asList("in", "out")) +// ), +// +// /** +// * 调用活动节点 +// * 调用外部定义的流程 +// * 特点: +// * - 可以调用其他已定义的流程 +// * - 支持流程的复用 +// * - 可以传递参数和接收返回值 +// */ +// CALL_ACTIVITY( +// "CALL_ACTIVITY", +// "调用活动", +// "调用外部定义的流程,支持跨系统流程调用和参数传递", +// new WorkflowNodeGraph() +// .setShape("rectangle") +// .setSize(120, 60) +// .setStyle("#ffffff", "#1890ff", "api") +// .configPorts(Arrays.asList("in", "out")) +// ); @JsonValue private final String code; // 节点类型编码 private final String name; // 节点显示名称 - private final String shortDesc; // 节点简要描述 + private final NodeCategoryEnums category; //分类 private final String description; // 节点详细描述 - private final List features; // 节点功能列表 - - private final List scenarios; // 使用场景列表 - private final WorkflowNodeGraph uiConfig; // UI配置 NodeTypeEnums( String code, String name, - String shortDesc, + NodeCategoryEnums category, String description, - List features, - List scenarios, WorkflowNodeGraph uiConfig) { this.code = code; this.name = name; - this.shortDesc = shortDesc; + this.category = category; this.description = description; - this.features = features; - this.scenarios = scenarios; this.uiConfig = uiConfig; } diff --git a/backend/src/main/java/com/qqchen/deploy/backend/workflow/hibernate/WorkflowGraphType.java b/backend/src/main/java/com/qqchen/deploy/backend/workflow/hibernate/WorkflowGraphType.java index 26632a84..9c6251ba 100644 --- a/backend/src/main/java/com/qqchen/deploy/backend/workflow/hibernate/WorkflowGraphType.java +++ b/backend/src/main/java/com/qqchen/deploy/backend/workflow/hibernate/WorkflowGraphType.java @@ -7,6 +7,8 @@ import com.qqchen.deploy.backend.workflow.dto.graph.WorkflowDefinitionGraph; import org.hibernate.HibernateException; import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.usertype.UserType; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import java.io.Serializable; import java.sql.PreparedStatement; @@ -18,6 +20,7 @@ import java.sql.Types; * 自定义 Hibernate 类型,用于处理 WorkflowGraph 的序列化和反序列化 */ public class WorkflowGraphType implements UserType { + private static final Logger log = LoggerFactory.getLogger(WorkflowGraphType.class); private final ObjectMapper objectMapper; public WorkflowGraphType() { @@ -62,7 +65,9 @@ public class WorkflowGraphType implements UserType { try { return objectMapper.readValue(value, WorkflowDefinitionGraph.class); } catch (JsonProcessingException e) { - throw new HibernateException("Failed to convert String to WorkflowGraph: " + value, e); + // 记录错误日志,但不抛出异常,返回 null 使得单条记录的错误不影响整体查询 + log.error("Failed to convert String to WorkflowGraph: " + value, e); + return null; } } diff --git a/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/BpmnConverter.java b/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/BpmnConverter.java index 311be6d2..63246359 100644 --- a/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/BpmnConverter.java +++ b/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/BpmnConverter.java @@ -100,20 +100,20 @@ public class BpmnConverter { return createStartEvent(node); case END_EVENT: return createEndEvent(node); - case SERVICE_TASK: - return createServiceTask(node); - case USER_TASK: - return createUserTask(node); - case SCRIPT_TASK: - return createScriptTask(node); - case EXCLUSIVE_GATEWAY: - return createExclusiveGateway(node); - case PARALLEL_GATEWAY: - return createParallelGateway(node); - case SUBPROCESS: - return createSubProcess(node); - case CALL_ACTIVITY: - return createCallActivity(node); +// case SERVICE_TASK: +// return createServiceTask(node); +// case USER_TASK: +// return createUserTask(node); +// case SCRIPT_TASK: +// return createScriptTask(node); +// case EXCLUSIVE_GATEWAY: +// return createExclusiveGateway(node); +// case PARALLEL_GATEWAY: +// return createParallelGateway(node); +// case SUBPROCESS: +// return createSubProcess(node); +// case CALL_ACTIVITY: +// return createCallActivity(node); default: return null; } diff --git a/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/SchemaGenerator.java b/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/SchemaGenerator.java index a2781748..e02a2075 100644 --- a/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/SchemaGenerator.java +++ b/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/SchemaGenerator.java @@ -34,14 +34,14 @@ public class SchemaGenerator { case SCRIPT_TASK: return ScriptNodeConfig.class; // 其他节点类型的配置类 - case USER_TASK: - case SERVICE_TASK: - case EXCLUSIVE_GATEWAY: - case PARALLEL_GATEWAY: - case SUBPROCESS: - case CALL_ACTIVITY: +// case USER_TASK: +// case SERVICE_TASK: +// case EXCLUSIVE_GATEWAY: +// case PARALLEL_GATEWAY: +// case SUBPROCESS: +// case CALL_ACTIVITY: // TODO: 为其他节点类型添加对应的配置类 - return BaseNodeConfig.class; +// return BaseNodeConfig.class; default: return BaseNodeConfig.class; } @@ -74,16 +74,16 @@ public class SchemaGenerator { // 设置基本信息 node.put("code", nodeType.getCode()); node.put("name", nodeType.getName()); - node.put("description", nodeType.getShortDesc()); +// node.put("description", nodeType.getShortDesc()); // 添加详细信息 ObjectNode details = mapper.createObjectNode(); details.put("description", nodeType.getDescription()); ArrayNode features = mapper.createArrayNode(); - nodeType.getFeatures().forEach(features::add); +// nodeType.getFeatures().forEach(features::add); details.set("features", features); ArrayNode scenarios = mapper.createArrayNode(); - nodeType.getScenarios().forEach(scenarios::add); +// nodeType.getScenarios().forEach(scenarios::add); details.set("scenarios", scenarios); node.set("details", details); diff --git a/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/WorkflowDefinitionGraph.java b/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/WorkflowDefinitionGraph.java index ddbd1306..8353e406 100644 --- a/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/WorkflowDefinitionGraph.java +++ b/backend/src/main/java/com/qqchen/deploy/backend/workflow/util/WorkflowDefinitionGraph.java @@ -112,27 +112,27 @@ public class WorkflowDefinitionGraph { serviceConfig.put("url", "http://api.example.com/service"); serviceConfig.put("method", "POST"); - WorkflowDefinitionNode serviceNode = createNode( - "serviceTask1", - NodeTypeEnums.SERVICE_TASK, - "调用服务", - 300, 100, - serviceConfig - ); - nodes.add(serviceNode); +// WorkflowDefinitionNode serviceNode = createNode( +// "serviceTask1", +// NodeTypeEnums.SERVICE_TASK, +// "调用服务", +// 300, 100, +// serviceConfig +// ); +// nodes.add(serviceNode); // 用户任务节点 Map userConfig = createNodeConfig("用户任务", "人工审批"); userConfig.put("assignee", "admin"); - WorkflowDefinitionNode userNode = createNode( - "userTask1", - NodeTypeEnums.USER_TASK, - "人工审批", - 500, 100, - userConfig - ); - nodes.add(userNode); +// WorkflowDefinitionNode userNode = createNode( +// "userTask1", +// NodeTypeEnums.USER_TASK, +// "人工审批", +// 500, 100, +// userConfig +// ); +// nodes.add(userNode); // 脚本任务节点 Map scriptConfig = createNodeConfig("脚本任务", "执行脚本"); @@ -193,60 +193,60 @@ public class WorkflowDefinitionGraph { serviceConfig.put("url", "http://api.example.com/data"); serviceConfig.put("method", "GET"); - WorkflowDefinitionNode serviceNode = createNode( - "serviceTask1", - NodeTypeEnums.SERVICE_TASK, - "获取数据", - 250, 150, - serviceConfig - ); - nodes.add(serviceNode); +// WorkflowDefinitionNode serviceNode = createNode( +// "serviceTask1", +// NodeTypeEnums.SERVICE_TASK, +// "获取数据", +// 250, 150, +// serviceConfig +// ); +// nodes.add(serviceNode); // 排他网关 - WorkflowDefinitionNode exclusiveGateway = createNode( - "exclusiveGateway1", - NodeTypeEnums.EXCLUSIVE_GATEWAY, - "数据路由", - 400, 150, - createNodeConfig("排他网关", "根据数据量选择处理方式") - ); - nodes.add(exclusiveGateway); +// WorkflowDefinitionNode exclusiveGateway = createNode( +// "exclusiveGateway1", +// NodeTypeEnums.EXCLUSIVE_GATEWAY, +// "数据路由", +// 400, 150, +// createNodeConfig("排他网关", "根据数据量选择处理方式") +// ); +// nodes.add(exclusiveGateway); // 用户任务A(大数据量) Map userConfigA = createNodeConfig("用户任务A", "人工处理"); userConfigA.put("assignee", "expert"); - WorkflowDefinitionNode userNodeA = createNode( - "userTask1", - NodeTypeEnums.USER_TASK, - "人工处理", - 550, 50, - userConfigA - ); - nodes.add(userNodeA); +// WorkflowDefinitionNode userNodeA = createNode( +// "userTask1", +// NodeTypeEnums.USER_TASK, +// "人工处理", +// 550, 50, +// userConfigA +// ); +// nodes.add(userNodeA); // 用户任务B(小数据量) Map userConfigB = createNodeConfig("用户任务B", "快速处理"); userConfigB.put("assignee", "operator"); - WorkflowDefinitionNode userNodeB = createNode( - "userTask2", - NodeTypeEnums.USER_TASK, - "快速处理", - 550, 250, - userConfigB - ); - nodes.add(userNodeB); +// WorkflowDefinitionNode userNodeB = createNode( +// "userTask2", +// NodeTypeEnums.USER_TASK, +// "快速处理", +// 550, 250, +// userConfigB +// ); +// nodes.add(userNodeB); // 并行网关(合并) - WorkflowDefinitionNode parallelGateway = createNode( - "parallelGateway1", - NodeTypeEnums.PARALLEL_GATEWAY, - "并行处理", - 700, 150, - createNodeConfig("并行网关", "并行处理数据") - ); - nodes.add(parallelGateway); +// WorkflowDefinitionNode parallelGateway = createNode( +// "parallelGateway1", +// NodeTypeEnums.PARALLEL_GATEWAY, +// "并行处理", +// 700, 150, +// createNodeConfig("并行网关", "并行处理数据") +// ); +// nodes.add(parallelGateway); // 脚本任务A Map scriptConfigA = createNodeConfig("脚本任务A", "数据分析");