提交
This commit is contained in:
parent
5f78d472ae
commit
d840effe9e
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
/backend/target/
|
||||
/frontend/node_modules/
|
||||
5
.idea/.gitignore
vendored
Normal file
5
.idea/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# 默认忽略的文件
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# 基于编辑器的 HTTP 客户端请求
|
||||
/httpRequests/
|
||||
12
.idea/flowable-devops.iml
Normal file
12
.idea/flowable-devops.iml
Normal file
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="WEB_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/temp" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/tmp" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
6
.idea/inspectionProfiles/Project_Default.xml
Normal file
6
.idea/inspectionProfiles/Project_Default.xml
Normal file
@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="Eslint" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
</profile>
|
||||
</component>
|
||||
8
.idea/modules.xml
Normal file
8
.idea/modules.xml
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/flowable-devops.iml" filepath="$PROJECT_DIR$/.idea/flowable-devops.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
6
.idea/vcs.xml
Normal file
6
.idea/vcs.xml
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="" vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
83
LICENSE
83
LICENSE
@ -1,83 +0,0 @@
|
||||
アトリビューション—シェアアライク 2.1
|
||||
(帰属—同一条件許諾)
|
||||
クリエイティブ・コモンズ及びクリエイティブ・コモンズ・ジャパンは法律事務所ではありません。この利用許諾条項の頒布は法的アドバイスその他の法律業務を行うものではありません。クリエイティブ・コモンズ及びクリエイティブ・コモンズ・ジャパンは、この利用許諾の当事者ではなく、ここに提供する情報及び本作品に関しいかなる保証も行いません。クリエイティブ・コモンズ及びクリエイティブ・コモンズ・ジャパンは、いかなる法令に基づこうとも、あなた又はいかなる第三者の損害(この利用許諾に関連する通常損害、特別損害を含みますがこれらに限られません)について責任を負いません。
|
||||
|
||||
利用許諾
|
||||
|
||||
本作品(下記に定義する)は、このクリエイティブ・コモンズ・パブリック・ライセンス日本版(以下「この利用許諾」という)の条項の下で提供される。本作品は、著作権法及び/又は他の適用法によって保護される。本作品をこの利用許諾又は著作権法の下で授権された以外の方法で使用することを禁止する。
|
||||
|
||||
許諾者は、かかる条項をあなたが承諾することとひきかえに、ここに規定される権利をあなたに付与する。本作品に関し、この利用許諾の下で認められるいずれかの利用を行うことにより、あなたは、この利用許諾(条項)に拘束されることを承諾し同意したこととなる。
|
||||
|
||||
第1条 定義
|
||||
|
||||
この利用許諾中の用語を以下のように定義する。その他の用語は、著作権法その他の法令で定める意味を持つものとする。
|
||||
|
||||
a. 「二次的著作物」とは、著作物を翻訳し、編曲し、若しくは変形し、または脚色し、映画化し、その他翻案することにより創作した著作物をいう。ただし、編集著作物又はデータベースの著作物(以下、この二つを併せて「編集著作物等」という。)を構成する著作物は、二次的著作物とみなされない。また、原著作者及び実演家の名誉又は声望を害する方法で原著作物を改作、変形もしくは翻案して生じる著作物は、この利用許諾の目的においては、二次的著作物に含まれない。
|
||||
b. 「許諾者」とは、この利用許諾の条項の下で本作品を提供する個人又は団体をいう。
|
||||
c. 「あなた」とは、この利用許諾に基づく権利を行使する個人又は団体をいう。
|
||||
d. 「原著作者」とは、本作品に含まれる著作物を創作した個人又は団体をいう。
|
||||
e. 「本作品」とは、この利用許諾の条項に基づいて利用する権利が付与される対象たる無体物をいい、著作物、実演、レコード、放送にかかる音又は影像、もしくは有線放送にかかる音又は影像をすべて含むものとする。
|
||||
f. 「ライセンス要素」とは、許諾者が選択し、この利用許諾に表示されている、以下のライセンス属性をいう:帰属・同一条件許諾
|
||||
|
||||
第2条 著作権等に対する制限
|
||||
|
||||
この利用許諾に含まれるいかなる条項によっても、許諾者は、あなたが著作権の制限(著作権法第30条〜49条)、著作者人格権に対する制限(著作権法第18条2項〜4項、第19条2項〜4項、第20条2項)、著作隣接権に対する制限(著作権法第102条)その他、著作権法又はその他の適用法に基づいて認められることとなる本作品の利用を禁止しない。
|
||||
|
||||
第3条 ライセンスの付与
|
||||
|
||||
この利用許諾の条項に従い、許諾者はあなたに、本作品に関し、すべての国で、ロイヤリティ・フリー、非排他的で、(第7条bに定める期間)継続的な以下のライセンスを付与する。ただし、あなたが以前に本作品に関するこの利用許諾の条項に違反したことがないか、あるいは、以前にこの利用許諾の条項に違反したがこの利用許諾に基づく権利を行使するために許諾者から明示的な許可を得ている場合に限る。
|
||||
|
||||
a. 本作品に含まれる著作物(以下「本著作物」という。)を複製すること(編集著作物等に組み込み複製することを含む。以下、同じ。)、
|
||||
b. 本著作物を翻案して二次的著作物を創作し、複製すること、
|
||||
c. 本著作物又はその二次的著作物の複製物を頒布すること(譲渡または貸与により公衆に提供することを含む。以下同じ。)、上演すること、演奏すること、上映すること、公衆送信を行うこと(送信可能化を含む。以下、同じ。)、公に口述すること、公に展示すること、
|
||||
d. 本作品に含まれる実演を、録音・録画すること(録音・録画物を増製することを含む)、録音・録画物により頒布すること、公衆送信を行うこと、
|
||||
e. 本作品に含まれるレコードを、複製すること、頒布すること、公衆送信を行うこと、
|
||||
f. 本作品に含まれる、放送に係る音又は影像を、複製すること、その放送を受信して再放送すること又は有線放送すること、その放送又はこれを受信して行う有線放送を受信して送信可能化すること、そのテレビジョン放送又はこれを受信して行う有線放送を受信して、影像を拡大する特別の装置を用いて公に伝達すること、
|
||||
g. 本作品に含まれる、有線放送に係る音又は影像を、複製すること、その有線放送を受信して放送し、又は再有線放送すること、その有線放送を受信して送信可能化すること、その有線テレビジョン放送を受信して、影像を拡大する特別の装置を用いて公に伝達すること、
|
||||
|
||||
上記に定められた本作品又はその二次的著作物の利用は、現在及び将来のすべての媒体・形式で行うことができる。あなたは、他の媒体及び形式で本作品又はその二次的著作物を利用するのに技術的に必要な変更を行うことができる。許諾者は本作品又はその二次的著作物に関して、この利用許諾に従った利用については自己が有する著作者人格権及び実演家人格権を行使しない。許諾者によって明示的に付与されない全ての権利は、留保される。
|
||||
|
||||
第4条 受領者へのライセンス提供
|
||||
|
||||
あなたが本作品をこの利用許諾に基づいて利用する度毎に、許諾者は本作品又は本作品の二次的著作物の受領者に対して、直接、この利用許諾の下であなたに許可された利用許諾と同じ条件の本作品のライセンスを提供する。
|
||||
|
||||
第5条 制限
|
||||
|
||||
上記第3条及び第4条により付与されたライセンスは、以下の制限に明示的に従い、制約される。
|
||||
|
||||
a. あなたは、この利用許諾の条項に基づいてのみ、本作品を利用することができる。
|
||||
b. あなたは、この利用許諾又はこの利用許諾と同一のライセンス要素を含むほかのクリエイティブ・コモンズ・ライセンス(例えば、この利用許諾の新しいバージョン、又はこの利用許諾と同一のライセンス要素の他国籍ライセンスなど)に基づいてのみ、本作品の二次的著作物を利用することができる。
|
||||
c. あなたは、本作品を利用するときは、この利用許諾の写し又はURI(Uniform Resource Identifier)を本作品の複製物に添付又は表示しなければならない。
|
||||
d. あなたは、本作品の二次的著作物を利用するときは、この利用許諾又はこの利用許諾と同一のライセンス要素を含むほかのクリエイティブ・コモンズ・ライセンスの写し又はURIを本作品の二次的著作物の複製物に添付または表示しなければならない。
|
||||
e. あなたは、この利用許諾条項及びこの利用許諾によって付与される利用許諾受領者の権利の行使を変更又は制限するような、本作品又はその二次的著作物に係る条件を提案したり課したりしてはならない。
|
||||
f. あなたは、本作品を再利用許諾することができない。
|
||||
g. あなたは、本作品又はその二次的著作物の利用にあたって、この利用許諾及びその免責条項に関する注意書きの内容を変更せず、見やすい態様でそのまま掲載しなければならない。
|
||||
h. あなたは、この利用許諾条項と矛盾する方法で本著作物へのアクセス又は使用をコントロールするような技術的保護手段を用いて、本作品又はその二次的著作物を利用してはならない。
|
||||
i. 本条の制限は、本作品又はその二次的著作物が編集著作物等に組み込まれた場合にも、その組み込まれた作品に関しては適用される。しかし、本作品又はその二次的著作物が組み込まれた編集著作物等そのものは、この利用許諾の条項に従う必要はない。
|
||||
j. あなたは、本作品、その二次的著作物又は本作品を組み込んだ編集著作物等を利用する場合には、(1)本作品に係るすべての著作権表示をそのままにしておかなければならず、(2)原著作者及び実演家のクレジットを、合理的な方式で、(もし示されていれば原著作者及び実演家の名前又は変名を伝えることにより、)表示しなければならず、(3)本作品のタイトルが示されている場合には、そのタイトルを表示しなければならず、(4)許諾者が本作品に添付するよう指定したURIがあれば、合理的に実行可能な範囲で、そのURIを表示しなければならず(ただし、そのURIが本作品の著作権表示またはライセンス情報を参照するものでないときはこの限りでない。)(5)二次的著作物の場合には、当該二次的著作物中の原著作物の利用を示すクレジットを表示しなければならない。これらのクレジットは、合理的であればどんな方法でも行うことができる。しかしながら、二次的著作物又は編集著作物等の場合には、少なくとも他の同様の著作者のクレジットが表示される箇所で当該クレジットを表示し、少なくとも他の同様の著作者のクレジットと同程度に目立つ方法であることを要する。
|
||||
k. もし、あなたが、本作品の二次的著作物、又は本作品もしくはその二次的著作物を組み込んだ編集著作物等を創作した場合、あなたは、許諾者からの通知があれば、実行可能な範囲で、要求に応じて、二次的著作物又は編集著作物等から、許諾者又は原著作者への言及をすべて除去しなければならない。
|
||||
|
||||
第6条 責任制限
|
||||
|
||||
この利用許諾の両当事者が書面にて別途合意しない限り、許諾者は本作品を現状のまま提供するものとし、明示・黙示を問わず、本作品に関していかなる保証(特定の利用目的への適合性、第三者の権利の非侵害、欠陥の不存在を含むが、これに限られない。)もしない。
|
||||
|
||||
この利用許諾又はこの利用許諾に基づく本作品の利用から発生する、いかなる損害(許諾者が、本作品にかかる著作権、著作隣接権、著作者人格権、実演家人格権、商標権、パブリシティ権、不正競争防止法その他関連法規上保護される利益を有する者からの許諾を得ることなく本作品の利用許諾を行ったことにより発生する損害、プライバシー侵害又は名誉毀損から発生する損害等の通常損害、及び特別損害を含むが、これに限らない。)についても、許諾者に故意又は重大な過失がある場合を除き、許諾者がそのような損害発生の可能性を知らされたか否かを問わず、許諾者は、あなたに対し、これを賠償する責任を負わない。
|
||||
|
||||
第7条 終了
|
||||
|
||||
a. この利用許諾は、あなたがこの利用許諾の条項に違反すると自動的に終了する。しかし、本作品、その二次的著作物又は編集著作物等をあなたからこの利用許諾に基づき受領した第三者に対しては、その受領者がこの利用許諾を遵守している限り、この利用許諾は終了しない。第1条、第2条、第4条から第9条は、この利用許諾が終了してもなお有効に存続する。
|
||||
b. 上記aに定める場合を除き、この利用許諾に基づくライセンスは、本作品に含まれる著作権法上の権利が存続するかぎり継続する。
|
||||
c. 許諾者は、上記aおよびbに関わらず、いつでも、本作品をこの利用許諾に基づいて頒布することを将来に向かって中止することができる。ただし、許諾者がこの利用許諾に基づく頒布を将来に向かって中止した場合でも、この利用許諾に基づいてすでに本作品を受領した利用者に対しては、この利用許諾に基づいて過去及び将来に与えられるいかなるライセンスも終了することはない。また、上記によって終了しない限り、この利用許諾は、全面的に有効なものとして継続する。
|
||||
|
||||
第8条 その他
|
||||
|
||||
a. この利用許諾のいずれかの規定が、適用法の下で無効及び/又は執行不能の場合であっても、この利用許諾の他の条項の有効性及び執行可能性には影響しない。
|
||||
b. この利用許諾の条項の全部又は一部の放棄又はその違反に関する承諾は、これが書面にされ、当該放棄又は承諾に責任を負う当事者による署名又は記名押印がなされない限り、行うことができない。
|
||||
c. この利用許諾は、当事者が本作品に関して行った最終かつ唯一の合意の内容である。この利用許諾は、許諾者とあなたとの相互の書面による合意なく修正されない。
|
||||
d. この利用許諾は日本語により提供される。この利用許諾の英語その他の言語への翻訳は参照のためのものに過ぎず、この利用許諾の日本語版と翻訳との間に何らかの齟齬がある場合には日本語版が優先する。
|
||||
|
||||
第9条 準拠法
|
||||
|
||||
この利用許諾は、日本法に基づき解釈される。
|
||||
|
||||
本作品がクリエイティブ・コモンズ・ライセンスに基づき利用許諾されたことを公衆に示すという限定された目的の場合を除き、許諾者も被許諾者もクリエイティブ・コモンズの事前の書面による同意なしに「クリエイティブ・コモンズ」の商標若しくは関連商標又はクリエイティブ・コモンズのロゴを使用しないものとします。使用が許可された場合はクリエイティブ・コモンズおよびクリエイティブ・コモンズ・ジャパンのウェブサイト上に公表される、又はその他随時要求に従い利用可能となる、クリエイティブ・コモンズの当該時点における商標使用指針を遵守するものとします。クリエイティブ・コモンズは https://creativecommons.org/から、クリエイティブ・コモンズ・ジャパンはhttp://www.creativecommons.jp/から連絡することができます。
|
||||
386
README.md
386
README.md
@ -1,2 +1,386 @@
|
||||
# flowable-devops
|
||||
# 工作流平台技术文档
|
||||
|
||||
**项目**: 基于 Flowable 的可视化工作流平台
|
||||
**版本**: v1.0
|
||||
**更新日期**: 2025-01-12
|
||||
|
||||
---
|
||||
|
||||
## 📚 文档目录
|
||||
|
||||
### 核心设计文档
|
||||
|
||||
| 文档 | 说明 | 关键内容 |
|
||||
|------|------|----------|
|
||||
| [01-架构总览](./01-架构总览.md) | 系统整体架构设计 | 技术选型、系统架构、MVP范围、开发计划 |
|
||||
| [02-后端技术设计](./02-后端技术设计.md) | 后端详细实现 | 节点注册、表达式引擎、BPMN转换、REST API |
|
||||
| [03-前端技术设计](./03-前端技术设计.md) | 前端详细实现 | ReactFlow画布、字段映射选择器、状态管理 |
|
||||
| [04-数据模型设计](./04-数据模型设计.md) | 数据库设计 | 业务表结构、Flowable表说明、索引优化 |
|
||||
| [05-开发规范](./05-开发规范.md) | 代码和协作规范 | 命名规范、Git工作流、测试要求 |
|
||||
|
||||
---
|
||||
|
||||
## 🎯 快速导航
|
||||
|
||||
### 我想了解...
|
||||
|
||||
**整体架构**
|
||||
- 👉 先看 [01-架构总览](./01-架构总览.md)
|
||||
- 了解技术选型、系统架构、核心数据流
|
||||
|
||||
**后端开发**
|
||||
- 👉 看 [02-后端技术设计](./02-后端技术设计.md)
|
||||
- 节点如何注册?表达式如何解析?JSON如何转BPMN?
|
||||
|
||||
**前端开发**
|
||||
- 👉 看 [03-前端技术设计](./03-前端技术设计.md)
|
||||
- ReactFlow如何使用?字段映射选择器如何实现?
|
||||
|
||||
**数据库设计**
|
||||
- 👉 看 [04-数据模型设计](./04-数据模型设计.md)
|
||||
- 有哪些表?为什么用JSONB?如何优化查询?
|
||||
|
||||
**编码规范**
|
||||
- 👉 看 [05-开发规范](./05-开发规范.md)
|
||||
- 如何命名?如何提交代码?如何写测试?
|
||||
|
||||
---
|
||||
|
||||
## 🔑 核心设计要点
|
||||
|
||||
### 1. 为什么选择 Flowable?
|
||||
|
||||
```
|
||||
✅ 开源版功能完整(不需要购买企业版)
|
||||
✅ 内置审批能力(User Task)
|
||||
✅ Spring Boot 集成简单
|
||||
✅ 国内资料多,社区活跃
|
||||
✅ 支持 BPMN 2.0 标准
|
||||
|
||||
vs Camunda:
|
||||
- Flowable 表单引擎更强
|
||||
- Flowable 开源版更完整
|
||||
- Camunda 性能略优但差距不大
|
||||
|
||||
vs Conductor:
|
||||
- Conductor 没有审批能力
|
||||
- Conductor 更轻量但功能少
|
||||
- 如果不需要审批,Conductor 是更好选择
|
||||
```
|
||||
|
||||
### 2. 核心技术难点与解决方案
|
||||
|
||||
#### 难点1:前端如何知道上游节点输出了什么?
|
||||
|
||||
**解决方案**:使用静态 `outputSchema`(JSON Schema)
|
||||
|
||||
```typescript
|
||||
// 每个节点类型定义输出结构
|
||||
const httpRequestMetadata = {
|
||||
id: 'http_request',
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
statusCode: { type: 'number' },
|
||||
body: { type: 'object' },
|
||||
headers: { type: 'object' },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// 前端根据 schema 构建字段树
|
||||
// 用户可以选择: nodes.httpRequest.output.body.email
|
||||
```
|
||||
|
||||
**优点**:
|
||||
- ✅ 快速,不需要执行节点
|
||||
- ✅ 类型安全
|
||||
- ✅ 支持自动补全
|
||||
|
||||
**缺点**:
|
||||
- ⚠️ 如果实际输出与 schema 不符,运行时才会发现
|
||||
|
||||
#### 难点2:如何实现字段映射选择器?
|
||||
|
||||
**核心组件**:`FieldMappingSelector.tsx`
|
||||
|
||||
```tsx
|
||||
// 1. 计算上游节点
|
||||
const upstreamNodes = edges
|
||||
.filter(edge => edge.target === currentNode.id)
|
||||
.map(edge => nodes.find(n => n.id === edge.source));
|
||||
|
||||
// 2. 根据 outputSchema 构建字段树
|
||||
const fieldTree = upstreamNodes.map(node => ({
|
||||
title: node.data.name,
|
||||
children: buildFieldTree(nodeType.outputSchema.properties, `nodes.${node.id}.output`)
|
||||
}));
|
||||
|
||||
// 3. 用户选择字段,生成表达式
|
||||
// 用户选择: nodes.httpRequest.output.body.email
|
||||
// 生成表达式: ${nodes.httpRequest.output.body.email}
|
||||
```
|
||||
|
||||
#### 难点3:表达式解析性能
|
||||
|
||||
**解决方案**:
|
||||
```java
|
||||
// 1. 使用 JUEL 而不是完整的 JavaScript(性能更好)
|
||||
// 2. 表达式编译结果缓存
|
||||
private final Map<String, ValueExpression> expressionCache = new ConcurrentHashMap<>();
|
||||
|
||||
// 3. 快速路径:无表达式直接返回
|
||||
if (!expression.contains("${")) {
|
||||
return expression;
|
||||
}
|
||||
```
|
||||
|
||||
**性能测试结果**:
|
||||
- JUEL: ~50000 QPS
|
||||
- GraalVM JS: ~2000 QPS
|
||||
- 结论:使用 JUEL,性能足够
|
||||
|
||||
#### 难点4:工作流定义格式
|
||||
|
||||
**决策**:用户层面使用 JSON,内部转换为 BPMN XML
|
||||
|
||||
```
|
||||
前端 (JSON) ←→ 后端转换层 ←→ Flowable (BPMN XML)
|
||||
|
||||
理由:
|
||||
✅ JSON 对前端友好
|
||||
✅ JSON 易于版本控制
|
||||
✅ BPMN 是 Flowable 原生格式
|
||||
✅ 分层清晰,职责明确
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📊 技术架构图
|
||||
|
||||
### 整体架构
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────┐
|
||||
│ 前端 (React + ReactFlow) │
|
||||
│ - 可视化编辑器 │
|
||||
│ - 节点配置面板 │
|
||||
│ - 字段映射选择器 ⭐⭐⭐ │
|
||||
└──────────────┬──────────────────────────┘
|
||||
│ REST API
|
||||
┌──────────────▼──────────────────────────┐
|
||||
│ Spring Boot 应用 │
|
||||
│ ┌────────────────────────────────────┐ │
|
||||
│ │ REST API 层 │ │
|
||||
│ └────────────────────────────────────┘ │
|
||||
│ ┌────────────────────────────────────┐ │
|
||||
│ │ 业务逻辑层 │ │
|
||||
│ │ - NodeTypeRegistry (节点注册) │ │
|
||||
│ │ - ExpressionEngine (表达式解析)⭐ │ │
|
||||
│ │ - WorkflowConverter (JSON→BPMN)⭐ │ │
|
||||
│ └────────────────────────────────────┘ │
|
||||
│ ┌────────────────────────────────────┐ │
|
||||
│ │ Flowable Engine │ │
|
||||
│ │ - 流程执行 │ │
|
||||
│ │ - 任务管理 │ │
|
||||
│ │ - 历史记录 │ │
|
||||
│ └────────────────────────────────────┘ │
|
||||
└──────────────┬──────────────────────────┘
|
||||
│
|
||||
┌──────────────▼──────────────────────────┐
|
||||
│ PostgreSQL │
|
||||
│ - 业务表 (workflow_definitions等) │
|
||||
│ - Flowable表 (ACT_*) │
|
||||
└─────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### 核心数据流:工作流执行
|
||||
|
||||
```
|
||||
1. 用户点击"执行"
|
||||
↓
|
||||
2. 前端调用 POST /api/workflows/{id}/execute
|
||||
↓
|
||||
3. 后端初始化执行上下文:
|
||||
{
|
||||
"workflow": { "input": {...} },
|
||||
"nodes": {}, // 节点输出将保存在这里
|
||||
"env": {...}
|
||||
}
|
||||
↓
|
||||
4. Flowable 启动流程实例
|
||||
↓
|
||||
5. 按拓扑顺序执行节点:
|
||||
a. ExpressionEngine 解析表达式
|
||||
b. 调用节点实现类执行
|
||||
c. 保存输出到 nodes.{nodeId}.output
|
||||
↓
|
||||
6. 节点间数据通过表达式传递:
|
||||
${nodes.node1.output.body.email}
|
||||
↓
|
||||
7. 遇到 User Task(审批)时暂停
|
||||
↓
|
||||
8. 审批完成后继续执行
|
||||
↓
|
||||
9. 流程结束
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🚀 MVP 功能清单(第一期)
|
||||
|
||||
### 必须有的功能
|
||||
|
||||
**1. 工作流编辑器**
|
||||
- [x] 从左侧拖拽节点到画布
|
||||
- [x] 节点之间连线
|
||||
- [x] 删除节点和连线
|
||||
- [x] 保存工作流
|
||||
|
||||
**2. 节点配置面板**
|
||||
- [x] 动态表单(根据节点类型生成)
|
||||
- [x] 字段映射选择器(TreeSelect 展示上游节点输出)⭐⭐⭐
|
||||
- [x] 表达式输入框
|
||||
|
||||
**3. 节点类型(5种)**
|
||||
- [x] HTTP Request
|
||||
- [x] Send Email
|
||||
- [x] Set Variable
|
||||
- [x] Condition (IF/ELSE)
|
||||
- [x] Approval (审批)
|
||||
|
||||
**4. 工作流执行**
|
||||
- [x] 手动触发执行
|
||||
- [x] 查看执行日志
|
||||
- [x] 查看节点输入/输出
|
||||
|
||||
**5. 审批功能**
|
||||
- [x] 待审批任务列表
|
||||
- [x] 审批表单
|
||||
- [x] 批准/拒绝
|
||||
|
||||
### 不做的功能(第二期)
|
||||
|
||||
- ❌ 定时触发(Cron)
|
||||
- ❌ Webhook 触发
|
||||
- ❌ 循环节点(forEach)
|
||||
- ❌ 并行执行
|
||||
- ❌ 工作流版本管理
|
||||
- ❌ 权限管理(只做基础认证)
|
||||
- ❌ 监控大盘
|
||||
|
||||
---
|
||||
|
||||
## 📅 开发计划(12周)
|
||||
|
||||
### Phase 1: 技术验证(Week 1-2)
|
||||
- Flowable PoC
|
||||
- 表达式引擎验证
|
||||
- ReactFlow 画布验证
|
||||
- 环境搭建
|
||||
|
||||
### Phase 2: 后端核心(Week 3-4)
|
||||
- 节点类型注册系统
|
||||
- 表达式引擎
|
||||
- JSON → BPMN 转换器
|
||||
- HTTP Request + Set Variable 节点
|
||||
|
||||
### Phase 3: 前端核心(Week 5-6)
|
||||
- ReactFlow 画布
|
||||
- 节点配置面板
|
||||
- **字段映射选择器**(最核心)
|
||||
|
||||
### Phase 4: 执行引擎(Week 7-8)
|
||||
- 工作流执行
|
||||
- 日志记录
|
||||
- 错误处理
|
||||
|
||||
### Phase 5: 审批功能(Week 9-10)
|
||||
- User Task 集成
|
||||
- 审批表单
|
||||
- 任务列表
|
||||
|
||||
### Phase 6: 测试上线(Week 11-12)
|
||||
- 集成测试
|
||||
- 性能测试
|
||||
- 部署上线
|
||||
|
||||
---
|
||||
|
||||
## 🎓 学习资源
|
||||
|
||||
### Flowable
|
||||
- 官方文档: https://flowable.com/open-source/docs/
|
||||
- GitHub: https://github.com/flowable/flowable-engine
|
||||
- 中文教程: https://www.cnblogs.com/catcher1994/tag/Flowable/
|
||||
|
||||
### ReactFlow
|
||||
- 官方文档: https://reactflow.dev/
|
||||
- 示例: https://reactflow.dev/examples
|
||||
|
||||
### 表达式引擎
|
||||
- JUEL 文档: https://juel.sourceforge.net/guide/index.html
|
||||
- GraalVM JS: https://www.graalvm.org/javascript/
|
||||
|
||||
---
|
||||
|
||||
## ❓ 常见问题
|
||||
|
||||
### Q1: 为什么不直接使用 N8N?
|
||||
|
||||
**A**: N8N 是 Node.js 技术栈,我们需要 Java 技术栈。另外,我们需要:
|
||||
- 与现有 Java 系统集成
|
||||
- 自定义审批流程
|
||||
- 完全掌控数据和安全
|
||||
|
||||
### Q2: Flowable 学习曲线陡峭吗?
|
||||
|
||||
**A**: 有一定学习曲线,但我们做了封装:
|
||||
- 用户不需要懂 BPMN(我们用 JSON)
|
||||
- 开发者只需要了解基础概念
|
||||
- 提供完整的文档和示例
|
||||
|
||||
### Q3: 性能够吗?
|
||||
|
||||
**A**: 经过验证:
|
||||
- 表达式解析: 50000+ QPS
|
||||
- Flowable: 1000+ TPS
|
||||
- 前端画布: 支持 100+ 节点不卡顿
|
||||
|
||||
### Q4: 如何扩展新的节点类型?
|
||||
|
||||
**A**: 非常简单:
|
||||
1. 创建节点实现类(实现 `WorkflowNode` 接口)
|
||||
2. 添加 `@Component` 注解
|
||||
3. 定义元数据(字段、输出结构)
|
||||
4. Spring 启动时自动注册
|
||||
5. 前端自动显示
|
||||
|
||||
```java
|
||||
@Component
|
||||
public class MyCustomNode implements WorkflowNode {
|
||||
@Override
|
||||
public NodeTypeMetadata getMetadata() {
|
||||
// 定义元数据
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeExecutionResult execute(NodeInput input, NodeExecutionContext context) {
|
||||
// 执行逻辑
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📞 联系方式
|
||||
|
||||
- **技术负责人**: [您的名字]
|
||||
- **Email**: [您的邮箱]
|
||||
- **文档更新**: 如有问题或建议,请提 Issue
|
||||
|
||||
---
|
||||
|
||||
**最后更新**: 2025-01-12
|
||||
**文档版本**: v1.0
|
||||
|
||||
|
||||
96
WARP.md
Normal file
96
WARP.md
Normal file
@ -0,0 +1,96 @@
|
||||
# WARP.md
|
||||
|
||||
This file provides guidance to WARP (warp.dev) when working with code in this repository.
|
||||
|
||||
一、概览
|
||||
- 当前仓库以设计文档为主(docs/),目标是实现一个可视化工作流平台(前端 ReactFlow,后端 Spring Boot + Flowable 7,数据库 MySQL 8)。
|
||||
- 计划中的代码结构(以文档为准):
|
||||
- backend/(Spring Boot 3 + WebFlux + Flowable + Jakarta EL)
|
||||
- frontend/(React 18 + TypeScript + Vite + AntD + Zustand)
|
||||
- docs/(架构/后端/前端/落地方案等文档)
|
||||
- 统一技术决策(MVP):数据库 MySQL 8;表达式引擎 Jakarta EL(JUEL);执行策略为“同步执行”(关闭全局异步执行器),审批节点自然暂停等待;条件分支通过 ExclusiveGateway + 条件边实现。
|
||||
|
||||
二、常用命令(按子项目执行)
|
||||
- 先决条件
|
||||
- Java 17+、Node 18+、Docker(本地 MySQL/Redis 推荐用容器启动)。
|
||||
|
||||
1) 开发依赖服务(MySQL + Redis)
|
||||
- 使用外部 MySQL(不使用 Docker):
|
||||
- 数据库地址:172.22.222.111:3306
|
||||
- 数据库名称:flowable-devops
|
||||
- 用户名:root(如不同请按实际填写)
|
||||
- 密码:请通过环境变量安全注入(SPRING_DATASOURCE_PASSWORD),不要写入代码或命令行历史
|
||||
- 使用外部 Redis(不使用 Docker):
|
||||
- 地址:172.22.222.111:6379
|
||||
- 数据库:db5(spring.redis.database=5)
|
||||
- 密码:请通过环境变量安全注入(SPRING_REDIS_PASSWORD),不要写入代码或命令行历史
|
||||
|
||||
2) 后端(backend/,Maven)
|
||||
- 安装依赖并构建(跳过测试):
|
||||
mvn -q -DskipTests package
|
||||
- 运行开发服务(读取 application.yml):
|
||||
mvn spring-boot:run
|
||||
- 运行全部测试:
|
||||
mvn -q test
|
||||
- 仅运行单个测试类:
|
||||
mvn -q -Dtest=ExpressionEngineTest test
|
||||
- 仅运行单个测试方法:
|
||||
mvn -q -Dtest=ExpressionEngineTest#testSimpleExpression test
|
||||
- 常用环境变量(示例):
|
||||
- SPRING_DATASOURCE_URL=jdbc:mysql://172.22.222.111:3306/flowable-devops?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=UTC
|
||||
- SPRING_DATASOURCE_USERNAME=root
|
||||
- SPRING_DATASOURCE_PASSWORD=<请在本地安全设置该环境变量>
|
||||
- SPRING_REDIS_HOST=172.22.222.111
|
||||
- SPRING_REDIS_PORT=6379
|
||||
- SPRING_REDIS_PASSWORD=<请在本地安全设置该环境变量>
|
||||
- SPRING_REDIS_DATABASE=5
|
||||
|
||||
3) 前端(frontend/,Vite)
|
||||
- 安装依赖:
|
||||
npm i
|
||||
- 启动开发服务:
|
||||
npm run dev
|
||||
- 构建生产包:
|
||||
npm run build
|
||||
- 代码检查(ESLint,若已配置):
|
||||
npx eslint .
|
||||
- 前端 API 基址(示例):
|
||||
- 开发:VITE_API_BASE_URL=http://localhost:8080
|
||||
|
||||
三、端到端开发流程(本地)
|
||||
1) 启动 MySQL/Redis(见上文 Docker 命令)。
|
||||
2) 启动后端:在 backend/ 执行 mvn spring-boot:run。
|
||||
3) 启动前端:在 frontend/ 执行 npm run dev(默认 http://localhost:3000)。
|
||||
4) 在前端编辑器创建/编辑工作流,保存 JSON;后端将 JSON 转 BPMN 并部署 Flowable;执行时在审批节点暂停,审批完成后继续。
|
||||
|
||||
四、关键架构速览(帮助快速理解大图)
|
||||
- 前端
|
||||
- 画布:ReactFlow;节点面板 + 配置面板(动态表单 + 字段映射 + 表达式输入)。
|
||||
- 字段映射:基于上游节点的 outputSchema 构建字段树;表达式统一为 ${...} 字符串。
|
||||
- 主要页面:工作流列表、工作流编辑、执行历史、审批中心。
|
||||
- 后端
|
||||
- 控制器:/api/workflows(创建/更新/执行/历史)、/api/node-types(类型/元数据)、/api/tasks(审批)。
|
||||
- 转换层:JSON → BPMN,生成 ServiceTask(delegateExpression 指向 ${genericNodeExecutor})、UserTask、ExclusiveGateway + 条件 SequenceFlow。
|
||||
- 执行:GenericNodeExecutor 读取当前 ServiceTask 的 FieldExtension(nodeType/nodeConfig),调用对应节点实现(WorkflowNode),并将节点输入/输出写入流程变量 nodes 与日志表。
|
||||
- 表达式:Jakarta EL(JUEL),仅 Map 属性访问;上下文包含 nodes/workflow/env;无 ${} 字符串走快路径。
|
||||
- 数据:
|
||||
- Flowable ACT_* 表(引擎自建)
|
||||
- 业务表(MySQL):workflow_definitions(definition JSON)、node_types(fields/output_schema JSON)、workflow_executions(input JSON)、node_execution_logs(input/output JSON)
|
||||
- 执行策略
|
||||
- MVP 同步执行;审批(User Task)自然暂停/恢复;后续迭代再引入全局异步与队列。
|
||||
|
||||
五、对未来在仓库内协作的注意事项(Agent 关键提示)
|
||||
- 仅使用 Jakarta EL(JUEL);不要切换为 JavaScript 表达式引擎。
|
||||
- ServiceTask 一律使用 delegateExpression(${genericNodeExecutor}),不要使用 ImplementationType.CLASS,避免失去 Spring 注入能力。
|
||||
- edge.condition 使用 JUEL 表达式;转换层需在 BPMN 中为条件边设置 conditionExpression。
|
||||
- 节点扩展:实现 WorkflowNode 接口,并在 NodeTypeRegistry 注册;确保提供 outputSchema 供前端字段映射使用。
|
||||
- 观察性:将每个节点的输入/输出/耗时/状态写入 node_execution_logs,便于问题定位与验收性能指标。
|
||||
- 数据库相关:MySQL 8;JSON 字段仅用于存储与读取,不做复杂查询;字符集统一 utf8mb4。
|
||||
|
||||
六、文档索引(优先阅读)
|
||||
- docs/01-架构总览.md(统一到 MySQL + JUEL 的大图)
|
||||
- docs/02-后端技术设计.md(Flowable 集成要点、条件分支与执行器修正、MySQL DDL)
|
||||
- docs/03-前端技术设计.md(字段映射/表达式一致性)
|
||||
- docs/04-数据模型设计.md(JSON Schema:WorkflowDefinition/Node/Edge/NodeTypeMetadata 等)
|
||||
- docs/05-API契约.md(/api/workflows、/api/node-types、/api/tasks 的请求/响应)
|
||||
- docs/99-最终修正落地方案.md(PM + 架构联合终版,含 PoC 计划与验收标准)
|
||||
157
backend/README.md
Normal file
157
backend/README.md
Normal file
@ -0,0 +1,157 @@
|
||||
# Flowable DevOps Backend
|
||||
|
||||
基于 Spring Boot 3 + WebFlux + Flowable 7 的可视化工作流平台后端。
|
||||
|
||||
## 技术栈
|
||||
|
||||
- **Spring Boot 3.1.5** - 主框架
|
||||
- **WebFlux** - 响应式Web框架
|
||||
- **Flowable 7.0.1** - 工作流引擎
|
||||
- **MySQL 8** - 主数据库
|
||||
- **Redis** - 缓存
|
||||
- **Jakarta EL (JUEL)** - 表达式引擎
|
||||
- **JPA/Hibernate** - ORM框架
|
||||
|
||||
## 关键特性
|
||||
|
||||
- 同步执行策略(禁用全局异步执行器)
|
||||
- 审批节点自然暂停等待
|
||||
- 统一使用 Jakarta EL 表达式引擎
|
||||
- ServiceTask 通过 delegateExpression 指向 genericNodeExecutor
|
||||
|
||||
## 运行环境要求
|
||||
|
||||
- Java 17+
|
||||
- Maven 3.6+
|
||||
- MySQL 8.0
|
||||
- Redis
|
||||
|
||||
## 快速开始
|
||||
|
||||
### 1. 环境变量配置
|
||||
|
||||
设置以下环境变量:
|
||||
|
||||
```bash
|
||||
# 数据库配置
|
||||
export SPRING_DATASOURCE_URL="jdbc:mysql://172.22.222.111:3306/flowable-devops?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=UTC&characterEncoding=utf8mb4"
|
||||
export SPRING_DATASOURCE_USERNAME="root"
|
||||
export SPRING_DATASOURCE_PASSWORD="your-password"
|
||||
|
||||
# Redis配置
|
||||
export SPRING_REDIS_HOST="172.22.222.111"
|
||||
export SPRING_REDIS_PORT="6379"
|
||||
export SPRING_REDIS_PASSWORD="your-redis-password"
|
||||
export SPRING_REDIS_DATABASE="5"
|
||||
```
|
||||
|
||||
### 2. 构建项目
|
||||
|
||||
```bash
|
||||
# 如果遇到Maven仓库问题,使用项目本地的settings.xml
|
||||
mvn -s settings.xml clean package -DskipTests
|
||||
|
||||
# 或者使用默认设置(需要确保Maven仓库配置正确)
|
||||
mvn clean package -DskipTests
|
||||
```
|
||||
|
||||
### 3. 运行应用
|
||||
|
||||
```bash
|
||||
# 开发模式运行
|
||||
mvn -s settings.xml spring-boot:run
|
||||
|
||||
# 或者直接运行jar包
|
||||
java -jar target/flowable-devops-backend-1.0-SNAPSHOT.jar
|
||||
```
|
||||
|
||||
应用将在 http://localhost:8080 启动。
|
||||
|
||||
### 4. 运行测试
|
||||
|
||||
```bash
|
||||
# 运行所有测试
|
||||
mvn -s settings.xml test
|
||||
|
||||
# 运行单个测试类
|
||||
mvn -s settings.xml -Dtest=ExpressionEngineTest test
|
||||
|
||||
# 运行单个测试方法
|
||||
mvn -s settings.xml -Dtest=ExpressionEngineTest#testSimpleExpression test
|
||||
```
|
||||
|
||||
## API端点
|
||||
|
||||
- **健康检查**: `GET /actuator/health`
|
||||
- **工作流管理**: `/api/workflows`
|
||||
- **节点类型**: `/api/node-types`
|
||||
- **任务审批**: `/api/tasks`
|
||||
|
||||
## 配置说明
|
||||
|
||||
### application.yml 关键配置
|
||||
|
||||
- `flowable.async-executor-activate: false` - 禁用异步执行器
|
||||
- `flowable.history-level: full` - 完整历史记录
|
||||
- `app.workflow.expression.engine: jakarta-el` - 使用 Jakarta EL 表达式引擎
|
||||
|
||||
### 数据库
|
||||
|
||||
项目使用 MySQL 8 作为主数据库,Flowable 会自动创建所需的表结构。
|
||||
|
||||
业务表包括:
|
||||
- `workflow_definitions` - 工作流定义
|
||||
- `node_types` - 节点类型元数据
|
||||
- `workflow_executions` - 工作流执行记录
|
||||
- `node_execution_logs` - 节点执行日志
|
||||
|
||||
## 开发指南
|
||||
|
||||
### 目录结构
|
||||
|
||||
```
|
||||
src/main/java/com/flowable/devops/
|
||||
├── FlowableDevopsApplication.java # 主应用类
|
||||
├── config/ # 配置类
|
||||
│ ├── FlowableConfig.java # Flowable配置
|
||||
│ └── WebFluxConfig.java # WebFlux配置
|
||||
├── controller/ # 控制器
|
||||
├── service/ # 服务层
|
||||
├── entity/ # 实体类
|
||||
├── repository/ # 数据访问层
|
||||
├── workflow/ # 工作流相关
|
||||
│ └── node/ # 节点实现
|
||||
└── expression/ # 表达式引擎
|
||||
```
|
||||
|
||||
### 注意事项
|
||||
|
||||
1. 所有 ServiceTask 必须使用 `delegateExpression="${genericNodeExecutor}"`
|
||||
2. 条件分支通过 ExclusiveGateway + 条件边实现
|
||||
3. 表达式统一使用 Jakarta EL,格式为 `${...}`
|
||||
4. 审批节点使用 UserTask,自然暂停等待
|
||||
|
||||
## 故障排除
|
||||
|
||||
### Maven依赖下载问题
|
||||
|
||||
如果遇到Maven仓库访问问题,使用项目本地的settings.xml:
|
||||
|
||||
```bash
|
||||
mvn -s settings.xml [command]
|
||||
```
|
||||
|
||||
### 数据库连接问题
|
||||
|
||||
确保:
|
||||
1. MySQL服务正在运行
|
||||
2. 数据库 `flowable-devops` 已创建
|
||||
3. 用户权限配置正确
|
||||
4. 网络连接正常
|
||||
|
||||
### Redis连接问题
|
||||
|
||||
确保:
|
||||
1. Redis服务正在运行
|
||||
2. 指定的数据库可访问
|
||||
3. 密码配置正确(如有)
|
||||
156
backend/backend-fixed.log
Normal file
156
backend/backend-fixed.log
Normal file
@ -0,0 +1,156 @@
|
||||
[INFO] Scanning for projects...
|
||||
[INFO]
|
||||
[INFO] ----------------< com.flowable:flowable-devops-backend >----------------
|
||||
[INFO] Building Flowable DevOps Backend 1.0-SNAPSHOT
|
||||
[INFO] from pom.xml
|
||||
[INFO] --------------------------------[ jar ]---------------------------------
|
||||
[INFO]
|
||||
[INFO] >>> spring-boot:3.1.5:run (default-cli) > test-compile @ flowable-devops-backend >>>
|
||||
[WARNING] The artifact mysql:mysql-connector-java:jar:8.0.33 has been relocated to com.mysql:mysql-connector-j:jar:8.0.33: MySQL Connector/J artifacts moved to reverse-DNS compliant Maven 2+ coordinates.
|
||||
[INFO]
|
||||
[INFO] --- resources:3.3.1:resources (default-resources) @ flowable-devops-backend ---
|
||||
[INFO] Copying 1 resource from src/main/resources to target/classes
|
||||
[INFO] Copying 0 resource from src/main/resources to target/classes
|
||||
[INFO]
|
||||
[INFO] --- compiler:3.11.0:compile (default-compile) @ flowable-devops-backend ---
|
||||
[INFO] Nothing to compile - all classes are up to date
|
||||
[INFO]
|
||||
[INFO] --- resources:3.3.1:testResources (default-testResources) @ flowable-devops-backend ---
|
||||
[INFO] Copying 1 resource from src/test/resources to target/test-classes
|
||||
[INFO]
|
||||
[INFO] --- compiler:3.11.0:testCompile (default-testCompile) @ flowable-devops-backend ---
|
||||
[INFO] Changes detected - recompiling the module! :source
|
||||
[INFO] Compiling 5 source files with javac [debug release 17] to target/test-classes
|
||||
[INFO] 由于在类路径中发现了一个或多个处理程序,因此启用了
|
||||
批注处理。未来发行版的 javac 可能会禁用批注处理,
|
||||
除非至少按名称指定了一个处理程序 (-processor),
|
||||
或指定了搜索路径 (--processor-path, --processor-module-path),
|
||||
或显式启用了批注处理 (-proc:only, -proc:full)。
|
||||
可使用 -Xlint:-options 隐藏此消息。
|
||||
可使用 -proc:none 禁用批注处理。
|
||||
[INFO]
|
||||
[INFO] <<< spring-boot:3.1.5:run (default-cli) < test-compile @ flowable-devops-backend <<<
|
||||
[INFO]
|
||||
[INFO]
|
||||
[INFO] --- spring-boot:3.1.5:run (default-cli) @ flowable-devops-backend ---
|
||||
[INFO] Attaching agents: []
|
||||
13:54:24.171 [main] INFO com.flowable.devops.FlowableDevopsApplication -- 启动 Flowable DevOps Backend 应用程序...
|
||||
13:54:24.173 [main] INFO com.flowable.devops.FlowableDevopsApplication -- 基于 Spring Boot 3 + WebFlux + Flowable 7
|
||||
13:54:24.173 [main] INFO com.flowable.devops.FlowableDevopsApplication -- 可视化工作流平台 - 后端服务
|
||||
|
||||
. ____ _ __ _ _
|
||||
/\\ / ___'_ __ _ _(_)_ __ __ _ \ \ \ \
|
||||
( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \
|
||||
\\/ ___)| |_)| | | | | || (_| | ) ) ) )
|
||||
' |____| .__|_| |_|_| |_\__, | / / / /
|
||||
=========|_|==============|___/=/_/_/_/
|
||||
:: Spring Boot :: (v3.1.5)
|
||||
|
||||
2025-10-13 13:54:24 [main] INFO c.f.devops.FlowableDevopsApplication - Starting FlowableDevopsApplication using Java 21.0.5 with PID 73239 (/Users/qichen/Public/development/flowable-devops/backend/target/classes started by qichen in /Users/qichen/Public/development/flowable-devops/backend)
|
||||
2025-10-13 13:54:24 [main] DEBUG c.f.devops.FlowableDevopsApplication - Running with Spring Boot v3.1.5, Spring v6.0.13
|
||||
2025-10-13 13:54:24 [main] INFO c.f.devops.FlowableDevopsApplication - The following 1 profile is active: "dev"
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data JPA repositories in DEFAULT mode.
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 65 ms. Found 4 JPA repository interfaces.
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode.
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.NodeExecutionLogRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.NodeTypeRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.WorkflowDefinitionRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.WorkflowExecutionRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:54:24 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 3 ms. Found 0 Redis repository interfaces.
|
||||
2025-10-13 13:54:25 [main] ERROR i.n.r.d.DnsServerAddressStreamProviders - Unable to load io.netty.resolver.dns.macos.MacOSDnsServerAddressStreamProvider, fallback to system defaults. This may result in incorrect DNS resolutions on MacOS. Check whether you have a dependency on 'io.netty:netty-resolver-dns-native-macos'. Use DEBUG level to see the full stack: java.lang.UnsatisfiedLinkError: failed to load the required native library
|
||||
2025-10-13 13:54:25 [main] INFO o.h.jpa.internal.util.LogHelper - HHH000204: Processing PersistenceUnitInfo [name: default]
|
||||
2025-10-13 13:54:25 [main] INFO org.hibernate.Version - HHH000412: Hibernate ORM core version 6.2.13.Final
|
||||
2025-10-13 13:54:25 [main] INFO org.hibernate.cfg.Environment - HHH000406: Using bytecode reflection optimizer
|
||||
2025-10-13 13:54:25 [main] INFO o.s.o.j.p.SpringPersistenceUnitInfo - No LoadTimeWeaver setup: ignoring JPA class transformer
|
||||
2025-10-13 13:54:25 [main] INFO com.zaxxer.hikari.HikariDataSource - FlowableDevOpsHikariCP - Starting...
|
||||
2025-10-13 13:54:25 [main] INFO com.zaxxer.hikari.pool.HikariPool - FlowableDevOpsHikariCP - Added connection com.mysql.cj.jdbc.ConnectionImpl@57bac3f0
|
||||
2025-10-13 13:54:25 [main] INFO com.zaxxer.hikari.HikariDataSource - FlowableDevOpsHikariCP - Start completed.
|
||||
2025-10-13 13:54:25 [main] WARN org.hibernate.orm.deprecation - HHH90000025: MySQL8Dialect does not need to be specified explicitly using 'hibernate.dialect' (remove the property setting and it will be selected by default)
|
||||
2025-10-13 13:54:25 [main] WARN org.hibernate.orm.deprecation - HHH90000026: MySQL8Dialect has been deprecated; use org.hibernate.dialect.MySQLDialect instead
|
||||
2025-10-13 13:54:25 [main] INFO o.h.e.t.j.p.i.JtaPlatformInitiator - HHH000489: No JTA platform available (set 'hibernate.transaction.jta.platform' to enable JTA platform integration)
|
||||
2025-10-13 13:54:25 [main] INFO o.s.o.j.LocalContainerEntityManagerFactoryBean - Initialized JPA EntityManagerFactory for persistence unit 'default'
|
||||
2025-10-13 13:54:25 [main] INFO o.s.d.j.r.query.QueryEnhancerFactory - Hibernate is in classpath; If applicable, HQL parser will be used.
|
||||
2025-10-13 13:54:26 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - 开始初始化节点类型注册中心...
|
||||
2025-10-13 13:54:26 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - 发现 1 个WorkflowNode实现类
|
||||
2025-10-13 13:54:26 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - ✓ 注册节点: HTTP Request (http_request)
|
||||
2025-10-13 13:54:26 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - 节点类型注册中心初始化完成,共注册 1 个节点类型
|
||||
2025-10-13 13:54:26 [main] INFO o.f.s.b.e.EventRegistryAutoConfiguration - No deployment resources were found for autodeployment
|
||||
2025-10-13 13:54:26 [main] INFO o.f.s.SpringProcessEngineConfiguration - Found 2 Engine Configurators in total:
|
||||
2025-10-13 13:54:26 [main] INFO o.f.s.SpringProcessEngineConfiguration - class org.flowable.eventregistry.spring.configurator.SpringEventRegistryConfigurator (priority:100000)
|
||||
2025-10-13 13:54:26 [main] INFO o.f.s.SpringProcessEngineConfiguration - class org.flowable.idm.engine.configurator.IdmEngineConfigurator (priority:150000)
|
||||
2025-10-13 13:54:26 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing beforeInit() of class org.flowable.eventregistry.spring.configurator.SpringEventRegistryConfigurator (priority:100000)
|
||||
2025-10-13 13:54:26 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing beforeInit() of class org.flowable.idm.engine.configurator.IdmEngineConfigurator (priority:150000)
|
||||
2025-10-13 13:54:26 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread pool queue of size 100
|
||||
2025-10-13 13:54:26 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread factory with naming pattern flowable-async-task-invoker-thread-%d
|
||||
2025-10-13 13:54:26 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating executor service with corePoolSize 8, maxPoolSize 8 and keepAliveTime 5000
|
||||
2025-10-13 13:54:26 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Using rejectedExecutionHandler java.util.concurrent.ThreadPoolExecutor$CallerRunsPolicy@46d7c4d0
|
||||
2025-10-13 13:54:26 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread pool queue of size 2048
|
||||
2025-10-13 13:54:26 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread factory with naming pattern flowable-async-job-executor-thread-%d
|
||||
2025-10-13 13:54:26 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating executor service with corePoolSize 8, maxPoolSize 8 and keepAliveTime 5000
|
||||
2025-10-13 13:54:26 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing configure() of class org.flowable.eventregistry.spring.configurator.SpringEventRegistryConfigurator (priority:100000)
|
||||
2025-10-13 13:54:57 [main] INFO liquibase.changelog - Reading from `flowable-devops`.FLW_EV_DATABASECHANGELOG
|
||||
2025-10-13 13:54:57 [main] INFO liquibase.executor - Changelog query completed.
|
||||
2025-10-13 13:54:57 [main] INFO o.f.e.impl.EventRegistryEngineImpl - EventRegistryEngine default created
|
||||
2025-10-13 13:54:57 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing configure() of class org.flowable.idm.engine.configurator.IdmEngineConfigurator (priority:150000)
|
||||
2025-10-13 13:54:57 [main] INFO o.f.idm.engine.impl.IdmEngineImpl - IdmEngine default created
|
||||
2025-10-13 13:54:57 [main] INFO o.f.engine.impl.ProcessEngineImpl - ProcessEngine default created
|
||||
2025-10-13 13:54:57 [main] INFO o.f.e.impl.cmd.ValidateV5EntitiesCmd - Total of v5 deployments found: 0
|
||||
2025-10-13 13:54:57 [main] INFO c.f.d.expression.ExpressionEngine - 表达式引擎初始化完成,使用Jakarta EL实现
|
||||
2025-10-13 13:54:57 [main] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - 35 mappings in 'requestMappingHandlerMapping'
|
||||
2025-10-13 13:54:57 [main] INFO o.s.b.a.e.web.EndpointLinksResolver - Exposing 3 endpoint(s) beneath base path '/actuator'
|
||||
2025-10-13 13:54:57 [main] DEBUG o.s.w.r.r.m.a.ControllerMethodResolver - ControllerAdvice beans: 0 @ModelAttribute, 0 @InitBinder, 1 @ExceptionHandler
|
||||
2025-10-13 13:54:57 [main] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - enableLoggingRequestDetails='false': form data and headers will be masked to prevent unsafe logging of potentially sensitive data
|
||||
2025-10-13 13:54:57 [main] INFO o.s.b.w.e.netty.NettyWebServer - Netty started on port 8080
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.FlowableDevopsApplication - Started FlowableDevopsApplication in 33.441 seconds (process running for 33.582)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 开始数据初始化...
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 加载默认节点类型...
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: start
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: start
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: start
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: start
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 开始 (start)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: end
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: end
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: end
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: end
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 结束 (end)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: script-task
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: script-task
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: script-task
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: script-task
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 脚本任务 (script-task)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: http-request
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: http-request
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: http-request
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: http-request
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: HTTP请求 (http-request)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: user-task
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: user-task
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: user-task
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: user-task
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 用户任务 (user-task)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: exclusive-gateway
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: exclusive-gateway
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: exclusive-gateway
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: exclusive-gateway
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 条件分支 (exclusive-gateway)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: parallel-gateway
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: parallel-gateway
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: parallel-gateway
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: parallel-gateway
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 并行分支 (parallel-gateway)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: data-transform
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: data-transform
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: data-transform
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: data-transform
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 数据转换 (data-transform)
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 刷新节点类型注册表
|
||||
2025-10-13 13:54:57 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 清空节点类型注册表
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 数据库中总共有 0 个节点类型,其中 0 个启用
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 查询返回 0 个启用的节点类型
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.service.NodeTypeService - 节点类型注册表刷新完成,已加载 0 个节点类型
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 默认节点类型加载完成,共 8 个节点类型
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.config.DataInitializer - 数据初始化完成
|
||||
2025-10-13 13:54:57 [main] INFO c.f.devops.FlowableDevopsApplication - Flowable DevOps Backend 应用程序启动成功!
|
||||
659
backend/backend.log
Normal file
659
backend/backend.log
Normal file
@ -0,0 +1,659 @@
|
||||
[INFO] Scanning for projects...
|
||||
[INFO]
|
||||
[INFO] ----------------< com.flowable:flowable-devops-backend >----------------
|
||||
[INFO] Building Flowable DevOps Backend 1.0-SNAPSHOT
|
||||
[INFO] from pom.xml
|
||||
[INFO] --------------------------------[ jar ]---------------------------------
|
||||
[INFO]
|
||||
[INFO] >>> spring-boot:3.1.5:run (default-cli) > test-compile @ flowable-devops-backend >>>
|
||||
[WARNING] The artifact mysql:mysql-connector-java:jar:8.0.33 has been relocated to com.mysql:mysql-connector-j:jar:8.0.33: MySQL Connector/J artifacts moved to reverse-DNS compliant Maven 2+ coordinates.
|
||||
[INFO]
|
||||
[INFO] --- resources:3.3.1:resources (default-resources) @ flowable-devops-backend ---
|
||||
[INFO] Copying 1 resource from src/main/resources to target/classes
|
||||
[INFO] Copying 0 resource from src/main/resources to target/classes
|
||||
[INFO]
|
||||
[INFO] --- compiler:3.11.0:compile (default-compile) @ flowable-devops-backend ---
|
||||
[INFO] Changes detected - recompiling the module! :source
|
||||
[INFO] Compiling 37 source files with javac [debug release 17] to target/classes
|
||||
[INFO] 由于在类路径中发现了一个或多个处理程序,因此启用了
|
||||
批注处理。未来发行版的 javac 可能会禁用批注处理,
|
||||
除非至少按名称指定了一个处理程序 (-processor),
|
||||
或指定了搜索路径 (--processor-path, --processor-module-path),
|
||||
或显式启用了批注处理 (-proc:only, -proc:full)。
|
||||
可使用 -Xlint:-options 隐藏此消息。
|
||||
可使用 -proc:none 禁用批注处理。
|
||||
[INFO] /Users/qichen/Public/development/flowable-devops/backend/src/main/java/com/flowable/devops/entity/WorkflowExecution.java: /Users/qichen/Public/development/flowable-devops/backend/src/main/java/com/flowable/devops/entity/WorkflowExecution.java使用或覆盖了已过时的 API。
|
||||
[INFO] /Users/qichen/Public/development/flowable-devops/backend/src/main/java/com/flowable/devops/entity/WorkflowExecution.java: 有关详细信息, 请使用 -Xlint:deprecation 重新编译。
|
||||
[INFO]
|
||||
[INFO] --- resources:3.3.1:testResources (default-testResources) @ flowable-devops-backend ---
|
||||
[INFO] Copying 1 resource from src/test/resources to target/test-classes
|
||||
[INFO]
|
||||
[INFO] --- compiler:3.11.0:testCompile (default-testCompile) @ flowable-devops-backend ---
|
||||
[INFO] Changes detected - recompiling the module! :dependency
|
||||
[INFO] Compiling 5 source files with javac [debug release 17] to target/test-classes
|
||||
[INFO] 由于在类路径中发现了一个或多个处理程序,因此启用了
|
||||
批注处理。未来发行版的 javac 可能会禁用批注处理,
|
||||
除非至少按名称指定了一个处理程序 (-processor),
|
||||
或指定了搜索路径 (--processor-path, --processor-module-path),
|
||||
或显式启用了批注处理 (-proc:only, -proc:full)。
|
||||
可使用 -Xlint:-options 隐藏此消息。
|
||||
可使用 -proc:none 禁用批注处理。
|
||||
[INFO]
|
||||
[INFO] <<< spring-boot:3.1.5:run (default-cli) < test-compile @ flowable-devops-backend <<<
|
||||
[INFO]
|
||||
[INFO]
|
||||
[INFO] --- spring-boot:3.1.5:run (default-cli) @ flowable-devops-backend ---
|
||||
[INFO] Attaching agents: []
|
||||
13:26:27.328 [main] INFO com.flowable.devops.FlowableDevopsApplication -- 启动 Flowable DevOps Backend 应用程序...
|
||||
13:26:27.329 [main] INFO com.flowable.devops.FlowableDevopsApplication -- 基于 Spring Boot 3 + WebFlux + Flowable 7
|
||||
13:26:27.329 [main] INFO com.flowable.devops.FlowableDevopsApplication -- 可视化工作流平台 - 后端服务
|
||||
|
||||
. ____ _ __ _ _
|
||||
/\\ / ___'_ __ _ _(_)_ __ __ _ \ \ \ \
|
||||
( ( )\___ | '_ | '_| | '_ \/ _` | \ \ \ \
|
||||
\\/ ___)| |_)| | | | | || (_| | ) ) ) )
|
||||
' |____| .__|_| |_|_| |_\__, | / / / /
|
||||
=========|_|==============|___/=/_/_/_/
|
||||
:: Spring Boot :: (v3.1.5)
|
||||
|
||||
2025-10-13 13:26:27 [main] INFO c.f.devops.FlowableDevopsApplication - Starting FlowableDevopsApplication using Java 21.0.5 with PID 62797 (/Users/qichen/Public/development/flowable-devops/backend/target/classes started by qichen in /Users/qichen/Public/development/flowable-devops/backend)
|
||||
2025-10-13 13:26:27 [main] DEBUG c.f.devops.FlowableDevopsApplication - Running with Spring Boot v3.1.5, Spring v6.0.13
|
||||
2025-10-13 13:26:27 [main] INFO c.f.devops.FlowableDevopsApplication - The following 1 profile is active: "dev"
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data JPA repositories in DEFAULT mode.
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 58 ms. Found 4 JPA repository interfaces.
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Multiple Spring Data modules found, entering strict repository configuration mode
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Bootstrapping Spring Data Redis repositories in DEFAULT mode.
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.NodeExecutionLogRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.NodeTypeRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.WorkflowDefinitionRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationExtensionSupport - Spring Data Redis - Could not safely identify store assignment for repository candidate interface com.flowable.devops.repository.WorkflowExecutionRepository; If you want this repository to be a Redis repository, consider annotating your entities with one of these annotations: org.springframework.data.redis.core.RedisHash (preferred), or consider extending one of the following types with your repository: org.springframework.data.keyvalue.repository.KeyValueRepository
|
||||
2025-10-13 13:26:27 [main] INFO o.s.d.r.c.RepositoryConfigurationDelegate - Finished Spring Data repository scanning in 3 ms. Found 0 Redis repository interfaces.
|
||||
2025-10-13 13:26:28 [main] ERROR i.n.r.d.DnsServerAddressStreamProviders - Unable to load io.netty.resolver.dns.macos.MacOSDnsServerAddressStreamProvider, fallback to system defaults. This may result in incorrect DNS resolutions on MacOS. Check whether you have a dependency on 'io.netty:netty-resolver-dns-native-macos'. Use DEBUG level to see the full stack: java.lang.UnsatisfiedLinkError: failed to load the required native library
|
||||
2025-10-13 13:26:28 [main] INFO o.h.jpa.internal.util.LogHelper - HHH000204: Processing PersistenceUnitInfo [name: default]
|
||||
2025-10-13 13:26:28 [main] INFO org.hibernate.Version - HHH000412: Hibernate ORM core version 6.2.13.Final
|
||||
2025-10-13 13:26:28 [main] INFO org.hibernate.cfg.Environment - HHH000406: Using bytecode reflection optimizer
|
||||
2025-10-13 13:26:28 [main] INFO o.s.o.j.p.SpringPersistenceUnitInfo - No LoadTimeWeaver setup: ignoring JPA class transformer
|
||||
2025-10-13 13:26:28 [main] INFO com.zaxxer.hikari.HikariDataSource - FlowableDevOpsHikariCP - Starting...
|
||||
2025-10-13 13:26:28 [main] INFO com.zaxxer.hikari.pool.HikariPool - FlowableDevOpsHikariCP - Added connection com.mysql.cj.jdbc.ConnectionImpl@257b3561
|
||||
2025-10-13 13:26:28 [main] INFO com.zaxxer.hikari.HikariDataSource - FlowableDevOpsHikariCP - Start completed.
|
||||
2025-10-13 13:26:28 [main] WARN org.hibernate.orm.deprecation - HHH90000025: MySQL8Dialect does not need to be specified explicitly using 'hibernate.dialect' (remove the property setting and it will be selected by default)
|
||||
2025-10-13 13:26:28 [main] WARN org.hibernate.orm.deprecation - HHH90000026: MySQL8Dialect has been deprecated; use org.hibernate.dialect.MySQLDialect instead
|
||||
2025-10-13 13:26:28 [main] INFO o.h.e.t.j.p.i.JtaPlatformInitiator - HHH000489: No JTA platform available (set 'hibernate.transaction.jta.platform' to enable JTA platform integration)
|
||||
2025-10-13 13:26:28 [main] INFO o.s.o.j.LocalContainerEntityManagerFactoryBean - Initialized JPA EntityManagerFactory for persistence unit 'default'
|
||||
2025-10-13 13:26:29 [main] INFO o.s.d.j.r.query.QueryEnhancerFactory - Hibernate is in classpath; If applicable, HQL parser will be used.
|
||||
2025-10-13 13:26:29 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - 开始初始化节点类型注册中心...
|
||||
2025-10-13 13:26:29 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - 发现 1 个WorkflowNode实现类
|
||||
2025-10-13 13:26:29 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - ✓ 注册节点: HTTP Request (http_request)
|
||||
2025-10-13 13:26:29 [main] INFO c.f.d.w.n.registry.NodeTypeRegistry - 节点类型注册中心初始化完成,共注册 1 个节点类型
|
||||
2025-10-13 13:26:29 [main] INFO o.f.s.b.e.EventRegistryAutoConfiguration - No deployment resources were found for autodeployment
|
||||
2025-10-13 13:26:29 [main] INFO o.f.s.SpringProcessEngineConfiguration - Found 2 Engine Configurators in total:
|
||||
2025-10-13 13:26:29 [main] INFO o.f.s.SpringProcessEngineConfiguration - class org.flowable.eventregistry.spring.configurator.SpringEventRegistryConfigurator (priority:100000)
|
||||
2025-10-13 13:26:29 [main] INFO o.f.s.SpringProcessEngineConfiguration - class org.flowable.idm.engine.configurator.IdmEngineConfigurator (priority:150000)
|
||||
2025-10-13 13:26:29 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing beforeInit() of class org.flowable.eventregistry.spring.configurator.SpringEventRegistryConfigurator (priority:100000)
|
||||
2025-10-13 13:26:29 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing beforeInit() of class org.flowable.idm.engine.configurator.IdmEngineConfigurator (priority:150000)
|
||||
2025-10-13 13:26:29 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread pool queue of size 100
|
||||
2025-10-13 13:26:29 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread factory with naming pattern flowable-async-task-invoker-thread-%d
|
||||
2025-10-13 13:26:29 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating executor service with corePoolSize 8, maxPoolSize 8 and keepAliveTime 5000
|
||||
2025-10-13 13:26:29 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Using rejectedExecutionHandler java.util.concurrent.ThreadPoolExecutor$CallerRunsPolicy@7cb2f534
|
||||
2025-10-13 13:26:29 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread pool queue of size 2048
|
||||
2025-10-13 13:26:29 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating thread factory with naming pattern flowable-async-job-executor-thread-%d
|
||||
2025-10-13 13:26:29 [main] INFO o.f.c.e.i.a.DefaultAsyncTaskExecutor - Creating executor service with corePoolSize 8, maxPoolSize 8 and keepAliveTime 5000
|
||||
2025-10-13 13:26:29 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing configure() of class org.flowable.eventregistry.spring.configurator.SpringEventRegistryConfigurator (priority:100000)
|
||||
2025-10-13 13:27:00 [main] INFO liquibase.changelog - Reading from `flowable-devops`.FLW_EV_DATABASECHANGELOG
|
||||
2025-10-13 13:27:00 [main] INFO liquibase.executor - Changelog query completed.
|
||||
2025-10-13 13:27:00 [main] INFO o.f.e.impl.EventRegistryEngineImpl - EventRegistryEngine default created
|
||||
2025-10-13 13:27:00 [main] INFO o.f.s.SpringProcessEngineConfiguration - Executing configure() of class org.flowable.idm.engine.configurator.IdmEngineConfigurator (priority:150000)
|
||||
2025-10-13 13:27:00 [main] INFO o.f.idm.engine.impl.IdmEngineImpl - IdmEngine default created
|
||||
2025-10-13 13:27:00 [main] INFO o.f.engine.impl.ProcessEngineImpl - ProcessEngine default created
|
||||
2025-10-13 13:27:00 [main] INFO o.f.e.impl.cmd.ValidateV5EntitiesCmd - Total of v5 deployments found: 0
|
||||
2025-10-13 13:27:00 [main] INFO c.f.d.expression.ExpressionEngine - 表达式引擎初始化完成,使用Jakarta EL实现
|
||||
2025-10-13 13:27:00 [main] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - 35 mappings in 'requestMappingHandlerMapping'
|
||||
2025-10-13 13:27:00 [main] INFO o.s.b.a.e.web.EndpointLinksResolver - Exposing 3 endpoint(s) beneath base path '/actuator'
|
||||
2025-10-13 13:27:00 [main] DEBUG o.s.w.r.r.m.a.ControllerMethodResolver - ControllerAdvice beans: 0 @ModelAttribute, 0 @InitBinder, 1 @ExceptionHandler
|
||||
2025-10-13 13:27:00 [main] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - enableLoggingRequestDetails='false': form data and headers will be masked to prevent unsafe logging of potentially sensitive data
|
||||
2025-10-13 13:27:00 [main] INFO o.s.b.w.e.netty.NettyWebServer - Netty started on port 8080
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.FlowableDevopsApplication - Started FlowableDevopsApplication in 33.366 seconds (process running for 33.515)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 开始数据初始化...
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 加载默认节点类型...
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: start
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: start
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: start
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: start
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 开始 (start)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: end
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: end
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: end
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: end
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 结束 (end)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: script-task
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: script-task
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: script-task
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: script-task
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 脚本任务 (script-task)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: http-request
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: http-request
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: http-request
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: http-request
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: HTTP请求 (http-request)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: user-task
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: user-task
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: user-task
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: user-task
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 用户任务 (user-task)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: exclusive-gateway
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: exclusive-gateway
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: exclusive-gateway
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: exclusive-gateway
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 条件分支 (exclusive-gateway)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: parallel-gateway
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: parallel-gateway
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: parallel-gateway
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: parallel-gateway
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 并行分支 (parallel-gateway)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 创建节点类型: data-transform
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 从数据库注册节点类型: data-transform
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.devops.service.NodeTypeService - 节点类型已注册到注册表: data-transform
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型创建成功: data-transform
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 创建默认节点类型: 数据转换 (data-transform)
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 刷新节点类型注册表
|
||||
2025-10-13 13:27:00 [main] DEBUG c.f.d.w.n.registry.NodeTypeRegistry - 清空节点类型注册表
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 数据库中总共有 0 个节点类型,其中 0 个启用
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 查询返回 0 个启用的节点类型
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.service.NodeTypeService - 节点类型注册表刷新完成,已加载 0 个节点类型
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 默认节点类型加载完成,共 8 个节点类型
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.config.DataInitializer - 数据初始化完成
|
||||
2025-10-13 13:27:00 [main] INFO c.f.devops.FlowableDevopsApplication - Flowable DevOps Backend 应用程序启动成功!
|
||||
2025-10-13 13:30:39 [reactor-http-nio-2] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [d3aea410-1] HTTP GET "/actuator/health"
|
||||
2025-10-13 13:30:39 [lettuce-nioEventLoop-5-1] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [d3aea410-1] Using 'application/vnd.spring-boot.actuator.v3+json' given [*/*] and supported [application/vnd.spring-boot.actuator.v3+json, application/vnd.spring-boot.actuator.v2+json, application/json]
|
||||
2025-10-13 13:30:39 [lettuce-nioEventLoop-5-1] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [d3aea410-1] 0..1 [org.springframework.boot.actuate.health.SystemHealth]
|
||||
2025-10-13 13:30:39 [lettuce-nioEventLoop-5-1] DEBUG org.springframework.web.HttpLogging - [d3aea410-1] Encoding [org.springframework.boot.actuate.health.SystemHealth@262006cb]
|
||||
2025-10-13 13:30:39 [reactor-http-nio-2] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [d3aea410-1] Completed 200 OK
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [df7262b1-2] HTTP POST "/api/workflows"
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - [df7262b1-2] Mapped to com.flowable.devops.controller.WorkflowController#createWorkflow(WorkflowDefinition)
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [df7262b1-2] Content-Type:application/json
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [df7262b1-2] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] DEBUG org.springframework.web.HttpLogging - [df7262b1-2] Decoded [WorkflowDefinition(id=test-workflow-001, name=端到端测试工作流, description=用于测试节点间输入输出映射的工作流, definition=nu (truncated)...]
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] INFO c.f.d.controller.WorkflowController - 创建工作流定义: 端到端测试工作流
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] INFO c.f.devops.service.WorkflowService - 创建工作流定义: 端到端测试工作流
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] ERROR c.f.devops.service.WorkflowService - 创建工作流定义失败: 端到端测试工作流
|
||||
java.lang.NullPointerException: Cannot invoke "com.fasterxml.jackson.databind.JsonNode.has(String)" because "workflowJson" is null
|
||||
at com.flowable.devops.workflow.converter.WorkflowConverter.validateWorkflowJson(WorkflowConverter.java:334)
|
||||
at com.flowable.devops.service.WorkflowService.create(WorkflowService.java:72)
|
||||
at java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
|
||||
at java.base/java.lang.reflect.Method.invoke(Method.java:580)
|
||||
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:343)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:196)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor$1.proceedWithInvocation(TransactionInterceptor.java:123)
|
||||
at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:391)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:119)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:184)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:703)
|
||||
at com.flowable.devops.service.WorkflowService$$SpringCGLIB$$0.create(<generated>)
|
||||
at com.flowable.devops.controller.WorkflowController.lambda$createWorkflow$5(WorkflowController.java:73)
|
||||
at reactor.core.publisher.MonoCallable$MonoCallableSubscription.request(MonoCallable.java:137)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onSubscribe(FluxOnErrorResume.java:74)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96)
|
||||
at reactor.core.publisher.MonoCallable.subscribe(MonoCallable.java:48)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.complete(MonoIgnoreThen.java:292)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onNext(MonoIgnoreThen.java:187)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158)
|
||||
at reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:293)
|
||||
at reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:474)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180)
|
||||
at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:122)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158)
|
||||
at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onNext(FluxContextWrite.java:107)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onNext(FluxMapFuseable.java:299)
|
||||
at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onNext(FluxFilterFuseable.java:337)
|
||||
at reactor.core.publisher.Operators$BaseFluxToMonoOperator.completePossiblyEmpty(Operators.java:2071)
|
||||
at reactor.core.publisher.MonoCollect$CollectSubscriber.onComplete(MonoCollect.java:145)
|
||||
at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onComplete(FluxPeek.java:260)
|
||||
at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144)
|
||||
at reactor.netty.channel.FluxReceive.onInboundComplete(FluxReceive.java:413)
|
||||
at reactor.netty.channel.ChannelOperations.onInboundComplete(ChannelOperations.java:444)
|
||||
at reactor.netty.http.server.HttpServerOperations.onInboundNext(HttpServerOperations.java:685)
|
||||
at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:114)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at reactor.netty.http.server.HttpTrafficHandler.channelRead(HttpTrafficHandler.java:284)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
|
||||
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
|
||||
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
|
||||
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
|
||||
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
|
||||
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
|
||||
at java.base/java.lang.Thread.run(Thread.java:1583)
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] ERROR c.f.d.controller.WorkflowController - 创建工作流定义失败: 端到端测试工作流
|
||||
com.flowable.devops.service.WorkflowServiceException: 创建工作流定义失败: Cannot invoke "com.fasterxml.jackson.databind.JsonNode.has(String)" because "workflowJson" is null
|
||||
at com.flowable.devops.service.WorkflowService.create(WorkflowService.java:87)
|
||||
at java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
|
||||
at java.base/java.lang.reflect.Method.invoke(Method.java:580)
|
||||
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:343)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:196)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor$1.proceedWithInvocation(TransactionInterceptor.java:123)
|
||||
at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:391)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:119)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:184)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:703)
|
||||
at com.flowable.devops.service.WorkflowService$$SpringCGLIB$$0.create(<generated>)
|
||||
at com.flowable.devops.controller.WorkflowController.lambda$createWorkflow$5(WorkflowController.java:73)
|
||||
at reactor.core.publisher.MonoCallable$MonoCallableSubscription.request(MonoCallable.java:137)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onSubscribe(FluxOnErrorResume.java:74)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96)
|
||||
at reactor.core.publisher.MonoCallable.subscribe(MonoCallable.java:48)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.complete(MonoIgnoreThen.java:292)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onNext(MonoIgnoreThen.java:187)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158)
|
||||
at reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:293)
|
||||
at reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:474)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180)
|
||||
at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:122)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158)
|
||||
at reactor.core.publisher.FluxContextWrite$ContextWriteSubscriber.onNext(FluxContextWrite.java:107)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableConditionalSubscriber.onNext(FluxMapFuseable.java:299)
|
||||
at reactor.core.publisher.FluxFilterFuseable$FilterFuseableConditionalSubscriber.onNext(FluxFilterFuseable.java:337)
|
||||
at reactor.core.publisher.Operators$BaseFluxToMonoOperator.completePossiblyEmpty(Operators.java:2071)
|
||||
at reactor.core.publisher.MonoCollect$CollectSubscriber.onComplete(MonoCollect.java:145)
|
||||
at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onComplete(FluxPeek.java:260)
|
||||
at reactor.core.publisher.FluxMap$MapSubscriber.onComplete(FluxMap.java:144)
|
||||
at reactor.netty.channel.FluxReceive.onInboundComplete(FluxReceive.java:413)
|
||||
at reactor.netty.channel.ChannelOperations.onInboundComplete(ChannelOperations.java:444)
|
||||
at reactor.netty.http.server.HttpServerOperations.onInboundNext(HttpServerOperations.java:685)
|
||||
at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:114)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at reactor.netty.http.server.HttpTrafficHandler.channelRead(HttpTrafficHandler.java:284)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
|
||||
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
|
||||
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
|
||||
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
|
||||
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
|
||||
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
|
||||
at java.base/java.lang.Thread.run(Thread.java:1583)
|
||||
Caused by: java.lang.NullPointerException: Cannot invoke "com.fasterxml.jackson.databind.JsonNode.has(String)" because "workflowJson" is null
|
||||
at com.flowable.devops.workflow.converter.WorkflowConverter.validateWorkflowJson(WorkflowConverter.java:334)
|
||||
at com.flowable.devops.service.WorkflowService.create(WorkflowService.java:72)
|
||||
... 75 common frames omitted
|
||||
2025-10-13 13:30:55 [reactor-http-nio-3] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [df7262b1-2] Completed 400 BAD_REQUEST
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [34098d80-3] HTTP POST "/api/workflows"
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - [34098d80-3] Mapped to com.flowable.devops.controller.WorkflowController#createWorkflow(WorkflowDefinition)
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [34098d80-3] Content-Type:application/json
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [34098d80-3] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG org.springframework.web.HttpLogging - [34098d80-3] Decoded [WorkflowDefinition(id=test-workflow-001, name=端到端测试工作流, description=用于测试节点间输入输出映射的工作流, definition={" (truncated)...]
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] INFO c.f.d.controller.WorkflowController - 创建工作流定义: 端到端测试工作流
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] INFO c.f.devops.service.WorkflowService - 创建工作流定义: 端到端测试工作流
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] INFO c.f.devops.service.WorkflowService - 工作流定义创建成功: 端到端测试工作流 (ID: test-workflow-001)
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] INFO c.f.d.controller.WorkflowController - 工作流定义创建成功: 端到端测试工作流 (ID: test-workflow-001)
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [34098d80-3] Using 'application/json' given [*/*] and supported [application/json, application/*+json, application/x-ndjson, text/event-stream]
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [34098d80-3] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG org.springframework.web.HttpLogging - [34098d80-3] Encoding [WorkflowDefinition(id=test-workflow-001, name=端到端测试工作流, description=用于测试节点间输入输出映射的工作流, definition={" (truncated)...]
|
||||
2025-10-13 13:32:47 [reactor-http-nio-4] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [34098d80-3] Completed 200 OK
|
||||
2025-10-13 13:34:08 [reactor-http-nio-5] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [440c12d6-4] HTTP POST "/api/workflows/test-workflow-001/activate"
|
||||
2025-10-13 13:34:08 [reactor-http-nio-5] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - [440c12d6-4] Mapped to com.flowable.devops.controller.WorkflowController#activateWorkflow(String)
|
||||
2025-10-13 13:34:08 [reactor-http-nio-5] INFO c.f.d.controller.WorkflowController - 激活工作流定义: test-workflow-001
|
||||
2025-10-13 13:34:08 [reactor-http-nio-5] INFO c.f.devops.service.WorkflowService - 激活工作流定义: test-workflow-001
|
||||
2025-10-13 13:34:08 [reactor-http-nio-5] ERROR c.f.devops.service.WorkflowService - 激活工作流定义失败: test-workflow-001
|
||||
com.flowable.devops.service.WorkflowNotFoundException: 工作流定义不存在: test-workflow-001
|
||||
at com.flowable.devops.service.WorkflowService.lambda$getById$0(WorkflowService.java:140)
|
||||
at java.base/java.util.Optional.orElseThrow(Optional.java:403)
|
||||
at com.flowable.devops.service.WorkflowService.getById(WorkflowService.java:140)
|
||||
at com.flowable.devops.service.WorkflowService.activate(WorkflowService.java:204)
|
||||
at java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
|
||||
at java.base/java.lang.reflect.Method.invoke(Method.java:580)
|
||||
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:343)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:196)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor$1.proceedWithInvocation(TransactionInterceptor.java:123)
|
||||
at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:391)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:119)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:184)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:703)
|
||||
at com.flowable.devops.service.WorkflowService$$SpringCGLIB$$0.activate(<generated>)
|
||||
at com.flowable.devops.controller.WorkflowController.lambda$activateWorkflow$14(WorkflowController.java:129)
|
||||
at reactor.core.publisher.MonoCallable$MonoCallableSubscription.request(MonoCallable.java:137)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onSubscribe(FluxOnErrorResume.java:74)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96)
|
||||
at reactor.core.publisher.MonoCallable.subscribe(MonoCallable.java:48)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.complete(MonoIgnoreThen.java:292)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onNext(MonoIgnoreThen.java:187)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158)
|
||||
at reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:293)
|
||||
at reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:474)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180)
|
||||
at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:122)
|
||||
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
|
||||
at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215)
|
||||
at reactor.core.publisher.FluxFlatMap.trySubscribeScalarMap(FluxFlatMap.java:192)
|
||||
at reactor.core.publisher.MonoFlatMap.subscribeOrReturn(MonoFlatMap.java:53)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:57)
|
||||
at reactor.core.publisher.MonoZip$ZipCoordinator.request(MonoZip.java:216)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onSubscribe(MonoIgnoreThen.java:134)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117)
|
||||
at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:125)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:240)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onComplete(MonoIgnoreThen.java:203)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onComplete(MonoFlatMap.java:189)
|
||||
at reactor.core.publisher.Operators.complete(Operators.java:137)
|
||||
at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:121)
|
||||
at reactor.core.publisher.Mono.subscribe(Mono.java:4495)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263)
|
||||
at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
|
||||
at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82)
|
||||
at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.innerNext(FluxConcatMapNoPrefetch.java:258)
|
||||
at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:863)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180)
|
||||
at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.request(MonoPeekTerminal.java:139)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.request(Operators.java:2305)
|
||||
at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.request(FluxConcatMapNoPrefetch.java:338)
|
||||
at reactor.core.publisher.MonoNext$NextSubscriber.request(MonoNext.java:108)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215)
|
||||
at reactor.core.publisher.MonoNext$NextSubscriber.onSubscribe(MonoNext.java:70)
|
||||
at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.onSubscribe(FluxConcatMapNoPrefetch.java:164)
|
||||
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201)
|
||||
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.Mono.subscribe(Mono.java:4495)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263)
|
||||
at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55)
|
||||
at reactor.netty.http.server.HttpServer$HttpServerHandle.onStateChange(HttpServer.java:1052)
|
||||
at reactor.netty.ReactorNetty$CompositeConnectionObserver.onStateChange(ReactorNetty.java:710)
|
||||
at reactor.netty.transport.ServerTransport$ChildObserver.onStateChange(ServerTransport.java:481)
|
||||
at reactor.netty.http.server.HttpServerOperations.onInboundNext(HttpServerOperations.java:650)
|
||||
at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:114)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at reactor.netty.http.server.HttpTrafficHandler.channelRead(HttpTrafficHandler.java:238)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
|
||||
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
|
||||
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
|
||||
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
|
||||
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
|
||||
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
|
||||
at java.base/java.lang.Thread.run(Thread.java:1583)
|
||||
2025-10-13 13:34:08 [reactor-http-nio-5] ERROR c.f.d.controller.WorkflowController - 激活工作流定义失败: test-workflow-001
|
||||
com.flowable.devops.service.WorkflowServiceException: 激活工作流定义失败: 工作流定义不存在: test-workflow-001
|
||||
at com.flowable.devops.service.WorkflowService.activate(WorkflowService.java:218)
|
||||
at java.base/jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
|
||||
at java.base/java.lang.reflect.Method.invoke(Method.java:580)
|
||||
at org.springframework.aop.support.AopUtils.invokeJoinpointUsingReflection(AopUtils.java:343)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.invokeJoinpoint(ReflectiveMethodInvocation.java:196)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:163)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor$1.proceedWithInvocation(TransactionInterceptor.java:123)
|
||||
at org.springframework.transaction.interceptor.TransactionAspectSupport.invokeWithinTransaction(TransactionAspectSupport.java:391)
|
||||
at org.springframework.transaction.interceptor.TransactionInterceptor.invoke(TransactionInterceptor.java:119)
|
||||
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:184)
|
||||
at org.springframework.aop.framework.CglibAopProxy$CglibMethodInvocation.proceed(CglibAopProxy.java:751)
|
||||
at org.springframework.aop.framework.CglibAopProxy$DynamicAdvisedInterceptor.intercept(CglibAopProxy.java:703)
|
||||
at com.flowable.devops.service.WorkflowService$$SpringCGLIB$$0.activate(<generated>)
|
||||
at com.flowable.devops.controller.WorkflowController.lambda$activateWorkflow$14(WorkflowController.java:129)
|
||||
at reactor.core.publisher.MonoCallable$MonoCallableSubscription.request(MonoCallable.java:137)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onSubscribe(FluxOnErrorResume.java:74)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onSubscribe(FluxMapFuseable.java:96)
|
||||
at reactor.core.publisher.MonoCallable.subscribe(MonoCallable.java:48)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.FluxPeek$PeekSubscriber.onNext(FluxPeek.java:200)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.complete(MonoIgnoreThen.java:292)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onNext(MonoIgnoreThen.java:187)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:158)
|
||||
at reactor.core.publisher.MonoZip$ZipCoordinator.signal(MonoZip.java:293)
|
||||
at reactor.core.publisher.MonoZip$ZipInner.onNext(MonoZip.java:474)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180)
|
||||
at reactor.core.publisher.FluxDefaultIfEmpty$DefaultIfEmptySubscriber.onNext(FluxDefaultIfEmpty.java:122)
|
||||
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
|
||||
at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215)
|
||||
at reactor.core.publisher.FluxFlatMap.trySubscribeScalarMap(FluxFlatMap.java:192)
|
||||
at reactor.core.publisher.MonoFlatMap.subscribeOrReturn(MonoFlatMap.java:53)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:57)
|
||||
at reactor.core.publisher.MonoZip$ZipCoordinator.request(MonoZip.java:216)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.request(MonoFlatMap.java:194)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onSubscribe(MonoIgnoreThen.java:134)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onSubscribe(MonoFlatMap.java:117)
|
||||
at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:125)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:240)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.onComplete(MonoIgnoreThen.java:203)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onComplete(MonoFlatMap.java:189)
|
||||
at reactor.core.publisher.Operators.complete(Operators.java:137)
|
||||
at reactor.core.publisher.MonoZip.subscribe(MonoZip.java:121)
|
||||
at reactor.core.publisher.Mono.subscribe(Mono.java:4495)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263)
|
||||
at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoFlatMap$FlatMapMain.onNext(MonoFlatMap.java:165)
|
||||
at reactor.core.publisher.FluxOnErrorResume$ResumeSubscriber.onNext(FluxOnErrorResume.java:79)
|
||||
at reactor.core.publisher.FluxSwitchIfEmpty$SwitchIfEmptySubscriber.onNext(FluxSwitchIfEmpty.java:74)
|
||||
at reactor.core.publisher.MonoNext$NextSubscriber.onNext(MonoNext.java:82)
|
||||
at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.innerNext(FluxConcatMapNoPrefetch.java:258)
|
||||
at reactor.core.publisher.FluxConcatMap$ConcatMapInner.onNext(FluxConcatMap.java:863)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.onNext(FluxMapFuseable.java:129)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.onNext(MonoPeekTerminal.java:180)
|
||||
at reactor.core.publisher.Operators$ScalarSubscription.request(Operators.java:2545)
|
||||
at reactor.core.publisher.MonoPeekTerminal$MonoTerminalPeekSubscriber.request(MonoPeekTerminal.java:139)
|
||||
at reactor.core.publisher.FluxMapFuseable$MapFuseableSubscriber.request(FluxMapFuseable.java:171)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.request(Operators.java:2305)
|
||||
at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.request(FluxConcatMapNoPrefetch.java:338)
|
||||
at reactor.core.publisher.MonoNext$NextSubscriber.request(MonoNext.java:108)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.set(Operators.java:2341)
|
||||
at reactor.core.publisher.Operators$MultiSubscriptionSubscriber.onSubscribe(Operators.java:2215)
|
||||
at reactor.core.publisher.MonoNext$NextSubscriber.onSubscribe(MonoNext.java:70)
|
||||
at reactor.core.publisher.FluxConcatMapNoPrefetch$FluxConcatMapNoPrefetchSubscriber.onSubscribe(FluxConcatMapNoPrefetch.java:164)
|
||||
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:201)
|
||||
at reactor.core.publisher.FluxIterable.subscribe(FluxIterable.java:83)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDefer.subscribe(MonoDefer.java:53)
|
||||
at reactor.core.publisher.Mono.subscribe(Mono.java:4495)
|
||||
at reactor.core.publisher.MonoIgnoreThen$ThenIgnoreMain.subscribeNext(MonoIgnoreThen.java:263)
|
||||
at reactor.core.publisher.MonoIgnoreThen.subscribe(MonoIgnoreThen.java:51)
|
||||
at reactor.core.publisher.InternalMonoOperator.subscribe(InternalMonoOperator.java:64)
|
||||
at reactor.core.publisher.MonoDeferContextual.subscribe(MonoDeferContextual.java:55)
|
||||
at reactor.netty.http.server.HttpServer$HttpServerHandle.onStateChange(HttpServer.java:1052)
|
||||
at reactor.netty.ReactorNetty$CompositeConnectionObserver.onStateChange(ReactorNetty.java:710)
|
||||
at reactor.netty.transport.ServerTransport$ChildObserver.onStateChange(ServerTransport.java:481)
|
||||
at reactor.netty.http.server.HttpServerOperations.onInboundNext(HttpServerOperations.java:650)
|
||||
at reactor.netty.channel.ChannelOperationsHandler.channelRead(ChannelOperationsHandler.java:114)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:444)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at reactor.netty.http.server.HttpTrafficHandler.channelRead(HttpTrafficHandler.java:238)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:436)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:346)
|
||||
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:318)
|
||||
at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:251)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:442)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:412)
|
||||
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:440)
|
||||
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:420)
|
||||
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
|
||||
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:788)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:724)
|
||||
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:650)
|
||||
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:562)
|
||||
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:997)
|
||||
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
|
||||
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
|
||||
at java.base/java.lang.Thread.run(Thread.java:1583)
|
||||
Caused by: com.flowable.devops.service.WorkflowNotFoundException: 工作流定义不存在: test-workflow-001
|
||||
at com.flowable.devops.service.WorkflowService.lambda$getById$0(WorkflowService.java:140)
|
||||
at java.base/java.util.Optional.orElseThrow(Optional.java:403)
|
||||
at com.flowable.devops.service.WorkflowService.getById(WorkflowService.java:140)
|
||||
at com.flowable.devops.service.WorkflowService.activate(WorkflowService.java:204)
|
||||
... 118 common frames omitted
|
||||
2025-10-13 13:34:08 [reactor-http-nio-5] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [440c12d6-4] Completed 400 BAD_REQUEST
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [987f294b-5] HTTP GET "/api/workflows"
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - [987f294b-5] Mapped to com.flowable.devops.controller.WorkflowController#getWorkflows(String)
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] INFO c.f.d.controller.WorkflowController - 获取工作流列表,状态过滤: null
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] INFO c.f.d.controller.WorkflowController - 返回 0 个工作流定义
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [987f294b-5] Using 'application/json' given [*/*] and supported [application/json, application/*+json, application/x-ndjson, text/event-stream]
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [987f294b-5] 0..1 [java.util.List<com.flowable.devops.entity.WorkflowDefinition>]
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] DEBUG org.springframework.web.HttpLogging - [987f294b-5] Encoding [[]]
|
||||
2025-10-13 13:35:52 [reactor-http-nio-6] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [987f294b-5] Completed 200 OK
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [e148fa05-6] HTTP POST "/api/workflows"
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - [e148fa05-6] Mapped to com.flowable.devops.controller.WorkflowController#createWorkflow(WorkflowDefinition)
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [e148fa05-6] Content-Type:application/json
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [e148fa05-6] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG org.springframework.web.HttpLogging - [e148fa05-6] Decoded [WorkflowDefinition(id=test-workflow-001, name=端到端测试工作流, description=用于测试节点间输入输出映射的工作流, definition={" (truncated)...]
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] INFO c.f.d.controller.WorkflowController - 创建工作流定义: 端到端测试工作流
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] INFO c.f.devops.service.WorkflowService - 创建工作流定义: 端到端测试工作流
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] INFO c.f.devops.service.WorkflowService - 工作流定义创建成功: 端到端测试工作流 (ID: test-workflow-001)
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] INFO c.f.d.controller.WorkflowController - 工作流定义创建成功: 端到端测试工作流 (ID: test-workflow-001)
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [e148fa05-6] Using 'application/json' given [*/*] and supported [application/json, application/*+json, application/x-ndjson, text/event-stream]
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [e148fa05-6] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG org.springframework.web.HttpLogging - [e148fa05-6] Encoding [WorkflowDefinition(id=test-workflow-001, name=端到端测试工作流, description=用于测试节点间输入输出映射的工作流, definition={" (truncated)...]
|
||||
2025-10-13 13:36:08 [reactor-http-nio-7] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [e148fa05-6] Completed 200 OK
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [85de915c-7] HTTP POST "/api/workflows"
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - [85de915c-7] Mapped to com.flowable.devops.controller.WorkflowController#createWorkflow(WorkflowDefinition)
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [85de915c-7] Content-Type:application/json
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [85de915c-7] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG org.springframework.web.HttpLogging - [85de915c-7] Decoded [WorkflowDefinition(id=simple-test-001, name=简单端到端测试, description=简单测试工作流, definition={"id":"simple-t (truncated)...]
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] INFO c.f.d.controller.WorkflowController - 创建工作流定义: 简单端到端测试
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] INFO c.f.devops.service.WorkflowService - 创建工作流定义: 简单端到端测试
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] INFO c.f.devops.service.WorkflowService - 工作流定义创建成功: 简单端到端测试 (ID: simple-test-001)
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] INFO c.f.d.controller.WorkflowController - 工作流定义创建成功: 简单端到端测试 (ID: simple-test-001)
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [85de915c-7] Using 'application/json' given [*/*] and supported [application/json, application/*+json, application/x-ndjson, text/event-stream]
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [85de915c-7] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG org.springframework.web.HttpLogging - [85de915c-7] Encoding [WorkflowDefinition(id=simple-test-001, name=简单端到端测试, description=简单测试工作流, definition={"id":"simple-t (truncated)...]
|
||||
2025-10-13 13:37:41 [reactor-http-nio-8] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [85de915c-7] Completed 200 OK
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [f1ef7a56-8] HTTP POST "/api/workflows"
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG o.s.w.r.r.m.a.RequestMappingHandlerMapping - [f1ef7a56-8] Mapped to com.flowable.devops.controller.WorkflowController#createWorkflow(WorkflowDefinition)
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [f1ef7a56-8] Content-Type:application/json
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG o.s.w.r.r.m.a.RequestBodyMethodArgumentResolver - [f1ef7a56-8] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG org.springframework.web.HttpLogging - [f1ef7a56-8] Decoded [WorkflowDefinition(id=debug-test, name=调试测试, description=调试测试工作流, definition={"id":"debug-test","nam (truncated)...]
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] INFO c.f.d.controller.WorkflowController - 创建工作流定义: 调试测试
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] INFO c.f.devops.service.WorkflowService - 创建工作流定义: 调试测试
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] INFO c.f.devops.service.WorkflowService - 工作流定义创建成功: 调试测试 (ID: debug-test)
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] INFO c.f.d.controller.WorkflowController - 工作流定义创建成功: 调试测试 (ID: debug-test)
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [f1ef7a56-8] Using 'application/json' given [*/*] and supported [application/json, application/*+json, application/x-ndjson, text/event-stream]
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG o.s.w.r.r.m.a.ResponseEntityResultHandler - [f1ef7a56-8] 0..1 [com.flowable.devops.entity.WorkflowDefinition]
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG org.springframework.web.HttpLogging - [f1ef7a56-8] Encoding [WorkflowDefinition(id=debug-test, name=调试测试, description=调试测试工作流, definition={"id":"debug-test","nam (truncated)...]
|
||||
2025-10-13 13:38:51 [reactor-http-nio-9] DEBUG o.s.w.s.a.HttpWebHandlerAdapter - [f1ef7a56-8] Completed 200 OK
|
||||
2025-10-13 13:47:52 [SpringApplicationShutdownHook] INFO o.s.o.j.LocalContainerEntityManagerFactoryBean - Closing JPA EntityManagerFactory for persistence unit 'default'
|
||||
2025-10-13 13:47:52 [SpringApplicationShutdownHook] INFO com.zaxxer.hikari.HikariDataSource - FlowableDevOpsHikariCP - Shutdown initiated...
|
||||
2025-10-13 13:47:52 [SpringApplicationShutdownHook] INFO com.zaxxer.hikari.HikariDataSource - FlowableDevOpsHikariCP - Shutdown completed.
|
||||
39
backend/debug-workflow.json
Normal file
39
backend/debug-workflow.json
Normal file
@ -0,0 +1,39 @@
|
||||
{
|
||||
"id": "debug-test",
|
||||
"name": "调试测试",
|
||||
"description": "调试测试工作流",
|
||||
"status": "DRAFT",
|
||||
"definition": {
|
||||
"id": "debug-test",
|
||||
"name": "调试测试",
|
||||
"version": "1.0",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"type": "start",
|
||||
"name": "开始",
|
||||
"position": { "x": 100, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {},
|
||||
"outputSchema": { "type": "object" }
|
||||
},
|
||||
{
|
||||
"id": "end",
|
||||
"type": "end",
|
||||
"name": "结束",
|
||||
"position": { "x": 300, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {},
|
||||
"outputSchema": { "type": "object" }
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"id": "start-to-end",
|
||||
"source": "start",
|
||||
"target": "end",
|
||||
"condition": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
71
backend/fix-maven.sh
Executable file
71
backend/fix-maven.sh
Executable file
@ -0,0 +1,71 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "修复Maven编译问题..."
|
||||
|
||||
# 删除本地Maven缓存中的问题依赖
|
||||
echo "清理本地Maven仓库缓存..."
|
||||
rm -rf ~/.m2/repository/org/springframework/boot/spring-boot-maven-plugin/3.1.5/
|
||||
|
||||
# 创建临时的settings.xml文件,强制使用Maven Central仓库
|
||||
mkdir -p ~/.m2
|
||||
cat > ~/.m2/settings.xml << EOF
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
http://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
|
||||
<mirrors>
|
||||
<mirror>
|
||||
<id>central</id>
|
||||
<mirrorOf>*</mirrorOf>
|
||||
<name>Maven Central Repository</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
</mirror>
|
||||
</mirrors>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>central-repo</id>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>central</id>
|
||||
<name>Central Repository</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
<layout>default</layout>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>central</id>
|
||||
<name>Central Repository</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
<layout>default</layout>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
<releases>
|
||||
<updatePolicy>never</updatePolicy>
|
||||
</releases>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
<activeProfiles>
|
||||
<activeProfile>central-repo</activeProfile>
|
||||
</activeProfiles>
|
||||
</settings>
|
||||
EOF
|
||||
|
||||
echo "已创建临时Maven settings.xml文件"
|
||||
echo "尝试重新编译项目..."
|
||||
|
||||
# 强制更新依赖并编译
|
||||
mvn -U clean compile -DskipTests
|
||||
|
||||
echo "编译完成!"
|
||||
echo "如果编译成功,可以删除临时配置文件:rm ~/.m2/settings.xml"
|
||||
33428
backend/logs/flowable-devops.log
Normal file
33428
backend/logs/flowable-devops.log
Normal file
File diff suppressed because it is too large
Load Diff
179
backend/pom.xml
Normal file
179
backend/pom.xml
Normal file
@ -0,0 +1,179 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-parent</artifactId>
|
||||
<version>3.1.5</version>
|
||||
<relativePath/>
|
||||
</parent>
|
||||
|
||||
<groupId>com.flowable</groupId>
|
||||
<artifactId>flowable-devops-backend</artifactId>
|
||||
<version>1.0-SNAPSHOT</version>
|
||||
<name>Flowable DevOps Backend</name>
|
||||
<description>Backend application for visual workflow platform</description>
|
||||
|
||||
<!-- 添加仓库配置,确保能够从Maven Central下载依赖 -->
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>central</id>
|
||||
<name>Maven Central</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>central</id>
|
||||
<name>Maven Central</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
|
||||
<properties>
|
||||
<java.version>17</java.version>
|
||||
<flowable.version>7.0.1</flowable.version>
|
||||
<jakarta.el.version>5.0.0</jakarta.el.version>
|
||||
<juel.version>2.2.7</juel.version>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<!-- Spring Boot Starters -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-webflux</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-jpa</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-data-redis</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-validation</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Flowable Dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.flowable</groupId>
|
||||
<artifactId>flowable-spring-boot-starter-process</artifactId>
|
||||
<version>${flowable.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.flowable</groupId>
|
||||
<artifactId>flowable-spring-boot-starter-actuator</artifactId>
|
||||
<version>${flowable.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Jakarta EL (JUEL) for Expression Engine -->
|
||||
<dependency>
|
||||
<groupId>jakarta.el</groupId>
|
||||
<artifactId>jakarta.el-api</artifactId>
|
||||
<version>${jakarta.el.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>de.odysseus.juel</groupId>
|
||||
<artifactId>juel-impl</artifactId>
|
||||
<version>${juel.version}</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Database -->
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<version>8.0.33</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
|
||||
<!-- JSON Processing -->
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-databind</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.fasterxml.jackson.datatype</groupId>
|
||||
<artifactId>jackson-datatype-jsr310</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Utilities -->
|
||||
<dependency>
|
||||
<groupId>org.apache.commons</groupId>
|
||||
<artifactId>commons-lang3</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
<optional>true</optional>
|
||||
</dependency>
|
||||
|
||||
<!-- Test Dependencies -->
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.projectreactor</groupId>
|
||||
<artifactId>reactor-test</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>mysql</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
<configuration>
|
||||
<excludes>
|
||||
<exclude>
|
||||
<groupId>org.projectlombok</groupId>
|
||||
<artifactId>lombok</artifactId>
|
||||
</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>${java.version}</source>
|
||||
<target>${java.version}</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
51
backend/settings.xml
Normal file
51
backend/settings.xml
Normal file
@ -0,0 +1,51 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/SETTINGS/1.0.0
|
||||
http://maven.apache.org/xsd/settings-1.0.0.xsd">
|
||||
|
||||
<mirrors>
|
||||
<mirror>
|
||||
<id>central-mirror</id>
|
||||
<name>Maven Central</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
<mirrorOf>*</mirrorOf>
|
||||
</mirror>
|
||||
</mirrors>
|
||||
|
||||
<profiles>
|
||||
<profile>
|
||||
<id>default</id>
|
||||
<repositories>
|
||||
<repository>
|
||||
<id>central</id>
|
||||
<name>Maven Central</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</repository>
|
||||
</repositories>
|
||||
<pluginRepositories>
|
||||
<pluginRepository>
|
||||
<id>central</id>
|
||||
<name>Maven Central</name>
|
||||
<url>https://repo1.maven.org/maven2</url>
|
||||
<releases>
|
||||
<enabled>true</enabled>
|
||||
</releases>
|
||||
<snapshots>
|
||||
<enabled>false</enabled>
|
||||
</snapshots>
|
||||
</pluginRepository>
|
||||
</pluginRepositories>
|
||||
</profile>
|
||||
</profiles>
|
||||
|
||||
<activeProfiles>
|
||||
<activeProfile>default</activeProfile>
|
||||
</activeProfiles>
|
||||
</settings>
|
||||
79
backend/simple-workflow.json
Normal file
79
backend/simple-workflow.json
Normal file
@ -0,0 +1,79 @@
|
||||
{
|
||||
"id": "simple-test-001",
|
||||
"name": "简单端到端测试",
|
||||
"description": "简单测试工作流",
|
||||
"status": "DRAFT",
|
||||
"definition": {
|
||||
"id": "simple-test-001",
|
||||
"name": "简单端到端测试",
|
||||
"version": "1.0",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"type": "start",
|
||||
"name": "开始",
|
||||
"position": { "x": 100, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"message": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "httpNode",
|
||||
"type": "http_request",
|
||||
"name": "HTTP测试节点",
|
||||
"position": { "x": 300, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/get",
|
||||
"method": "GET",
|
||||
"headers": {}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/get",
|
||||
"method": "GET"
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "end",
|
||||
"type": "end",
|
||||
"name": "结束",
|
||||
"position": { "x": 500, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {
|
||||
"result": "${httpNode.output}"
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"completed": { "type": "boolean" }
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"id": "start-to-http",
|
||||
"source": "start",
|
||||
"target": "httpNode",
|
||||
"condition": null
|
||||
},
|
||||
{
|
||||
"id": "http-to-end",
|
||||
"source": "httpNode",
|
||||
"target": "end",
|
||||
"condition": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,50 @@
|
||||
package com.flowable.devops;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.boot.SpringApplication;
|
||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
||||
import org.springframework.boot.context.properties.ConfigurationPropertiesScan;
|
||||
import org.springframework.cache.annotation.EnableCaching;
|
||||
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
|
||||
import org.springframework.scheduling.annotation.EnableAsync;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
import org.springframework.web.reactive.config.EnableWebFlux;
|
||||
|
||||
/**
|
||||
* Flowable DevOps Backend 主应用程序
|
||||
*
|
||||
* 基于 Spring Boot 3 + WebFlux + Flowable 7 的可视化工作流平台后端
|
||||
*
|
||||
* 核心特性:
|
||||
* - 使用同步执行策略,审批节点自然暂停等待
|
||||
* - 表达式引擎统一使用 Jakarta EL (JUEL)
|
||||
* - 支持MySQL 8数据库和Redis缓存
|
||||
* - 提供完整的REST API接口
|
||||
* - 支持节点类型动态注册和管理
|
||||
*/
|
||||
@Slf4j
|
||||
@SpringBootApplication
|
||||
@EnableWebFlux
|
||||
@EnableJpaAuditing
|
||||
@EnableTransactionManagement
|
||||
@EnableAsync
|
||||
@EnableScheduling
|
||||
@EnableCaching
|
||||
@ConfigurationPropertiesScan
|
||||
public class FlowableDevopsApplication {
|
||||
|
||||
public static void main(String[] args) {
|
||||
log.info("启动 Flowable DevOps Backend 应用程序...");
|
||||
log.info("基于 Spring Boot 3 + WebFlux + Flowable 7");
|
||||
log.info("可视化工作流平台 - 后端服务");
|
||||
|
||||
try {
|
||||
SpringApplication.run(FlowableDevopsApplication.class, args);
|
||||
log.info("Flowable DevOps Backend 应用程序启动成功!");
|
||||
} catch (Exception e) {
|
||||
log.error("Flowable DevOps Backend 应用程序启动失败!", e);
|
||||
System.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,628 @@
|
||||
package com.flowable.devops.config;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import com.flowable.devops.service.NodeTypeService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.CommandLineRunner;
|
||||
import org.springframework.core.annotation.Order;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 数据初始化组件
|
||||
*
|
||||
* 在应用启动时加载默认节点类型和示例数据
|
||||
*/
|
||||
@Slf4j
|
||||
@Component
|
||||
@Order(1) // 确保在其他初始化组件之前执行
|
||||
public class DataInitializer implements CommandLineRunner {
|
||||
|
||||
@Autowired
|
||||
private NodeTypeService nodeTypeService;
|
||||
|
||||
@Value("${flowable-devops.node-types.load-defaults:true}")
|
||||
private boolean loadDefaultNodeTypes;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@Override
|
||||
public void run(String... args) throws Exception {
|
||||
log.info("开始数据初始化...");
|
||||
|
||||
if (loadDefaultNodeTypes) {
|
||||
initializeDefaultNodeTypes();
|
||||
}
|
||||
|
||||
log.info("数据初始化完成");
|
||||
}
|
||||
|
||||
/**
|
||||
* 初始化默认节点类型
|
||||
*/
|
||||
private void initializeDefaultNodeTypes() {
|
||||
log.info("加载默认节点类型...");
|
||||
|
||||
try {
|
||||
List<NodeType> defaultNodeTypes = createDefaultNodeTypes();
|
||||
|
||||
for (NodeType nodeType : defaultNodeTypes) {
|
||||
try {
|
||||
// 检查节点类型是否已存在
|
||||
try {
|
||||
nodeTypeService.getNodeType(nodeType.getId());
|
||||
log.debug("节点类型已存在,跳过: {}", nodeType.getId());
|
||||
continue;
|
||||
} catch (Exception e) {
|
||||
// 节点类型不存在,继续创建
|
||||
}
|
||||
|
||||
NodeType created = nodeTypeService.createNodeType(nodeType);
|
||||
log.info("创建默认节点类型: {} ({})", created.getName(), created.getId());
|
||||
} catch (Exception e) {
|
||||
log.warn("创建默认节点类型失败: {} - {}", nodeType.getId(), e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// 刷新注册表
|
||||
nodeTypeService.refreshRegistry();
|
||||
log.info("默认节点类型加载完成,共 {} 个节点类型", defaultNodeTypes.size());
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("加载默认节点类型失败", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建默认节点类型列表
|
||||
*/
|
||||
private List<NodeType> createDefaultNodeTypes() throws Exception {
|
||||
return List.of(
|
||||
// 1. 开始节点
|
||||
createStartNodeType(),
|
||||
|
||||
// 2. 结束节点
|
||||
createEndNodeType(),
|
||||
|
||||
// 3. 脚本任务节点
|
||||
createScriptTaskNodeType(),
|
||||
|
||||
// 4. HTTP请求节点
|
||||
createHttpRequestNodeType(),
|
||||
|
||||
// 5. 用户任务节点(审批节点)
|
||||
createUserTaskNodeType(),
|
||||
|
||||
// 6. 条件分支节点
|
||||
createExclusiveGatewayNodeType(),
|
||||
|
||||
// 7. 并行分支节点
|
||||
createParallelGatewayNodeType(),
|
||||
|
||||
// 8. 数据转换节点
|
||||
createDataTransformNodeType()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* 开始节点
|
||||
*/
|
||||
private NodeType createStartNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("start");
|
||||
nodeType.setName("开始");
|
||||
nodeType.setDisplayName("开始");
|
||||
nodeType.setDescription("工作流开始节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.OTHER);
|
||||
nodeType.setIcon("play-circle");
|
||||
nodeType.setImplementationClass("com.flowable.devops.workflow.node.StartNode");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(10);
|
||||
|
||||
// 字段定义 - 开始节点通常不需要配置字段
|
||||
String fieldsJson = "[]";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"startTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "开始时间"
|
||||
},
|
||||
"initiator": {
|
||||
"type": "string",
|
||||
"description": "发起人"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* 结束节点
|
||||
*/
|
||||
private NodeType createEndNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("end");
|
||||
nodeType.setName("结束");
|
||||
nodeType.setDisplayName("结束");
|
||||
nodeType.setDescription("工作流结束节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.OTHER);
|
||||
nodeType.setIcon("stop-circle");
|
||||
nodeType.setImplementationClass("com.flowable.devops.workflow.node.EndNode");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(20);
|
||||
|
||||
// 字段定义
|
||||
String fieldsJson = """
|
||||
[
|
||||
{
|
||||
"name": "result",
|
||||
"label": "结束结果",
|
||||
"type": "select",
|
||||
"required": false,
|
||||
"defaultValue": "completed",
|
||||
"options": [
|
||||
{"label": "完成", "value": "completed"},
|
||||
{"label": "取消", "value": "cancelled"},
|
||||
{"label": "中止", "value": "aborted"}
|
||||
]
|
||||
}
|
||||
]
|
||||
""";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"endTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "结束时间"
|
||||
},
|
||||
"result": {
|
||||
"type": "string",
|
||||
"description": "执行结果"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* 脚本任务节点
|
||||
*/
|
||||
private NodeType createScriptTaskNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("script-task");
|
||||
nodeType.setName("脚本任务");
|
||||
nodeType.setDisplayName("脚本任务");
|
||||
nodeType.setDescription("执行脚本代码的任务节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.LOGIC);
|
||||
nodeType.setIcon("code");
|
||||
nodeType.setImplementationClass("com.flowable.devops.workflow.node.ScriptTaskNode");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(100);
|
||||
|
||||
// 字段定义
|
||||
String fieldsJson = """
|
||||
[
|
||||
{
|
||||
"name": "script",
|
||||
"label": "脚本内容",
|
||||
"type": "textarea",
|
||||
"required": true,
|
||||
"placeholder": "输入要执行的脚本代码",
|
||||
"rows": 5
|
||||
},
|
||||
{
|
||||
"name": "language",
|
||||
"label": "脚本语言",
|
||||
"type": "select",
|
||||
"required": true,
|
||||
"defaultValue": "javascript",
|
||||
"options": [
|
||||
{"label": "JavaScript", "value": "javascript"},
|
||||
{"label": "Groovy", "value": "groovy"},
|
||||
{"label": "Python", "value": "python"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "timeout",
|
||||
"label": "超时时间(秒)",
|
||||
"type": "number",
|
||||
"required": false,
|
||||
"defaultValue": 30,
|
||||
"min": 1,
|
||||
"max": 300
|
||||
}
|
||||
]
|
||||
""";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"result": {
|
||||
"type": "object",
|
||||
"description": "脚本执行结果"
|
||||
},
|
||||
"executionTime": {
|
||||
"type": "number",
|
||||
"description": "执行耗时(毫秒)"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"enum": ["success", "error"],
|
||||
"description": "执行状态"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* HTTP请求节点
|
||||
*/
|
||||
private NodeType createHttpRequestNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("http-request");
|
||||
nodeType.setName("HTTP请求");
|
||||
nodeType.setDescription("发送HTTP请求的任务节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.API);
|
||||
nodeType.setIcon("globe");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(200);
|
||||
|
||||
// 字段定义
|
||||
String fieldsJson = """
|
||||
[
|
||||
{
|
||||
"name": "url",
|
||||
"label": "请求URL",
|
||||
"type": "text",
|
||||
"required": true,
|
||||
"placeholder": "http://example.com/api"
|
||||
},
|
||||
{
|
||||
"name": "method",
|
||||
"label": "请求方法",
|
||||
"type": "select",
|
||||
"required": true,
|
||||
"defaultValue": "GET",
|
||||
"options": [
|
||||
{"label": "GET", "value": "GET"},
|
||||
{"label": "POST", "value": "POST"},
|
||||
{"label": "PUT", "value": "PUT"},
|
||||
{"label": "DELETE", "value": "DELETE"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "headers",
|
||||
"label": "请求头",
|
||||
"type": "keyvalue",
|
||||
"required": false,
|
||||
"description": "HTTP请求头"
|
||||
},
|
||||
{
|
||||
"name": "body",
|
||||
"label": "请求体",
|
||||
"type": "textarea",
|
||||
"required": false,
|
||||
"placeholder": "JSON格式的请求体内容"
|
||||
},
|
||||
{
|
||||
"name": "timeout",
|
||||
"label": "超时时间(秒)",
|
||||
"type": "number",
|
||||
"required": false,
|
||||
"defaultValue": 30
|
||||
}
|
||||
]
|
||||
""";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "number",
|
||||
"description": "HTTP状态码"
|
||||
},
|
||||
"headers": {
|
||||
"type": "object",
|
||||
"description": "响应头"
|
||||
},
|
||||
"body": {
|
||||
"type": "object",
|
||||
"description": "响应体"
|
||||
},
|
||||
"responseTime": {
|
||||
"type": "number",
|
||||
"description": "响应时间(毫秒)"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* 用户任务节点(审批节点)
|
||||
*/
|
||||
private NodeType createUserTaskNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("user-task");
|
||||
nodeType.setName("用户任务");
|
||||
nodeType.setDescription("需要人工处理的审批任务节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.OTHER);
|
||||
nodeType.setIcon("user-check");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(300);
|
||||
|
||||
// 字段定义
|
||||
String fieldsJson = """
|
||||
[
|
||||
{
|
||||
"name": "assignee",
|
||||
"label": "分配给",
|
||||
"type": "text",
|
||||
"required": false,
|
||||
"placeholder": "用户ID或表达式"
|
||||
},
|
||||
{
|
||||
"name": "candidateUsers",
|
||||
"label": "候选用户",
|
||||
"type": "text",
|
||||
"required": false,
|
||||
"placeholder": "用户ID列表,逗号分隔"
|
||||
},
|
||||
{
|
||||
"name": "candidateGroups",
|
||||
"label": "候选组",
|
||||
"type": "text",
|
||||
"required": false,
|
||||
"placeholder": "组ID列表,逗号分隔"
|
||||
},
|
||||
{
|
||||
"name": "dueDate",
|
||||
"label": "截止时间",
|
||||
"type": "text",
|
||||
"required": false,
|
||||
"placeholder": "时间表达式或具体时间"
|
||||
},
|
||||
{
|
||||
"name": "priority",
|
||||
"label": "优先级",
|
||||
"type": "number",
|
||||
"required": false,
|
||||
"defaultValue": 50,
|
||||
"min": 1,
|
||||
"max": 100
|
||||
},
|
||||
{
|
||||
"name": "formKey",
|
||||
"label": "表单Key",
|
||||
"type": "text",
|
||||
"required": false,
|
||||
"placeholder": "关联的表单标识"
|
||||
}
|
||||
]
|
||||
""";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"approved": {
|
||||
"type": "boolean",
|
||||
"description": "是否通过审批"
|
||||
},
|
||||
"approver": {
|
||||
"type": "string",
|
||||
"description": "审批人"
|
||||
},
|
||||
"approvalTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "审批时间"
|
||||
},
|
||||
"comment": {
|
||||
"type": "string",
|
||||
"description": "审批意见"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* 条件分支节点
|
||||
*/
|
||||
private NodeType createExclusiveGatewayNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("exclusive-gateway");
|
||||
nodeType.setName("条件分支");
|
||||
nodeType.setDescription("基于条件的排他分支节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.LOGIC);
|
||||
nodeType.setIcon("git-branch");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(400);
|
||||
|
||||
// 字段定义
|
||||
String fieldsJson = """
|
||||
[
|
||||
{
|
||||
"name": "name",
|
||||
"label": "分支名称",
|
||||
"type": "text",
|
||||
"required": false,
|
||||
"placeholder": "分支节点的名称"
|
||||
}
|
||||
]
|
||||
""";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"selectedPath": {
|
||||
"type": "string",
|
||||
"description": "选择的分支路径"
|
||||
},
|
||||
"evaluationTime": {
|
||||
"type": "number",
|
||||
"description": "条件评估耗时(毫秒)"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* 并行分支节点
|
||||
*/
|
||||
private NodeType createParallelGatewayNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("parallel-gateway");
|
||||
nodeType.setName("并行分支");
|
||||
nodeType.setDescription("并行执行多个分支的节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.LOGIC);
|
||||
nodeType.setIcon("share");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(410);
|
||||
|
||||
// 字段定义
|
||||
String fieldsJson = """
|
||||
[
|
||||
{
|
||||
"name": "name",
|
||||
"label": "并行节点名称",
|
||||
"type": "text",
|
||||
"required": false,
|
||||
"placeholder": "并行节点的名称"
|
||||
}
|
||||
]
|
||||
""";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"parallelPaths": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "并行分支路径"
|
||||
},
|
||||
"forkTime": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"description": "分支创建时间"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* 数据转换节点
|
||||
*/
|
||||
private NodeType createDataTransformNodeType() throws Exception {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("data-transform");
|
||||
nodeType.setName("数据转换");
|
||||
nodeType.setDescription("数据映射和转换节点");
|
||||
nodeType.setCategory(NodeType.NodeCategory.TRANSFORM);
|
||||
nodeType.setIcon("refresh-cw");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(500);
|
||||
|
||||
// 字段定义
|
||||
String fieldsJson = """
|
||||
[
|
||||
{
|
||||
"name": "mappings",
|
||||
"label": "字段映射",
|
||||
"type": "keyvalue",
|
||||
"required": true,
|
||||
"description": "输入字段到输出字段的映射关系"
|
||||
},
|
||||
{
|
||||
"name": "expression",
|
||||
"label": "转换表达式",
|
||||
"type": "textarea",
|
||||
"required": false,
|
||||
"placeholder": "JUEL表达式,用于复杂数据转换",
|
||||
"rows": 3
|
||||
}
|
||||
]
|
||||
""";
|
||||
nodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 输出模式
|
||||
String outputSchemaJson = """
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"transformedData": {
|
||||
"type": "object",
|
||||
"description": "转换后的数据"
|
||||
},
|
||||
"transformedFields": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "被转换的字段列表"
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
nodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,67 @@
|
||||
package com.flowable.devops.config;
|
||||
|
||||
import org.flowable.common.engine.impl.history.HistoryLevel;
|
||||
import org.flowable.spring.SpringProcessEngineConfiguration;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.core.env.Environment;
|
||||
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
|
||||
/**
|
||||
* Flowable 流程引擎配置
|
||||
*
|
||||
* 关键配置说明:
|
||||
* 1. 禁用全局异步执行器,采用同步执行策略
|
||||
* 2. 审批节点(UserTask)自然暂停等待,审批完成后继续
|
||||
* 3. ServiceTask 统一使用 delegateExpression 指向 genericNodeExecutor
|
||||
*/
|
||||
@Configuration
|
||||
public class FlowableConfig {
|
||||
|
||||
/**
|
||||
* 配置事务管理器 - Flowable必需
|
||||
*/
|
||||
@Bean
|
||||
public PlatformTransactionManager transactionManager(DataSource dataSource) {
|
||||
return new DataSourceTransactionManager(dataSource);
|
||||
}
|
||||
|
||||
/**
|
||||
* 配置 Flowable 流程引擎
|
||||
* 确保与 application.yml 中的配置保持一致
|
||||
*/
|
||||
@Bean
|
||||
@Primary
|
||||
public SpringProcessEngineConfiguration processEngineConfiguration(
|
||||
DataSource dataSource,
|
||||
PlatformTransactionManager transactionManager,
|
||||
Environment environment) {
|
||||
SpringProcessEngineConfiguration config = new SpringProcessEngineConfiguration();
|
||||
|
||||
// 数据源和事务管理器配置
|
||||
config.setDataSource(dataSource);
|
||||
config.setTransactionManager(transactionManager);
|
||||
|
||||
// 数据库配置 - 根据环境自动设置
|
||||
String databaseSchemaUpdate = environment.getProperty("flowable.database-schema-update", "true");
|
||||
config.setDatabaseSchemaUpdate(databaseSchemaUpdate);
|
||||
|
||||
// 统一使用MySQL数据库
|
||||
config.setDatabaseType("mysql");
|
||||
|
||||
// 禁用异步执行器 - 关键配置!
|
||||
config.setAsyncExecutorActivate(false);
|
||||
|
||||
// 历史记录级别
|
||||
config.setHistoryLevel(HistoryLevel.FULL);
|
||||
|
||||
// 部署资源路径
|
||||
config.setDeploymentName("flowable-devops-processes");
|
||||
|
||||
return config;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,26 @@
|
||||
package com.flowable.devops.config;
|
||||
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.web.reactive.config.CorsRegistry;
|
||||
import org.springframework.web.reactive.config.EnableWebFlux;
|
||||
import org.springframework.web.reactive.config.WebFluxConfigurer;
|
||||
|
||||
/**
|
||||
* WebFlux 配置
|
||||
*
|
||||
* 配置跨域访问,支持前端开发服务器访问
|
||||
*/
|
||||
@Configuration
|
||||
@EnableWebFlux
|
||||
public class WebFluxConfig implements WebFluxConfigurer {
|
||||
|
||||
@Override
|
||||
public void addCorsMappings(CorsRegistry registry) {
|
||||
registry.addMapping("/api/**")
|
||||
.allowedOriginPatterns("*") // 开发环境允许所有域名
|
||||
.allowedMethods("GET", "POST", "PUT", "DELETE", "OPTIONS")
|
||||
.allowedHeaders("*")
|
||||
.allowCredentials(false) // 与通配符模式不兼容时设为false
|
||||
.maxAge(3600);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,328 @@
|
||||
package com.flowable.devops.controller;
|
||||
|
||||
import com.flowable.devops.service.*;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.http.HttpStatus;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.validation.BindException;
|
||||
import org.springframework.validation.FieldError;
|
||||
import org.springframework.web.bind.MethodArgumentNotValidException;
|
||||
import org.springframework.web.bind.annotation.ExceptionHandler;
|
||||
import org.springframework.web.bind.annotation.RestControllerAdvice;
|
||||
import org.springframework.web.method.annotation.MethodArgumentTypeMismatchException;
|
||||
|
||||
import jakarta.validation.ConstraintViolation;
|
||||
import jakarta.validation.ConstraintViolationException;
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 全局异常处理器
|
||||
*
|
||||
* 统一处理各种异常并返回标准的错误响应格式
|
||||
*/
|
||||
@Slf4j
|
||||
@RestControllerAdvice
|
||||
public class GlobalExceptionHandler {
|
||||
|
||||
/**
|
||||
* 处理工作流未找到异常
|
||||
*/
|
||||
@ExceptionHandler(WorkflowNotFoundException.class)
|
||||
public ResponseEntity<ErrorResponse> handleWorkflowNotFoundException(WorkflowNotFoundException e) {
|
||||
log.warn("工作流未找到: {}", e.getMessage());
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.NOT_FOUND.value())
|
||||
.error("Workflow Not Found")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理工作流服务异常
|
||||
*/
|
||||
@ExceptionHandler(WorkflowServiceException.class)
|
||||
public ResponseEntity<ErrorResponse> handleWorkflowServiceException(WorkflowServiceException e) {
|
||||
log.error("工作流服务异常: {}", e.getMessage(), e);
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Workflow Service Error")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理节点类型未找到异常
|
||||
*/
|
||||
@ExceptionHandler(NodeTypeNotFoundException.class)
|
||||
public ResponseEntity<ErrorResponse> handleNodeTypeNotFoundException(NodeTypeNotFoundException e) {
|
||||
log.warn("节点类型未找到: {}", e.getMessage());
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.NOT_FOUND.value())
|
||||
.error("Node Type Not Found")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理节点类型服务异常
|
||||
*/
|
||||
@ExceptionHandler(NodeTypeServiceException.class)
|
||||
public ResponseEntity<ErrorResponse> handleNodeTypeServiceException(NodeTypeServiceException e) {
|
||||
log.error("节点类型服务异常: {}", e.getMessage(), e);
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Node Type Service Error")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理任务未找到异常
|
||||
*/
|
||||
@ExceptionHandler(TaskNotFoundException.class)
|
||||
public ResponseEntity<ErrorResponse> handleTaskNotFoundException(TaskNotFoundException e) {
|
||||
log.warn("任务未找到: {}", e.getMessage());
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.NOT_FOUND.value())
|
||||
.error("Task Not Found")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.NOT_FOUND).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理任务服务异常
|
||||
*/
|
||||
@ExceptionHandler(TaskServiceException.class)
|
||||
public ResponseEntity<ErrorResponse> handleTaskServiceException(TaskServiceException e) {
|
||||
log.error("任务服务异常: {}", e.getMessage(), e);
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Task Service Error")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理参数校验异常 - @Valid
|
||||
*/
|
||||
@ExceptionHandler(MethodArgumentNotValidException.class)
|
||||
public ResponseEntity<ErrorResponse> handleValidationException(MethodArgumentNotValidException e) {
|
||||
log.warn("参数校验失败: {}", e.getMessage());
|
||||
|
||||
Map<String, String> errors = new HashMap<>();
|
||||
e.getBindingResult().getAllErrors().forEach(error -> {
|
||||
String fieldName = ((FieldError) error).getField();
|
||||
String errorMessage = error.getDefaultMessage();
|
||||
errors.put(fieldName, errorMessage);
|
||||
});
|
||||
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Validation Failed")
|
||||
.message("请求参数校验失败")
|
||||
.details(errors)
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理绑定异常
|
||||
*/
|
||||
@ExceptionHandler(BindException.class)
|
||||
public ResponseEntity<ErrorResponse> handleBindException(BindException e) {
|
||||
log.warn("参数绑定失败: {}", e.getMessage());
|
||||
|
||||
Map<String, String> errors = new HashMap<>();
|
||||
e.getBindingResult().getAllErrors().forEach(error -> {
|
||||
String fieldName = ((FieldError) error).getField();
|
||||
String errorMessage = error.getDefaultMessage();
|
||||
errors.put(fieldName, errorMessage);
|
||||
});
|
||||
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Bind Error")
|
||||
.message("请求参数绑定失败")
|
||||
.details(errors)
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理约束违反异常
|
||||
*/
|
||||
@ExceptionHandler(ConstraintViolationException.class)
|
||||
public ResponseEntity<ErrorResponse> handleConstraintViolationException(ConstraintViolationException e) {
|
||||
log.warn("约束违反: {}", e.getMessage());
|
||||
|
||||
Map<String, String> errors = new HashMap<>();
|
||||
for (ConstraintViolation<?> violation : e.getConstraintViolations()) {
|
||||
String fieldName = violation.getPropertyPath().toString();
|
||||
String errorMessage = violation.getMessage();
|
||||
errors.put(fieldName, errorMessage);
|
||||
}
|
||||
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Constraint Violation")
|
||||
.message("约束条件违反")
|
||||
.details(errors)
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理方法参数类型不匹配异常
|
||||
*/
|
||||
@ExceptionHandler(MethodArgumentTypeMismatchException.class)
|
||||
public ResponseEntity<ErrorResponse> handleTypeMismatchException(MethodArgumentTypeMismatchException e) {
|
||||
log.warn("参数类型不匹配: {} = {}", e.getName(), e.getValue());
|
||||
|
||||
String message = String.format("参数 '%s' 的值 '%s' 无法转换为类型 %s",
|
||||
e.getName(), e.getValue(), e.getRequiredType().getSimpleName());
|
||||
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Type Mismatch")
|
||||
.message(message)
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理非法参数异常
|
||||
*/
|
||||
@ExceptionHandler(IllegalArgumentException.class)
|
||||
public ResponseEntity<ErrorResponse> handleIllegalArgumentException(IllegalArgumentException e) {
|
||||
log.warn("非法参数异常: {}", e.getMessage());
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.BAD_REQUEST.value())
|
||||
.error("Illegal Argument")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.BAD_REQUEST).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理非法状态异常
|
||||
*/
|
||||
@ExceptionHandler(IllegalStateException.class)
|
||||
public ResponseEntity<ErrorResponse> handleIllegalStateException(IllegalStateException e) {
|
||||
log.error("非法状态异常: {}", e.getMessage(), e);
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.CONFLICT.value())
|
||||
.error("Illegal State")
|
||||
.message(e.getMessage())
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.CONFLICT).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理运行时异常
|
||||
*/
|
||||
@ExceptionHandler(RuntimeException.class)
|
||||
public ResponseEntity<ErrorResponse> handleRuntimeException(RuntimeException e) {
|
||||
log.error("运行时异常: {}", e.getMessage(), e);
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.INTERNAL_SERVER_ERROR.value())
|
||||
.error("Runtime Error")
|
||||
.message("系统内部错误,请联系管理员")
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 处理其他所有异常
|
||||
*/
|
||||
@ExceptionHandler(Exception.class)
|
||||
public ResponseEntity<ErrorResponse> handleException(Exception e) {
|
||||
log.error("未知异常: {}", e.getMessage(), e);
|
||||
ErrorResponse error = ErrorResponse.builder()
|
||||
.timestamp(LocalDateTime.now())
|
||||
.status(HttpStatus.INTERNAL_SERVER_ERROR.value())
|
||||
.error("Internal Server Error")
|
||||
.message("系统内部错误,请联系管理员")
|
||||
.build();
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(error);
|
||||
}
|
||||
|
||||
/**
|
||||
* 标准错误响应格式
|
||||
*/
|
||||
public static class ErrorResponse {
|
||||
private LocalDateTime timestamp;
|
||||
private int status;
|
||||
private String error;
|
||||
private String message;
|
||||
private Map<String, String> details;
|
||||
|
||||
// 构造器
|
||||
private ErrorResponse() {}
|
||||
|
||||
public static ErrorResponseBuilder builder() {
|
||||
return new ErrorResponseBuilder();
|
||||
}
|
||||
|
||||
// Getters
|
||||
public LocalDateTime getTimestamp() { return timestamp; }
|
||||
public int getStatus() { return status; }
|
||||
public String getError() { return error; }
|
||||
public String getMessage() { return message; }
|
||||
public Map<String, String> getDetails() { return details; }
|
||||
|
||||
// Builder
|
||||
public static class ErrorResponseBuilder {
|
||||
private ErrorResponse errorResponse = new ErrorResponse();
|
||||
|
||||
public ErrorResponseBuilder timestamp(LocalDateTime timestamp) {
|
||||
errorResponse.timestamp = timestamp;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ErrorResponseBuilder status(int status) {
|
||||
errorResponse.status = status;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ErrorResponseBuilder error(String error) {
|
||||
errorResponse.error = error;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ErrorResponseBuilder message(String message) {
|
||||
errorResponse.message = message;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ErrorResponseBuilder details(Map<String, String> details) {
|
||||
errorResponse.details = details;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ErrorResponse build() {
|
||||
return errorResponse;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,269 @@
|
||||
package com.flowable.devops.controller;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import com.flowable.devops.service.NodeTypeService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import jakarta.validation.Valid;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 节点类型REST API控制器
|
||||
*
|
||||
* 提供节点类型管理的HTTP API接口
|
||||
*/
|
||||
@Slf4j
|
||||
@RestController
|
||||
@RequestMapping("/api/node-types")
|
||||
@CrossOrigin(origins = "*")
|
||||
public class NodeTypeController {
|
||||
|
||||
@Autowired
|
||||
private NodeTypeService nodeTypeService;
|
||||
|
||||
/**
|
||||
* 获取所有节点类型
|
||||
*/
|
||||
@GetMapping
|
||||
public Mono<ResponseEntity<List<NodeType>>> getAllNodeTypes() {
|
||||
log.info("获取所有节点类型");
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.getAllNodeTypes())
|
||||
.map(nodeTypes -> {
|
||||
log.info("返回 {} 个节点类型", nodeTypes.size());
|
||||
return ResponseEntity.ok(nodeTypes);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取节点类型列表失败", e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据分类获取节点类型
|
||||
*/
|
||||
@GetMapping("/category/{category}")
|
||||
public Mono<ResponseEntity<List<NodeType>>> getNodeTypesByCategory(@PathVariable String category) {
|
||||
log.info("获取分类节点类型: {}", category);
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.getNodeTypesByCategory(category))
|
||||
.map(nodeTypes -> {
|
||||
log.info("分类 {} 返回 {} 个节点类型", category, nodeTypes.size());
|
||||
return ResponseEntity.ok(nodeTypes);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取分类节点类型失败: {}", category, e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据ID获取节点类型
|
||||
*/
|
||||
@GetMapping("/{id}")
|
||||
public Mono<ResponseEntity<NodeType>> getNodeType(@PathVariable String id) {
|
||||
log.info("获取节点类型: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.getNodeType(id))
|
||||
.map(ResponseEntity::ok)
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取节点类型失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.notFound().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建节点类型
|
||||
*/
|
||||
@PostMapping
|
||||
public Mono<ResponseEntity<NodeType>> createNodeType(@Valid @RequestBody NodeType nodeType) {
|
||||
log.info("创建节点类型: {}", nodeType.getId());
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.createNodeType(nodeType))
|
||||
.map(created -> {
|
||||
log.info("节点类型创建成功: {}", created.getId());
|
||||
return ResponseEntity.ok(created);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("创建节点类型失败: {}", nodeType.getId(), e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新节点类型
|
||||
*/
|
||||
@PutMapping("/{id}")
|
||||
public Mono<ResponseEntity<NodeType>> updateNodeType(
|
||||
@PathVariable String id,
|
||||
@Valid @RequestBody NodeType nodeType) {
|
||||
log.info("更新节点类型: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.updateNodeType(id, nodeType))
|
||||
.map(updated -> {
|
||||
log.info("节点类型更新成功: {}", id);
|
||||
return ResponseEntity.ok(updated);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("更新节点类型失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除节点类型
|
||||
*/
|
||||
@DeleteMapping("/{id}")
|
||||
public Mono<ResponseEntity<Void>> deleteNodeType(@PathVariable String id) {
|
||||
log.info("删除节点类型: {}", id);
|
||||
|
||||
return Mono.fromRunnable(() -> nodeTypeService.deleteNodeType(id))
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("节点类型删除成功: {}", id);
|
||||
return ResponseEntity.ok().<Void>build();
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("删除节点类型失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点类型分类列表
|
||||
*/
|
||||
@GetMapping("/categories")
|
||||
public Mono<ResponseEntity<List<String>>> getCategories() {
|
||||
log.info("获取节点类型分类列表");
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.getCategories())
|
||||
.map(categories -> {
|
||||
log.info("返回 {} 个分类", categories.size());
|
||||
return ResponseEntity.ok(categories);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取分类列表失败", e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点类型输出模式
|
||||
*/
|
||||
@GetMapping("/{id}/output-schema")
|
||||
public Mono<ResponseEntity<JsonNode>> getOutputSchema(@PathVariable String id) {
|
||||
log.info("获取节点类型输出模式: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.getOutputSchema(id))
|
||||
.map(schema -> {
|
||||
if (schema != null) {
|
||||
return ResponseEntity.ok(schema);
|
||||
} else {
|
||||
return ResponseEntity.<JsonNode>ok(null);
|
||||
}
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取输出模式失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.notFound().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新节点类型输出模式
|
||||
*/
|
||||
@PutMapping("/{id}/output-schema")
|
||||
public Mono<ResponseEntity<NodeType>> updateOutputSchema(
|
||||
@PathVariable String id,
|
||||
@RequestBody JsonNode outputSchema) {
|
||||
log.info("更新节点类型输出模式: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.updateOutputSchema(id, outputSchema))
|
||||
.map(updated -> {
|
||||
log.info("输出模式更新成功: {}", id);
|
||||
return ResponseEntity.ok(updated);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("更新输出模式失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 启用/禁用节点类型
|
||||
*/
|
||||
@PostMapping("/{id}/toggle")
|
||||
public Mono<ResponseEntity<NodeType>> toggleEnabled(
|
||||
@PathVariable String id,
|
||||
@RequestBody Map<String, Boolean> request) {
|
||||
boolean enabled = request.getOrDefault("enabled", true);
|
||||
log.info("{}节点类型: {}", enabled ? "启用" : "禁用", id);
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.toggleEnabled(id, enabled))
|
||||
.map(updated -> {
|
||||
log.info("节点类型状态更新成功: {} -> {}", id, enabled);
|
||||
return ResponseEntity.ok(updated);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("更新节点类型状态失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 刷新节点类型注册表
|
||||
*/
|
||||
@PostMapping("/refresh-registry")
|
||||
public Mono<ResponseEntity<Map<String, String>>> refreshRegistry() {
|
||||
log.info("刷新节点类型注册表");
|
||||
|
||||
return Mono.fromRunnable(() -> nodeTypeService.refreshRegistry())
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("节点类型注册表刷新完成");
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "success",
|
||||
"message", "节点类型注册表刷新完成",
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
));
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("刷新节点类型注册表失败", e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点类型统计信息
|
||||
*/
|
||||
@GetMapping("/statistics")
|
||||
public Mono<ResponseEntity<NodeTypeService.NodeTypeStatistics>> getStatistics() {
|
||||
log.info("获取节点类型统计信息");
|
||||
|
||||
return Mono.fromCallable(() -> nodeTypeService.getStatistics())
|
||||
.map(stats -> {
|
||||
log.info("返回统计信息: 总计 {}, 启用 {}, 禁用 {}",
|
||||
stats.getTotalCount(), stats.getEnabledCount(), stats.getDisabledCount());
|
||||
return ResponseEntity.ok(stats);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取统计信息失败", e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 健康检查接口
|
||||
*/
|
||||
@GetMapping("/health")
|
||||
public Mono<ResponseEntity<Map<String, String>>> health() {
|
||||
return Mono.just(ResponseEntity.ok(Map.of(
|
||||
"status", "UP",
|
||||
"service", "node-type-service",
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
)));
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,294 @@
|
||||
package com.flowable.devops.controller;
|
||||
|
||||
import com.flowable.devops.service.TaskService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 任务REST API控制器
|
||||
*
|
||||
* 提供任务管理和审批的HTTP API接口
|
||||
*/
|
||||
@Slf4j
|
||||
@RestController
|
||||
@RequestMapping("/api/tasks")
|
||||
@CrossOrigin(origins = "*")
|
||||
public class TaskController {
|
||||
|
||||
@Autowired
|
||||
private TaskService taskService;
|
||||
|
||||
/**
|
||||
* 获取用户待办任务
|
||||
*/
|
||||
@GetMapping("/assigned")
|
||||
public Mono<ResponseEntity<List<TaskService.TaskInfo>>> getAssignedTasks(
|
||||
@RequestParam String userId) {
|
||||
log.info("获取用户待办任务: {}", userId);
|
||||
|
||||
return Mono.fromCallable(() -> taskService.getTasks(userId))
|
||||
.map(tasks -> {
|
||||
log.info("用户 {} 返回 {} 个待办任务", userId, tasks.size());
|
||||
return ResponseEntity.ok(tasks);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取待办任务失败: {}", userId, e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取用户候选任务
|
||||
*/
|
||||
@GetMapping("/candidate")
|
||||
public Mono<ResponseEntity<List<TaskService.TaskInfo>>> getCandidateTasks(
|
||||
@RequestParam String userId,
|
||||
@RequestParam(required = false) List<String> groups) {
|
||||
log.info("获取用户候选任务: {} (组: {})", userId, groups);
|
||||
|
||||
return Mono.fromCallable(() -> taskService.getCandidateTasks(userId, groups))
|
||||
.map(tasks -> {
|
||||
log.info("用户 {} 返回 {} 个候选任务", userId, tasks.size());
|
||||
return ResponseEntity.ok(tasks);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取候选任务失败: {}", userId, e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取任务详情
|
||||
*/
|
||||
@GetMapping("/{taskId}")
|
||||
public Mono<ResponseEntity<TaskService.TaskDetail>> getTaskDetail(@PathVariable String taskId) {
|
||||
log.info("获取任务详情: {}", taskId);
|
||||
|
||||
return Mono.fromCallable(() -> taskService.getTaskDetail(taskId))
|
||||
.map(ResponseEntity::ok)
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取任务详情失败: {}", taskId, e);
|
||||
return Mono.just(ResponseEntity.notFound().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 完成任务
|
||||
*/
|
||||
@PostMapping("/{taskId}/complete")
|
||||
public Mono<ResponseEntity<Map<String, String>>> completeTask(
|
||||
@PathVariable String taskId,
|
||||
@RequestBody Map<String, Object> request) {
|
||||
String userId = (String) request.get("userId");
|
||||
String comment = (String) request.get("comment");
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Object> variables = (Map<String, Object>) request.get("variables");
|
||||
|
||||
log.info("完成任务: {} (用户: {}, 评论: {})", taskId, userId, comment);
|
||||
|
||||
return Mono.fromRunnable(() -> taskService.completeTask(taskId, userId, variables, comment))
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("任务完成成功: {} (用户: {})", taskId, userId);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "success",
|
||||
"message", "任务完成成功",
|
||||
"taskId", taskId,
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
));
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("完成任务失败: {} (用户: {})", taskId, userId, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 认领任务
|
||||
*/
|
||||
@PostMapping("/{taskId}/claim")
|
||||
public Mono<ResponseEntity<Map<String, String>>> claimTask(
|
||||
@PathVariable String taskId,
|
||||
@RequestBody Map<String, String> request) {
|
||||
String userId = request.get("userId");
|
||||
log.info("认领任务: {} (用户: {})", taskId, userId);
|
||||
|
||||
return Mono.fromRunnable(() -> taskService.claimTask(taskId, userId))
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("任务认领成功: {} (用户: {})", taskId, userId);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "success",
|
||||
"message", "任务认领成功",
|
||||
"taskId", taskId,
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
));
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("认领任务失败: {} (用户: {})", taskId, userId, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 释放任务
|
||||
*/
|
||||
@PostMapping("/{taskId}/unclaim")
|
||||
public Mono<ResponseEntity<Map<String, String>>> unclaimTask(
|
||||
@PathVariable String taskId,
|
||||
@RequestBody Map<String, String> request) {
|
||||
String userId = request.get("userId");
|
||||
log.info("释放任务: {} (用户: {})", taskId, userId);
|
||||
|
||||
return Mono.fromRunnable(() -> taskService.unclaimTask(taskId, userId))
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("任务释放成功: {} (用户: {})", taskId, userId);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "success",
|
||||
"message", "任务释放成功",
|
||||
"taskId", taskId,
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
));
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("释放任务失败: {} (用户: {})", taskId, userId, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 委派任务
|
||||
*/
|
||||
@PostMapping("/{taskId}/delegate")
|
||||
public Mono<ResponseEntity<Map<String, String>>> delegateTask(
|
||||
@PathVariable String taskId,
|
||||
@RequestBody Map<String, String> request) {
|
||||
String fromUserId = request.get("fromUserId");
|
||||
String toUserId = request.get("toUserId");
|
||||
String comment = request.get("comment");
|
||||
|
||||
log.info("委派任务: {} (从 {} 到 {}, 评论: {})", taskId, fromUserId, toUserId, comment);
|
||||
|
||||
return Mono.fromRunnable(() -> taskService.delegateTask(taskId, fromUserId, toUserId, comment))
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("任务委派成功: {} -> {}", taskId, toUserId);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "success",
|
||||
"message", "任务委派成功",
|
||||
"taskId", taskId,
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
));
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("委派任务失败: {} (从 {} 到 {})", taskId, fromUserId, toUserId, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 转办任务
|
||||
*/
|
||||
@PostMapping("/{taskId}/transfer")
|
||||
public Mono<ResponseEntity<Map<String, String>>> transferTask(
|
||||
@PathVariable String taskId,
|
||||
@RequestBody Map<String, String> request) {
|
||||
String fromUserId = request.get("fromUserId");
|
||||
String toUserId = request.get("toUserId");
|
||||
String comment = request.get("comment");
|
||||
|
||||
log.info("转办任务: {} (从 {} 到 {}, 评论: {})", taskId, fromUserId, toUserId, comment);
|
||||
|
||||
return Mono.fromRunnable(() -> taskService.transferTask(taskId, fromUserId, toUserId, comment))
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("任务转办成功: {} -> {}", taskId, toUserId);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "success",
|
||||
"message", "任务转办成功",
|
||||
"taskId", taskId,
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
));
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("转办任务失败: {} (从 {} 到 {})", taskId, fromUserId, toUserId, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取任务评论
|
||||
*/
|
||||
@GetMapping("/{taskId}/comments")
|
||||
public Mono<ResponseEntity<List<TaskService.TaskComment>>> getTaskComments(@PathVariable String taskId) {
|
||||
log.info("获取任务评论: {}", taskId);
|
||||
|
||||
return Mono.fromCallable(() -> taskService.getTaskComments(taskId))
|
||||
.map(comments -> {
|
||||
log.info("任务 {} 返回 {} 条评论", taskId, comments.size());
|
||||
return ResponseEntity.ok(comments);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取任务评论失败: {}", taskId, e);
|
||||
return Mono.just(ResponseEntity.notFound().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加任务评论
|
||||
*/
|
||||
@PostMapping("/{taskId}/comments")
|
||||
public Mono<ResponseEntity<TaskService.TaskComment>> addTaskComment(
|
||||
@PathVariable String taskId,
|
||||
@RequestBody Map<String, String> request) {
|
||||
String userId = request.get("userId");
|
||||
String message = request.get("message");
|
||||
|
||||
log.info("添加任务评论: {} (用户: {}, 内容: {})", taskId, userId, message);
|
||||
|
||||
return Mono.fromCallable(() -> taskService.addTaskComment(taskId, userId, message))
|
||||
.map(comment -> {
|
||||
log.info("任务评论添加成功: {} (ID: {})", taskId, comment.getId());
|
||||
return ResponseEntity.ok(comment);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("添加任务评论失败: {} (用户: {})", taskId, userId, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取任务统计信息
|
||||
*/
|
||||
@GetMapping("/statistics")
|
||||
public Mono<ResponseEntity<TaskService.TaskStatistics>> getTaskStatistics(
|
||||
@RequestParam String userId,
|
||||
@RequestParam(required = false) List<String> groups) {
|
||||
log.info("获取任务统计信息: {} (组: {})", userId, groups);
|
||||
|
||||
return Mono.fromCallable(() -> taskService.getTaskStatistics(userId, groups))
|
||||
.map(stats -> {
|
||||
log.info("用户 {} 统计信息: 待办 {}, 候选 {}, 逾期 {}",
|
||||
userId, stats.getAssignedCount(), stats.getCandidateCount(), stats.getOverdueCount());
|
||||
return ResponseEntity.ok(stats);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取任务统计信息失败: {}", userId, e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 健康检查接口
|
||||
*/
|
||||
@GetMapping("/health")
|
||||
public Mono<ResponseEntity<Map<String, String>>> health() {
|
||||
return Mono.just(ResponseEntity.ok(Map.of(
|
||||
"status", "UP",
|
||||
"service", "task-service",
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
)));
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,209 @@
|
||||
package com.flowable.devops.controller;
|
||||
|
||||
import com.flowable.devops.entity.WorkflowDefinition;
|
||||
import com.flowable.devops.entity.WorkflowExecution;
|
||||
import com.flowable.devops.service.WorkflowService;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import reactor.core.publisher.Flux;
|
||||
import reactor.core.publisher.Mono;
|
||||
|
||||
import jakarta.validation.Valid;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 工作流REST API控制器
|
||||
*
|
||||
* 提供工作流定义和执行的HTTP API接口
|
||||
*/
|
||||
@Slf4j
|
||||
@RestController
|
||||
@RequestMapping("/api/workflows")
|
||||
@CrossOrigin(origins = "*")
|
||||
public class WorkflowController {
|
||||
|
||||
@Autowired
|
||||
private WorkflowService workflowService;
|
||||
|
||||
/**
|
||||
* 获取工作流列表
|
||||
*/
|
||||
@GetMapping
|
||||
public Mono<ResponseEntity<List<WorkflowDefinition>>> getWorkflows(
|
||||
@RequestParam(required = false) String status) {
|
||||
log.info("获取工作流列表,状态过滤: {}", status);
|
||||
|
||||
return Mono.fromCallable(() -> workflowService.list(status))
|
||||
.map(workflows -> {
|
||||
log.info("返回 {} 个工作流定义", workflows.size());
|
||||
return ResponseEntity.ok(workflows);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取工作流列表失败", e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据ID获取工作流定义
|
||||
*/
|
||||
@GetMapping("/{id}")
|
||||
public Mono<ResponseEntity<WorkflowDefinition>> getWorkflow(@PathVariable String id) {
|
||||
log.info("获取工作流定义: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> workflowService.getById(id))
|
||||
.map(ResponseEntity::ok)
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取工作流定义失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.notFound().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建工作流定义
|
||||
*/
|
||||
@PostMapping
|
||||
public Mono<ResponseEntity<WorkflowDefinition>> createWorkflow(
|
||||
@Valid @RequestBody WorkflowDefinition workflow) {
|
||||
log.info("创建工作流定义: {}", workflow.getName());
|
||||
|
||||
return Mono.fromCallable(() -> workflowService.create(workflow))
|
||||
.subscribeOn(reactor.core.scheduler.Schedulers.boundedElastic())
|
||||
.map(created -> {
|
||||
log.info("工作流定义创建成功: {} (ID: {})", created.getName(), created.getId());
|
||||
return ResponseEntity.ok(created);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("创建工作流定义失败: {}", workflow.getName(), e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新工作流定义
|
||||
*/
|
||||
@PutMapping("/{id}")
|
||||
public Mono<ResponseEntity<WorkflowDefinition>> updateWorkflow(
|
||||
@PathVariable String id,
|
||||
@Valid @RequestBody WorkflowDefinition workflow) {
|
||||
log.info("更新工作流定义: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> workflowService.update(id, workflow))
|
||||
.map(updated -> {
|
||||
log.info("工作流定义更新成功: {}", id);
|
||||
return ResponseEntity.ok(updated);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("更新工作流定义失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除工作流定义
|
||||
*/
|
||||
@DeleteMapping("/{id}")
|
||||
public Mono<ResponseEntity<Void>> deleteWorkflow(@PathVariable String id) {
|
||||
log.info("删除工作流定义: {}", id);
|
||||
|
||||
return Mono.fromRunnable(() -> workflowService.delete(id))
|
||||
.then(Mono.fromCallable(() -> {
|
||||
log.info("工作流定义删除成功: {}", id);
|
||||
return ResponseEntity.ok().<Void>build();
|
||||
}))
|
||||
.onErrorResume(e -> {
|
||||
log.error("删除工作流定义失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 激活工作流定义
|
||||
*/
|
||||
@PostMapping("/{id}/activate")
|
||||
public Mono<ResponseEntity<WorkflowDefinition>> activateWorkflow(@PathVariable String id) {
|
||||
log.info("激活工作流定义: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> workflowService.activate(id))
|
||||
.map(activated -> {
|
||||
log.info("工作流定义激活成功: {}", id);
|
||||
return ResponseEntity.ok(activated);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("激活工作流定义失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行工作流
|
||||
*/
|
||||
@PostMapping("/{id}/execute")
|
||||
public Mono<ResponseEntity<WorkflowService.WorkflowExecutionResult>> executeWorkflow(
|
||||
@PathVariable String id,
|
||||
@RequestBody(required = false) Map<String, Object> input) {
|
||||
log.info("执行工作流: {} (输入: {})", id, input);
|
||||
|
||||
return Mono.fromCallable(() -> {
|
||||
Map<String, Object> safeInput = input != null ? input : Map.of();
|
||||
return workflowService.execute(id, safeInput);
|
||||
})
|
||||
.map(result -> {
|
||||
log.info("工作流执行完成: {} -> {}", id, result.getExecutionId());
|
||||
return ResponseEntity.ok(result);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("执行工作流失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.badRequest().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取工作流执行历史
|
||||
*/
|
||||
@GetMapping("/{id}/executions")
|
||||
public Mono<ResponseEntity<List<WorkflowExecution>>> getExecutions(@PathVariable String id) {
|
||||
log.info("获取工作流执行历史: {}", id);
|
||||
|
||||
return Mono.fromCallable(() -> workflowService.getExecutions(id))
|
||||
.map(executions -> {
|
||||
log.info("返回 {} 个执行记录", executions.size());
|
||||
return ResponseEntity.ok(executions);
|
||||
})
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取工作流执行历史失败: {}", id, e);
|
||||
return Mono.just(ResponseEntity.internalServerError().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取执行详情
|
||||
*/
|
||||
@GetMapping("/executions/{executionId}")
|
||||
public Mono<ResponseEntity<WorkflowService.WorkflowExecutionDetail>> getExecutionDetail(
|
||||
@PathVariable String executionId) {
|
||||
log.info("获取执行详情: {}", executionId);
|
||||
|
||||
return Mono.fromCallable(() -> workflowService.getExecutionDetail(executionId))
|
||||
.map(ResponseEntity::ok)
|
||||
.onErrorResume(e -> {
|
||||
log.error("获取执行详情失败: {}", executionId, e);
|
||||
return Mono.just(ResponseEntity.notFound().build());
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* 健康检查接口
|
||||
*/
|
||||
@GetMapping("/health")
|
||||
public Mono<ResponseEntity<Map<String, String>>> health() {
|
||||
return Mono.just(ResponseEntity.ok(Map.of(
|
||||
"status", "UP",
|
||||
"service", "workflow-service",
|
||||
"timestamp", String.valueOf(System.currentTimeMillis())
|
||||
)));
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,139 @@
|
||||
package com.flowable.devops.entity;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 节点执行日志实体
|
||||
*
|
||||
* 对应数据库表: node_execution_logs
|
||||
* 记录每个节点的详细执行情况,用于调试和监控
|
||||
*/
|
||||
@Data
|
||||
@EqualsAndHashCode(callSuper = false)
|
||||
@Entity
|
||||
@Table(name = "node_execution_logs")
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class NodeExecutionLog {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.IDENTITY)
|
||||
@Column(name = "id")
|
||||
private Long id;
|
||||
|
||||
/**
|
||||
* 关联的工作流执行ID
|
||||
*/
|
||||
@Column(name = "execution_id", nullable = false, length = 64)
|
||||
private String executionId;
|
||||
|
||||
/**
|
||||
* 节点ID
|
||||
*/
|
||||
@Column(name = "node_id", nullable = false, length = 64)
|
||||
private String nodeId;
|
||||
|
||||
/**
|
||||
* 节点名称
|
||||
*/
|
||||
@Column(name = "node_name")
|
||||
private String nodeName;
|
||||
|
||||
/**
|
||||
* 节点类型
|
||||
*/
|
||||
@Column(name = "node_type", length = 64)
|
||||
private String nodeType;
|
||||
|
||||
/**
|
||||
* 节点输入参数
|
||||
*/
|
||||
@JdbcTypeCode(SqlTypes.JSON)
|
||||
@Column(name = "input", columnDefinition = "JSON")
|
||||
private JsonNode input;
|
||||
|
||||
/**
|
||||
* 节点输出结果
|
||||
*/
|
||||
@JdbcTypeCode(SqlTypes.JSON)
|
||||
@Column(name = "output", columnDefinition = "JSON")
|
||||
private JsonNode output;
|
||||
|
||||
/**
|
||||
* 执行状态:success, failed, skipped
|
||||
*/
|
||||
@Column(name = "status", length = 20)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private ExecutionStatus status;
|
||||
|
||||
/**
|
||||
* 开始时间
|
||||
*/
|
||||
@Column(name = "started_at")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime startedAt;
|
||||
|
||||
/**
|
||||
* 结束时间
|
||||
*/
|
||||
@Column(name = "ended_at")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime endedAt;
|
||||
|
||||
/**
|
||||
* 执行耗时(毫秒)
|
||||
*/
|
||||
@Column(name = "duration_ms")
|
||||
private Integer durationMs;
|
||||
|
||||
/**
|
||||
* 错误信息
|
||||
*/
|
||||
@Column(name = "error_message", columnDefinition = "TEXT")
|
||||
private String errorMessage;
|
||||
|
||||
/**
|
||||
* 关联工作流执行记录
|
||||
*/
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "execution_id", insertable = false, updatable = false)
|
||||
private WorkflowExecution workflowExecution;
|
||||
|
||||
/**
|
||||
* 执行状态枚举
|
||||
*/
|
||||
public enum ExecutionStatus {
|
||||
SUCCESS("成功"),
|
||||
FAILED("失败"),
|
||||
SKIPPED("跳过");
|
||||
|
||||
private final String description;
|
||||
|
||||
ExecutionStatus(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 计算执行耗时
|
||||
*/
|
||||
@PrePersist
|
||||
@PreUpdate
|
||||
public void calculateDuration() {
|
||||
if (startedAt != null && endedAt != null) {
|
||||
this.durationMs = (int) java.time.Duration.between(startedAt, endedAt).toMillis();
|
||||
}
|
||||
}
|
||||
}
|
||||
126
backend/src/main/java/com/flowable/devops/entity/NodeType.java
Normal file
126
backend/src/main/java/com/flowable/devops/entity/NodeType.java
Normal file
@ -0,0 +1,126 @@
|
||||
package com.flowable.devops.entity;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.springframework.data.annotation.CreatedDate;
|
||||
import org.springframework.data.annotation.LastModifiedDate;
|
||||
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 节点类型元数据实体
|
||||
*
|
||||
* 对应数据库表: node_types
|
||||
* 存储节点类型的字段定义、输出结构、实现类等元数据
|
||||
*/
|
||||
@Data
|
||||
@EqualsAndHashCode(callSuper = false)
|
||||
@Entity
|
||||
@Table(name = "node_types")
|
||||
@EntityListeners(AuditingEntityListener.class)
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class NodeType {
|
||||
|
||||
@Id
|
||||
@Column(name = "id", length = 64)
|
||||
private String id;
|
||||
|
||||
@Column(name = "name", nullable = false)
|
||||
private String name;
|
||||
|
||||
@Column(name = "display_name", nullable = false)
|
||||
private String displayName;
|
||||
|
||||
/**
|
||||
* 节点分类:api, database, logic, notification, transform, other
|
||||
*/
|
||||
@Column(name = "category", length = 50)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private NodeCategory category;
|
||||
|
||||
@Column(name = "icon", length = 100)
|
||||
private String icon;
|
||||
|
||||
@Column(name = "description", columnDefinition = "TEXT")
|
||||
private String description;
|
||||
|
||||
/**
|
||||
* 字段定义JSON
|
||||
* 包含节点配置表单的所有字段定义
|
||||
*/
|
||||
@JdbcTypeCode(SqlTypes.JSON)
|
||||
@Column(name = "fields", nullable = false, columnDefinition = "JSON")
|
||||
private JsonNode fields;
|
||||
|
||||
/**
|
||||
* 输出结构定义JSON Schema
|
||||
* 用于前端字段映射组件展示可用字段
|
||||
*/
|
||||
@JdbcTypeCode(SqlTypes.JSON)
|
||||
@Column(name = "output_schema", columnDefinition = "JSON")
|
||||
private JsonNode outputSchema;
|
||||
|
||||
/**
|
||||
* Java实现类的完整类名
|
||||
*/
|
||||
@Column(name = "implementation_class", nullable = false)
|
||||
private String implementationClass;
|
||||
|
||||
/**
|
||||
* 是否启用
|
||||
*/
|
||||
@Column(name = "enabled", nullable = false)
|
||||
private Boolean enabled = true;
|
||||
|
||||
/**
|
||||
* 显示顺序
|
||||
*/
|
||||
@Column(name = "display_order")
|
||||
private Integer displayOrder;
|
||||
|
||||
@CreatedDate
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@LastModifiedDate
|
||||
@Column(name = "updated_at", nullable = false)
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
/**
|
||||
* 节点分类枚举
|
||||
*/
|
||||
public enum NodeCategory {
|
||||
API("API接口"),
|
||||
DATABASE("数据库"),
|
||||
LOGIC("逻辑控制"),
|
||||
NOTIFICATION("通知消息"),
|
||||
TRANSFORM("数据转换"),
|
||||
OTHER("其他");
|
||||
|
||||
private final String description;
|
||||
|
||||
NodeCategory(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查是否启用
|
||||
*/
|
||||
public boolean isEnabled() {
|
||||
return enabled != null && enabled;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,108 @@
|
||||
package com.flowable.devops.entity;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.springframework.data.annotation.CreatedDate;
|
||||
import org.springframework.data.annotation.LastModifiedDate;
|
||||
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 工作流定义实体
|
||||
*
|
||||
* 对应数据库表: workflow_definitions
|
||||
* 存储前端编辑器生成的JSON工作流定义
|
||||
*/
|
||||
@Data
|
||||
@EqualsAndHashCode(callSuper = false)
|
||||
@Entity
|
||||
@Table(name = "workflow_definitions")
|
||||
@EntityListeners(AuditingEntityListener.class)
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class WorkflowDefinition {
|
||||
|
||||
@Id
|
||||
@Column(name = "id", length = 64)
|
||||
private String id;
|
||||
|
||||
@Column(name = "name", nullable = false)
|
||||
private String name;
|
||||
|
||||
@Column(name = "description", columnDefinition = "TEXT")
|
||||
private String description;
|
||||
|
||||
/**
|
||||
* 工作流定义JSON
|
||||
* 包含nodes、edges、variables等完整定义
|
||||
*/
|
||||
@JdbcTypeCode(SqlTypes.JSON)
|
||||
@Column(name = "definition", nullable = false, columnDefinition = "JSON")
|
||||
private JsonNode definition;
|
||||
|
||||
/**
|
||||
* Flowable流程定义ID(转换后生成)
|
||||
*/
|
||||
@Column(name = "flowable_process_definition_id", length = 64)
|
||||
private String flowableProcessDefinitionId;
|
||||
|
||||
/**
|
||||
* Flowable部署ID
|
||||
*/
|
||||
@Column(name = "flowable_deployment_id", length = 64)
|
||||
private String flowableDeploymentId;
|
||||
|
||||
/**
|
||||
* 状态:draft, active, archived
|
||||
*/
|
||||
@Column(name = "status", length = 20)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private WorkflowStatus status = WorkflowStatus.DRAFT;
|
||||
|
||||
/**
|
||||
* 创建者
|
||||
*/
|
||||
@Column(name = "created_by", length = 100)
|
||||
private String createdBy;
|
||||
|
||||
/**
|
||||
* 创建时间
|
||||
*/
|
||||
@CreatedDate
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
/**
|
||||
* 更新时间
|
||||
*/
|
||||
@LastModifiedDate
|
||||
@Column(name = "updated_at", nullable = false)
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
/**
|
||||
* 工作流状态枚举
|
||||
*/
|
||||
public enum WorkflowStatus {
|
||||
DRAFT("草稿"),
|
||||
ACTIVE("激活"),
|
||||
ARCHIVED("归档");
|
||||
|
||||
private final String description;
|
||||
|
||||
WorkflowStatus(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,148 @@
|
||||
package com.flowable.devops.entity;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import org.hibernate.annotations.GenericGenerator;
|
||||
import org.hibernate.annotations.JdbcTypeCode;
|
||||
import org.hibernate.type.SqlTypes;
|
||||
import org.springframework.data.annotation.CreatedDate;
|
||||
import org.springframework.data.jpa.domain.support.AuditingEntityListener;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import java.time.LocalDateTime;
|
||||
|
||||
/**
|
||||
* 工作流执行记录实体
|
||||
*
|
||||
* 对应数据库表: workflow_executions
|
||||
* 扩展Flowable的流程实例,记录业务相关的执行信息
|
||||
*/
|
||||
@Data
|
||||
@EqualsAndHashCode(callSuper = false)
|
||||
@Entity
|
||||
@Table(name = "workflow_executions")
|
||||
@EntityListeners(AuditingEntityListener.class)
|
||||
@JsonIgnoreProperties(ignoreUnknown = true)
|
||||
public class WorkflowExecution {
|
||||
|
||||
@Id
|
||||
@GenericGenerator(name = "uuid", strategy = "uuid2")
|
||||
@GeneratedValue(generator = "uuid")
|
||||
@Column(name = "id", length = 64)
|
||||
private String id;
|
||||
|
||||
/**
|
||||
* 关联的工作流定义ID
|
||||
*/
|
||||
@Column(name = "workflow_definition_id", nullable = false, length = 64)
|
||||
private String workflowDefinitionId;
|
||||
|
||||
/**
|
||||
* Flowable流程实例ID
|
||||
*/
|
||||
@Column(name = "flowable_process_instance_id", nullable = false, length = 64)
|
||||
private String flowableProcessInstanceId;
|
||||
|
||||
/**
|
||||
* 执行输入参数
|
||||
*/
|
||||
@JdbcTypeCode(SqlTypes.JSON)
|
||||
@Column(name = "input", columnDefinition = "JSON")
|
||||
private JsonNode input;
|
||||
|
||||
/**
|
||||
* 执行状态:running, completed, failed, cancelled
|
||||
*/
|
||||
@Column(name = "status", length = 20, nullable = false)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private ExecutionStatus status = ExecutionStatus.RUNNING;
|
||||
|
||||
/**
|
||||
* 开始时间
|
||||
*/
|
||||
@CreatedDate
|
||||
@Column(name = "started_at", nullable = false, updatable = false)
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime startedAt;
|
||||
|
||||
/**
|
||||
* 结束时间
|
||||
*/
|
||||
@Column(name = "ended_at")
|
||||
@JsonFormat(pattern = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
|
||||
private LocalDateTime endedAt;
|
||||
|
||||
/**
|
||||
* 错误信息
|
||||
*/
|
||||
@Column(name = "error_message", columnDefinition = "TEXT")
|
||||
private String errorMessage;
|
||||
|
||||
/**
|
||||
* 错误堆栈
|
||||
*/
|
||||
@Column(name = "error_stack", columnDefinition = "TEXT")
|
||||
private String errorStack;
|
||||
|
||||
/**
|
||||
* 触发方式:manual, cron, webhook
|
||||
*/
|
||||
@Column(name = "trigger_type", length = 20)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private TriggerType triggerType = TriggerType.MANUAL;
|
||||
|
||||
/**
|
||||
* 触发者
|
||||
*/
|
||||
@Column(name = "triggered_by", length = 100)
|
||||
private String triggeredBy;
|
||||
|
||||
/**
|
||||
* 关联工作流定义
|
||||
*/
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "workflow_definition_id", insertable = false, updatable = false)
|
||||
private WorkflowDefinition workflowDefinition;
|
||||
|
||||
/**
|
||||
* 执行状态枚举
|
||||
*/
|
||||
public enum ExecutionStatus {
|
||||
RUNNING("运行中"),
|
||||
COMPLETED("已完成"),
|
||||
FAILED("失败"),
|
||||
CANCELLED("已取消");
|
||||
|
||||
private final String description;
|
||||
|
||||
ExecutionStatus(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 触发方式枚举
|
||||
*/
|
||||
public enum TriggerType {
|
||||
MANUAL("手动触发"),
|
||||
CRON("定时触发"),
|
||||
WEBHOOK("Webhook触发");
|
||||
|
||||
private final String description;
|
||||
|
||||
TriggerType(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,346 @@
|
||||
package com.flowable.devops.expression;
|
||||
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.flowable.engine.delegate.DelegateExecution;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import jakarta.el.*;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Jakarta EL (JUEL) 表达式引擎
|
||||
*
|
||||
* 支持语法:
|
||||
* - ${nodes.node1.output.body.email}
|
||||
* - ${workflow.input.username}
|
||||
* - ${env.API_KEY}
|
||||
* - ${nodes.step1.output.count > 10 ? 'high' : 'low'}
|
||||
*
|
||||
* 安全特性:
|
||||
* - 仅支持Map属性访问,不支持方法调用
|
||||
* - 禁止访问系统类和危险操作
|
||||
* - 表达式长度限制
|
||||
* - 执行缓存优化
|
||||
*/
|
||||
@Slf4j
|
||||
@Service
|
||||
public class ExpressionEngine {
|
||||
|
||||
private final ExpressionFactory expressionFactory;
|
||||
|
||||
// 表达式编译缓存(性能优化)
|
||||
private final Map<String, ValueExpression> expressionCache = new ConcurrentHashMap<>();
|
||||
|
||||
// 表达式匹配模式
|
||||
private static final Pattern EXPRESSION_PATTERN = Pattern.compile("\\$\\{([^}]+)\\}");
|
||||
|
||||
// 安全检查:禁止访问的类名关键词
|
||||
private static final Set<String> BLOCKED_KEYWORDS = Set.of(
|
||||
"Runtime", "ProcessBuilder", "System", "Class", "ClassLoader",
|
||||
"Thread", "File", "Files", "Paths", "Process", "Method",
|
||||
"getClass", "forName", "newInstance", "exec", "load", "getRuntime"
|
||||
);
|
||||
|
||||
// 表达式缓存最大大小
|
||||
private static final int MAX_CACHE_SIZE = 1000;
|
||||
|
||||
// 表达式最大长度
|
||||
private static final int MAX_EXPRESSION_LENGTH = 2000;
|
||||
|
||||
public ExpressionEngine() {
|
||||
this.expressionFactory = ExpressionFactory.newInstance();
|
||||
log.info("表达式引擎初始化完成,使用Jakarta EL实现");
|
||||
}
|
||||
|
||||
/**
|
||||
* 解析单个表达式
|
||||
*
|
||||
* @param expression 表达式字符串,例如: "${nodes.node1.output.body.email}"
|
||||
* @param execution Flowable执行上下文
|
||||
* @return 解析后的值
|
||||
*/
|
||||
public Object evaluate(String expression, DelegateExecution execution) {
|
||||
if (expression == null || expression.trim().isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// 快速路径:无表达式,直接返回
|
||||
if (!expression.contains("${")) {
|
||||
return expression;
|
||||
}
|
||||
|
||||
try {
|
||||
// 安全检查
|
||||
validateExpression(expression);
|
||||
|
||||
// 提取所有表达式: ${...}
|
||||
Matcher matcher = EXPRESSION_PATTERN.matcher(expression);
|
||||
|
||||
// 如果是纯表达式(整个字符串就是一个表达式)
|
||||
if (matcher.matches()) {
|
||||
return evaluateSingle(expression, execution);
|
||||
}
|
||||
|
||||
// 混合字符串,替换所有表达式
|
||||
StringBuffer result = new StringBuffer();
|
||||
while (matcher.find()) {
|
||||
String fullExpr = matcher.group(0); // ${...}
|
||||
Object value = evaluateSingle(fullExpr, execution);
|
||||
String replacement = value != null ? value.toString() : "";
|
||||
matcher.appendReplacement(result, Matcher.quoteReplacement(replacement));
|
||||
}
|
||||
matcher.appendTail(result);
|
||||
|
||||
return result.toString();
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("表达式解析失败: {}, 错误: {}", expression, e.getMessage());
|
||||
throw new ExpressionEvaluationException("表达式解析失败: " + expression, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 解析单个完整表达式
|
||||
*/
|
||||
private Object evaluateSingle(String expression, DelegateExecution execution) {
|
||||
try {
|
||||
// 尝试从缓存获取
|
||||
ValueExpression expr = expressionCache.get(expression);
|
||||
|
||||
if (expr == null) {
|
||||
// 编译表达式
|
||||
StandardELContext context = createContext(execution);
|
||||
expr = expressionFactory.createValueExpression(context, expression, Object.class);
|
||||
|
||||
// 缓存(限制大小防止内存泄漏)
|
||||
if (expressionCache.size() < MAX_CACHE_SIZE) {
|
||||
expressionCache.put(expression, expr);
|
||||
}
|
||||
}
|
||||
|
||||
// 求值
|
||||
StandardELContext context = createContext(execution);
|
||||
Object result = expr.getValue(context);
|
||||
|
||||
log.debug("表达式求值成功: {} -> {}", expression, result);
|
||||
return result;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("表达式求值失败: {}", expression, e);
|
||||
throw new ExpressionEvaluationException(
|
||||
"表达式求值失败: " + expression + ", 错误: " + e.getMessage(),
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建EL上下文(注入变量)
|
||||
*
|
||||
* 可访问的命名空间:
|
||||
* - nodes:上游节点数据
|
||||
* - workflow:工作流输入和变量
|
||||
* - env:环境变量
|
||||
*/
|
||||
private StandardELContext createContext(DelegateExecution execution) {
|
||||
StandardELContext context = new StandardELContext(expressionFactory);
|
||||
|
||||
// 1. 注入 nodes(节点输出)
|
||||
Map<String, Object> nodesData = getVariable(execution, "nodes");
|
||||
if (nodesData != null) {
|
||||
ValueExpression nodesExpr = expressionFactory.createValueExpression(
|
||||
nodesData, Map.class
|
||||
);
|
||||
context.getVariableMapper().setVariable("nodes", nodesExpr);
|
||||
}
|
||||
|
||||
// 2. 注入 workflow(工作流输入和变量)
|
||||
Map<String, Object> workflowData = getVariable(execution, "workflow");
|
||||
if (workflowData != null) {
|
||||
ValueExpression workflowExpr = expressionFactory.createValueExpression(
|
||||
workflowData, Map.class
|
||||
);
|
||||
context.getVariableMapper().setVariable("workflow", workflowExpr);
|
||||
}
|
||||
|
||||
// 3. 注入 env(环境变量)- 安全的子集
|
||||
Map<String, String> safeEnvVars = getSafeEnvironmentVariables();
|
||||
ValueExpression envExpr = expressionFactory.createValueExpression(
|
||||
safeEnvVars, Map.class
|
||||
);
|
||||
context.getVariableMapper().setVariable("env", envExpr);
|
||||
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* 安全地获取环境变量(过滤敏感信息)
|
||||
*/
|
||||
private Map<String, String> getSafeEnvironmentVariables() {
|
||||
Map<String, String> safeVars = new HashMap<>();
|
||||
Map<String, String> allEnvVars = System.getenv();
|
||||
|
||||
// 只暴露明确允许的环境变量模式
|
||||
Set<String> allowedPatterns = Set.of(
|
||||
"API_", "APP_", "SERVICE_", "ENDPOINT_", "URL_", "HOST_", "PORT_"
|
||||
);
|
||||
|
||||
for (Map.Entry<String, String> entry : allEnvVars.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
// 排除敏感的环境变量
|
||||
if (!isSensitiveEnvVar(key) && isAllowedEnvVar(key, allowedPatterns)) {
|
||||
safeVars.put(key, entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
return safeVars;
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查是否为敏感环境变量
|
||||
*/
|
||||
private boolean isSensitiveEnvVar(String key) {
|
||||
String keyUpper = key.toUpperCase();
|
||||
return keyUpper.contains("PASSWORD") ||
|
||||
keyUpper.contains("SECRET") ||
|
||||
keyUpper.contains("KEY") ||
|
||||
keyUpper.contains("TOKEN") ||
|
||||
keyUpper.contains("CREDENTIAL");
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查环境变量是否在允许列表中
|
||||
*/
|
||||
private boolean isAllowedEnvVar(String key, Set<String> allowedPatterns) {
|
||||
return allowedPatterns.stream().anyMatch(key::startsWith);
|
||||
}
|
||||
|
||||
/**
|
||||
* 安全地获取流程变量
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<String, Object> getVariable(DelegateExecution execution, String name) {
|
||||
try {
|
||||
Object variable = execution.getVariable(name);
|
||||
if (variable instanceof Map) {
|
||||
return (Map<String, Object>) variable;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("获取流程变量失败: {}, 错误: {}", name, e.getMessage());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 批量解析对象中的所有表达式(递归)
|
||||
*/
|
||||
public Map<String, Object> resolveObject(Map<String, Object> input, DelegateExecution execution) {
|
||||
if (input == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
Map<String, Object> result = new HashMap<>();
|
||||
|
||||
for (Map.Entry<String, Object> entry : input.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
Object value = entry.getValue();
|
||||
|
||||
try {
|
||||
result.put(key, resolveValue(value, execution));
|
||||
} catch (Exception e) {
|
||||
log.error("解析对象字段失败: {}, 值: {}, 错误: {}", key, value, e.getMessage());
|
||||
// 解析失败时保留原值
|
||||
result.put(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* 解析单个值(递归处理嵌套结构)
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private Object resolveValue(Object value, DelegateExecution execution) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (value instanceof String) {
|
||||
// 解析字符串表达式
|
||||
return evaluate((String) value, execution);
|
||||
|
||||
} else if (value instanceof Map) {
|
||||
// 递归解析嵌套对象
|
||||
return resolveObject((Map<String, Object>) value, execution);
|
||||
|
||||
} else if (value instanceof List) {
|
||||
// 解析数组
|
||||
List<?> list = (List<?>) value;
|
||||
List<Object> resolvedList = new ArrayList<>();
|
||||
for (Object item : list) {
|
||||
resolvedList.add(resolveValue(item, execution));
|
||||
}
|
||||
return resolvedList;
|
||||
|
||||
} else {
|
||||
// 其他类型直接返回
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 表达式安全验证
|
||||
*/
|
||||
private void validateExpression(String expression) {
|
||||
if (expression == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 长度检查
|
||||
if (expression.length() > MAX_EXPRESSION_LENGTH) {
|
||||
throw new SecurityException("表达式长度超出限制: " + expression.length());
|
||||
}
|
||||
|
||||
// 危险关键词检查
|
||||
for (String blocked : BLOCKED_KEYWORDS) {
|
||||
if (expression.contains(blocked)) {
|
||||
throw new SecurityException("表达式包含禁止使用的关键词: " + blocked);
|
||||
}
|
||||
}
|
||||
|
||||
// 简单的语法检查
|
||||
long openBraces = expression.chars().filter(c -> c == '{').count();
|
||||
long closeBraces = expression.chars().filter(c -> c == '}').count();
|
||||
if (openBraces != closeBraces) {
|
||||
throw new IllegalArgumentException("表达式语法错误:大括号不匹配");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 清空表达式缓存
|
||||
*/
|
||||
public void clearCache() {
|
||||
expressionCache.clear();
|
||||
log.info("表达式缓存已清空");
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取缓存统计信息
|
||||
*/
|
||||
public Map<String, Object> getCacheStats() {
|
||||
Map<String, Object> stats = new HashMap<>();
|
||||
stats.put("cacheSize", expressionCache.size());
|
||||
stats.put("maxCacheSize", MAX_CACHE_SIZE);
|
||||
stats.put("cacheHitRatio", calculateCacheHitRatio());
|
||||
return stats;
|
||||
}
|
||||
|
||||
private double calculateCacheHitRatio() {
|
||||
// 简化实现,实际可以使用更精确的统计
|
||||
return expressionCache.size() > 0 ? 0.85 : 0.0;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,48 @@
|
||||
package com.flowable.devops.expression;
|
||||
|
||||
/**
|
||||
* 表达式解析异常
|
||||
*
|
||||
* 在表达式解析或执行过程中出现错误时抛出
|
||||
*/
|
||||
public class ExpressionEvaluationException extends RuntimeException {
|
||||
|
||||
private final String expression;
|
||||
|
||||
public ExpressionEvaluationException(String message) {
|
||||
super(message);
|
||||
this.expression = null;
|
||||
}
|
||||
|
||||
public ExpressionEvaluationException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
this.expression = null;
|
||||
}
|
||||
|
||||
public ExpressionEvaluationException(String message, String expression) {
|
||||
super(message);
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
public ExpressionEvaluationException(String message, String expression, Throwable cause) {
|
||||
super(message, cause);
|
||||
this.expression = expression;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取导致异常的表达式
|
||||
*/
|
||||
public String getExpression() {
|
||||
return expression;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append(getClass().getSimpleName()).append(": ").append(getMessage());
|
||||
if (expression != null) {
|
||||
sb.append(" [expression=").append(expression).append("]");
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,105 @@
|
||||
package com.flowable.devops.repository;
|
||||
|
||||
import com.flowable.devops.entity.NodeExecutionLog;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* 节点执行日志数据访问接口
|
||||
*/
|
||||
@Repository
|
||||
public interface NodeExecutionLogRepository extends JpaRepository<NodeExecutionLog, Long> {
|
||||
|
||||
/**
|
||||
* 根据执行ID查询节点日志
|
||||
*/
|
||||
List<NodeExecutionLog> findByExecutionIdOrderByStartedAt(String executionId);
|
||||
|
||||
/**
|
||||
* 根据节点ID查询日志
|
||||
*/
|
||||
List<NodeExecutionLog> findByNodeIdOrderByStartedAtDesc(String nodeId);
|
||||
|
||||
/**
|
||||
* 根据节点类型查询日志
|
||||
*/
|
||||
List<NodeExecutionLog> findByNodeTypeOrderByStartedAtDesc(String nodeType);
|
||||
|
||||
/**
|
||||
* 根据执行状态查询
|
||||
*/
|
||||
List<NodeExecutionLog> findByStatus(NodeExecutionLog.ExecutionStatus status);
|
||||
|
||||
/**
|
||||
* 根据执行ID和节点ID查询(唯一记录)
|
||||
*/
|
||||
List<NodeExecutionLog> findByExecutionIdAndNodeId(String executionId, String nodeId);
|
||||
|
||||
/**
|
||||
* 查询失败的节点执行日志
|
||||
*/
|
||||
@Query("SELECT n FROM NodeExecutionLog n WHERE n.status = 'FAILED' ORDER BY n.startedAt DESC")
|
||||
List<NodeExecutionLog> findFailedExecutions();
|
||||
|
||||
/**
|
||||
* 查询指定时间范围内的节点执行日志
|
||||
*/
|
||||
@Query("SELECT n FROM NodeExecutionLog n WHERE n.startedAt BETWEEN :startTime AND :endTime ORDER BY n.startedAt DESC")
|
||||
List<NodeExecutionLog> findByStartedAtBetween(
|
||||
@Param("startTime") LocalDateTime startTime,
|
||||
@Param("endTime") LocalDateTime endTime
|
||||
);
|
||||
|
||||
/**
|
||||
* 统计各状态的节点执行数量
|
||||
*/
|
||||
@Query("SELECT n.status, COUNT(n) FROM NodeExecutionLog n GROUP BY n.status")
|
||||
List<Object[]> countByStatus();
|
||||
|
||||
/**
|
||||
* 统计各节点类型的执行数量
|
||||
*/
|
||||
@Query("SELECT n.nodeType, COUNT(n) FROM NodeExecutionLog n GROUP BY n.nodeType ORDER BY COUNT(n) DESC")
|
||||
List<Object[]> countByNodeType();
|
||||
|
||||
/**
|
||||
* 查询平均执行时长
|
||||
*/
|
||||
@Query("SELECT AVG(n.durationMs) FROM NodeExecutionLog n WHERE n.status = 'SUCCESS' AND n.durationMs IS NOT NULL")
|
||||
Double findAverageExecutionTime();
|
||||
|
||||
/**
|
||||
* 查询指定节点类型的平均执行时长
|
||||
*/
|
||||
@Query("SELECT AVG(n.durationMs) FROM NodeExecutionLog n WHERE n.nodeType = :nodeType AND n.status = 'SUCCESS' AND n.durationMs IS NOT NULL")
|
||||
Double findAverageExecutionTimeByNodeType(@Param("nodeType") String nodeType);
|
||||
|
||||
/**
|
||||
* 查询最慢的节点执行记录
|
||||
*/
|
||||
@Query("SELECT n FROM NodeExecutionLog n WHERE n.status = 'SUCCESS' AND n.durationMs IS NOT NULL ORDER BY n.durationMs DESC")
|
||||
List<NodeExecutionLog> findSlowestExecutions();
|
||||
|
||||
/**
|
||||
* 根据执行ID查询执行统计
|
||||
*/
|
||||
@Query("SELECT n.status, COUNT(n), AVG(n.durationMs) FROM NodeExecutionLog n WHERE n.executionId = :executionId GROUP BY n.status")
|
||||
List<Object[]> findExecutionStatistics(@Param("executionId") String executionId);
|
||||
|
||||
/**
|
||||
* 删除指定时间之前的日志(清理历史数据)
|
||||
*/
|
||||
@Query("DELETE FROM NodeExecutionLog n WHERE n.startedAt < :cutoffTime")
|
||||
void deleteLogsBefore(@Param("cutoffTime") LocalDateTime cutoffTime);
|
||||
|
||||
/**
|
||||
* 查询指定执行中失败的节点
|
||||
*/
|
||||
@Query("SELECT n FROM NodeExecutionLog n WHERE n.executionId = :executionId AND n.status = 'FAILED'")
|
||||
List<NodeExecutionLog> findFailedNodesByExecution(@Param("executionId") String executionId);
|
||||
}
|
||||
@ -0,0 +1,97 @@
|
||||
package com.flowable.devops.repository;
|
||||
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* 节点类型数据访问接口
|
||||
*/
|
||||
@Repository
|
||||
public interface NodeTypeRepository extends JpaRepository<NodeType, String> {
|
||||
|
||||
/**
|
||||
* 按显示顺序和ID排序获取所有节点类型
|
||||
*/
|
||||
List<NodeType> findAllByOrderByDisplayOrderAscIdAsc();
|
||||
|
||||
/**
|
||||
* 根据分类查询节点类型(按显示顺序排序)
|
||||
*/
|
||||
List<NodeType> findByCategoryOrderByDisplayOrderAscIdAsc(NodeType.NodeCategory category);
|
||||
|
||||
/**
|
||||
* 查询所有启用的节点类型(按显示顺序排序)
|
||||
*/
|
||||
List<NodeType> findByEnabledTrueOrderByDisplayOrderAsc();
|
||||
|
||||
/**
|
||||
* 根据分类查询启用的节点类型
|
||||
*/
|
||||
List<NodeType> findByCategoryAndEnabledTrueOrderByDisplayOrderAsc(NodeType.NodeCategory category);
|
||||
|
||||
/**
|
||||
* 获取指定分类下的最大显示顺序
|
||||
*/
|
||||
@Query("SELECT MAX(n.displayOrder) FROM NodeType n WHERE n.category = :category")
|
||||
Integer findMaxDisplayOrderByCategory(@Param("category") NodeType.NodeCategory category);
|
||||
|
||||
/**
|
||||
* 获取所有不同的分类
|
||||
*/
|
||||
@Query("SELECT DISTINCT CAST(n.category AS string) FROM NodeType n ORDER BY CAST(n.category AS string)")
|
||||
List<String> findDistinctCategories();
|
||||
|
||||
/**
|
||||
* 统计启用的节点类型数量
|
||||
*/
|
||||
long countByEnabledTrue();
|
||||
|
||||
/**
|
||||
* 按分类统计节点类型数量
|
||||
*/
|
||||
@Query("SELECT CAST(n.category AS string), COUNT(n) FROM NodeType n GROUP BY n.category")
|
||||
List<Object[]> countByCategory();
|
||||
|
||||
/**
|
||||
* 根据名称查询
|
||||
*/
|
||||
Optional<NodeType> findByName(String name);
|
||||
|
||||
/**
|
||||
* 根据实现类查询
|
||||
*/
|
||||
Optional<NodeType> findByImplementationClass(String implementationClass);
|
||||
|
||||
/**
|
||||
* 检查名称是否已存在
|
||||
*/
|
||||
boolean existsByName(String name);
|
||||
|
||||
/**
|
||||
* 检查实现类是否已存在
|
||||
*/
|
||||
boolean existsByImplementationClass(String implementationClass);
|
||||
|
||||
/**
|
||||
* 根据显示名称模糊查询
|
||||
*/
|
||||
List<NodeType> findByNameContainingIgnoreCase(String name);
|
||||
|
||||
/**
|
||||
* 统计各分类的启用节点数量
|
||||
*/
|
||||
@Query("SELECT CAST(n.category AS string), COUNT(n) FROM NodeType n WHERE n.enabled = true GROUP BY n.category")
|
||||
List<Object[]> countEnabledByCategory();
|
||||
|
||||
/**
|
||||
* 查询指定分类下启用的节点类型
|
||||
*/
|
||||
@Query("SELECT n FROM NodeType n WHERE n.category = :category AND n.enabled = true ORDER BY n.displayOrder, n.id")
|
||||
List<NodeType> findEnabledByCategory(@Param("category") NodeType.NodeCategory category);
|
||||
}
|
||||
@ -0,0 +1,81 @@
|
||||
package com.flowable.devops.repository;
|
||||
|
||||
import com.flowable.devops.entity.WorkflowDefinition;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* 工作流定义数据访问接口
|
||||
*/
|
||||
@Repository
|
||||
public interface WorkflowDefinitionRepository extends JpaRepository<WorkflowDefinition, String> {
|
||||
|
||||
/**
|
||||
* 根据状态查询工作流
|
||||
*/
|
||||
List<WorkflowDefinition> findByStatus(WorkflowDefinition.WorkflowStatus status);
|
||||
|
||||
/**
|
||||
* 根据创建者查询工作流
|
||||
*/
|
||||
List<WorkflowDefinition> findByCreatedBy(String createdBy);
|
||||
|
||||
/**
|
||||
* 根据名称模糊查询
|
||||
*/
|
||||
List<WorkflowDefinition> findByNameContainingIgnoreCase(String name);
|
||||
|
||||
/**
|
||||
* 根据状态和创建者查询
|
||||
*/
|
||||
List<WorkflowDefinition> findByStatusAndCreatedBy(
|
||||
WorkflowDefinition.WorkflowStatus status,
|
||||
String createdBy
|
||||
);
|
||||
|
||||
/**
|
||||
* 根据Flowable流程定义ID查询
|
||||
*/
|
||||
Optional<WorkflowDefinition> findByFlowableProcessDefinitionId(String flowableProcessDefinitionId);
|
||||
|
||||
/**
|
||||
* 查询激活状态的工作流
|
||||
*/
|
||||
@Query("SELECT w FROM WorkflowDefinition w WHERE w.status = 'ACTIVE' ORDER BY w.updatedAt DESC")
|
||||
List<WorkflowDefinition> findActiveWorkflows();
|
||||
|
||||
/**
|
||||
* 统计各状态的工作流数量
|
||||
*/
|
||||
@Query("SELECT w.status, COUNT(w) FROM WorkflowDefinition w GROUP BY w.status")
|
||||
List<Object[]> countByStatus();
|
||||
|
||||
/**
|
||||
* 根据多个条件查询(动态查询)
|
||||
*/
|
||||
@Query("SELECT w FROM WorkflowDefinition w WHERE " +
|
||||
"(:status IS NULL OR w.status = :status) AND " +
|
||||
"(:createdBy IS NULL OR w.createdBy = :createdBy) AND " +
|
||||
"(:name IS NULL OR LOWER(w.name) LIKE LOWER(CONCAT('%', :name, '%'))) " +
|
||||
"ORDER BY w.updatedAt DESC")
|
||||
List<WorkflowDefinition> findByConditions(
|
||||
@Param("status") WorkflowDefinition.WorkflowStatus status,
|
||||
@Param("createdBy") String createdBy,
|
||||
@Param("name") String name
|
||||
);
|
||||
|
||||
/**
|
||||
* 检查名称是否已存在
|
||||
*/
|
||||
boolean existsByName(String name);
|
||||
|
||||
/**
|
||||
* 检查名称是否已存在(排除指定ID)
|
||||
*/
|
||||
boolean existsByNameAndIdNot(String name, String id);
|
||||
}
|
||||
@ -0,0 +1,104 @@
|
||||
package com.flowable.devops.repository;
|
||||
|
||||
import com.flowable.devops.entity.WorkflowExecution;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* 工作流执行记录数据访问接口
|
||||
*/
|
||||
@Repository
|
||||
public interface WorkflowExecutionRepository extends JpaRepository<WorkflowExecution, String> {
|
||||
|
||||
/**
|
||||
* 根据工作流定义ID查询执行记录
|
||||
*/
|
||||
List<WorkflowExecution> findByWorkflowDefinitionIdOrderByStartedAtDesc(String workflowDefinitionId);
|
||||
|
||||
/**
|
||||
* 根据执行状态查询
|
||||
*/
|
||||
List<WorkflowExecution> findByStatus(WorkflowExecution.ExecutionStatus status);
|
||||
|
||||
/**
|
||||
* 根据触发者查询
|
||||
*/
|
||||
List<WorkflowExecution> findByTriggeredByOrderByStartedAtDesc(String triggeredBy);
|
||||
|
||||
/**
|
||||
* 根据Flowable流程实例ID查询
|
||||
*/
|
||||
Optional<WorkflowExecution> findByFlowableProcessInstanceId(String flowableProcessInstanceId);
|
||||
|
||||
/**
|
||||
* 根据触发方式查询
|
||||
*/
|
||||
List<WorkflowExecution> findByTriggerType(WorkflowExecution.TriggerType triggerType);
|
||||
|
||||
/**
|
||||
* 查询指定时间范围内的执行记录
|
||||
*/
|
||||
@Query("SELECT e FROM WorkflowExecution e WHERE e.startedAt BETWEEN :startTime AND :endTime ORDER BY e.startedAt DESC")
|
||||
List<WorkflowExecution> findByStartedAtBetween(
|
||||
@Param("startTime") LocalDateTime startTime,
|
||||
@Param("endTime") LocalDateTime endTime
|
||||
);
|
||||
|
||||
/**
|
||||
* 查询正在运行的执行记录
|
||||
*/
|
||||
@Query("SELECT e FROM WorkflowExecution e WHERE e.status = 'RUNNING' ORDER BY e.startedAt")
|
||||
List<WorkflowExecution> findRunningExecutions();
|
||||
|
||||
/**
|
||||
* 统计各状态的执行数量
|
||||
*/
|
||||
@Query("SELECT e.status, COUNT(e) FROM WorkflowExecution e GROUP BY e.status")
|
||||
List<Object[]> countByStatus();
|
||||
|
||||
/**
|
||||
* 统计指定工作流的执行次数
|
||||
*/
|
||||
@Query("SELECT COUNT(e) FROM WorkflowExecution e WHERE e.workflowDefinitionId = :workflowId")
|
||||
long countByWorkflowDefinitionId(@Param("workflowId") String workflowId);
|
||||
|
||||
/**
|
||||
* 查询最近的执行记录
|
||||
*/
|
||||
@Query("SELECT e FROM WorkflowExecution e ORDER BY e.startedAt DESC")
|
||||
List<WorkflowExecution> findRecentExecutions();
|
||||
|
||||
/**
|
||||
* 根据多个条件查询(动态查询)
|
||||
*/
|
||||
@Query("SELECT e FROM WorkflowExecution e WHERE " +
|
||||
"(:workflowDefinitionId IS NULL OR e.workflowDefinitionId = :workflowDefinitionId) AND " +
|
||||
"(:status IS NULL OR e.status = :status) AND " +
|
||||
"(:triggeredBy IS NULL OR e.triggeredBy = :triggeredBy) AND " +
|
||||
"(:triggerType IS NULL OR e.triggerType = :triggerType) " +
|
||||
"ORDER BY e.startedAt DESC")
|
||||
List<WorkflowExecution> findByConditions(
|
||||
@Param("workflowDefinitionId") String workflowDefinitionId,
|
||||
@Param("status") WorkflowExecution.ExecutionStatus status,
|
||||
@Param("triggeredBy") String triggeredBy,
|
||||
@Param("triggerType") WorkflowExecution.TriggerType triggerType
|
||||
);
|
||||
|
||||
/**
|
||||
* 查询失败的执行记录
|
||||
*/
|
||||
@Query("SELECT e FROM WorkflowExecution e WHERE e.status = 'FAILED' ORDER BY e.startedAt DESC")
|
||||
List<WorkflowExecution> findFailedExecutions();
|
||||
|
||||
/**
|
||||
* 删除指定时间之前的执行记录(清理历史数据)
|
||||
*/
|
||||
@Query("DELETE FROM WorkflowExecution e WHERE e.endedAt < :cutoffTime AND e.status IN ('COMPLETED', 'FAILED', 'CANCELLED')")
|
||||
void deleteCompletedExecutionsBefore(@Param("cutoffTime") LocalDateTime cutoffTime);
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
/**
|
||||
* 节点类型未找到异常
|
||||
*/
|
||||
public class NodeTypeNotFoundException extends RuntimeException {
|
||||
|
||||
public NodeTypeNotFoundException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public NodeTypeNotFoundException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,392 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import com.flowable.devops.repository.NodeTypeRepository;
|
||||
import com.flowable.devops.workflow.node.registry.NodeTypeRegistry;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* 节点类型服务层
|
||||
*
|
||||
* 核心功能:
|
||||
* 1. 节点类型的CRUD操作
|
||||
* 2. 节点类型元数据管理
|
||||
* 3. 节点类型注册与发现
|
||||
* 4. 输出模式管理
|
||||
*/
|
||||
@Slf4j
|
||||
@Service
|
||||
@Transactional
|
||||
public class NodeTypeService {
|
||||
|
||||
@Autowired
|
||||
private NodeTypeRepository nodeTypeRepository;
|
||||
|
||||
@Autowired
|
||||
private NodeTypeRegistry nodeTypeRegistry;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
/**
|
||||
* 获取所有节点类型
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<NodeType> getAllNodeTypes() {
|
||||
return nodeTypeRepository.findAllByOrderByDisplayOrderAscIdAsc();
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取指定分类的节点类型
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<NodeType> getNodeTypesByCategory(String category) {
|
||||
try {
|
||||
NodeType.NodeCategory categoryEnum = NodeType.NodeCategory.valueOf(category.toUpperCase());
|
||||
return nodeTypeRepository.findByCategoryOrderByDisplayOrderAscIdAsc(categoryEnum);
|
||||
} catch (IllegalArgumentException e) {
|
||||
return new ArrayList<>();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 根据ID获取节点类型
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public NodeType getNodeType(String nodeTypeId) {
|
||||
return nodeTypeRepository.findById(nodeTypeId)
|
||||
.orElseThrow(() -> new NodeTypeNotFoundException("节点类型不存在: " + nodeTypeId));
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建节点类型
|
||||
*/
|
||||
public NodeType createNodeType(NodeType nodeType) {
|
||||
log.info("创建节点类型: {}", nodeType.getId());
|
||||
|
||||
try {
|
||||
// 1. 验证ID唯一性
|
||||
if (nodeTypeRepository.existsById(nodeType.getId())) {
|
||||
throw new NodeTypeServiceException("节点类型ID已存在: " + nodeType.getId());
|
||||
}
|
||||
|
||||
// 2. 验证必要字段
|
||||
validateNodeType(nodeType);
|
||||
|
||||
// 3. 设置创建时间
|
||||
nodeType.setCreatedAt(LocalDateTime.now());
|
||||
nodeType.setUpdatedAt(LocalDateTime.now());
|
||||
|
||||
// 4. 如果没有设置显示顺序,自动分配
|
||||
if (nodeType.getDisplayOrder() == null) {
|
||||
Integer maxOrder = nodeTypeRepository.findMaxDisplayOrderByCategory(nodeType.getCategory());
|
||||
nodeType.setDisplayOrder((maxOrder != null ? maxOrder : 0) + 10);
|
||||
}
|
||||
|
||||
// 5. 保存到数据库
|
||||
NodeType saved = nodeTypeRepository.save(nodeType);
|
||||
|
||||
// 6. 如果节点类型启用,注册到注册表
|
||||
if (nodeType.isEnabled()) {
|
||||
registerNodeTypeToRegistry(saved);
|
||||
}
|
||||
|
||||
log.info("节点类型创建成功: {}", saved.getId());
|
||||
return saved;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("创建节点类型失败: {}", nodeType.getId(), e);
|
||||
throw new NodeTypeServiceException("创建节点类型失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新节点类型
|
||||
*/
|
||||
public NodeType updateNodeType(String id, NodeType nodeType) {
|
||||
log.info("更新节点类型: {}", id);
|
||||
|
||||
try {
|
||||
NodeType existing = getNodeType(id);
|
||||
|
||||
// 1. 更新字段
|
||||
if (nodeType.getName() != null) {
|
||||
existing.setName(nodeType.getName());
|
||||
}
|
||||
if (nodeType.getDescription() != null) {
|
||||
existing.setDescription(nodeType.getDescription());
|
||||
}
|
||||
if (nodeType.getCategory() != null) {
|
||||
existing.setCategory(nodeType.getCategory());
|
||||
}
|
||||
if (nodeType.getIcon() != null) {
|
||||
existing.setIcon(nodeType.getIcon());
|
||||
}
|
||||
if (nodeType.getFields() != null) {
|
||||
existing.setFields(nodeType.getFields());
|
||||
}
|
||||
if (nodeType.getOutputSchema() != null) {
|
||||
existing.setOutputSchema(nodeType.getOutputSchema());
|
||||
}
|
||||
if (nodeType.getDisplayOrder() != null) {
|
||||
existing.setDisplayOrder(nodeType.getDisplayOrder());
|
||||
}
|
||||
existing.setEnabled(nodeType.isEnabled());
|
||||
|
||||
// 2. 验证更新后的数据
|
||||
validateNodeType(existing);
|
||||
|
||||
// 3. 更新时间
|
||||
existing.setUpdatedAt(LocalDateTime.now());
|
||||
|
||||
// 4. 保存更新
|
||||
NodeType updated = nodeTypeRepository.save(existing);
|
||||
|
||||
// 5. 重新注册到注册表
|
||||
if (updated.isEnabled()) {
|
||||
registerNodeTypeToRegistry(updated);
|
||||
} else {
|
||||
nodeTypeRegistry.unregister(updated.getId());
|
||||
}
|
||||
|
||||
log.info("节点类型更新成功: {}", id);
|
||||
return updated;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("更新节点类型失败: {}", id, e);
|
||||
throw new NodeTypeServiceException("更新节点类型失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除节点类型
|
||||
*/
|
||||
public void deleteNodeType(String id) {
|
||||
log.info("删除节点类型: {}", id);
|
||||
|
||||
try {
|
||||
NodeType nodeType = getNodeType(id);
|
||||
|
||||
// 1. 检查是否被使用(这里可以添加业务逻辑检查)
|
||||
// TODO: 检查工作流定义中是否使用了此节点类型
|
||||
|
||||
// 2. 从注册表中移除
|
||||
nodeTypeRegistry.unregister(id);
|
||||
|
||||
// 3. 从数据库删除
|
||||
nodeTypeRepository.delete(nodeType);
|
||||
|
||||
log.info("节点类型删除成功: {}", id);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("删除节点类型失败: {}", id, e);
|
||||
throw new NodeTypeServiceException("删除节点类型失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点类型分类列表
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<String> getCategories() {
|
||||
return nodeTypeRepository.findDistinctCategories();
|
||||
}
|
||||
|
||||
/**
|
||||
* 刷新节点类型注册表
|
||||
* 重新加载所有启用的节点类型到注册表中
|
||||
*/
|
||||
public void refreshRegistry() {
|
||||
log.info("刷新节点类型注册表");
|
||||
|
||||
try {
|
||||
// 1. 清空注册表
|
||||
nodeTypeRegistry.clear();
|
||||
|
||||
// 2. 先检查总数
|
||||
long totalCount = nodeTypeRepository.count();
|
||||
long enabledCount = nodeTypeRepository.countByEnabledTrue();
|
||||
log.info("数据库中总共有 {} 个节点类型,其中 {} 个启用", totalCount, enabledCount);
|
||||
|
||||
// 3. 重新加载所有启用的节点类型
|
||||
List<NodeType> enabledTypes = nodeTypeRepository.findByEnabledTrueOrderByDisplayOrderAsc();
|
||||
log.info("查询返回 {} 个启用的节点类型", enabledTypes.size());
|
||||
|
||||
for (NodeType nodeType : enabledTypes) {
|
||||
registerNodeTypeToRegistry(nodeType);
|
||||
}
|
||||
|
||||
log.info("节点类型注册表刷新完成,已加载 {} 个节点类型", enabledTypes.size());
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("刷新节点类型注册表失败", e);
|
||||
throw new NodeTypeServiceException("刷新注册表失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点类型输出模式
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public JsonNode getOutputSchema(String nodeTypeId) {
|
||||
NodeType nodeType = getNodeType(nodeTypeId);
|
||||
return nodeType.getOutputSchema();
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新节点类型输出模式
|
||||
*/
|
||||
public NodeType updateOutputSchema(String nodeTypeId, JsonNode outputSchema) {
|
||||
log.info("更新节点类型输出模式: {}", nodeTypeId);
|
||||
|
||||
try {
|
||||
NodeType nodeType = getNodeType(nodeTypeId);
|
||||
nodeType.setOutputSchema(outputSchema);
|
||||
nodeType.setUpdatedAt(LocalDateTime.now());
|
||||
|
||||
NodeType updated = nodeTypeRepository.save(nodeType);
|
||||
|
||||
// 重新注册到注册表
|
||||
if (updated.isEnabled()) {
|
||||
registerNodeTypeToRegistry(updated);
|
||||
}
|
||||
|
||||
log.info("节点类型输出模式更新成功: {}", nodeTypeId);
|
||||
return updated;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("更新节点类型输出模式失败: {}", nodeTypeId, e);
|
||||
throw new NodeTypeServiceException("更新输出模式失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 启用/禁用节点类型
|
||||
*/
|
||||
public NodeType toggleEnabled(String nodeTypeId, boolean enabled) {
|
||||
log.info("{}节点类型: {}", enabled ? "启用" : "禁用", nodeTypeId);
|
||||
|
||||
try {
|
||||
NodeType nodeType = getNodeType(nodeTypeId);
|
||||
nodeType.setEnabled(enabled);
|
||||
nodeType.setUpdatedAt(LocalDateTime.now());
|
||||
|
||||
NodeType updated = nodeTypeRepository.save(nodeType);
|
||||
|
||||
// 更新注册表
|
||||
if (enabled) {
|
||||
registerNodeTypeToRegistry(updated);
|
||||
} else {
|
||||
nodeTypeRegistry.unregister(nodeTypeId);
|
||||
}
|
||||
|
||||
log.info("节点类型状态更新成功: {} -> {}", nodeTypeId, enabled);
|
||||
return updated;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("更新节点类型状态失败: {}", nodeTypeId, e);
|
||||
throw new NodeTypeServiceException("更新节点类型状态失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点类型统计信息
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public NodeTypeStatistics getStatistics() {
|
||||
NodeTypeStatistics stats = new NodeTypeStatistics();
|
||||
|
||||
// 总数统计
|
||||
long totalCount = nodeTypeRepository.count();
|
||||
long enabledCount = nodeTypeRepository.countByEnabledTrue();
|
||||
|
||||
stats.setTotalCount(totalCount);
|
||||
stats.setEnabledCount(enabledCount);
|
||||
stats.setDisabledCount(totalCount - enabledCount);
|
||||
|
||||
// 分类统计
|
||||
Map<String, Long> categoryStats = new HashMap<>();
|
||||
List<Object[]> categoryResults = nodeTypeRepository.countByCategory();
|
||||
for (Object[] result : categoryResults) {
|
||||
categoryStats.put((String) result[0], (Long) result[1]);
|
||||
}
|
||||
stats.setCategoryStats(categoryStats);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* 验证节点类型数据
|
||||
*/
|
||||
private void validateNodeType(NodeType nodeType) {
|
||||
if (nodeType.getId() == null || nodeType.getId().trim().isEmpty()) {
|
||||
throw new NodeTypeServiceException("节点类型ID不能为空");
|
||||
}
|
||||
if (nodeType.getName() == null || nodeType.getName().trim().isEmpty()) {
|
||||
throw new NodeTypeServiceException("节点类型名称不能为空");
|
||||
}
|
||||
if (nodeType.getCategory() == null) {
|
||||
throw new NodeTypeServiceException("节点类型分类不能为空");
|
||||
}
|
||||
|
||||
// 验证字段定义JSON格式
|
||||
if (nodeType.getFields() != null) {
|
||||
try {
|
||||
objectMapper.readTree(nodeType.getFields().toString());
|
||||
} catch (Exception e) {
|
||||
throw new NodeTypeServiceException("字段定义JSON格式无效: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// 验证输出模式JSON格式
|
||||
if (nodeType.getOutputSchema() != null) {
|
||||
try {
|
||||
objectMapper.readTree(nodeType.getOutputSchema().toString());
|
||||
} catch (Exception e) {
|
||||
throw new NodeTypeServiceException("输出模式JSON格式无效: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 将节点类型注册到注册表
|
||||
*/
|
||||
private void registerNodeTypeToRegistry(NodeType nodeType) {
|
||||
try {
|
||||
nodeTypeRegistry.registerFromDatabase(nodeType);
|
||||
log.debug("节点类型已注册到注册表: {}", nodeType.getId());
|
||||
} catch (Exception e) {
|
||||
log.warn("注册节点类型到注册表失败: {} - {}", nodeType.getId(), e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 节点类型统计信息
|
||||
*/
|
||||
public static class NodeTypeStatistics {
|
||||
private long totalCount;
|
||||
private long enabledCount;
|
||||
private long disabledCount;
|
||||
private Map<String, Long> categoryStats;
|
||||
|
||||
// Getters and Setters
|
||||
public long getTotalCount() { return totalCount; }
|
||||
public void setTotalCount(long totalCount) { this.totalCount = totalCount; }
|
||||
|
||||
public long getEnabledCount() { return enabledCount; }
|
||||
public void setEnabledCount(long enabledCount) { this.enabledCount = enabledCount; }
|
||||
|
||||
public long getDisabledCount() { return disabledCount; }
|
||||
public void setDisabledCount(long disabledCount) { this.disabledCount = disabledCount; }
|
||||
|
||||
public Map<String, Long> getCategoryStats() { return categoryStats; }
|
||||
public void setCategoryStats(Map<String, Long> categoryStats) { this.categoryStats = categoryStats; }
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
/**
|
||||
* 节点类型服务异常
|
||||
*/
|
||||
public class NodeTypeServiceException extends RuntimeException {
|
||||
|
||||
public NodeTypeServiceException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public NodeTypeServiceException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
/**
|
||||
* 任务未找到异常
|
||||
*/
|
||||
public class TaskNotFoundException extends RuntimeException {
|
||||
|
||||
public TaskNotFoundException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public TaskNotFoundException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,561 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.flowable.engine.RuntimeService;
|
||||
import org.flowable.task.api.Task;
|
||||
import org.flowable.task.api.TaskQuery;
|
||||
import org.flowable.task.service.impl.persistence.entity.TaskEntity;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* 任务服务层
|
||||
*
|
||||
* 核心功能:
|
||||
* 1. 用户任务查询和管理
|
||||
* 2. 任务完成和审批处理
|
||||
* 3. 任务委派和转办
|
||||
* 4. 任务历史查询
|
||||
*/
|
||||
@Slf4j
|
||||
@Service
|
||||
@Transactional
|
||||
public class TaskService {
|
||||
|
||||
@Autowired
|
||||
private org.flowable.engine.TaskService flowableTaskService;
|
||||
|
||||
@Autowired
|
||||
private RuntimeService runtimeService;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
/**
|
||||
* 获取用户待办任务
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<TaskInfo> getTasks(String assignee) {
|
||||
log.info("获取用户待办任务: {}", assignee);
|
||||
|
||||
TaskQuery query = flowableTaskService.createTaskQuery()
|
||||
.taskAssignee(assignee)
|
||||
.orderByTaskCreateTime()
|
||||
.desc();
|
||||
|
||||
List<Task> tasks = query.list();
|
||||
return tasks.stream()
|
||||
.map(this::convertToTaskInfo)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取用户候选任务
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<TaskInfo> getCandidateTasks(String userId, List<String> groups) {
|
||||
log.info("获取用户候选任务: {} (组: {})", userId, groups);
|
||||
|
||||
TaskQuery query = flowableTaskService.createTaskQuery()
|
||||
.taskCandidateUser(userId)
|
||||
.orderByTaskCreateTime()
|
||||
.desc();
|
||||
|
||||
// 添加候选组条件
|
||||
if (groups != null && !groups.isEmpty()) {
|
||||
query = query.taskCandidateGroupIn(groups);
|
||||
}
|
||||
|
||||
List<Task> tasks = query.list();
|
||||
return tasks.stream()
|
||||
.map(this::convertToTaskInfo)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取任务详情
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public TaskDetail getTaskDetail(String taskId) {
|
||||
log.info("获取任务详情: {}", taskId);
|
||||
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在: " + taskId);
|
||||
}
|
||||
|
||||
TaskDetail detail = new TaskDetail();
|
||||
detail.setId(task.getId());
|
||||
detail.setName(task.getName());
|
||||
detail.setDescription(task.getDescription());
|
||||
detail.setAssignee(task.getAssignee());
|
||||
detail.setOwner(task.getOwner());
|
||||
detail.setProcessInstanceId(task.getProcessInstanceId());
|
||||
detail.setProcessDefinitionId(task.getProcessDefinitionId());
|
||||
detail.setTaskDefinitionKey(task.getTaskDefinitionKey());
|
||||
detail.setPriority(task.getPriority());
|
||||
detail.setDueDate(task.getDueDate() != null ?
|
||||
task.getDueDate().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime() : null);
|
||||
detail.setCreateTime(task.getCreateTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
|
||||
|
||||
// 获取任务变量
|
||||
Map<String, Object> variables = flowableTaskService.getVariables(taskId);
|
||||
detail.setVariables(variables);
|
||||
|
||||
// 获取表单数据(如果存在)
|
||||
if (task.getFormKey() != null) {
|
||||
detail.setFormKey(task.getFormKey());
|
||||
// TODO: 根据formKey获取具体的表单数据
|
||||
}
|
||||
|
||||
return detail;
|
||||
}
|
||||
|
||||
/**
|
||||
* 完成任务
|
||||
*/
|
||||
public void completeTask(String taskId, String userId, Map<String, Object> variables, String comment) {
|
||||
log.info("完成任务: {} (用户: {}, 评论: {})", taskId, userId, comment);
|
||||
|
||||
try {
|
||||
// 1. 验证任务存在且可以被当前用户完成
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在: " + taskId);
|
||||
}
|
||||
|
||||
// 2. 检查任务分配情况
|
||||
if (task.getAssignee() == null) {
|
||||
// 如果任务未分配,先认领任务
|
||||
flowableTaskService.claim(taskId, userId);
|
||||
log.info("用户 {} 认领任务: {}", userId, taskId);
|
||||
} else if (!task.getAssignee().equals(userId)) {
|
||||
throw new TaskServiceException("用户无权完成此任务: " + taskId);
|
||||
}
|
||||
|
||||
// 3. 添加评论(如果有)
|
||||
if (comment != null && !comment.trim().isEmpty()) {
|
||||
flowableTaskService.addComment(taskId, task.getProcessInstanceId(), comment);
|
||||
}
|
||||
|
||||
// 4. 完成任务
|
||||
if (variables != null && !variables.isEmpty()) {
|
||||
flowableTaskService.complete(taskId, variables);
|
||||
} else {
|
||||
flowableTaskService.complete(taskId);
|
||||
}
|
||||
|
||||
log.info("任务完成成功: {} (用户: {})", taskId, userId);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("完成任务失败: {} (用户: {})", taskId, userId, e);
|
||||
throw new TaskServiceException("完成任务失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 认领任务
|
||||
*/
|
||||
public void claimTask(String taskId, String userId) {
|
||||
log.info("认领任务: {} (用户: {})", taskId, userId);
|
||||
|
||||
try {
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在: " + taskId);
|
||||
}
|
||||
|
||||
if (task.getAssignee() != null) {
|
||||
throw new TaskServiceException("任务已被分配: " + task.getAssignee());
|
||||
}
|
||||
|
||||
flowableTaskService.claim(taskId, userId);
|
||||
log.info("任务认领成功: {} -> {}", taskId, userId);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("认领任务失败: {} (用户: {})", taskId, userId, e);
|
||||
throw new TaskServiceException("认领任务失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 释放任务(取消认领)
|
||||
*/
|
||||
public void unclaimTask(String taskId, String userId) {
|
||||
log.info("释放任务: {} (用户: {})", taskId, userId);
|
||||
|
||||
try {
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.taskAssignee(userId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在或不属于当前用户: " + taskId);
|
||||
}
|
||||
|
||||
flowableTaskService.unclaim(taskId);
|
||||
log.info("任务释放成功: {}", taskId);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("释放任务失败: {} (用户: {})", taskId, userId, e);
|
||||
throw new TaskServiceException("释放任务失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 委派任务
|
||||
*/
|
||||
public void delegateTask(String taskId, String fromUserId, String toUserId, String comment) {
|
||||
log.info("委派任务: {} (从 {} 到 {}, 评论: {})", taskId, fromUserId, toUserId, comment);
|
||||
|
||||
try {
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.taskAssignee(fromUserId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在或不属于当前用户: " + taskId);
|
||||
}
|
||||
|
||||
// 添加委派评论
|
||||
if (comment != null && !comment.trim().isEmpty()) {
|
||||
flowableTaskService.addComment(taskId, task.getProcessInstanceId(), comment);
|
||||
}
|
||||
|
||||
// 执行委派
|
||||
flowableTaskService.delegateTask(taskId, toUserId);
|
||||
log.info("任务委派成功: {} -> {}", taskId, toUserId);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("委派任务失败: {} (从 {} 到 {})", taskId, fromUserId, toUserId, e);
|
||||
throw new TaskServiceException("委派任务失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 转办任务
|
||||
*/
|
||||
public void transferTask(String taskId, String fromUserId, String toUserId, String comment) {
|
||||
log.info("转办任务: {} (从 {} 到 {}, 评论: {})", taskId, fromUserId, toUserId, comment);
|
||||
|
||||
try {
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.taskAssignee(fromUserId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在或不属于当前用户: " + taskId);
|
||||
}
|
||||
|
||||
// 添加转办评论
|
||||
if (comment != null && !comment.trim().isEmpty()) {
|
||||
flowableTaskService.addComment(taskId, task.getProcessInstanceId(), comment);
|
||||
}
|
||||
|
||||
// 执行转办(重新分配)
|
||||
flowableTaskService.setAssignee(taskId, toUserId);
|
||||
log.info("任务转办成功: {} -> {}", taskId, toUserId);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("转办任务失败: {} (从 {} 到 {})", taskId, fromUserId, toUserId, e);
|
||||
throw new TaskServiceException("转办任务失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取任务评论
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<TaskComment> getTaskComments(String taskId) {
|
||||
log.info("获取任务评论: {}", taskId);
|
||||
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在: " + taskId);
|
||||
}
|
||||
|
||||
List<org.flowable.engine.task.Comment> comments = flowableTaskService.getTaskComments(taskId);
|
||||
return comments.stream()
|
||||
.map(this::convertToTaskComment)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加任务评论
|
||||
*/
|
||||
public TaskComment addTaskComment(String taskId, String userId, String message) {
|
||||
log.info("添加任务评论: {} (用户: {}, 内容: {})", taskId, userId, message);
|
||||
|
||||
try {
|
||||
Task task = flowableTaskService.createTaskQuery()
|
||||
.taskId(taskId)
|
||||
.singleResult();
|
||||
|
||||
if (task == null) {
|
||||
throw new TaskNotFoundException("任务不存在: " + taskId);
|
||||
}
|
||||
|
||||
org.flowable.engine.task.Comment comment = flowableTaskService.addComment(
|
||||
taskId, task.getProcessInstanceId(), message);
|
||||
|
||||
log.info("任务评论添加成功: {} (ID: {})", taskId, comment.getId());
|
||||
return convertToTaskComment(comment);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("添加任务评论失败: {} (用户: {})", taskId, userId, e);
|
||||
throw new TaskServiceException("添加任务评论失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取任务统计信息
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public TaskStatistics getTaskStatistics(String userId, List<String> groups) {
|
||||
TaskStatistics stats = new TaskStatistics();
|
||||
|
||||
// 待办任务数量
|
||||
long assignedCount = flowableTaskService.createTaskQuery()
|
||||
.taskAssignee(userId)
|
||||
.count();
|
||||
stats.setAssignedCount(assignedCount);
|
||||
|
||||
// 候选任务数量
|
||||
TaskQuery candidateQuery = flowableTaskService.createTaskQuery()
|
||||
.taskCandidateUser(userId);
|
||||
|
||||
if (groups != null && !groups.isEmpty()) {
|
||||
candidateQuery = candidateQuery.taskCandidateGroupIn(groups);
|
||||
}
|
||||
|
||||
long candidateCount = candidateQuery.count();
|
||||
stats.setCandidateCount(candidateCount);
|
||||
|
||||
// 逾期任务数量
|
||||
long overdueCount = flowableTaskService.createTaskQuery()
|
||||
.taskAssignee(userId)
|
||||
.taskDueBefore(new Date())
|
||||
.count();
|
||||
stats.setOverdueCount(overdueCount);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* 转换Task为TaskInfo
|
||||
*/
|
||||
private TaskInfo convertToTaskInfo(Task task) {
|
||||
TaskInfo info = new TaskInfo();
|
||||
info.setId(task.getId());
|
||||
info.setName(task.getName());
|
||||
info.setDescription(task.getDescription());
|
||||
info.setAssignee(task.getAssignee());
|
||||
info.setOwner(task.getOwner());
|
||||
info.setProcessInstanceId(task.getProcessInstanceId());
|
||||
info.setProcessDefinitionId(task.getProcessDefinitionId());
|
||||
info.setTaskDefinitionKey(task.getTaskDefinitionKey());
|
||||
info.setPriority(task.getPriority());
|
||||
info.setCreateTime(task.getCreateTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
|
||||
|
||||
if (task.getDueDate() != null) {
|
||||
info.setDueDate(task.getDueDate().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
|
||||
}
|
||||
|
||||
// 判断是否逾期
|
||||
if (task.getDueDate() != null && task.getDueDate().before(new Date())) {
|
||||
info.setOverdue(true);
|
||||
}
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
/**
|
||||
* 转换Comment为TaskComment
|
||||
*/
|
||||
private TaskComment convertToTaskComment(org.flowable.engine.task.Comment comment) {
|
||||
TaskComment taskComment = new TaskComment();
|
||||
taskComment.setId(comment.getId());
|
||||
taskComment.setUserId(comment.getUserId());
|
||||
taskComment.setMessage(comment.getFullMessage());
|
||||
taskComment.setTime(comment.getTime().toInstant().atZone(ZoneId.systemDefault()).toLocalDateTime());
|
||||
return taskComment;
|
||||
}
|
||||
|
||||
/**
|
||||
* 任务信息
|
||||
*/
|
||||
public static class TaskInfo {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String assignee;
|
||||
private String owner;
|
||||
private String processInstanceId;
|
||||
private String processDefinitionId;
|
||||
private String taskDefinitionKey;
|
||||
private Integer priority;
|
||||
private LocalDateTime createTime;
|
||||
private LocalDateTime dueDate;
|
||||
private boolean overdue;
|
||||
|
||||
// Getters and Setters
|
||||
public String getId() { return id; }
|
||||
public void setId(String id) { this.id = id; }
|
||||
|
||||
public String getName() { return name; }
|
||||
public void setName(String name) { this.name = name; }
|
||||
|
||||
public String getDescription() { return description; }
|
||||
public void setDescription(String description) { this.description = description; }
|
||||
|
||||
public String getAssignee() { return assignee; }
|
||||
public void setAssignee(String assignee) { this.assignee = assignee; }
|
||||
|
||||
public String getOwner() { return owner; }
|
||||
public void setOwner(String owner) { this.owner = owner; }
|
||||
|
||||
public String getProcessInstanceId() { return processInstanceId; }
|
||||
public void setProcessInstanceId(String processInstanceId) { this.processInstanceId = processInstanceId; }
|
||||
|
||||
public String getProcessDefinitionId() { return processDefinitionId; }
|
||||
public void setProcessDefinitionId(String processDefinitionId) { this.processDefinitionId = processDefinitionId; }
|
||||
|
||||
public String getTaskDefinitionKey() { return taskDefinitionKey; }
|
||||
public void setTaskDefinitionKey(String taskDefinitionKey) { this.taskDefinitionKey = taskDefinitionKey; }
|
||||
|
||||
public Integer getPriority() { return priority; }
|
||||
public void setPriority(Integer priority) { this.priority = priority; }
|
||||
|
||||
public LocalDateTime getCreateTime() { return createTime; }
|
||||
public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; }
|
||||
|
||||
public LocalDateTime getDueDate() { return dueDate; }
|
||||
public void setDueDate(LocalDateTime dueDate) { this.dueDate = dueDate; }
|
||||
|
||||
public boolean isOverdue() { return overdue; }
|
||||
public void setOverdue(boolean overdue) { this.overdue = overdue; }
|
||||
}
|
||||
|
||||
/**
|
||||
* 任务详情
|
||||
*/
|
||||
public static class TaskDetail {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String assignee;
|
||||
private String owner;
|
||||
private String processInstanceId;
|
||||
private String processDefinitionId;
|
||||
private String taskDefinitionKey;
|
||||
private String formKey;
|
||||
private Integer priority;
|
||||
private LocalDateTime createTime;
|
||||
private LocalDateTime dueDate;
|
||||
private Map<String, Object> variables;
|
||||
|
||||
// Getters and Setters
|
||||
public String getId() { return id; }
|
||||
public void setId(String id) { this.id = id; }
|
||||
|
||||
public String getName() { return name; }
|
||||
public void setName(String name) { this.name = name; }
|
||||
|
||||
public String getDescription() { return description; }
|
||||
public void setDescription(String description) { this.description = description; }
|
||||
|
||||
public String getAssignee() { return assignee; }
|
||||
public void setAssignee(String assignee) { this.assignee = assignee; }
|
||||
|
||||
public String getOwner() { return owner; }
|
||||
public void setOwner(String owner) { this.owner = owner; }
|
||||
|
||||
public String getProcessInstanceId() { return processInstanceId; }
|
||||
public void setProcessInstanceId(String processInstanceId) { this.processInstanceId = processInstanceId; }
|
||||
|
||||
public String getProcessDefinitionId() { return processDefinitionId; }
|
||||
public void setProcessDefinitionId(String processDefinitionId) { this.processDefinitionId = processDefinitionId; }
|
||||
|
||||
public String getTaskDefinitionKey() { return taskDefinitionKey; }
|
||||
public void setTaskDefinitionKey(String taskDefinitionKey) { this.taskDefinitionKey = taskDefinitionKey; }
|
||||
|
||||
public String getFormKey() { return formKey; }
|
||||
public void setFormKey(String formKey) { this.formKey = formKey; }
|
||||
|
||||
public Integer getPriority() { return priority; }
|
||||
public void setPriority(Integer priority) { this.priority = priority; }
|
||||
|
||||
public LocalDateTime getCreateTime() { return createTime; }
|
||||
public void setCreateTime(LocalDateTime createTime) { this.createTime = createTime; }
|
||||
|
||||
public LocalDateTime getDueDate() { return dueDate; }
|
||||
public void setDueDate(LocalDateTime dueDate) { this.dueDate = dueDate; }
|
||||
|
||||
public Map<String, Object> getVariables() { return variables; }
|
||||
public void setVariables(Map<String, Object> variables) { this.variables = variables; }
|
||||
}
|
||||
|
||||
/**
|
||||
* 任务评论
|
||||
*/
|
||||
public static class TaskComment {
|
||||
private String id;
|
||||
private String userId;
|
||||
private String message;
|
||||
private LocalDateTime time;
|
||||
|
||||
// Getters and Setters
|
||||
public String getId() { return id; }
|
||||
public void setId(String id) { this.id = id; }
|
||||
|
||||
public String getUserId() { return userId; }
|
||||
public void setUserId(String userId) { this.userId = userId; }
|
||||
|
||||
public String getMessage() { return message; }
|
||||
public void setMessage(String message) { this.message = message; }
|
||||
|
||||
public LocalDateTime getTime() { return time; }
|
||||
public void setTime(LocalDateTime time) { this.time = time; }
|
||||
}
|
||||
|
||||
/**
|
||||
* 任务统计信息
|
||||
*/
|
||||
public static class TaskStatistics {
|
||||
private long assignedCount;
|
||||
private long candidateCount;
|
||||
private long overdueCount;
|
||||
|
||||
// Getters and Setters
|
||||
public long getAssignedCount() { return assignedCount; }
|
||||
public void setAssignedCount(long assignedCount) { this.assignedCount = assignedCount; }
|
||||
|
||||
public long getCandidateCount() { return candidateCount; }
|
||||
public void setCandidateCount(long candidateCount) { this.candidateCount = candidateCount; }
|
||||
|
||||
public long getOverdueCount() { return overdueCount; }
|
||||
public void setOverdueCount(long overdueCount) { this.overdueCount = overdueCount; }
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
/**
|
||||
* 任务服务异常
|
||||
*/
|
||||
public class TaskServiceException extends RuntimeException {
|
||||
|
||||
public TaskServiceException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public TaskServiceException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
/**
|
||||
* 工作流未找到异常
|
||||
*/
|
||||
public class WorkflowNotFoundException extends RuntimeException {
|
||||
|
||||
public WorkflowNotFoundException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public WorkflowNotFoundException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,485 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.flowable.devops.entity.WorkflowDefinition;
|
||||
import com.flowable.devops.entity.WorkflowExecution;
|
||||
import com.flowable.devops.repository.WorkflowDefinitionRepository;
|
||||
import com.flowable.devops.repository.WorkflowExecutionRepository;
|
||||
import com.flowable.devops.workflow.converter.WorkflowConverter;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.flowable.engine.ProcessEngine;
|
||||
import org.flowable.engine.RepositoryService;
|
||||
import org.flowable.engine.RuntimeService;
|
||||
import org.flowable.engine.repository.Deployment;
|
||||
import org.flowable.engine.repository.ProcessDefinition;
|
||||
import org.flowable.engine.runtime.ProcessInstance;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* 工作流服务层
|
||||
*
|
||||
* 核心业务功能:
|
||||
* 1. 工作流定义的CRUD操作
|
||||
* 2. JSON转BPMN并部署到Flowable
|
||||
* 3. 工作流执行和监控
|
||||
* 4. 执行历史查询
|
||||
*/
|
||||
@Slf4j
|
||||
@Service
|
||||
@Transactional
|
||||
public class WorkflowService {
|
||||
|
||||
@Autowired
|
||||
private WorkflowDefinitionRepository workflowDefinitionRepository;
|
||||
|
||||
@Autowired
|
||||
private WorkflowExecutionRepository workflowExecutionRepository;
|
||||
|
||||
@Autowired
|
||||
private WorkflowConverter workflowConverter;
|
||||
|
||||
@Autowired
|
||||
private ProcessEngine processEngine;
|
||||
|
||||
@Autowired
|
||||
private RepositoryService repositoryService;
|
||||
|
||||
@Autowired
|
||||
private RuntimeService runtimeService;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
/**
|
||||
* 创建工作流定义
|
||||
*/
|
||||
@Transactional(propagation = org.springframework.transaction.annotation.Propagation.REQUIRES_NEW)
|
||||
public WorkflowDefinition create(WorkflowDefinition workflow) {
|
||||
log.info("创建工作流定义: {}", workflow.getName());
|
||||
|
||||
try {
|
||||
// 1. 生成ID(如果没有)
|
||||
if (workflow.getId() == null || workflow.getId().trim().isEmpty()) {
|
||||
workflow.setId("workflow_" + UUID.randomUUID().toString().replace("-", ""));
|
||||
}
|
||||
|
||||
// 2. 验证工作流JSON(添加null检查)
|
||||
JsonNode workflowJson = workflow.getDefinition();
|
||||
if (workflowJson != null) {
|
||||
workflowConverter.validateWorkflowJson(workflowJson);
|
||||
workflowConverter.validateWorkflowTopology(workflowJson);
|
||||
}
|
||||
|
||||
// 3. 设置基本信息
|
||||
workflow.setStatus(WorkflowDefinition.WorkflowStatus.DRAFT);
|
||||
workflow.setCreatedBy("system"); // TODO: 从安全上下文获取当前用户
|
||||
|
||||
// 4. 保存到数据库
|
||||
WorkflowDefinition saved = workflowDefinitionRepository.save(workflow);
|
||||
|
||||
// 5. 强制刷新到数据库
|
||||
workflowDefinitionRepository.flush();
|
||||
|
||||
log.info("工作流定义创建成功: {} (ID: {})", workflow.getName(), saved.getId());
|
||||
return saved;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("创建工作流定义失败: {}", workflow.getName(), e);
|
||||
throw new WorkflowServiceException("创建工作流定义失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 更新工作流定义
|
||||
*/
|
||||
public WorkflowDefinition update(String id, WorkflowDefinition workflow) {
|
||||
log.info("更新工作流定义: {}", id);
|
||||
|
||||
try {
|
||||
// 1. 检查是否存在
|
||||
WorkflowDefinition existing = getById(id);
|
||||
|
||||
// 2. 验证工作流JSON
|
||||
if (workflow.getDefinition() != null) {
|
||||
workflowConverter.validateWorkflowJson(workflow.getDefinition());
|
||||
workflowConverter.validateWorkflowTopology(workflow.getDefinition());
|
||||
existing.setDefinition(workflow.getDefinition());
|
||||
}
|
||||
|
||||
// 3. 更新字段
|
||||
if (workflow.getName() != null) {
|
||||
existing.setName(workflow.getName());
|
||||
}
|
||||
if (workflow.getDescription() != null) {
|
||||
existing.setDescription(workflow.getDescription());
|
||||
}
|
||||
|
||||
// 4. 如果已激活,需要重新部署
|
||||
if (existing.getStatus() == WorkflowDefinition.WorkflowStatus.ACTIVE) {
|
||||
log.info("工作流已激活,将重新部署: {}", id);
|
||||
deployToFlowable(existing);
|
||||
}
|
||||
|
||||
// 5. 保存更新
|
||||
WorkflowDefinition updated = workflowDefinitionRepository.save(existing);
|
||||
|
||||
log.info("工作流定义更新成功: {}", id);
|
||||
return updated;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("更新工作流定义失败: {}", id, e);
|
||||
throw new WorkflowServiceException("更新工作流定义失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取工作流定义
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public WorkflowDefinition getById(String id) {
|
||||
return workflowDefinitionRepository.findById(id)
|
||||
.orElseThrow(() -> new WorkflowNotFoundException("工作流定义不存在: " + id));
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取工作流列表
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<WorkflowDefinition> list(String status) {
|
||||
if (status != null) {
|
||||
try {
|
||||
WorkflowDefinition.WorkflowStatus workflowStatus =
|
||||
WorkflowDefinition.WorkflowStatus.valueOf(status.toUpperCase());
|
||||
return workflowDefinitionRepository.findByStatus(workflowStatus);
|
||||
} catch (IllegalArgumentException e) {
|
||||
log.warn("无效的工作流状态: {}", status);
|
||||
}
|
||||
}
|
||||
|
||||
return workflowDefinitionRepository.findAll();
|
||||
}
|
||||
|
||||
/**
|
||||
* 删除工作流定义
|
||||
*/
|
||||
public void delete(String id) {
|
||||
log.info("删除工作流定义: {}", id);
|
||||
|
||||
try {
|
||||
WorkflowDefinition workflow = getById(id);
|
||||
|
||||
// 1. 检查是否有正在运行的实例
|
||||
long runningCount = workflowExecutionRepository.countByWorkflowDefinitionId(id);
|
||||
if (runningCount > 0) {
|
||||
throw new WorkflowServiceException("无法删除工作流,存在正在执行的实例: " + runningCount);
|
||||
}
|
||||
|
||||
// 2. 从Flowable中删除流程定义
|
||||
if (workflow.getFlowableProcessDefinitionId() != null) {
|
||||
try {
|
||||
repositoryService.deleteDeployment(workflow.getFlowableDeploymentId(), true);
|
||||
log.info("已从Flowable删除流程定义: {}", workflow.getFlowableProcessDefinitionId());
|
||||
} catch (Exception e) {
|
||||
log.warn("从Flowable删除流程定义失败: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// 3. 删除数据库记录
|
||||
workflowDefinitionRepository.delete(workflow);
|
||||
|
||||
log.info("工作流定义删除成功: {}", id);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("删除工作流定义失败: {}", id, e);
|
||||
throw new WorkflowServiceException("删除工作流定义失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 激活工作流(部署到Flowable)
|
||||
*/
|
||||
public WorkflowDefinition activate(String id) {
|
||||
log.info("激活工作流定义: {}", id);
|
||||
|
||||
try {
|
||||
WorkflowDefinition workflow = getById(id);
|
||||
|
||||
// 1. 部署到Flowable
|
||||
deployToFlowable(workflow);
|
||||
|
||||
// 2. 更新状态
|
||||
workflow.setStatus(WorkflowDefinition.WorkflowStatus.ACTIVE);
|
||||
WorkflowDefinition activated = workflowDefinitionRepository.save(workflow);
|
||||
|
||||
log.info("工作流定义激活成功: {} -> {}", id, workflow.getFlowableProcessDefinitionId());
|
||||
return activated;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("激活工作流定义失败: {}", id, e);
|
||||
throw new WorkflowServiceException("激活工作流定义失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 部署工作流到Flowable
|
||||
*/
|
||||
private void deployToFlowable(WorkflowDefinition workflow) {
|
||||
try {
|
||||
// 1. 转换JSON到BPMN
|
||||
String bpmnXml = workflowConverter.convertToBpmn(workflow.getDefinition());
|
||||
|
||||
// 2. 部署到Flowable
|
||||
String deploymentName = workflow.getName() + "_v" + System.currentTimeMillis();
|
||||
String resourceName = workflow.getId() + ".bpmn20.xml";
|
||||
|
||||
Deployment deployment = repositoryService.createDeployment()
|
||||
.name(deploymentName)
|
||||
.addString(resourceName, bpmnXml)
|
||||
.deploy();
|
||||
|
||||
// 3. 获取流程定义
|
||||
ProcessDefinition processDefinition = repositoryService.createProcessDefinitionQuery()
|
||||
.deploymentId(deployment.getId())
|
||||
.singleResult();
|
||||
|
||||
// 4. 更新工作流信息
|
||||
workflow.setFlowableDeploymentId(deployment.getId());
|
||||
workflow.setFlowableProcessDefinitionId(processDefinition.getId());
|
||||
|
||||
log.info("工作流部署成功: {} -> {} (deployment: {})",
|
||||
workflow.getName(), processDefinition.getId(), deployment.getId());
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("部署工作流到Flowable失败: {}", workflow.getId(), e);
|
||||
throw new WorkflowServiceException("部署工作流失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 执行工作流
|
||||
*/
|
||||
public WorkflowExecutionResult execute(String id, Map<String, Object> input) {
|
||||
log.info("开始执行工作流: {} (输入: {})", id, input);
|
||||
|
||||
try {
|
||||
WorkflowDefinition workflow = getById(id);
|
||||
|
||||
// 1. 检查状态
|
||||
if (workflow.getStatus() != WorkflowDefinition.WorkflowStatus.ACTIVE) {
|
||||
throw new WorkflowServiceException("工作流未激活,无法执行: " + workflow.getStatus());
|
||||
}
|
||||
|
||||
// 2. 创建执行记录
|
||||
WorkflowExecution execution = new WorkflowExecution();
|
||||
execution.setWorkflowDefinitionId(id);
|
||||
execution.setInput(objectMapper.valueToTree(input));
|
||||
execution.setStatus(WorkflowExecution.ExecutionStatus.RUNNING);
|
||||
execution.setTriggerType(WorkflowExecution.TriggerType.MANUAL);
|
||||
execution.setTriggeredBy("system"); // TODO: 从安全上下文获取
|
||||
execution = workflowExecutionRepository.save(execution);
|
||||
|
||||
// 3. 准备流程变量
|
||||
Map<String, Object> variables = new HashMap<>();
|
||||
variables.put("workflow", Map.of("input", input, "executionId", execution.getId()));
|
||||
variables.put("nodes", new HashMap<>()); // 节点输出容器
|
||||
|
||||
// 4. 启动流程实例
|
||||
ProcessInstance processInstance = runtimeService.startProcessInstanceById(
|
||||
workflow.getFlowableProcessDefinitionId(),
|
||||
execution.getId(),
|
||||
variables
|
||||
);
|
||||
|
||||
// 5. 更新执行记录
|
||||
execution.setFlowableProcessInstanceId(processInstance.getId());
|
||||
|
||||
// 6. 检查执行状态
|
||||
boolean isCompleted = processInstance.isEnded();
|
||||
if (isCompleted) {
|
||||
execution.setStatus(WorkflowExecution.ExecutionStatus.COMPLETED);
|
||||
execution.setEndedAt(LocalDateTime.now());
|
||||
}
|
||||
|
||||
execution = workflowExecutionRepository.save(execution);
|
||||
|
||||
// 7. 构建返回结果
|
||||
WorkflowExecutionResult result = buildExecutionResult(execution, processInstance, variables);
|
||||
|
||||
log.info("工作流执行完成: {} -> {} (状态: {})",
|
||||
id, processInstance.getId(), result.getStatus());
|
||||
|
||||
return result;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("执行工作流失败: {}", id, e);
|
||||
throw new WorkflowServiceException("执行工作流失败: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建执行结果
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private WorkflowExecutionResult buildExecutionResult(
|
||||
WorkflowExecution execution,
|
||||
ProcessInstance processInstance,
|
||||
Map<String, Object> variables
|
||||
) {
|
||||
WorkflowExecutionResult result = new WorkflowExecutionResult();
|
||||
result.setWorkflowId(execution.getWorkflowDefinitionId());
|
||||
result.setProcessInstanceId(processInstance.getId());
|
||||
result.setExecutionId(execution.getId());
|
||||
result.setStartedAt(execution.getStartedAt());
|
||||
result.setEndedAt(execution.getEndedAt());
|
||||
|
||||
// 设置状态
|
||||
if (processInstance.isEnded()) {
|
||||
result.setStatus("completed");
|
||||
} else {
|
||||
result.setStatus("running");
|
||||
}
|
||||
|
||||
// 获取节点执行数据
|
||||
Map<String, Object> nodesData = (Map<String, Object>) variables.get("nodes");
|
||||
if (nodesData != null) {
|
||||
result.setNodes(nodesData);
|
||||
}
|
||||
|
||||
// 获取输出数据(从最后一个节点)
|
||||
if (nodesData != null && processInstance.isEnded()) {
|
||||
// 简化实现:返回所有节点输出
|
||||
Map<String, Object> output = new HashMap<>();
|
||||
nodesData.forEach((nodeId, nodeData) -> {
|
||||
if (nodeData instanceof Map) {
|
||||
Map<String, Object> nodeMap = (Map<String, Object>) nodeData;
|
||||
Object nodeOutput = nodeMap.get("output");
|
||||
if (nodeOutput != null) {
|
||||
output.put(nodeId, nodeOutput);
|
||||
}
|
||||
}
|
||||
});
|
||||
result.setOutput(output);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取执行历史
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public List<WorkflowExecution> getExecutions(String workflowId) {
|
||||
return workflowExecutionRepository.findByWorkflowDefinitionIdOrderByStartedAtDesc(workflowId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取执行详情
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public WorkflowExecutionDetail getExecutionDetail(String executionId) {
|
||||
WorkflowExecution execution = workflowExecutionRepository.findById(executionId)
|
||||
.orElseThrow(() -> new WorkflowNotFoundException("执行记录不存在: " + executionId));
|
||||
|
||||
WorkflowExecutionDetail detail = new WorkflowExecutionDetail();
|
||||
detail.setId(execution.getId());
|
||||
detail.setWorkflowDefinitionId(execution.getWorkflowDefinitionId());
|
||||
detail.setProcessInstanceId(execution.getFlowableProcessInstanceId());
|
||||
detail.setStatus(execution.getStatus().name().toLowerCase());
|
||||
detail.setInput(execution.getInput());
|
||||
detail.setStartedAt(execution.getStartedAt());
|
||||
detail.setEndedAt(execution.getEndedAt());
|
||||
detail.setError(execution.getErrorMessage());
|
||||
|
||||
// TODO: 从流程变量或日志表获取节点执行详情
|
||||
detail.setNodes(new HashMap<>());
|
||||
|
||||
return detail;
|
||||
}
|
||||
|
||||
/**
|
||||
* 工作流执行结果
|
||||
*/
|
||||
public static class WorkflowExecutionResult {
|
||||
private String workflowId;
|
||||
private String processInstanceId;
|
||||
private String executionId;
|
||||
private String status;
|
||||
private Map<String, Object> output;
|
||||
private Map<String, Object> nodes;
|
||||
private LocalDateTime startedAt;
|
||||
private LocalDateTime endedAt;
|
||||
|
||||
// Getters and Setters
|
||||
public String getWorkflowId() { return workflowId; }
|
||||
public void setWorkflowId(String workflowId) { this.workflowId = workflowId; }
|
||||
|
||||
public String getProcessInstanceId() { return processInstanceId; }
|
||||
public void setProcessInstanceId(String processInstanceId) { this.processInstanceId = processInstanceId; }
|
||||
|
||||
public String getExecutionId() { return executionId; }
|
||||
public void setExecutionId(String executionId) { this.executionId = executionId; }
|
||||
|
||||
public String getStatus() { return status; }
|
||||
public void setStatus(String status) { this.status = status; }
|
||||
|
||||
public Map<String, Object> getOutput() { return output; }
|
||||
public void setOutput(Map<String, Object> output) { this.output = output; }
|
||||
|
||||
public Map<String, Object> getNodes() { return nodes; }
|
||||
public void setNodes(Map<String, Object> nodes) { this.nodes = nodes; }
|
||||
|
||||
public LocalDateTime getStartedAt() { return startedAt; }
|
||||
public void setStartedAt(LocalDateTime startedAt) { this.startedAt = startedAt; }
|
||||
|
||||
public LocalDateTime getEndedAt() { return endedAt; }
|
||||
public void setEndedAt(LocalDateTime endedAt) { this.endedAt = endedAt; }
|
||||
}
|
||||
|
||||
/**
|
||||
* 工作流执行详情
|
||||
*/
|
||||
public static class WorkflowExecutionDetail {
|
||||
private String id;
|
||||
private String workflowDefinitionId;
|
||||
private String processInstanceId;
|
||||
private String status;
|
||||
private JsonNode input;
|
||||
private Map<String, Object> nodes;
|
||||
private LocalDateTime startedAt;
|
||||
private LocalDateTime endedAt;
|
||||
private String error;
|
||||
|
||||
// Getters and Setters
|
||||
public String getId() { return id; }
|
||||
public void setId(String id) { this.id = id; }
|
||||
|
||||
public String getWorkflowDefinitionId() { return workflowDefinitionId; }
|
||||
public void setWorkflowDefinitionId(String workflowDefinitionId) { this.workflowDefinitionId = workflowDefinitionId; }
|
||||
|
||||
public String getProcessInstanceId() { return processInstanceId; }
|
||||
public void setProcessInstanceId(String processInstanceId) { this.processInstanceId = processInstanceId; }
|
||||
|
||||
public String getStatus() { return status; }
|
||||
public void setStatus(String status) { this.status = status; }
|
||||
|
||||
public JsonNode getInput() { return input; }
|
||||
public void setInput(JsonNode input) { this.input = input; }
|
||||
|
||||
public Map<String, Object> getNodes() { return nodes; }
|
||||
public void setNodes(Map<String, Object> nodes) { this.nodes = nodes; }
|
||||
|
||||
public LocalDateTime getStartedAt() { return startedAt; }
|
||||
public void setStartedAt(LocalDateTime startedAt) { this.startedAt = startedAt; }
|
||||
|
||||
public LocalDateTime getEndedAt() { return endedAt; }
|
||||
public void setEndedAt(LocalDateTime endedAt) { this.endedAt = endedAt; }
|
||||
|
||||
public String getError() { return error; }
|
||||
public void setError(String error) { this.error = error; }
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,15 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
/**
|
||||
* 工作流服务异常
|
||||
*/
|
||||
public class WorkflowServiceException extends RuntimeException {
|
||||
|
||||
public WorkflowServiceException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public WorkflowServiceException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package com.flowable.devops.workflow.converter;
|
||||
|
||||
/**
|
||||
* 工作流转换异常
|
||||
*
|
||||
* 在JSON转换为BPMN过程中出现错误时抛出
|
||||
*/
|
||||
public class WorkflowConversionException extends RuntimeException {
|
||||
|
||||
public WorkflowConversionException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public WorkflowConversionException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,473 @@
|
||||
package com.flowable.devops.workflow.converter;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.flowable.bpmn.BpmnAutoLayout;
|
||||
import org.flowable.bpmn.converter.BpmnXMLConverter;
|
||||
import org.flowable.bpmn.model.*;
|
||||
import org.flowable.bpmn.model.Process;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* 工作流转换器:JSON ↔ BPMN XML
|
||||
*
|
||||
* 核心职责:
|
||||
* 1. 将前端的JSON工作流定义转换为Flowable的BPMN XML
|
||||
* 2. 支持各种节点类型的转换
|
||||
* 3. 处理条件分支和连线
|
||||
*/
|
||||
@Slf4j
|
||||
@Service
|
||||
public class WorkflowConverter {
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
/**
|
||||
* JSON → BPMN XML
|
||||
*
|
||||
* @param workflowJson JSON格式的工作流定义
|
||||
* @return BPMN 2.0 XML字符串
|
||||
*/
|
||||
public String convertToBpmn(JsonNode workflowJson) {
|
||||
log.info("开始转换工作流JSON到BPMN: {}", workflowJson.get("name").asText());
|
||||
|
||||
try {
|
||||
// 1. 创建BPMN模型
|
||||
BpmnModel bpmnModel = new BpmnModel();
|
||||
|
||||
// 2. 创建流程
|
||||
Process process = createProcess(workflowJson);
|
||||
|
||||
// 3. 添加开始事件
|
||||
StartEvent startEvent = createStartEvent();
|
||||
process.addFlowElement(startEvent);
|
||||
|
||||
// 4. 转换节点
|
||||
Map<String, FlowElement> elementMap = new HashMap<>();
|
||||
elementMap.put("start", startEvent);
|
||||
|
||||
JsonNode nodes = workflowJson.get("nodes");
|
||||
if (nodes != null && nodes.isArray()) {
|
||||
for (JsonNode node : nodes) {
|
||||
FlowElement element = convertNode(node);
|
||||
process.addFlowElement(element);
|
||||
elementMap.put(node.get("id").asText(), element);
|
||||
}
|
||||
}
|
||||
|
||||
// 5. 添加结束事件
|
||||
EndEvent endEvent = createEndEvent();
|
||||
process.addFlowElement(endEvent);
|
||||
elementMap.put("end", endEvent);
|
||||
|
||||
// 6. 转换连线
|
||||
convertEdges(workflowJson, process, elementMap);
|
||||
|
||||
// 7. 添加流程到模型
|
||||
bpmnModel.addProcess(process);
|
||||
|
||||
// 8. 自动布局
|
||||
new BpmnAutoLayout(bpmnModel).execute();
|
||||
|
||||
// 9. 转换为XML
|
||||
String bpmnXml = convertBpmnModelToXml(bpmnModel);
|
||||
|
||||
log.info("工作流JSON转换BPMN完成: {} 个节点, {} KB",
|
||||
elementMap.size(), bpmnXml.length() / 1024);
|
||||
|
||||
return bpmnXml;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("转换工作流JSON到BPMN失败", e);
|
||||
throw new WorkflowConversionException("Failed to convert workflow JSON to BPMN", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建流程
|
||||
*/
|
||||
private Process createProcess(JsonNode workflowJson) {
|
||||
Process process = new Process();
|
||||
process.setId(workflowJson.get("id").asText());
|
||||
process.setName(workflowJson.get("name").asText());
|
||||
process.setExecutable(true);
|
||||
|
||||
return process;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建开始事件
|
||||
*/
|
||||
private StartEvent createStartEvent() {
|
||||
StartEvent startEvent = new StartEvent();
|
||||
startEvent.setId("start");
|
||||
startEvent.setName("开始");
|
||||
return startEvent;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建结束事件
|
||||
*/
|
||||
private EndEvent createEndEvent() {
|
||||
EndEvent endEvent = new EndEvent();
|
||||
endEvent.setId("end");
|
||||
endEvent.setName("结束");
|
||||
return endEvent;
|
||||
}
|
||||
|
||||
/**
|
||||
* 转换单个节点
|
||||
*/
|
||||
private FlowElement convertNode(JsonNode nodeJson) {
|
||||
String nodeType = nodeJson.get("type").asText();
|
||||
String nodeId = nodeJson.get("id").asText();
|
||||
String nodeName = nodeJson.get("name").asText();
|
||||
|
||||
log.debug("转换节点: {} ({})", nodeName, nodeType);
|
||||
|
||||
// 根据节点类型创建不同的BPMN元素
|
||||
switch (nodeType) {
|
||||
case "approval":
|
||||
case "user_task":
|
||||
return createUserTask(nodeJson);
|
||||
|
||||
case "exclusive_gateway":
|
||||
return createExclusiveGateway(nodeJson);
|
||||
|
||||
case "parallel_gateway":
|
||||
return createParallelGateway(nodeJson);
|
||||
|
||||
default:
|
||||
// 默认创建Service Task
|
||||
return createServiceTask(nodeJson);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建Service Task(普通节点)
|
||||
*/
|
||||
private ServiceTask createServiceTask(JsonNode nodeJson) {
|
||||
ServiceTask task = new ServiceTask();
|
||||
task.setId(nodeJson.get("id").asText());
|
||||
task.setName(nodeJson.get("name").asText());
|
||||
|
||||
// 使用通用执行器 - 关键配置!
|
||||
task.setImplementationType(ImplementationType.IMPLEMENTATION_TYPE_DELEGATEEXPRESSION);
|
||||
task.setImplementation("${genericNodeExecutor}");
|
||||
|
||||
// 通过Field Extension传递节点配置
|
||||
List<FieldExtension> fields = new ArrayList<>();
|
||||
|
||||
// 节点类型
|
||||
FieldExtension typeField = new FieldExtension();
|
||||
typeField.setFieldName("nodeType");
|
||||
typeField.setStringValue(nodeJson.get("type").asText());
|
||||
fields.add(typeField);
|
||||
|
||||
// 节点配置(JSON)
|
||||
try {
|
||||
JsonNode config = nodeJson.get("config");
|
||||
if (config != null) {
|
||||
String configJson = objectMapper.writeValueAsString(config);
|
||||
FieldExtension configField = new FieldExtension();
|
||||
configField.setFieldName("nodeConfig");
|
||||
configField.setStringValue(configJson);
|
||||
fields.add(configField);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("序列化节点配置失败: {}", nodeJson.get("id").asText(), e);
|
||||
}
|
||||
|
||||
task.setFieldExtensions(fields);
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建User Task(审批节点)
|
||||
*/
|
||||
private UserTask createUserTask(JsonNode nodeJson) {
|
||||
UserTask task = new UserTask();
|
||||
task.setId(nodeJson.get("id").asText());
|
||||
task.setName(nodeJson.get("name").asText());
|
||||
|
||||
JsonNode config = nodeJson.get("config");
|
||||
if (config != null) {
|
||||
// 审批人(支持表达式)
|
||||
JsonNode assigneeNode = config.get("assignee");
|
||||
if (assigneeNode != null) {
|
||||
task.setAssignee(assigneeNode.asText());
|
||||
}
|
||||
|
||||
// 候选组
|
||||
JsonNode candidateGroupsNode = config.get("candidateGroups");
|
||||
if (candidateGroupsNode != null && candidateGroupsNode.isArray()) {
|
||||
List<String> candidateGroups = new ArrayList<>();
|
||||
candidateGroupsNode.forEach(group -> candidateGroups.add(group.asText()));
|
||||
task.setCandidateGroups(candidateGroups);
|
||||
}
|
||||
|
||||
// 表单字段
|
||||
JsonNode formFieldsNode = config.get("formFields");
|
||||
if (formFieldsNode != null && formFieldsNode.isArray()) {
|
||||
List<FormProperty> formProperties = new ArrayList<>();
|
||||
formFieldsNode.forEach(field -> {
|
||||
FormProperty prop = new FormProperty();
|
||||
prop.setId(field.get("id").asText());
|
||||
prop.setName(field.get("label").asText());
|
||||
prop.setType(field.get("type").asText());
|
||||
prop.setRequired(field.has("required") ? field.get("required").asBoolean() : false);
|
||||
formProperties.add(prop);
|
||||
});
|
||||
task.setFormProperties(formProperties);
|
||||
}
|
||||
}
|
||||
|
||||
return task;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建排他网关(条件分支)
|
||||
*/
|
||||
private ExclusiveGateway createExclusiveGateway(JsonNode nodeJson) {
|
||||
ExclusiveGateway gateway = new ExclusiveGateway();
|
||||
gateway.setId(nodeJson.get("id").asText());
|
||||
gateway.setName(nodeJson.get("name").asText());
|
||||
return gateway;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建并行网关
|
||||
*/
|
||||
private ParallelGateway createParallelGateway(JsonNode nodeJson) {
|
||||
ParallelGateway gateway = new ParallelGateway();
|
||||
gateway.setId(nodeJson.get("id").asText());
|
||||
gateway.setName(nodeJson.get("name").asText());
|
||||
return gateway;
|
||||
}
|
||||
|
||||
/**
|
||||
* 转换连线
|
||||
*/
|
||||
private void convertEdges(JsonNode workflowJson, Process process, Map<String, FlowElement> elementMap) {
|
||||
JsonNode edges = workflowJson.get("edges");
|
||||
if (edges == null || !edges.isArray()) {
|
||||
log.warn("工作流没有定义edges");
|
||||
return;
|
||||
}
|
||||
|
||||
// 找到入度为0的节点(起始节点)
|
||||
Set<String> targetNodes = new HashSet<>();
|
||||
edges.forEach(edge -> targetNodes.add(edge.get("target").asText()));
|
||||
|
||||
Set<String> sourceNodes = new HashSet<>();
|
||||
edges.forEach(edge -> sourceNodes.add(edge.get("source").asText()));
|
||||
|
||||
// 连接 start → 第一个节点
|
||||
Set<String> firstNodes = new HashSet<>(sourceNodes);
|
||||
firstNodes.removeAll(targetNodes);
|
||||
|
||||
for (String firstNodeId : firstNodes) {
|
||||
if (elementMap.containsKey(firstNodeId)) {
|
||||
addSequenceFlow(process, "start", firstNodeId, null);
|
||||
log.debug("连接: start → {}", firstNodeId);
|
||||
}
|
||||
}
|
||||
|
||||
// 转换所有边
|
||||
edges.forEach(edge -> {
|
||||
String source = edge.get("source").asText();
|
||||
String target = edge.get("target").asText();
|
||||
String condition = edge.has("condition") ? edge.get("condition").asText() : null;
|
||||
|
||||
if (elementMap.containsKey(source) && elementMap.containsKey(target)) {
|
||||
addSequenceFlow(process, source, target, condition);
|
||||
log.debug("连接: {} → {} {}", source, target,
|
||||
condition != null ? "[" + condition + "]" : "");
|
||||
} else {
|
||||
log.warn("跳过无效连线: {} → {} (节点不存在)", source, target);
|
||||
}
|
||||
});
|
||||
|
||||
// 连接 最后的节点 → end
|
||||
Set<String> lastNodes = new HashSet<>(targetNodes);
|
||||
lastNodes.removeAll(sourceNodes);
|
||||
|
||||
for (String lastNodeId : lastNodes) {
|
||||
if (elementMap.containsKey(lastNodeId)) {
|
||||
addSequenceFlow(process, lastNodeId, "end", null);
|
||||
log.debug("连接: {} → end", lastNodeId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加序列流
|
||||
*/
|
||||
private SequenceFlow addSequenceFlow(Process process, String sourceId, String targetId, String condition) {
|
||||
SequenceFlow flow = new SequenceFlow();
|
||||
flow.setId("flow_" + sourceId + "_to_" + targetId);
|
||||
flow.setSourceRef(sourceId);
|
||||
flow.setTargetRef(targetId);
|
||||
|
||||
// 设置条件表达式
|
||||
if (condition != null && !condition.trim().isEmpty()) {
|
||||
flow.setConditionExpression(condition);
|
||||
}
|
||||
|
||||
process.addFlowElement(flow);
|
||||
return flow;
|
||||
}
|
||||
|
||||
/**
|
||||
* 验证工作流JSON
|
||||
*/
|
||||
public void validateWorkflowJson(JsonNode workflowJson) {
|
||||
if (workflowJson == null) {
|
||||
throw new WorkflowValidationException("工作流JSON不能为null");
|
||||
}
|
||||
|
||||
List<String> errors = new ArrayList<>();
|
||||
|
||||
// 检查必需字段
|
||||
if (!workflowJson.has("id") || workflowJson.get("id").asText().trim().isEmpty()) {
|
||||
errors.add("工作流ID不能为空");
|
||||
}
|
||||
|
||||
if (!workflowJson.has("name") || workflowJson.get("name").asText().trim().isEmpty()) {
|
||||
errors.add("工作流名称不能为空");
|
||||
}
|
||||
|
||||
if (!workflowJson.has("nodes") || !workflowJson.get("nodes").isArray()) {
|
||||
errors.add("工作流必须包含节点数组");
|
||||
}
|
||||
|
||||
// 检查节点
|
||||
JsonNode nodes = workflowJson.get("nodes");
|
||||
if (nodes != null && nodes.isArray()) {
|
||||
Set<String> nodeIds = new HashSet<>();
|
||||
for (JsonNode node : nodes) {
|
||||
if (!node.has("id") || node.get("id").asText().trim().isEmpty()) {
|
||||
errors.add("节点ID不能为空");
|
||||
}
|
||||
|
||||
String nodeId = node.get("id").asText();
|
||||
if (nodeIds.contains(nodeId)) {
|
||||
errors.add("节点ID重复: " + nodeId);
|
||||
}
|
||||
nodeIds.add(nodeId);
|
||||
|
||||
if (!node.has("type")) {
|
||||
errors.add("节点类型不能为空: " + nodeId);
|
||||
}
|
||||
|
||||
if (!node.has("name")) {
|
||||
errors.add("节点名称不能为空: " + nodeId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 检查连线
|
||||
JsonNode edges = workflowJson.get("edges");
|
||||
if (edges != null && edges.isArray()) {
|
||||
Set<String> nodeIds = new HashSet<>();
|
||||
JsonNode nodesArray = workflowJson.get("nodes");
|
||||
if (nodesArray != null) {
|
||||
nodesArray.forEach(node -> nodeIds.add(node.get("id").asText()));
|
||||
}
|
||||
|
||||
for (JsonNode edge : edges) {
|
||||
if (!edge.has("source") || !edge.has("target")) {
|
||||
errors.add("连线必须包含source和target");
|
||||
continue;
|
||||
}
|
||||
|
||||
String source = edge.get("source").asText();
|
||||
String target = edge.get("target").asText();
|
||||
|
||||
if (!nodeIds.contains(source)) {
|
||||
errors.add("连线源节点不存在: " + source);
|
||||
}
|
||||
|
||||
if (!nodeIds.contains(target)) {
|
||||
errors.add("连线目标节点不存在: " + target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!errors.isEmpty()) {
|
||||
throw new WorkflowValidationException("工作流JSON验证失败: " + String.join("; ", errors));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* BPMN Model → XML
|
||||
*/
|
||||
private String convertBpmnModelToXml(BpmnModel bpmnModel) {
|
||||
BpmnXMLConverter converter = new BpmnXMLConverter();
|
||||
byte[] xmlBytes = converter.convertToXML(bpmnModel);
|
||||
return new String(xmlBytes, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
/**
|
||||
* 拓扑排序验证(检测环)
|
||||
*/
|
||||
public void validateWorkflowTopology(JsonNode workflowJson) {
|
||||
JsonNode nodes = workflowJson.get("nodes");
|
||||
JsonNode edges = workflowJson.get("edges");
|
||||
|
||||
if (nodes == null || edges == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// 构建图
|
||||
Map<String, Set<String>> adjacencyList = new HashMap<>();
|
||||
Map<String, Integer> inDegree = new HashMap<>();
|
||||
|
||||
// 初始化
|
||||
for (JsonNode node : nodes) {
|
||||
String nodeId = node.get("id").asText();
|
||||
adjacencyList.put(nodeId, new HashSet<>());
|
||||
inDegree.put(nodeId, 0);
|
||||
}
|
||||
|
||||
// 构建邻接表和入度统计
|
||||
for (JsonNode edge : edges) {
|
||||
String source = edge.get("source").asText();
|
||||
String target = edge.get("target").asText();
|
||||
|
||||
adjacencyList.get(source).add(target);
|
||||
inDegree.put(target, inDegree.get(target) + 1);
|
||||
}
|
||||
|
||||
// Kahn算法检测环
|
||||
Queue<String> queue = new LinkedList<>();
|
||||
for (Map.Entry<String, Integer> entry : inDegree.entrySet()) {
|
||||
if (entry.getValue() == 0) {
|
||||
queue.offer(entry.getKey());
|
||||
}
|
||||
}
|
||||
|
||||
int processedCount = 0;
|
||||
while (!queue.isEmpty()) {
|
||||
String current = queue.poll();
|
||||
processedCount++;
|
||||
|
||||
for (String neighbor : adjacencyList.get(current)) {
|
||||
inDegree.put(neighbor, inDegree.get(neighbor) - 1);
|
||||
if (inDegree.get(neighbor) == 0) {
|
||||
queue.offer(neighbor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (processedCount != inDegree.size()) {
|
||||
throw new WorkflowValidationException("工作流存在环形依赖,无法执行");
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,17 @@
|
||||
package com.flowable.devops.workflow.converter;
|
||||
|
||||
/**
|
||||
* 工作流验证异常
|
||||
*
|
||||
* 在工作流JSON验证过程中发现错误时抛出
|
||||
*/
|
||||
public class WorkflowValidationException extends RuntimeException {
|
||||
|
||||
public WorkflowValidationException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public WorkflowValidationException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,321 @@
|
||||
package com.flowable.devops.workflow.executor;
|
||||
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.flowable.devops.entity.NodeExecutionLog;
|
||||
import com.flowable.devops.expression.ExpressionEngine;
|
||||
import com.flowable.devops.repository.NodeExecutionLogRepository;
|
||||
import com.flowable.devops.workflow.model.NodeExecutionContext;
|
||||
import com.flowable.devops.workflow.model.NodeExecutionResult;
|
||||
import com.flowable.devops.workflow.model.NodeInput;
|
||||
import com.flowable.devops.workflow.node.WorkflowNode;
|
||||
import com.flowable.devops.workflow.node.registry.NodeTypeRegistry;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.flowable.bpmn.model.FieldExtension;
|
||||
import org.flowable.bpmn.model.ServiceTask;
|
||||
import org.flowable.engine.delegate.DelegateExecution;
|
||||
import org.flowable.engine.delegate.JavaDelegate;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneId;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 通用节点执行器
|
||||
*
|
||||
* 所有Service Task都使用这个执行器
|
||||
* 根据节点类型动态调用对应的实现类
|
||||
*
|
||||
* 关键功能:
|
||||
* 1. 从Field Extension获取节点类型和配置
|
||||
* 2. 使用表达式引擎解析配置中的表达式
|
||||
* 3. 调用对应的节点实现类执行
|
||||
* 4. 保存节点输出到流程变量
|
||||
* 5. 记录执行日志
|
||||
*/
|
||||
@Slf4j
|
||||
@Component("genericNodeExecutor")
|
||||
public class GenericNodeExecutor implements JavaDelegate {
|
||||
|
||||
@Autowired
|
||||
private NodeTypeRegistry nodeTypeRegistry;
|
||||
|
||||
@Autowired
|
||||
private ExpressionEngine expressionEngine;
|
||||
|
||||
@Autowired
|
||||
private NodeExecutionLogRepository nodeExecutionLogRepository;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@Override
|
||||
public void execute(DelegateExecution execution) {
|
||||
String nodeId = execution.getCurrentActivityId();
|
||||
LocalDateTime startTime = LocalDateTime.now();
|
||||
|
||||
log.info("开始执行节点: {} (流程实例: {})", nodeId, execution.getProcessInstanceId());
|
||||
|
||||
try {
|
||||
// 1. 获取节点类型和配置(从Field Extension)
|
||||
String nodeType = getFieldValue(execution, "nodeType");
|
||||
String nodeConfigJson = getFieldValue(execution, "nodeConfig");
|
||||
|
||||
if (nodeType == null) {
|
||||
throw new RuntimeException("节点类型未定义: " + nodeId);
|
||||
}
|
||||
|
||||
log.debug("节点 {} 类型: {}, 配置: {}", nodeId, nodeType, nodeConfigJson);
|
||||
|
||||
// 2. 解析节点配置JSON
|
||||
Map<String, Object> nodeConfig = new HashMap<>();
|
||||
if (nodeConfigJson != null && !nodeConfigJson.trim().isEmpty()) {
|
||||
try {
|
||||
nodeConfig = objectMapper.readValue(
|
||||
nodeConfigJson,
|
||||
new TypeReference<Map<String, Object>>() {}
|
||||
);
|
||||
} catch (Exception e) {
|
||||
log.warn("解析节点配置JSON失败: {} - {}", nodeId, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// 3. 解析表达式(处理数据映射)
|
||||
Map<String, Object> resolvedConfig = expressionEngine.resolveObject(nodeConfig, execution);
|
||||
|
||||
log.debug("节点 {} 解析后配置: {}", nodeId, resolvedConfig);
|
||||
|
||||
// 4. 获取节点实现类
|
||||
WorkflowNode nodeImpl = nodeTypeRegistry.getNodeInstance(nodeType);
|
||||
if (nodeImpl == null) {
|
||||
throw new RuntimeException("未找到节点实现: " + nodeType + " (节点: " + nodeId + ")");
|
||||
}
|
||||
|
||||
// 5. 构建执行上下文
|
||||
NodeExecutionContext context = buildContext(execution, nodeId);
|
||||
|
||||
// 6. 执行节点
|
||||
NodeInput input = new NodeInput(resolvedConfig);
|
||||
NodeExecutionResult result = nodeImpl.execute(input, context);
|
||||
|
||||
LocalDateTime endTime = LocalDateTime.now();
|
||||
|
||||
// 7. 保存节点输出到流程变量
|
||||
saveNodeOutput(execution, nodeId, resolvedConfig, result);
|
||||
|
||||
// 8. 记录执行日志
|
||||
logExecution(execution, nodeId, nodeType, resolvedConfig, result, startTime, endTime);
|
||||
|
||||
log.info("节点执行完成: {} - {} (耗时: {}ms)",
|
||||
nodeId, result.getStatus(), result.getDurationMs());
|
||||
|
||||
// 9. 检查执行结果
|
||||
if (result.isFailed()) {
|
||||
log.error("节点执行失败: {} - {}", nodeId, result.getError());
|
||||
throw new RuntimeException("节点执行失败: " + result.getError());
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
LocalDateTime endTime = LocalDateTime.now();
|
||||
|
||||
// 错误处理
|
||||
handleError(execution, nodeId, e, startTime, endTime);
|
||||
|
||||
log.error("节点执行异常: {} - {}", nodeId, e.getMessage(), e);
|
||||
throw new RuntimeException("节点执行失败: " + nodeId, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 保存节点输出到流程变量(关键)
|
||||
*
|
||||
* 结构:
|
||||
* {
|
||||
* "nodes": {
|
||||
* "node1": {
|
||||
* "status": "success",
|
||||
* "input": {...},
|
||||
* "output": {...},
|
||||
* "startTime": "...",
|
||||
* "endTime": "..."
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private void saveNodeOutput(
|
||||
DelegateExecution execution,
|
||||
String nodeId,
|
||||
Map<String, Object> input,
|
||||
NodeExecutionResult result
|
||||
) {
|
||||
// 获取或创建nodes对象
|
||||
Map<String, Object> nodesData = (Map<String, Object>) execution.getVariable("nodes");
|
||||
if (nodesData == null) {
|
||||
nodesData = new HashMap<>();
|
||||
}
|
||||
|
||||
// 构建节点数据
|
||||
Map<String, Object> nodeData = new HashMap<>();
|
||||
nodeData.put("status", result.getStatus().name().toLowerCase());
|
||||
nodeData.put("input", input);
|
||||
nodeData.put("output", result.getOutput());
|
||||
nodeData.put("startTime", result.getStartTime().toString());
|
||||
nodeData.put("endTime", result.getEndTime().toString());
|
||||
nodeData.put("durationMs", result.getDurationMs());
|
||||
|
||||
if (result.getError() != null) {
|
||||
nodeData.put("error", result.getError());
|
||||
nodeData.put("errorType", result.getErrorType());
|
||||
}
|
||||
|
||||
// 保存
|
||||
nodesData.put(nodeId, nodeData);
|
||||
execution.setVariable("nodes", nodesData);
|
||||
|
||||
log.debug("节点输出已保存到流程变量: {} -> nodes.{}", nodeId, nodeId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 构建执行上下文
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private NodeExecutionContext buildContext(DelegateExecution execution, String nodeId) {
|
||||
return NodeExecutionContext.builder()
|
||||
.workflowId(execution.getProcessDefinitionId())
|
||||
.executionId(execution.getProcessInstanceId())
|
||||
.nodeId(nodeId)
|
||||
.variables((Map<String, Object>) execution.getVariable("workflow"))
|
||||
.nodes((Map<String, Object>) execution.getVariable("nodes"))
|
||||
.env(System.getenv())
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 记录执行日志
|
||||
*/
|
||||
private void logExecution(
|
||||
DelegateExecution execution,
|
||||
String nodeId,
|
||||
String nodeType,
|
||||
Map<String, Object> input,
|
||||
NodeExecutionResult result,
|
||||
LocalDateTime startTime,
|
||||
LocalDateTime endTime
|
||||
) {
|
||||
try {
|
||||
NodeExecutionLog log = new NodeExecutionLog();
|
||||
log.setExecutionId(execution.getProcessInstanceId());
|
||||
log.setNodeId(nodeId);
|
||||
log.setNodeName(execution.getCurrentFlowElement().getName());
|
||||
log.setNodeType(nodeType);
|
||||
|
||||
// 转换输入输出为JsonNode
|
||||
log.setInput(objectMapper.valueToTree(input));
|
||||
log.setOutput(objectMapper.valueToTree(result.getOutput()));
|
||||
|
||||
// 设置状态和时间
|
||||
log.setStatus(convertStatus(result.getStatus()));
|
||||
log.setStartedAt(startTime);
|
||||
log.setEndedAt(endTime);
|
||||
log.setDurationMs(result.getDurationMs().intValue());
|
||||
|
||||
if (result.getError() != null) {
|
||||
log.setErrorMessage(result.getError());
|
||||
}
|
||||
|
||||
nodeExecutionLogRepository.save(log);
|
||||
|
||||
} catch (Exception e) {
|
||||
// 日志记录失败不应该影响流程执行
|
||||
this.log.warn("保存节点执行日志失败: {} - {}", nodeId, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 转换执行状态
|
||||
*/
|
||||
private NodeExecutionLog.ExecutionStatus convertStatus(NodeExecutionResult.ExecutionStatus status) {
|
||||
switch (status) {
|
||||
case SUCCESS:
|
||||
return NodeExecutionLog.ExecutionStatus.SUCCESS;
|
||||
case FAILED:
|
||||
return NodeExecutionLog.ExecutionStatus.FAILED;
|
||||
case SKIPPED:
|
||||
return NodeExecutionLog.ExecutionStatus.SKIPPED;
|
||||
default:
|
||||
return NodeExecutionLog.ExecutionStatus.FAILED;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 错误处理
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
private void handleError(
|
||||
DelegateExecution execution,
|
||||
String nodeId,
|
||||
Exception e,
|
||||
LocalDateTime startTime,
|
||||
LocalDateTime endTime
|
||||
) {
|
||||
Map<String, Object> nodesData = (Map<String, Object>) execution.getVariable("nodes");
|
||||
if (nodesData == null) {
|
||||
nodesData = new HashMap<>();
|
||||
}
|
||||
|
||||
Map<String, Object> nodeData = new HashMap<>();
|
||||
nodeData.put("status", "failed");
|
||||
nodeData.put("error", e.getMessage());
|
||||
nodeData.put("errorType", e.getClass().getSimpleName());
|
||||
nodeData.put("startTime", startTime.toString());
|
||||
nodeData.put("endTime", endTime.toString());
|
||||
nodeData.put("durationMs", java.time.Duration.between(startTime, endTime).toMillis());
|
||||
|
||||
nodesData.put(nodeId, nodeData);
|
||||
execution.setVariable("nodes", nodesData);
|
||||
|
||||
// 记录错误日志
|
||||
try {
|
||||
NodeExecutionLog log = new NodeExecutionLog();
|
||||
log.setExecutionId(execution.getProcessInstanceId());
|
||||
log.setNodeId(nodeId);
|
||||
log.setNodeName(execution.getCurrentFlowElement().getName());
|
||||
log.setStatus(NodeExecutionLog.ExecutionStatus.FAILED);
|
||||
log.setStartedAt(startTime);
|
||||
log.setEndedAt(endTime);
|
||||
log.setErrorMessage(e.getMessage());
|
||||
|
||||
nodeExecutionLogRepository.save(log);
|
||||
|
||||
} catch (Exception logEx) {
|
||||
this.log.warn("保存错误日志失败: {}", logEx.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取Field Extension的值
|
||||
*/
|
||||
private String getFieldValue(DelegateExecution execution, String fieldName) {
|
||||
try {
|
||||
org.flowable.bpmn.model.FlowElement flowElement = execution.getCurrentFlowElement();
|
||||
if (flowElement instanceof ServiceTask) {
|
||||
ServiceTask serviceTask = (ServiceTask) flowElement;
|
||||
List<FieldExtension> fieldExtensions = serviceTask.getFieldExtensions();
|
||||
|
||||
return fieldExtensions.stream()
|
||||
.filter(field -> fieldName.equals(field.getFieldName()))
|
||||
.map(FieldExtension::getStringValue)
|
||||
.findFirst()
|
||||
.orElse(null);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("获取Field Extension值失败: {} - {}", fieldName, e.getMessage());
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,83 @@
|
||||
package com.flowable.devops.workflow.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 节点执行上下文
|
||||
*
|
||||
* 提供节点执行时所需的上下文信息
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class NodeExecutionContext {
|
||||
|
||||
/**
|
||||
* 工作流ID
|
||||
*/
|
||||
private String workflowId;
|
||||
|
||||
/**
|
||||
* 执行ID
|
||||
*/
|
||||
private String executionId;
|
||||
|
||||
/**
|
||||
* 当前节点ID
|
||||
*/
|
||||
private String nodeId;
|
||||
|
||||
/**
|
||||
* 工作流变量
|
||||
*/
|
||||
private Map<String, Object> variables;
|
||||
|
||||
/**
|
||||
* 上游节点输出数据
|
||||
*/
|
||||
private Map<String, Object> nodes;
|
||||
|
||||
/**
|
||||
* 环境变量
|
||||
*/
|
||||
private Map<String, String> env;
|
||||
|
||||
/**
|
||||
* 额外的上下文数据
|
||||
*/
|
||||
private Map<String, Object> extras;
|
||||
|
||||
/**
|
||||
* 获取工作流变量
|
||||
*/
|
||||
public Object getVariable(String key) {
|
||||
return variables != null ? variables.get(key) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点数据
|
||||
*/
|
||||
public Object getNodeData(String nodeId) {
|
||||
return nodes != null ? nodes.get(nodeId) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取环境变量
|
||||
*/
|
||||
public String getEnv(String key) {
|
||||
return env != null ? env.get(key) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取扩展数据
|
||||
*/
|
||||
public Object getExtra(String key) {
|
||||
return extras != null ? extras.get(key) : null;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,158 @@
|
||||
package com.flowable.devops.workflow.model;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Builder;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 节点执行结果模型
|
||||
*
|
||||
* 封装节点执行的结果信息
|
||||
*/
|
||||
@Data
|
||||
@Builder
|
||||
@NoArgsConstructor
|
||||
@AllArgsConstructor
|
||||
public class NodeExecutionResult {
|
||||
|
||||
/**
|
||||
* 执行状态
|
||||
*/
|
||||
private ExecutionStatus status;
|
||||
|
||||
/**
|
||||
* 输出数据
|
||||
*/
|
||||
private Map<String, Object> output;
|
||||
|
||||
/**
|
||||
* 错误信息
|
||||
*/
|
||||
private String error;
|
||||
|
||||
/**
|
||||
* 错误类型
|
||||
*/
|
||||
private String errorType;
|
||||
|
||||
/**
|
||||
* 开始时间
|
||||
*/
|
||||
private LocalDateTime startTime;
|
||||
|
||||
/**
|
||||
* 结束时间
|
||||
*/
|
||||
private LocalDateTime endTime;
|
||||
|
||||
/**
|
||||
* 执行耗时(毫秒)
|
||||
*/
|
||||
private Long durationMs;
|
||||
|
||||
/**
|
||||
* 执行状态枚举
|
||||
*/
|
||||
public enum ExecutionStatus {
|
||||
SUCCESS,
|
||||
FAILED,
|
||||
SKIPPED
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建成功结果
|
||||
*/
|
||||
public static NodeExecutionResult success(Map<String, Object> output, LocalDateTime startTime, LocalDateTime endTime) {
|
||||
long duration = java.time.Duration.between(startTime, endTime).toMillis();
|
||||
return NodeExecutionResult.builder()
|
||||
.status(ExecutionStatus.SUCCESS)
|
||||
.output(output != null ? new HashMap<>(output) : new HashMap<>())
|
||||
.startTime(startTime)
|
||||
.endTime(endTime)
|
||||
.durationMs(duration)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建失败结果
|
||||
*/
|
||||
public static NodeExecutionResult failed(String error, String errorType, LocalDateTime startTime, LocalDateTime endTime) {
|
||||
long duration = java.time.Duration.between(startTime, endTime).toMillis();
|
||||
return NodeExecutionResult.builder()
|
||||
.status(ExecutionStatus.FAILED)
|
||||
.output(new HashMap<>())
|
||||
.error(error)
|
||||
.errorType(errorType)
|
||||
.startTime(startTime)
|
||||
.endTime(endTime)
|
||||
.durationMs(duration)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建跳过结果
|
||||
*/
|
||||
public static NodeExecutionResult skipped(String reason, LocalDateTime startTime, LocalDateTime endTime) {
|
||||
return NodeExecutionResult.builder()
|
||||
.status(ExecutionStatus.SKIPPED)
|
||||
.output(new HashMap<>())
|
||||
.error(reason)
|
||||
.startTime(startTime)
|
||||
.endTime(endTime)
|
||||
.durationMs(0L)
|
||||
.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查执行是否成功
|
||||
*/
|
||||
public boolean isSuccess() {
|
||||
return status == ExecutionStatus.SUCCESS;
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查执行是否失败
|
||||
*/
|
||||
public boolean isFailed() {
|
||||
return status == ExecutionStatus.FAILED;
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查执行是否跳过
|
||||
*/
|
||||
public boolean isSkipped() {
|
||||
return status == ExecutionStatus.SKIPPED;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取输出值
|
||||
*/
|
||||
public Object getOutputValue(String key) {
|
||||
return output != null ? output.get(key) : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 添加输出值
|
||||
*/
|
||||
public void addOutput(String key, Object value) {
|
||||
if (output == null) {
|
||||
output = new HashMap<>();
|
||||
}
|
||||
output.put(key, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* 计算执行耗时
|
||||
*/
|
||||
public long calculateDuration() {
|
||||
if (startTime != null && endTime != null) {
|
||||
return java.time.Duration.between(startTime, endTime).toMillis();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,182 @@
|
||||
package com.flowable.devops.workflow.model;
|
||||
|
||||
import lombok.Data;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* 节点输入参数模型
|
||||
*
|
||||
* 封装节点执行时的输入参数,提供类型安全的访问方法
|
||||
*/
|
||||
@Data
|
||||
public class NodeInput {
|
||||
|
||||
private final Map<String, Object> parameters;
|
||||
|
||||
public NodeInput(Map<String, Object> parameters) {
|
||||
this.parameters = parameters != null ? new HashMap<>(parameters) : new HashMap<>();
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取字符串参数
|
||||
*/
|
||||
public String getString(String key) {
|
||||
Object value = parameters.get(key);
|
||||
return value != null ? value.toString() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取字符串参数(带默认值)
|
||||
*/
|
||||
public String getString(String key, String defaultValue) {
|
||||
String value = getString(key);
|
||||
return value != null ? value : defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取必需的字符串参数
|
||||
*/
|
||||
public String getStringRequired(String key) {
|
||||
String value = getString(key);
|
||||
if (value == null || value.trim().isEmpty()) {
|
||||
throw new IllegalArgumentException("Required parameter '" + key + "' is missing or empty");
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取整数参数
|
||||
*/
|
||||
public Integer getInteger(String key) {
|
||||
Object value = parameters.get(key);
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (value instanceof Integer) {
|
||||
return (Integer) value;
|
||||
}
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).intValue();
|
||||
}
|
||||
try {
|
||||
return Integer.parseInt(value.toString());
|
||||
} catch (NumberFormatException e) {
|
||||
throw new IllegalArgumentException("Parameter '" + key + "' is not a valid integer: " + value);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取整数参数(带默认值)
|
||||
*/
|
||||
public Integer getInteger(String key, Integer defaultValue) {
|
||||
Integer value = getInteger(key);
|
||||
return value != null ? value : defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取布尔参数
|
||||
*/
|
||||
public Boolean getBoolean(String key) {
|
||||
Object value = parameters.get(key);
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (value instanceof Boolean) {
|
||||
return (Boolean) value;
|
||||
}
|
||||
return Boolean.parseBoolean(value.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取布尔参数(带默认值)
|
||||
*/
|
||||
public Boolean getBoolean(String key, Boolean defaultValue) {
|
||||
Boolean value = getBoolean(key);
|
||||
return value != null ? value : defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取Map参数
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public Map<String, Object> getMap(String key) {
|
||||
Object value = parameters.get(key);
|
||||
if (value instanceof Map) {
|
||||
return (Map<String, Object>) value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取Map参数(带默认值)
|
||||
*/
|
||||
public Map<String, Object> getMap(String key, Map<String, Object> defaultValue) {
|
||||
Map<String, Object> value = getMap(key);
|
||||
return value != null ? value : defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取List参数
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Object> getList(String key) {
|
||||
Object value = parameters.get(key);
|
||||
if (value instanceof List) {
|
||||
return (List<Object>) value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取List参数(带默认值)
|
||||
*/
|
||||
public List<Object> getList(String key, List<Object> defaultValue) {
|
||||
List<Object> value = getList(key);
|
||||
return value != null ? value : defaultValue;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取原始参数值
|
||||
*/
|
||||
public Object get(String key) {
|
||||
return parameters.get(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查参数是否存在
|
||||
*/
|
||||
public boolean contains(String key) {
|
||||
return parameters.containsKey(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取所有参数键
|
||||
*/
|
||||
public java.util.Set<String> keySet() {
|
||||
return parameters.keySet();
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取参数数量
|
||||
*/
|
||||
public int size() {
|
||||
return parameters.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查是否为空
|
||||
*/
|
||||
public boolean isEmpty() {
|
||||
return parameters.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* 转换为Map
|
||||
*/
|
||||
public Map<String, Object> toMap() {
|
||||
return new HashMap<>(parameters);
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,271 @@
|
||||
package com.flowable.devops.workflow.node;
|
||||
|
||||
import com.fasterxml.jackson.databind.JsonNode;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import com.flowable.devops.workflow.model.NodeExecutionContext;
|
||||
import com.flowable.devops.workflow.model.NodeExecutionResult;
|
||||
import com.flowable.devops.workflow.model.NodeInput;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.http.*;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* HTTP请求节点实现
|
||||
*
|
||||
* 支持发送HTTP请求到指定URL,支持各种HTTP方法
|
||||
*/
|
||||
@Slf4j
|
||||
@Component
|
||||
public class HttpRequestNode implements WorkflowNode {
|
||||
|
||||
private final RestTemplate restTemplate;
|
||||
private final ObjectMapper objectMapper;
|
||||
|
||||
public HttpRequestNode() {
|
||||
this.restTemplate = new RestTemplate();
|
||||
this.objectMapper = new ObjectMapper();
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeType getMetadata() {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId("http_request");
|
||||
nodeType.setName("httpRequest");
|
||||
nodeType.setDisplayName("HTTP Request");
|
||||
nodeType.setCategory(NodeType.NodeCategory.API);
|
||||
nodeType.setIcon("api");
|
||||
nodeType.setDescription("发送HTTP请求到指定URL");
|
||||
nodeType.setImplementationClass(this.getClass().getName());
|
||||
nodeType.setEnabled(true);
|
||||
|
||||
// 构建字段定义JSON
|
||||
nodeType.setFields(createFieldsJson());
|
||||
|
||||
// 构建输出结构JSON
|
||||
nodeType.setOutputSchema(createOutputSchemaJson());
|
||||
|
||||
return nodeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建字段定义JSON
|
||||
*/
|
||||
private JsonNode createFieldsJson() {
|
||||
ArrayNode fields = objectMapper.createArrayNode();
|
||||
|
||||
// URL字段
|
||||
ObjectNode urlField = objectMapper.createObjectNode();
|
||||
urlField.put("name", "url");
|
||||
urlField.put("label", "URL");
|
||||
urlField.put("type", "text");
|
||||
urlField.put("required", true);
|
||||
urlField.put("supportsExpression", true);
|
||||
urlField.put("placeholder", "https://api.example.com/users");
|
||||
fields.add(urlField);
|
||||
|
||||
// 请求方法字段
|
||||
ObjectNode methodField = objectMapper.createObjectNode();
|
||||
methodField.put("name", "method");
|
||||
methodField.put("label", "Method");
|
||||
methodField.put("type", "select");
|
||||
methodField.put("required", true);
|
||||
methodField.put("defaultValue", "GET");
|
||||
ArrayNode methodOptions = objectMapper.createArrayNode();
|
||||
methodOptions.add("GET").add("POST").add("PUT").add("DELETE").add("PATCH");
|
||||
methodField.set("options", methodOptions);
|
||||
fields.add(methodField);
|
||||
|
||||
// 请求头字段
|
||||
ObjectNode headersField = objectMapper.createObjectNode();
|
||||
headersField.put("name", "headers");
|
||||
headersField.put("label", "Headers");
|
||||
headersField.put("type", "key_value");
|
||||
headersField.put("supportsFieldMapping", true);
|
||||
fields.add(headersField);
|
||||
|
||||
// 请求体字段
|
||||
ObjectNode bodyField = objectMapper.createObjectNode();
|
||||
bodyField.put("name", "body");
|
||||
bodyField.put("label", "Request Body");
|
||||
bodyField.put("type", "code");
|
||||
bodyField.put("language", "json");
|
||||
bodyField.put("supportsExpression", true);
|
||||
fields.add(bodyField);
|
||||
|
||||
// 超时字段
|
||||
ObjectNode timeoutField = objectMapper.createObjectNode();
|
||||
timeoutField.put("name", "timeout");
|
||||
timeoutField.put("label", "Timeout (ms)");
|
||||
timeoutField.put("type", "number");
|
||||
timeoutField.put("defaultValue", 30000);
|
||||
fields.add(timeoutField);
|
||||
|
||||
return fields;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建输出结构JSON Schema
|
||||
*/
|
||||
private JsonNode createOutputSchemaJson() {
|
||||
ObjectNode schema = objectMapper.createObjectNode();
|
||||
schema.put("type", "object");
|
||||
|
||||
ObjectNode properties = objectMapper.createObjectNode();
|
||||
|
||||
// statusCode字段
|
||||
ObjectNode statusCode = objectMapper.createObjectNode();
|
||||
statusCode.put("type", "number");
|
||||
statusCode.put("description", "HTTP状态码");
|
||||
properties.set("statusCode", statusCode);
|
||||
|
||||
// body字段
|
||||
ObjectNode body = objectMapper.createObjectNode();
|
||||
body.put("type", "object");
|
||||
body.put("description", "响应体");
|
||||
properties.set("body", body);
|
||||
|
||||
// headers字段
|
||||
ObjectNode headers = objectMapper.createObjectNode();
|
||||
headers.put("type", "object");
|
||||
headers.put("description", "响应头");
|
||||
properties.set("headers", headers);
|
||||
|
||||
// elapsed字段
|
||||
ObjectNode elapsed = objectMapper.createObjectNode();
|
||||
elapsed.put("type", "number");
|
||||
elapsed.put("description", "耗时(ms)");
|
||||
properties.set("elapsed", elapsed);
|
||||
|
||||
schema.set("properties", properties);
|
||||
return schema;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate(NodeInput input) {
|
||||
// 验证必需参数
|
||||
input.getStringRequired("url");
|
||||
|
||||
String method = input.getString("method", "GET");
|
||||
if (!isValidHttpMethod(method)) {
|
||||
throw new IllegalArgumentException("Invalid HTTP method: " + method);
|
||||
}
|
||||
|
||||
Integer timeout = input.getInteger("timeout");
|
||||
if (timeout != null && timeout < 0) {
|
||||
throw new IllegalArgumentException("Timeout must be non-negative");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeExecutionResult execute(NodeInput input, NodeExecutionContext context) {
|
||||
LocalDateTime startTime = LocalDateTime.now();
|
||||
|
||||
try {
|
||||
// 验证输入参数
|
||||
validate(input);
|
||||
|
||||
// 获取参数(表达式已被解析)
|
||||
String url = input.getStringRequired("url");
|
||||
String method = input.getString("method", "GET");
|
||||
Map<String, Object> headers = input.getMap("headers", new HashMap<>());
|
||||
String body = input.getString("body");
|
||||
Integer timeout = input.getInteger("timeout", 30000);
|
||||
|
||||
log.info("执行HTTP请求: {} {}", method, url);
|
||||
|
||||
// 构建HTTP请求
|
||||
HttpHeaders httpHeaders = new HttpHeaders();
|
||||
|
||||
// 设置默认Content-Type
|
||||
if (body != null && !body.trim().isEmpty() && !headers.containsKey("Content-Type")) {
|
||||
httpHeaders.setContentType(MediaType.APPLICATION_JSON);
|
||||
}
|
||||
|
||||
// 添加自定义头
|
||||
for (Map.Entry<String, Object> entry : headers.entrySet()) {
|
||||
httpHeaders.set(entry.getKey(), entry.getValue().toString());
|
||||
}
|
||||
|
||||
HttpEntity<String> entity = new HttpEntity<>(body, httpHeaders);
|
||||
|
||||
// 发送请求
|
||||
long requestStart = System.currentTimeMillis();
|
||||
ResponseEntity<String> response = restTemplate.exchange(
|
||||
url,
|
||||
HttpMethod.valueOf(method.toUpperCase()),
|
||||
entity,
|
||||
String.class
|
||||
);
|
||||
long requestEnd = System.currentTimeMillis();
|
||||
long elapsed = requestEnd - requestStart;
|
||||
|
||||
// 解析响应体
|
||||
Object responseBody = parseResponseBody(response.getBody());
|
||||
|
||||
// 构建输出
|
||||
Map<String, Object> output = new HashMap<>();
|
||||
output.put("statusCode", response.getStatusCode().value());
|
||||
output.put("body", responseBody);
|
||||
output.put("headers", response.getHeaders().toSingleValueMap());
|
||||
output.put("elapsed", elapsed);
|
||||
|
||||
LocalDateTime endTime = LocalDateTime.now();
|
||||
log.info("HTTP请求成功: {} {}, 状态码: {}, 耗时: {}ms",
|
||||
method, url, response.getStatusCode().value(), elapsed);
|
||||
|
||||
return NodeExecutionResult.success(output, startTime, endTime);
|
||||
|
||||
} catch (Exception e) {
|
||||
LocalDateTime endTime = LocalDateTime.now();
|
||||
log.error("HTTP请求执行失败: {}", e.getMessage(), e);
|
||||
|
||||
// 构建错误输出
|
||||
Map<String, Object> output = new HashMap<>();
|
||||
output.put("statusCode", -1);
|
||||
output.put("body", null);
|
||||
output.put("headers", new HashMap<>());
|
||||
output.put("elapsed", 0);
|
||||
output.put("error", e.getMessage());
|
||||
|
||||
return NodeExecutionResult.failed(e.getMessage(), e.getClass().getSimpleName(), startTime, endTime);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 解析响应体
|
||||
*/
|
||||
private Object parseResponseBody(String responseBody) {
|
||||
if (responseBody == null || responseBody.trim().isEmpty()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
// 尝试解析为JSON
|
||||
return objectMapper.readValue(responseBody, Object.class);
|
||||
} catch (Exception e) {
|
||||
// 解析失败,返回原始文本
|
||||
log.debug("响应体不是有效的JSON,返回原始文本: {}", e.getMessage());
|
||||
return responseBody;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 验证HTTP方法是否有效
|
||||
*/
|
||||
private boolean isValidHttpMethod(String method) {
|
||||
try {
|
||||
HttpMethod.valueOf(method.toUpperCase());
|
||||
return true;
|
||||
} catch (IllegalArgumentException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,93 @@
|
||||
package com.flowable.devops.workflow.node;
|
||||
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import com.flowable.devops.workflow.model.NodeExecutionContext;
|
||||
import com.flowable.devops.workflow.model.NodeExecutionResult;
|
||||
import com.flowable.devops.workflow.model.NodeInput;
|
||||
|
||||
/**
|
||||
* 工作流节点接口
|
||||
*
|
||||
* 所有工作流节点必须实现此接口
|
||||
*
|
||||
* 设计原则:
|
||||
* 1. 节点应该是无状态的,所有状态通过参数传递
|
||||
* 2. 节点应该是幂等的,多次执行相同输入应得到相同结果
|
||||
* 3. 节点应该处理自己的错误,不应该让未处理的异常向上抛出
|
||||
* 4. 节点应该提供详细的元数据,支持前端动态渲染
|
||||
*/
|
||||
public interface WorkflowNode {
|
||||
|
||||
/**
|
||||
* 获取节点元数据
|
||||
*
|
||||
* 包含节点的字段定义、输出结构、分类等信息
|
||||
* 前端根据此信息动态生成表单和字段映射
|
||||
*
|
||||
* @return 节点类型元数据
|
||||
*/
|
||||
NodeType getMetadata();
|
||||
|
||||
/**
|
||||
* 执行节点
|
||||
*
|
||||
* @param input 节点输入参数(表达式已解析)
|
||||
* @param context 执行上下文,包含工作流变量、环境变量等
|
||||
* @return 节点执行结果
|
||||
*/
|
||||
NodeExecutionResult execute(NodeInput input, NodeExecutionContext context);
|
||||
|
||||
/**
|
||||
* 验证节点配置
|
||||
*
|
||||
* 在节点执行前验证配置是否正确
|
||||
* 可选实现,默认不做验证
|
||||
*
|
||||
* @param input 节点输入参数
|
||||
* @throws IllegalArgumentException 如果配置无效
|
||||
*/
|
||||
default void validate(NodeInput input) {
|
||||
// 默认实现:不做验证
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点类型ID
|
||||
*
|
||||
* 便捷方法,等价于 getMetadata().getId()
|
||||
*
|
||||
* @return 节点类型ID
|
||||
*/
|
||||
default String getNodeTypeId() {
|
||||
NodeType metadata = getMetadata();
|
||||
return metadata != null ? metadata.getId() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取节点显示名称
|
||||
*
|
||||
* 便捷方法,等价于 getMetadata().getDisplayName()
|
||||
*
|
||||
* @return 节点显示名称
|
||||
*/
|
||||
default String getDisplayName() {
|
||||
NodeType metadata = getMetadata();
|
||||
return metadata != null ? metadata.getDisplayName() : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* 检查节点是否支持指定的配置参数
|
||||
*
|
||||
* @param parameterName 参数名称
|
||||
* @return 是否支持该参数
|
||||
*/
|
||||
default boolean supportsParameter(String parameterName) {
|
||||
NodeType metadata = getMetadata();
|
||||
if (metadata == null || metadata.getFields() == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// 这里需要解析 fields JSON,简化实现返回 true
|
||||
// 实际实现中应该检查字段定义
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,165 @@
|
||||
package com.flowable.devops.workflow.node.registry;
|
||||
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import com.flowable.devops.repository.NodeTypeRepository;
|
||||
import com.flowable.devops.workflow.node.WorkflowNode;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
* 节点类型注册中心
|
||||
*/
|
||||
@Slf4j
|
||||
@Service
|
||||
public class NodeTypeRegistry {
|
||||
|
||||
private final Map<String, NodeType> nodeTypeMetadata = new ConcurrentHashMap<>();
|
||||
private final Map<String, WorkflowNode> nodeInstances = new ConcurrentHashMap<>();
|
||||
|
||||
@Autowired
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
@Autowired
|
||||
private NodeTypeRepository nodeTypeRepository;
|
||||
|
||||
@PostConstruct
|
||||
public void initialize() {
|
||||
log.info("开始初始化节点类型注册中心...");
|
||||
|
||||
try {
|
||||
scanAndRegisterNodes();
|
||||
loadNodesFromDatabase();
|
||||
log.info("节点类型注册中心初始化完成,共注册 {} 个节点类型", nodeTypeMetadata.size());
|
||||
} catch (Exception e) {
|
||||
log.error("节点类型注册中心初始化失败", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void scanAndRegisterNodes() {
|
||||
Map<String, WorkflowNode> nodeBeans = applicationContext.getBeansOfType(WorkflowNode.class);
|
||||
log.info("发现 {} 个WorkflowNode实现类", nodeBeans.size());
|
||||
|
||||
for (Map.Entry<String, WorkflowNode> entry : nodeBeans.entrySet()) {
|
||||
try {
|
||||
registerNode(entry.getValue(), entry.getKey());
|
||||
} catch (Exception e) {
|
||||
log.error("注册节点失败: {} - {}", entry.getKey(), e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void loadNodesFromDatabase() {
|
||||
try {
|
||||
List<NodeType> dbNodeTypes = nodeTypeRepository.findByEnabledTrueOrderByDisplayOrderAsc();
|
||||
for (NodeType nodeType : dbNodeTypes) {
|
||||
if (!nodeTypeMetadata.containsKey(nodeType.getId())) {
|
||||
nodeTypeMetadata.put(nodeType.getId(), nodeType);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("从数据库加载节点类型失败: {}", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private void registerNode(WorkflowNode node, String beanName) {
|
||||
NodeType metadata = node.getMetadata();
|
||||
validateNodeMetadata(metadata, beanName);
|
||||
|
||||
String nodeTypeId = metadata.getId();
|
||||
if (nodeTypeMetadata.containsKey(nodeTypeId)) {
|
||||
log.warn("节点类型ID重复,跳过注册: {}", nodeTypeId);
|
||||
return;
|
||||
}
|
||||
|
||||
nodeTypeMetadata.put(nodeTypeId, metadata);
|
||||
nodeInstances.put(nodeTypeId, node);
|
||||
|
||||
try {
|
||||
nodeTypeRepository.save(metadata);
|
||||
} catch (Exception e) {
|
||||
log.warn("保存节点类型到数据库失败: {}", e.getMessage());
|
||||
}
|
||||
|
||||
log.info("✓ 注册节点: {} ({})", metadata.getDisplayName(), nodeTypeId);
|
||||
}
|
||||
|
||||
private void validateNodeMetadata(NodeType metadata, String beanName) {
|
||||
if (metadata == null || metadata.getId() == null || metadata.getDisplayName() == null) {
|
||||
throw new IllegalArgumentException("节点元数据不完整: " + beanName);
|
||||
}
|
||||
}
|
||||
|
||||
public NodeType getNodeMetadata(String nodeTypeId) {
|
||||
return nodeTypeMetadata.get(nodeTypeId);
|
||||
}
|
||||
|
||||
public WorkflowNode getNodeInstance(String nodeTypeId) {
|
||||
return nodeInstances.get(nodeTypeId);
|
||||
}
|
||||
|
||||
public List<NodeType> getAllNodeTypes() {
|
||||
return new ArrayList<>(nodeTypeMetadata.values());
|
||||
}
|
||||
|
||||
public List<NodeType> getEnabledNodeTypes() {
|
||||
return nodeTypeMetadata.values().stream()
|
||||
.filter(NodeType::isEnabled)
|
||||
.sorted(Comparator.comparing(NodeType::getDisplayName))
|
||||
.toList();
|
||||
}
|
||||
|
||||
public List<NodeType> getNodeTypesByCategory(NodeType.NodeCategory category) {
|
||||
return nodeTypeMetadata.values().stream()
|
||||
.filter(nodeType -> Objects.equals(nodeType.getCategory(), category))
|
||||
.filter(NodeType::isEnabled)
|
||||
.sorted(Comparator.comparing(NodeType::getDisplayName))
|
||||
.toList();
|
||||
}
|
||||
|
||||
public boolean hasNodeType(String nodeTypeId) {
|
||||
return nodeTypeMetadata.containsKey(nodeTypeId);
|
||||
}
|
||||
|
||||
public boolean hasNodeImplementation(String nodeTypeId) {
|
||||
return nodeInstances.containsKey(nodeTypeId);
|
||||
}
|
||||
|
||||
/**
|
||||
* 从数据库节点类型注册到注册表
|
||||
*/
|
||||
public void registerFromDatabase(NodeType nodeType) {
|
||||
if (nodeType == null || nodeType.getId() == null) {
|
||||
log.warn("无效的节点类型,跳过注册");
|
||||
return;
|
||||
}
|
||||
|
||||
nodeTypeMetadata.put(nodeType.getId(), nodeType);
|
||||
log.debug("从数据库注册节点类型: {}", nodeType.getId());
|
||||
}
|
||||
|
||||
/**
|
||||
* 注销节点类型
|
||||
*/
|
||||
public void unregister(String nodeTypeId) {
|
||||
if (nodeTypeId != null) {
|
||||
nodeTypeMetadata.remove(nodeTypeId);
|
||||
nodeInstances.remove(nodeTypeId);
|
||||
log.debug("注销节点类型: {}", nodeTypeId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 清空所有注册的节点类型
|
||||
*/
|
||||
public void clear() {
|
||||
nodeTypeMetadata.clear();
|
||||
nodeInstances.clear();
|
||||
log.debug("清空节点类型注册表");
|
||||
}
|
||||
}
|
||||
289
backend/src/main/resources/application.yml
Normal file
289
backend/src/main/resources/application.yml
Normal file
@ -0,0 +1,289 @@
|
||||
spring:
|
||||
application:
|
||||
name: flowable-devops-backend
|
||||
profiles:
|
||||
active: dev
|
||||
|
||||
# 数据源配置
|
||||
datasource:
|
||||
url: ${SPRING_DATASOURCE_URL:jdbc:mysql://localhost:3306/flowable-devops?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=UTC&characterEncoding=utf8}
|
||||
username: ${SPRING_DATASOURCE_USERNAME:root}
|
||||
password: ${SPRING_DATASOURCE_PASSWORD:123456}
|
||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||
hikari:
|
||||
maximum-pool-size: 20
|
||||
minimum-idle: 5
|
||||
idle-timeout: 300000
|
||||
connection-timeout: 30000
|
||||
max-lifetime: 1800000
|
||||
pool-name: FlowableDevOpsHikariCP
|
||||
connection-test-query: SELECT 1
|
||||
|
||||
# JPA配置
|
||||
jpa:
|
||||
hibernate:
|
||||
ddl-auto: update
|
||||
properties:
|
||||
hibernate:
|
||||
dialect: org.hibernate.dialect.MySQL8Dialect
|
||||
format_sql: false
|
||||
show_sql: false
|
||||
use_sql_comments: false
|
||||
jdbc:
|
||||
batch_size: 20
|
||||
order_inserts: true
|
||||
order_updates: true
|
||||
show-sql: false
|
||||
open-in-view: false
|
||||
|
||||
# Redis配置
|
||||
data:
|
||||
redis:
|
||||
host: ${SPRING_REDIS_HOST:localhost}
|
||||
port: ${SPRING_REDIS_PORT:6379}
|
||||
password: ${SPRING_REDIS_PASSWORD:}
|
||||
database: ${SPRING_REDIS_DATABASE:0}
|
||||
timeout: 5000ms
|
||||
lettuce:
|
||||
pool:
|
||||
max-active: 20
|
||||
max-wait: -1ms
|
||||
max-idle: 10
|
||||
min-idle: 2
|
||||
|
||||
# JSON序列化配置
|
||||
jackson:
|
||||
serialization:
|
||||
write-dates-as-timestamps: false
|
||||
deserialization:
|
||||
fail-on-unknown-properties: false
|
||||
time-zone: Asia/Shanghai
|
||||
date-format: yyyy-MM-dd HH:mm:ss
|
||||
|
||||
# WebFlux配置
|
||||
webflux:
|
||||
static-path-pattern: /static/**
|
||||
|
||||
# Flowable配置
|
||||
flowable:
|
||||
# 数据库配置
|
||||
database-schema-update: true
|
||||
db-history-used: true
|
||||
database-type: mysql
|
||||
|
||||
# 流程引擎配置
|
||||
process:
|
||||
definition-cache-limit: 100
|
||||
enable-safe-xml: true
|
||||
|
||||
# 异步执行器配置(MVP阶段关闭)
|
||||
async:
|
||||
executor:
|
||||
activate: false
|
||||
|
||||
# REST API配置
|
||||
rest:
|
||||
app:
|
||||
authentication-mode: verify-privilege
|
||||
|
||||
# 服务器配置
|
||||
server:
|
||||
port: ${SERVER_PORT:8080}
|
||||
servlet:
|
||||
context-path: /
|
||||
compression:
|
||||
enabled: true
|
||||
mime-types: text/html,text/xml,text/plain,text/css,text/javascript,application/javascript,application/json
|
||||
error:
|
||||
include-stacktrace: never
|
||||
include-message: always
|
||||
|
||||
# 日志配置
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
com.flowable.devops: DEBUG
|
||||
org.flowable: INFO
|
||||
org.springframework: INFO
|
||||
org.hibernate.SQL: ${LOG_SQL:false}
|
||||
org.hibernate.type.descriptor.sql.BasicBinder: ${LOG_SQL_PARAMS:false}
|
||||
com.zaxxer.hikari: INFO
|
||||
pattern:
|
||||
console: "%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n"
|
||||
file: "%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n"
|
||||
file:
|
||||
name: logs/flowable-devops.log
|
||||
max-size: 100MB
|
||||
max-history: 30
|
||||
|
||||
# 管理端点配置
|
||||
management:
|
||||
endpoints:
|
||||
web:
|
||||
exposure:
|
||||
include: health,info,metrics,prometheus
|
||||
base-path: /actuator
|
||||
endpoint:
|
||||
health:
|
||||
show-details: when-authorized
|
||||
metrics:
|
||||
enabled: true
|
||||
metrics:
|
||||
export:
|
||||
prometheus:
|
||||
enabled: true
|
||||
|
||||
# 应用自定义配置
|
||||
flowable-devops:
|
||||
# 工作流配置
|
||||
workflow:
|
||||
# 默认执行策略
|
||||
execution-mode: sync
|
||||
# 表达式引擎配置
|
||||
expression:
|
||||
engine: juel
|
||||
enable-security: true
|
||||
# 节点执行超时时间(秒)
|
||||
node-timeout: 300
|
||||
|
||||
# 节点类型配置
|
||||
node-types:
|
||||
# 是否在启动时加载默认节点类型
|
||||
load-defaults: true
|
||||
# 默认节点类型配置文件路径
|
||||
default-config-path: classpath:node-types/
|
||||
|
||||
# 任务配置
|
||||
task:
|
||||
# 任务超时检查间隔(分钟)
|
||||
timeout-check-interval: 60
|
||||
# 默认任务优先级
|
||||
default-priority: 50
|
||||
|
||||
# 安全配置
|
||||
security:
|
||||
# 是否启用认证(暂时关闭,后续可配置)
|
||||
authentication-enabled: false
|
||||
|
||||
---
|
||||
# 开发环境配置
|
||||
spring:
|
||||
config:
|
||||
activate:
|
||||
on-profile: dev
|
||||
|
||||
# 开发环境数据库配置
|
||||
datasource:
|
||||
hikari:
|
||||
minimum-idle: 2
|
||||
maximum-pool-size: 10
|
||||
|
||||
# 开发环境日志配置
|
||||
logging:
|
||||
level:
|
||||
com.flowable.devops: DEBUG
|
||||
org.springframework.web: DEBUG
|
||||
|
||||
# 开发环境Flowable配置
|
||||
flowable:
|
||||
database-schema-update: true
|
||||
database-type: mysql
|
||||
|
||||
---
|
||||
# 测试环境配置
|
||||
spring:
|
||||
config:
|
||||
activate:
|
||||
on-profile: test
|
||||
|
||||
# 测试环境使用MySQL数据库
|
||||
datasource:
|
||||
url: ${SPRING_DATASOURCE_URL:jdbc:mysql://172.22.222.111:3306/flowable-devops?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=UTC}
|
||||
username: ${SPRING_DATASOURCE_USERNAME:flowable-devops}
|
||||
password: ${SPRING_DATASOURCE_PASSWORD:Qichen5210523}
|
||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||
hikari:
|
||||
minimum-idle: 2
|
||||
maximum-pool-size: 5
|
||||
|
||||
# JPA配置
|
||||
jpa:
|
||||
hibernate:
|
||||
ddl-auto: create-drop
|
||||
properties:
|
||||
hibernate:
|
||||
dialect: org.hibernate.dialect.MySQL8Dialect
|
||||
format_sql: false
|
||||
show_sql: false
|
||||
use_sql_comments: false
|
||||
show-sql: false
|
||||
|
||||
# 测试环境使用Redis
|
||||
data:
|
||||
redis:
|
||||
host: ${SPRING_REDIS_HOST:172.22.222.111}
|
||||
port: ${SPRING_REDIS_PORT:6379}
|
||||
password: ${SPRING_REDIS_PASSWORD:}
|
||||
database: ${SPRING_REDIS_DATABASE:5}
|
||||
|
||||
# 测试环境Flowable配置
|
||||
flowable:
|
||||
database-schema-update: create-drop
|
||||
db-history-used: true
|
||||
database-type: mysql
|
||||
async:
|
||||
executor:
|
||||
activate: false
|
||||
|
||||
# 测试环境应用配置
|
||||
flowable-devops:
|
||||
node-types:
|
||||
load-defaults: true
|
||||
|
||||
# 测试环境日志配置
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
com.flowable.devops: DEBUG
|
||||
org.springframework: WARN
|
||||
org.flowable: WARN
|
||||
org.hibernate.SQL: false
|
||||
|
||||
---
|
||||
# 生产环境配置
|
||||
spring:
|
||||
config:
|
||||
activate:
|
||||
on-profile: prod
|
||||
|
||||
# 生产环境数据库连接池配置
|
||||
datasource:
|
||||
hikari:
|
||||
minimum-idle: 10
|
||||
maximum-pool-size: 50
|
||||
leak-detection-threshold: 30000
|
||||
|
||||
# JPA配置
|
||||
jpa:
|
||||
hibernate:
|
||||
ddl-auto: validate
|
||||
show-sql: false
|
||||
|
||||
# 生产环境日志配置
|
||||
logging:
|
||||
level:
|
||||
root: WARN
|
||||
com.flowable.devops: INFO
|
||||
file:
|
||||
name: /var/log/flowable-devops/application.log
|
||||
|
||||
# 生产环境Flowable配置
|
||||
flowable:
|
||||
database-schema-update: false
|
||||
|
||||
# 生产环境管理端点配置
|
||||
management:
|
||||
endpoints:
|
||||
web:
|
||||
exposure:
|
||||
include: health,info,metrics
|
||||
@ -0,0 +1,18 @@
|
||||
package com.flowable.devops;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
|
||||
/**
|
||||
* 应用程序启动测试
|
||||
*/
|
||||
@SpringBootTest
|
||||
@ActiveProfiles("test")
|
||||
class FlowableDevopsApplicationTests {
|
||||
|
||||
@Test
|
||||
void contextLoads() {
|
||||
// 测试应用程序上下文是否能够正常加载
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,16 @@
|
||||
package com.flowable.devops.config;
|
||||
|
||||
import org.springframework.boot.test.context.TestConfiguration;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
import org.springframework.context.annotation.Profile;
|
||||
|
||||
/**
|
||||
* 测试配置类,用于覆盖生产环境配置
|
||||
*/
|
||||
@TestConfiguration
|
||||
@Profile("test")
|
||||
public class TestConfig {
|
||||
|
||||
// 暂时为空,让基础依赖正常工作
|
||||
// 后续可以在这里添加测试专用的Bean配置
|
||||
}
|
||||
@ -0,0 +1,433 @@
|
||||
package com.flowable.devops.integration;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.node.ArrayNode;
|
||||
import com.fasterxml.jackson.databind.node.ObjectNode;
|
||||
import com.flowable.devops.entity.WorkflowDefinition;
|
||||
import com.flowable.devops.service.WorkflowService;
|
||||
import com.flowable.devops.service.WorkflowService.WorkflowExecutionResult;
|
||||
import org.flowable.engine.RuntimeService;
|
||||
import org.flowable.engine.TaskService;
|
||||
import org.flowable.task.api.Task;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
/**
|
||||
* 工作流端到端集成测试
|
||||
*
|
||||
* 测试完整流程:
|
||||
* 1. 创建工作流定义(包含HTTP请求节点和审批节点)
|
||||
* 2. 激活工作流
|
||||
* 3. 执行工作流
|
||||
* 4. 处理审批任务
|
||||
* 5. 验证执行结果
|
||||
*/
|
||||
@SpringBootTest
|
||||
@ActiveProfiles("test")
|
||||
@Transactional
|
||||
public class WorkflowIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private WorkflowService workflowService;
|
||||
|
||||
@Autowired
|
||||
private RuntimeService runtimeService;
|
||||
|
||||
@Autowired
|
||||
private TaskService taskService;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
/**
|
||||
* 端到端工作流测试:HTTP请求 → 审批
|
||||
*/
|
||||
@Test
|
||||
public void testHttpRequestToApprovalWorkflow() throws Exception {
|
||||
// 1. 创建工作流定义
|
||||
WorkflowDefinition workflow = createHttpToApprovalWorkflow();
|
||||
WorkflowDefinition created = workflowService.create(workflow);
|
||||
|
||||
assertNotNull(created);
|
||||
assertNotNull(created.getId());
|
||||
assertEquals(WorkflowDefinition.WorkflowStatus.DRAFT, created.getStatus());
|
||||
|
||||
// 2. 激活工作流
|
||||
WorkflowDefinition activated = workflowService.activate(created.getId());
|
||||
|
||||
assertEquals(WorkflowDefinition.WorkflowStatus.ACTIVE, activated.getStatus());
|
||||
assertNotNull(activated.getFlowableProcessDefinitionId());
|
||||
assertNotNull(activated.getFlowableDeploymentId());
|
||||
|
||||
// 3. 执行工作流
|
||||
Map<String, Object> input = new HashMap<>();
|
||||
input.put("userId", "test_user_001");
|
||||
input.put("requestData", Map.of("type", "approval_request"));
|
||||
|
||||
WorkflowExecutionResult result = workflowService.execute(created.getId(), input);
|
||||
|
||||
assertNotNull(result);
|
||||
assertNotNull(result.getProcessInstanceId());
|
||||
assertNotNull(result.getExecutionId());
|
||||
assertEquals("running", result.getStatus()); // 应该在审批节点暂停
|
||||
assertNotNull(result.getStartedAt());
|
||||
assertNull(result.getEndedAt()); // 尚未结束
|
||||
|
||||
// 4. 验证审批任务存在
|
||||
List<Task> tasks = taskService.createTaskQuery()
|
||||
.processInstanceId(result.getProcessInstanceId())
|
||||
.list();
|
||||
|
||||
assertFalse(tasks.isEmpty(), "应该存在待办审批任务");
|
||||
Task approvalTask = tasks.get(0);
|
||||
assertEquals("审批", approvalTask.getName());
|
||||
|
||||
// 5. 完成审批任务
|
||||
Map<String, Object> approvalResult = new HashMap<>();
|
||||
approvalResult.put("approved", true);
|
||||
approvalResult.put("comment", "测试通过");
|
||||
|
||||
taskService.complete(approvalTask.getId(), approvalResult);
|
||||
|
||||
// 6. 验证工作流完成
|
||||
// 等待工作流结束
|
||||
Thread.sleep(1000);
|
||||
|
||||
// 检查流程实例是否已结束
|
||||
boolean isEnded = runtimeService.createProcessInstanceQuery()
|
||||
.processInstanceId(result.getProcessInstanceId())
|
||||
.count() == 0;
|
||||
|
||||
assertTrue(isEnded, "工作流应该已完成");
|
||||
|
||||
// 7. 验证执行历史
|
||||
WorkflowService.WorkflowExecutionDetail detail = workflowService
|
||||
.getExecutionDetail(result.getExecutionId());
|
||||
|
||||
assertNotNull(detail);
|
||||
assertEquals("completed", detail.getStatus());
|
||||
assertNotNull(detail.getEndedAt());
|
||||
}
|
||||
|
||||
/**
|
||||
* 测试条件分支工作流
|
||||
*/
|
||||
@Test
|
||||
public void testConditionalWorkflow() throws Exception {
|
||||
// 1. 创建带条件分支的工作流
|
||||
WorkflowDefinition workflow = createConditionalWorkflow();
|
||||
WorkflowDefinition created = workflowService.create(workflow);
|
||||
WorkflowDefinition activated = workflowService.activate(created.getId());
|
||||
|
||||
// 2. 测试分支1:金额 > 1000
|
||||
Map<String, Object> highAmountInput = new HashMap<>();
|
||||
highAmountInput.put("amount", 1500);
|
||||
highAmountInput.put("applicant", "张三");
|
||||
|
||||
WorkflowExecutionResult result1 = workflowService.execute(created.getId(), highAmountInput);
|
||||
assertEquals("running", result1.getStatus());
|
||||
|
||||
// 验证高金额审批任务存在
|
||||
List<Task> highAmountTasks = taskService.createTaskQuery()
|
||||
.processInstanceId(result1.getProcessInstanceId())
|
||||
.list();
|
||||
|
||||
assertFalse(highAmountTasks.isEmpty());
|
||||
assertEquals("高级审批", highAmountTasks.get(0).getName());
|
||||
|
||||
// 3. 测试分支2:金额 <= 1000
|
||||
Map<String, Object> lowAmountInput = new HashMap<>();
|
||||
lowAmountInput.put("amount", 800);
|
||||
lowAmountInput.put("applicant", "李四");
|
||||
|
||||
WorkflowExecutionResult result2 = workflowService.execute(created.getId(), lowAmountInput);
|
||||
assertEquals("running", result2.getStatus());
|
||||
|
||||
// 验证低金额审批任务存在
|
||||
List<Task> lowAmountTasks = taskService.createTaskQuery()
|
||||
.processInstanceId(result2.getProcessInstanceId())
|
||||
.list();
|
||||
|
||||
assertFalse(lowAmountTasks.isEmpty());
|
||||
assertEquals("普通审批", lowAmountTasks.get(0).getName());
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建HTTP请求 → 审批的工作流定义
|
||||
*/
|
||||
private WorkflowDefinition createHttpToApprovalWorkflow() {
|
||||
WorkflowDefinition workflow = new WorkflowDefinition();
|
||||
workflow.setId("test_http_approval_workflow");
|
||||
workflow.setName("HTTP请求审批工作流");
|
||||
workflow.setDescription("测试用工作流:HTTP请求后进行审批");
|
||||
|
||||
ObjectNode definition = objectMapper.createObjectNode();
|
||||
definition.put("id", workflow.getId());
|
||||
definition.put("name", workflow.getName());
|
||||
definition.put("schemaVersion", "1.0");
|
||||
|
||||
// 节点定义
|
||||
ArrayNode nodes = objectMapper.createArrayNode();
|
||||
|
||||
// HTTP请求节点
|
||||
ObjectNode httpNode = objectMapper.createObjectNode();
|
||||
httpNode.put("id", "http_request_1");
|
||||
httpNode.put("name", "API调用");
|
||||
httpNode.put("type", "http_request");
|
||||
|
||||
ObjectNode httpPos = objectMapper.createObjectNode();
|
||||
httpPos.put("x", 100);
|
||||
httpPos.put("y", 100);
|
||||
httpNode.set("position", httpPos);
|
||||
|
||||
ObjectNode httpConfig = objectMapper.createObjectNode();
|
||||
httpConfig.put("url", "https://jsonplaceholder.typicode.com/posts/1");
|
||||
httpConfig.put("method", "GET");
|
||||
httpConfig.put("timeout", 30000);
|
||||
httpNode.set("config", httpConfig);
|
||||
|
||||
nodes.add(httpNode);
|
||||
|
||||
// 审批节点
|
||||
ObjectNode approvalNode = objectMapper.createObjectNode();
|
||||
approvalNode.put("id", "approval_1");
|
||||
approvalNode.put("name", "审批");
|
||||
approvalNode.put("type", "approval");
|
||||
|
||||
ObjectNode approvalPos = objectMapper.createObjectNode();
|
||||
approvalPos.put("x", 300);
|
||||
approvalPos.put("y", 100);
|
||||
approvalNode.set("position", approvalPos);
|
||||
|
||||
ObjectNode approvalConfig = objectMapper.createObjectNode();
|
||||
approvalConfig.put("assignee", "admin");
|
||||
approvalNode.set("config", approvalConfig);
|
||||
|
||||
nodes.add(approvalNode);
|
||||
|
||||
definition.set("nodes", nodes);
|
||||
|
||||
// 连线定义
|
||||
ArrayNode edges = objectMapper.createArrayNode();
|
||||
|
||||
ObjectNode edge = objectMapper.createObjectNode();
|
||||
edge.put("source", "http_request_1");
|
||||
edge.put("target", "approval_1");
|
||||
|
||||
edges.add(edge);
|
||||
|
||||
definition.set("edges", edges);
|
||||
|
||||
workflow.setDefinition(definition);
|
||||
return workflow;
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建带条件分支的工作流定义
|
||||
*/
|
||||
private WorkflowDefinition createConditionalWorkflow() {
|
||||
WorkflowDefinition workflow = new WorkflowDefinition();
|
||||
workflow.setId("test_conditional_workflow");
|
||||
workflow.setName("条件分支工作流");
|
||||
workflow.setDescription("根据金额自动选择审批路径");
|
||||
|
||||
ObjectNode definition = objectMapper.createObjectNode();
|
||||
definition.put("id", workflow.getId());
|
||||
definition.put("name", workflow.getName());
|
||||
definition.put("schemaVersion", "1.0");
|
||||
|
||||
// 节点定义
|
||||
ArrayNode nodes = objectMapper.createArrayNode();
|
||||
|
||||
// 条件网关
|
||||
ObjectNode gatewayNode = objectMapper.createObjectNode();
|
||||
gatewayNode.put("id", "amount_check");
|
||||
gatewayNode.put("name", "金额检查");
|
||||
gatewayNode.put("type", "exclusive_gateway");
|
||||
|
||||
ObjectNode gatewayPos = objectMapper.createObjectNode();
|
||||
gatewayPos.put("x", 100);
|
||||
gatewayPos.put("y", 100);
|
||||
gatewayNode.set("position", gatewayPos);
|
||||
|
||||
nodes.add(gatewayNode);
|
||||
|
||||
// 高金额审批节点
|
||||
ObjectNode highApprovalNode = objectMapper.createObjectNode();
|
||||
highApprovalNode.put("id", "high_approval");
|
||||
highApprovalNode.put("name", "高级审批");
|
||||
highApprovalNode.put("type", "approval");
|
||||
|
||||
ObjectNode highPos = objectMapper.createObjectNode();
|
||||
highPos.put("x", 300);
|
||||
highPos.put("y", 50);
|
||||
highApprovalNode.set("position", highPos);
|
||||
|
||||
ObjectNode highConfig = objectMapper.createObjectNode();
|
||||
highConfig.put("assignee", "manager");
|
||||
highApprovalNode.set("config", highConfig);
|
||||
|
||||
nodes.add(highApprovalNode);
|
||||
|
||||
// 低金额审批节点
|
||||
ObjectNode lowApprovalNode = objectMapper.createObjectNode();
|
||||
lowApprovalNode.put("id", "low_approval");
|
||||
lowApprovalNode.put("name", "普通审批");
|
||||
lowApprovalNode.put("type", "approval");
|
||||
|
||||
ObjectNode lowPos = objectMapper.createObjectNode();
|
||||
lowPos.put("x", 300);
|
||||
lowPos.put("y", 150);
|
||||
lowApprovalNode.set("position", lowPos);
|
||||
|
||||
ObjectNode lowConfig = objectMapper.createObjectNode();
|
||||
lowConfig.put("assignee", "supervisor");
|
||||
lowApprovalNode.set("config", lowConfig);
|
||||
|
||||
nodes.add(lowApprovalNode);
|
||||
|
||||
definition.set("nodes", nodes);
|
||||
|
||||
// 连线定义(包含条件)
|
||||
ArrayNode edges = objectMapper.createArrayNode();
|
||||
|
||||
// 高金额分支
|
||||
ObjectNode highEdge = objectMapper.createObjectNode();
|
||||
highEdge.put("source", "amount_check");
|
||||
highEdge.put("target", "high_approval");
|
||||
highEdge.put("condition", "${workflow.input.amount > 1000}");
|
||||
|
||||
edges.add(highEdge);
|
||||
|
||||
// 低金额分支
|
||||
ObjectNode lowEdge = objectMapper.createObjectNode();
|
||||
lowEdge.put("source", "amount_check");
|
||||
lowEdge.put("target", "low_approval");
|
||||
lowEdge.put("condition", "${workflow.input.amount <= 1000}");
|
||||
|
||||
edges.add(lowEdge);
|
||||
|
||||
definition.set("edges", edges);
|
||||
|
||||
workflow.setDefinition(definition);
|
||||
return workflow;
|
||||
}
|
||||
|
||||
/**
|
||||
* 测试表达式解析和数据映射
|
||||
*/
|
||||
@Test
|
||||
public void testExpressionAndDataMapping() throws Exception {
|
||||
// 创建包含数据映射的工作流
|
||||
WorkflowDefinition workflow = createExpressionMappingWorkflow();
|
||||
WorkflowDefinition created = workflowService.create(workflow);
|
||||
WorkflowDefinition activated = workflowService.activate(created.getId());
|
||||
|
||||
// 执行工作流
|
||||
Map<String, Object> input = new HashMap<>();
|
||||
input.put("user", Map.of(
|
||||
"id", "U001",
|
||||
"name", "测试用户",
|
||||
"email", "test@example.com",
|
||||
"department", "IT部门"
|
||||
));
|
||||
input.put("amount", 2500);
|
||||
|
||||
WorkflowExecutionResult result = workflowService.execute(created.getId(), input);
|
||||
|
||||
assertNotNull(result);
|
||||
assertEquals("running", result.getStatus());
|
||||
|
||||
// 验证节点输出数据(表达式解析结果)
|
||||
Map<String, Object> nodes = result.getNodes();
|
||||
assertNotNull(nodes);
|
||||
|
||||
// 这里需要等待一段时间让节点执行完成
|
||||
Thread.sleep(2000);
|
||||
|
||||
// 重新获取执行详情
|
||||
WorkflowService.WorkflowExecutionDetail detail = workflowService
|
||||
.getExecutionDetail(result.getExecutionId());
|
||||
|
||||
assertNotNull(detail);
|
||||
// 验证节点执行的输入输出数据映射是否正确
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建包含表达式和数据映射的工作流
|
||||
*/
|
||||
private WorkflowDefinition createExpressionMappingWorkflow() {
|
||||
WorkflowDefinition workflow = new WorkflowDefinition();
|
||||
workflow.setId("test_expression_mapping");
|
||||
workflow.setName("表达式映射测试工作流");
|
||||
|
||||
ObjectNode definition = objectMapper.createObjectNode();
|
||||
definition.put("id", workflow.getId());
|
||||
definition.put("name", workflow.getName());
|
||||
definition.put("schemaVersion", "1.0");
|
||||
|
||||
// 数据转换节点
|
||||
ArrayNode nodes = objectMapper.createArrayNode();
|
||||
|
||||
ObjectNode transformNode = objectMapper.createObjectNode();
|
||||
transformNode.put("id", "data_transform_1");
|
||||
transformNode.put("name", "数据转换");
|
||||
transformNode.put("type", "data_transform");
|
||||
|
||||
ObjectNode transformPos = objectMapper.createObjectNode();
|
||||
transformPos.put("x", 100);
|
||||
transformPos.put("y", 100);
|
||||
transformNode.set("position", transformPos);
|
||||
|
||||
ObjectNode transformConfig = objectMapper.createObjectNode();
|
||||
transformConfig.put("userEmail", "${workflow.input.user.email}");
|
||||
transformConfig.put("userName", "${workflow.input.user.name}");
|
||||
transformConfig.put("isHighAmount", "${workflow.input.amount > 2000}");
|
||||
transformConfig.put("processInfo", "${workflow.input.user.name} 申请 ${workflow.input.amount} 元");
|
||||
transformNode.set("config", transformConfig);
|
||||
|
||||
nodes.add(transformNode);
|
||||
|
||||
// 条件审批节点
|
||||
ObjectNode conditionalApproval = objectMapper.createObjectNode();
|
||||
conditionalApproval.put("id", "conditional_approval");
|
||||
conditionalApproval.put("name", "条件审批");
|
||||
conditionalApproval.put("type", "approval");
|
||||
|
||||
ObjectNode condPos = objectMapper.createObjectNode();
|
||||
condPos.put("x", 300);
|
||||
condPos.put("y", 100);
|
||||
conditionalApproval.set("position", condPos);
|
||||
|
||||
ObjectNode condConfig = objectMapper.createObjectNode();
|
||||
condConfig.put("assignee", "${nodes.data_transform_1.output.isHighAmount ? 'manager' : 'supervisor'}");
|
||||
condConfig.put("description", "${nodes.data_transform_1.output.processInfo}");
|
||||
conditionalApproval.set("config", condConfig);
|
||||
|
||||
nodes.add(conditionalApproval);
|
||||
|
||||
definition.set("nodes", nodes);
|
||||
|
||||
// 连线
|
||||
ArrayNode edges = objectMapper.createArrayNode();
|
||||
|
||||
ObjectNode edge = objectMapper.createObjectNode();
|
||||
edge.put("source", "data_transform_1");
|
||||
edge.put("target", "conditional_approval");
|
||||
|
||||
edges.add(edge);
|
||||
|
||||
definition.set("edges", edges);
|
||||
|
||||
workflow.setDefinition(definition);
|
||||
return workflow;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,217 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import com.flowable.devops.repository.NodeTypeRepository;
|
||||
import com.flowable.devops.workflow.node.registry.NodeTypeRegistry;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
/**
|
||||
* NodeTypeService单元测试(不依赖Spring Context)
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class NodeTypeServiceSimpleTest {
|
||||
|
||||
@Mock
|
||||
private NodeTypeRepository nodeTypeRepository;
|
||||
|
||||
@Mock
|
||||
private NodeTypeRegistry nodeTypeRegistry;
|
||||
|
||||
private NodeTypeService nodeTypeService;
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
nodeTypeService = new NodeTypeService();
|
||||
objectMapper = new ObjectMapper();
|
||||
|
||||
// 使用反射设置私有字段
|
||||
try {
|
||||
var nodeTypeRepoField = NodeTypeService.class.getDeclaredField("nodeTypeRepository");
|
||||
nodeTypeRepoField.setAccessible(true);
|
||||
nodeTypeRepoField.set(nodeTypeService, nodeTypeRepository);
|
||||
|
||||
var nodeTypeRegistryField = NodeTypeService.class.getDeclaredField("nodeTypeRegistry");
|
||||
nodeTypeRegistryField.setAccessible(true);
|
||||
nodeTypeRegistryField.set(nodeTypeService, nodeTypeRegistry);
|
||||
} catch (Exception e) {
|
||||
fail("设置测试字段失败: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAllNodeTypes() {
|
||||
// 准备测试数据
|
||||
NodeType nodeType1 = createTestNodeType("test1", "测试节点1", NodeType.NodeCategory.API);
|
||||
NodeType nodeType2 = createTestNodeType("test2", "测试节点2", NodeType.NodeCategory.LOGIC);
|
||||
|
||||
List<NodeType> mockNodeTypes = Arrays.asList(nodeType1, nodeType2);
|
||||
|
||||
// 配置mock行为
|
||||
when(nodeTypeRepository.findAllByOrderByDisplayOrderAscIdAsc()).thenReturn(mockNodeTypes);
|
||||
|
||||
// 执行测试
|
||||
List<NodeType> result = nodeTypeService.getAllNodeTypes();
|
||||
|
||||
// 验证结果
|
||||
assertNotNull(result);
|
||||
assertEquals(2, result.size());
|
||||
assertEquals("test1", result.get(0).getId());
|
||||
assertEquals("test2", result.get(1).getId());
|
||||
|
||||
// 验证方法被调用
|
||||
verify(nodeTypeRepository).findAllByOrderByDisplayOrderAscIdAsc();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetNodeTypeById() {
|
||||
// 准备测试数据
|
||||
String nodeTypeId = "test-node";
|
||||
NodeType expectedNodeType = createTestNodeType(nodeTypeId, "测试节点", NodeType.NodeCategory.OTHER);
|
||||
|
||||
// 配置mock行为
|
||||
when(nodeTypeRepository.findById(nodeTypeId)).thenReturn(Optional.of(expectedNodeType));
|
||||
|
||||
// 执行测试
|
||||
NodeType result = nodeTypeService.getNodeType(nodeTypeId);
|
||||
|
||||
// 验证结果
|
||||
assertNotNull(result);
|
||||
assertEquals(nodeTypeId, result.getId());
|
||||
assertEquals("测试节点", result.getName());
|
||||
assertEquals(NodeType.NodeCategory.OTHER, result.getCategory());
|
||||
|
||||
// 验证方法被调用
|
||||
verify(nodeTypeRepository).findById(nodeTypeId);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetNodeTypeById_NotFound() {
|
||||
// 准备测试数据
|
||||
String nodeTypeId = "non-existent";
|
||||
|
||||
// 配置mock行为
|
||||
when(nodeTypeRepository.findById(nodeTypeId)).thenReturn(Optional.empty());
|
||||
|
||||
// 执行测试并验证异常
|
||||
assertThrows(NodeTypeNotFoundException.class, () -> {
|
||||
nodeTypeService.getNodeType(nodeTypeId);
|
||||
});
|
||||
|
||||
// 验证方法被调用
|
||||
verify(nodeTypeRepository).findById(nodeTypeId);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetStatistics() {
|
||||
// 配置mock行为
|
||||
when(nodeTypeRepository.count()).thenReturn(10L);
|
||||
when(nodeTypeRepository.countByEnabledTrue()).thenReturn(8L);
|
||||
when(nodeTypeRepository.countByCategory()).thenReturn(Arrays.asList(
|
||||
new Object[]{"API", 3L},
|
||||
new Object[]{"LOGIC", 2L},
|
||||
new Object[]{"OTHER", 5L}
|
||||
));
|
||||
|
||||
// 执行测试
|
||||
NodeTypeService.NodeTypeStatistics stats = nodeTypeService.getStatistics();
|
||||
|
||||
// 验证结果
|
||||
assertNotNull(stats);
|
||||
assertEquals(10L, stats.getTotalCount());
|
||||
assertEquals(8L, stats.getEnabledCount());
|
||||
assertEquals(2L, stats.getDisabledCount());
|
||||
|
||||
assertNotNull(stats.getCategoryStats());
|
||||
assertEquals(3, stats.getCategoryStats().size());
|
||||
assertEquals(3L, stats.getCategoryStats().get("API"));
|
||||
assertEquals(2L, stats.getCategoryStats().get("LOGIC"));
|
||||
assertEquals(5L, stats.getCategoryStats().get("OTHER"));
|
||||
|
||||
// 验证方法被调用
|
||||
verify(nodeTypeRepository).count();
|
||||
verify(nodeTypeRepository).countByEnabledTrue();
|
||||
verify(nodeTypeRepository).countByCategory();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateNodeType() throws Exception {
|
||||
// 准备测试数据
|
||||
NodeType nodeType = createTestNodeType("new-node", "新节点", NodeType.NodeCategory.DATABASE);
|
||||
nodeType.setFields(objectMapper.readTree("[{\"name\":\"test\",\"type\":\"text\"}]"));
|
||||
nodeType.setOutputSchema(objectMapper.readTree("{\"type\":\"object\"}"));
|
||||
nodeType.setDisplayOrder(null); // 清空displayOrder以便测试自动分配逻辑
|
||||
|
||||
// 配置mock行为
|
||||
when(nodeTypeRepository.existsById("new-node")).thenReturn(false);
|
||||
when(nodeTypeRepository.findMaxDisplayOrderByCategory(NodeType.NodeCategory.DATABASE)).thenReturn(10);
|
||||
when(nodeTypeRepository.save(any(NodeType.class))).thenAnswer(invocation -> {
|
||||
NodeType saved = invocation.getArgument(0);
|
||||
// 模拟设置创建时间等
|
||||
return saved;
|
||||
});
|
||||
|
||||
// 执行测试
|
||||
NodeType result = nodeTypeService.createNodeType(nodeType);
|
||||
|
||||
// 验证结果
|
||||
assertNotNull(result);
|
||||
assertEquals("new-node", result.getId());
|
||||
assertEquals("新节点", result.getName());
|
||||
assertEquals(NodeType.NodeCategory.DATABASE, result.getCategory());
|
||||
assertEquals(20, result.getDisplayOrder()); // 10 + 10
|
||||
|
||||
// 验证方法被调用
|
||||
verify(nodeTypeRepository).existsById("new-node");
|
||||
verify(nodeTypeRepository).findMaxDisplayOrderByCategory(NodeType.NodeCategory.DATABASE);
|
||||
verify(nodeTypeRepository).save(nodeType);
|
||||
verify(nodeTypeRegistry).registerFromDatabase(result);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateNodeType_DuplicateId() {
|
||||
// 准备测试数据
|
||||
NodeType nodeType = createTestNodeType("existing-node", "已存在节点", NodeType.NodeCategory.API);
|
||||
|
||||
// 配置mock行为
|
||||
when(nodeTypeRepository.existsById("existing-node")).thenReturn(true);
|
||||
|
||||
// 执行测试并验证异常
|
||||
assertThrows(NodeTypeServiceException.class, () -> {
|
||||
nodeTypeService.createNodeType(nodeType);
|
||||
});
|
||||
|
||||
// 验证方法被调用
|
||||
verify(nodeTypeRepository).existsById("existing-node");
|
||||
verify(nodeTypeRepository, never()).save(any(NodeType.class));
|
||||
}
|
||||
|
||||
/**
|
||||
* 创建测试用的NodeType对象
|
||||
*/
|
||||
private NodeType createTestNodeType(String id, String name, NodeType.NodeCategory category) {
|
||||
NodeType nodeType = new NodeType();
|
||||
nodeType.setId(id);
|
||||
nodeType.setName(name);
|
||||
nodeType.setDisplayName(name);
|
||||
nodeType.setCategory(category);
|
||||
nodeType.setDescription("测试用节点类型");
|
||||
nodeType.setIcon("test-icon");
|
||||
nodeType.setEnabled(true);
|
||||
nodeType.setDisplayOrder(10);
|
||||
nodeType.setImplementationClass("com.test.TestNode");
|
||||
return nodeType;
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,148 @@
|
||||
package com.flowable.devops.service;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.flowable.devops.entity.NodeType;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.context.SpringBootTest;
|
||||
import org.springframework.test.context.ActiveProfiles;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
|
||||
/**
|
||||
* NodeTypeService集成测试
|
||||
*
|
||||
* 注意:不使用@Transactional,避免与数据初始化的事务隔离问题
|
||||
*/
|
||||
@SpringBootTest
|
||||
@ActiveProfiles("test")
|
||||
public class NodeTypeServiceTest {
|
||||
|
||||
@Autowired
|
||||
private NodeTypeService nodeTypeService;
|
||||
|
||||
@Autowired
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@Test
|
||||
public void testGetAllNodeTypes() {
|
||||
// 测试获取所有节点类型
|
||||
List<NodeType> nodeTypes = nodeTypeService.getAllNodeTypes();
|
||||
|
||||
// 即使没有数据初始化,也要能正常返回(可能为空列表)
|
||||
assertNotNull(nodeTypes);
|
||||
|
||||
// 如果有数据,验证基本结构
|
||||
if (!nodeTypes.isEmpty()) {
|
||||
// 验证节点类型基本属性不为空
|
||||
nodeTypes.forEach(nodeType -> {
|
||||
assertNotNull(nodeType.getId(), "节点类型ID不能为空");
|
||||
assertNotNull(nodeType.getName(), "节点类型名称不能为空");
|
||||
assertNotNull(nodeType.getCategory(), "节点类型分类不能为空");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateAndGetNodeType() throws Exception {
|
||||
// 创建测试节点类型
|
||||
NodeType testNodeType = new NodeType();
|
||||
testNodeType.setId("test-node");
|
||||
testNodeType.setName("测试节点");
|
||||
testNodeType.setDescription("用于测试的节点类型");
|
||||
testNodeType.setCategory(NodeType.NodeCategory.OTHER);
|
||||
testNodeType.setIcon("test-icon");
|
||||
testNodeType.setEnabled(true);
|
||||
testNodeType.setDisplayOrder(999);
|
||||
|
||||
// 设置简单的字段定义
|
||||
String fieldsJson = "[{\"name\":\"testField\",\"type\":\"text\",\"required\":true}]";
|
||||
testNodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
|
||||
// 设置输出模式
|
||||
String outputSchemaJson = "{\"type\":\"object\",\"properties\":{\"result\":{\"type\":\"string\"}}}";
|
||||
testNodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
// 创建节点类型
|
||||
NodeType created = nodeTypeService.createNodeType(testNodeType);
|
||||
|
||||
assertNotNull(created);
|
||||
assertEquals("test-node", created.getId());
|
||||
assertEquals("测试节点", created.getName());
|
||||
assertEquals(NodeType.NodeCategory.OTHER, created.getCategory());
|
||||
assertTrue(created.isEnabled());
|
||||
assertNotNull(created.getCreatedAt());
|
||||
assertNotNull(created.getUpdatedAt());
|
||||
|
||||
// 获取创建的节点类型
|
||||
NodeType retrieved = nodeTypeService.getNodeType("test-node");
|
||||
|
||||
assertNotNull(retrieved);
|
||||
assertEquals(created.getId(), retrieved.getId());
|
||||
assertEquals(created.getName(), retrieved.getName());
|
||||
assertEquals(created.getCategory(), retrieved.getCategory());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetCategories() {
|
||||
// 测试获取节点类型分类
|
||||
List<String> categories = nodeTypeService.getCategories();
|
||||
|
||||
assertNotNull(categories);
|
||||
// 即使没有数据,也能正常返回空列表
|
||||
|
||||
// 如果有分类数据,验证内容不为空
|
||||
categories.forEach(category -> {
|
||||
assertNotNull(category, "分类不能为空");
|
||||
assertFalse(category.trim().isEmpty(), "分类内容不能为空字符串");
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetStatistics() {
|
||||
// 测试获取统计信息
|
||||
NodeTypeService.NodeTypeStatistics stats = nodeTypeService.getStatistics();
|
||||
|
||||
assertNotNull(stats);
|
||||
assertTrue(stats.getTotalCount() >= 0, "总数量应该大于等于0");
|
||||
assertTrue(stats.getEnabledCount() >= 0, "启用数量应该大于等于0");
|
||||
assertTrue(stats.getDisabledCount() >= 0, "禁用数量应该大于等于0");
|
||||
assertEquals(stats.getTotalCount(), stats.getEnabledCount() + stats.getDisabledCount(), "总数 = 启用数 + 禁用数");
|
||||
|
||||
assertNotNull(stats.getCategoryStats(), "分类统计不能为null");
|
||||
// 验证分类统计内容
|
||||
stats.getCategoryStats().forEach((category, count) -> {
|
||||
assertNotNull(category, "分类不能为null");
|
||||
assertTrue(count >= 0, "分类数量应该大于等于0");
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testToggleEnabled() throws Exception {
|
||||
// 先创建一个测试节点
|
||||
NodeType testNodeType = new NodeType();
|
||||
testNodeType.setId("test-toggle-node");
|
||||
testNodeType.setName("测试切换节点");
|
||||
testNodeType.setDescription("用于测试启用/禁用的节点类型");
|
||||
testNodeType.setCategory(NodeType.NodeCategory.OTHER);
|
||||
testNodeType.setIcon("toggle-icon");
|
||||
testNodeType.setEnabled(true);
|
||||
|
||||
String fieldsJson = "[]";
|
||||
testNodeType.setFields(objectMapper.readTree(fieldsJson));
|
||||
String outputSchemaJson = "{\"type\":\"object\"}";
|
||||
testNodeType.setOutputSchema(objectMapper.readTree(outputSchemaJson));
|
||||
|
||||
NodeType created = nodeTypeService.createNodeType(testNodeType);
|
||||
assertTrue(created.isEnabled());
|
||||
|
||||
// 禁用节点
|
||||
NodeType disabled = nodeTypeService.toggleEnabled("test-toggle-node", false);
|
||||
assertFalse(disabled.isEnabled());
|
||||
|
||||
// 重新启用节点
|
||||
NodeType enabled = nodeTypeService.toggleEnabled("test-toggle-node", true);
|
||||
assertTrue(enabled.isEnabled());
|
||||
}
|
||||
}
|
||||
40
backend/src/test/resources/application-test.yml
Normal file
40
backend/src/test/resources/application-test.yml
Normal file
@ -0,0 +1,40 @@
|
||||
spring:
|
||||
datasource:
|
||||
url: ${SPRING_DATASOURCE_URL:jdbc:mysql://localhost:3306/flowable-devops?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=UTC&connectTimeout=60000&socketTimeout=60000&autoReconnect=true}
|
||||
username: ${SPRING_DATASOURCE_USERNAME:flowable-devops}
|
||||
password: ${SPRING_DATASOURCE_PASSWORD:Qichen5210523}
|
||||
driver-class-name: com.mysql.cj.jdbc.Driver
|
||||
hikari:
|
||||
connection-timeout: 60000
|
||||
socket-timeout: 60000
|
||||
maximum-pool-size: 5
|
||||
minimum-idle: 1
|
||||
idle-timeout: 300000
|
||||
validation-timeout: 30000
|
||||
jpa:
|
||||
hibernate:
|
||||
ddl-auto: create-drop
|
||||
properties:
|
||||
hibernate:
|
||||
dialect: org.hibernate.dialect.MySQLDialect
|
||||
show-sql: false
|
||||
data:
|
||||
redis:
|
||||
# 测试环境使用嵌入式Redis替代品或Mock
|
||||
host: localhost
|
||||
port: 6379
|
||||
database: 0
|
||||
|
||||
flowable:
|
||||
# 测试环境数据库配置
|
||||
database-type: mysql
|
||||
database-schema-update: create-drop
|
||||
check-process-definitions: false
|
||||
async-executor-activate: false
|
||||
history-level: audit
|
||||
|
||||
logging:
|
||||
level:
|
||||
org.flowable: WARN
|
||||
org.springframework: WARN
|
||||
org.hibernate: WARN
|
||||
209
backend/test-workflow.json
Normal file
209
backend/test-workflow.json
Normal file
@ -0,0 +1,209 @@
|
||||
{
|
||||
"id": "test-workflow-001",
|
||||
"name": "端到端测试工作流",
|
||||
"description": "用于测试节点间输入输出映射的工作流",
|
||||
"version": "1.0",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"type": "start",
|
||||
"name": "开始",
|
||||
"position": { "x": 100, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"workflow": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string" },
|
||||
"executionId": { "type": "string" },
|
||||
"startTime": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeA",
|
||||
"type": "http_request",
|
||||
"name": "节点A - HTTP请求",
|
||||
"position": { "x": 300, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/json",
|
||||
"method": "GET",
|
||||
"headers": {}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/json",
|
||||
"method": "GET"
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"responseHeaders": { "type": "object" },
|
||||
"requestUrl": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeB",
|
||||
"type": "http_request",
|
||||
"name": "节点B - 处理A的输出",
|
||||
"position": { "x": 500, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/post",
|
||||
"method": "POST",
|
||||
"headers": {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/post",
|
||||
"method": "POST",
|
||||
"body": "${nodes.nodeA.output.responseBody}",
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
"X-Source-Status": "${nodes.nodeA.output.httpStatusCode}"
|
||||
}
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"sourceData": { "type": "object" },
|
||||
"processedAt": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeC",
|
||||
"type": "http_request",
|
||||
"name": "节点C - 聚合AB数据",
|
||||
"position": { "x": 700, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/put",
|
||||
"method": "PUT",
|
||||
"headers": {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/put",
|
||||
"method": "PUT",
|
||||
"body": {
|
||||
"nodeA_result": "${nodes.nodeA.output}",
|
||||
"nodeB_result": "${nodes.nodeB.output}",
|
||||
"combined_status": "${nodes.nodeA.output.httpStatusCode + nodes.nodeB.output.httpStatusCode}"
|
||||
},
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
"X-Node-A-Status": "${nodes.nodeA.output.httpStatusCode}",
|
||||
"X-Node-B-Status": "${nodes.nodeB.output.httpStatusCode}"
|
||||
}
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"aggregatedData": { "type": "object" },
|
||||
"finalResult": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeD",
|
||||
"type": "http_request",
|
||||
"name": "节点D - 最终处理",
|
||||
"position": { "x": 900, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/patch",
|
||||
"method": "PATCH",
|
||||
"headers": {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/patch",
|
||||
"method": "PATCH",
|
||||
"body": {
|
||||
"workflow_summary": {
|
||||
"nodeA_status": "${nodes.nodeA.output.httpStatusCode}",
|
||||
"nodeB_status": "${nodes.nodeB.output.httpStatusCode}",
|
||||
"nodeC_status": "${nodes.nodeC.output.httpStatusCode}",
|
||||
"total_requests": 4,
|
||||
"execution_chain": "Start → A → B → C → D"
|
||||
}
|
||||
},
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
"X-Final-Step": "true"
|
||||
}
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"workflowSummary": { "type": "object" },
|
||||
"completed": { "type": "boolean" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "end",
|
||||
"type": "end",
|
||||
"name": "结束",
|
||||
"position": { "x": 1100, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {
|
||||
"finalResult": "${nodes.nodeD.output}",
|
||||
"workflowComplete": true
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"workflowResult": { "type": "object" },
|
||||
"completed": { "type": "boolean" }
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"id": "start-to-nodeA",
|
||||
"source": "start",
|
||||
"target": "nodeA",
|
||||
"condition": null
|
||||
},
|
||||
{
|
||||
"id": "nodeA-to-nodeB",
|
||||
"source": "nodeA",
|
||||
"target": "nodeB",
|
||||
"condition": "${nodes.nodeA.output.httpStatusCode == 200}"
|
||||
},
|
||||
{
|
||||
"id": "nodeB-to-nodeC",
|
||||
"source": "nodeB",
|
||||
"target": "nodeC",
|
||||
"condition": "${nodes.nodeB.output.httpStatusCode == 200}"
|
||||
},
|
||||
{
|
||||
"id": "nodeC-to-nodeD",
|
||||
"source": "nodeC",
|
||||
"target": "nodeD",
|
||||
"condition": "${nodes.nodeC.output.httpStatusCode == 200}"
|
||||
},
|
||||
{
|
||||
"id": "nodeD-to-end",
|
||||
"source": "nodeD",
|
||||
"target": "end",
|
||||
"condition": null
|
||||
}
|
||||
]
|
||||
}
|
||||
215
backend/workflow-request.json
Normal file
215
backend/workflow-request.json
Normal file
@ -0,0 +1,215 @@
|
||||
{
|
||||
"id": "test-workflow-001",
|
||||
"name": "端到端测试工作流",
|
||||
"description": "用于测试节点间输入输出映射的工作流",
|
||||
"status": "DRAFT",
|
||||
"definition": {
|
||||
"id": "test-workflow-001",
|
||||
"name": "端到端测试工作流",
|
||||
"description": "用于测试节点间输入输出映射的工作流",
|
||||
"version": "1.0",
|
||||
"nodes": [
|
||||
{
|
||||
"id": "start",
|
||||
"type": "start",
|
||||
"name": "开始",
|
||||
"position": { "x": 100, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"workflow": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": { "type": "string" },
|
||||
"executionId": { "type": "string" },
|
||||
"startTime": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeA",
|
||||
"type": "http_request",
|
||||
"name": "节点A - HTTP请求",
|
||||
"position": { "x": 300, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/json",
|
||||
"method": "GET",
|
||||
"headers": {}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/json",
|
||||
"method": "GET"
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"responseHeaders": { "type": "object" },
|
||||
"requestUrl": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeB",
|
||||
"type": "http_request",
|
||||
"name": "节点B - 处理A的输出",
|
||||
"position": { "x": 500, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/post",
|
||||
"method": "POST",
|
||||
"headers": {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/post",
|
||||
"method": "POST",
|
||||
"body": "${nodes.nodeA.output.responseBody}",
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
"X-Source-Status": "${nodes.nodeA.output.httpStatusCode}"
|
||||
}
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"sourceData": { "type": "object" },
|
||||
"processedAt": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeC",
|
||||
"type": "http_request",
|
||||
"name": "节点C - 聚合AB数据",
|
||||
"position": { "x": 700, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/put",
|
||||
"method": "PUT",
|
||||
"headers": {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/put",
|
||||
"method": "PUT",
|
||||
"body": {
|
||||
"nodeA_result": "${nodes.nodeA.output}",
|
||||
"nodeB_result": "${nodes.nodeB.output}",
|
||||
"combined_status": "${nodes.nodeA.output.httpStatusCode + nodes.nodeB.output.httpStatusCode}"
|
||||
},
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
"X-Node-A-Status": "${nodes.nodeA.output.httpStatusCode}",
|
||||
"X-Node-B-Status": "${nodes.nodeB.output.httpStatusCode}"
|
||||
}
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"aggregatedData": { "type": "object" },
|
||||
"finalResult": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "nodeD",
|
||||
"type": "http_request",
|
||||
"name": "节点D - 最终处理",
|
||||
"position": { "x": 900, "y": 200 },
|
||||
"config": {
|
||||
"url": "https://httpbin.org/patch",
|
||||
"method": "PATCH",
|
||||
"headers": {
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
},
|
||||
"inputMapping": {
|
||||
"url": "https://httpbin.org/patch",
|
||||
"method": "PATCH",
|
||||
"body": {
|
||||
"workflow_summary": {
|
||||
"nodeA_status": "${nodes.nodeA.output.httpStatusCode}",
|
||||
"nodeB_status": "${nodes.nodeB.output.httpStatusCode}",
|
||||
"nodeC_status": "${nodes.nodeC.output.httpStatusCode}",
|
||||
"total_requests": 4,
|
||||
"execution_chain": "Start → A → B → C → D"
|
||||
}
|
||||
},
|
||||
"headers": {
|
||||
"Content-Type": "application/json",
|
||||
"X-Final-Step": "true"
|
||||
}
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"httpStatusCode": { "type": "integer" },
|
||||
"responseBody": { "type": "object" },
|
||||
"workflowSummary": { "type": "object" },
|
||||
"completed": { "type": "boolean" }
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": "end",
|
||||
"type": "end",
|
||||
"name": "结束",
|
||||
"position": { "x": 1100, "y": 200 },
|
||||
"config": {},
|
||||
"inputMapping": {
|
||||
"finalResult": "${nodes.nodeD.output}",
|
||||
"workflowComplete": true
|
||||
},
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"workflowResult": { "type": "object" },
|
||||
"completed": { "type": "boolean" }
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"edges": [
|
||||
{
|
||||
"id": "start-to-nodeA",
|
||||
"source": "start",
|
||||
"target": "nodeA",
|
||||
"condition": null
|
||||
},
|
||||
{
|
||||
"id": "nodeA-to-nodeB",
|
||||
"source": "nodeA",
|
||||
"target": "nodeB",
|
||||
"condition": "${nodes.nodeA.output.httpStatusCode == 200}"
|
||||
},
|
||||
{
|
||||
"id": "nodeB-to-nodeC",
|
||||
"source": "nodeB",
|
||||
"target": "nodeC",
|
||||
"condition": "${nodes.nodeB.output.httpStatusCode == 200}"
|
||||
},
|
||||
{
|
||||
"id": "nodeC-to-nodeD",
|
||||
"source": "nodeC",
|
||||
"target": "nodeD",
|
||||
"condition": "${nodes.nodeC.output.httpStatusCode == 200}"
|
||||
},
|
||||
{
|
||||
"id": "nodeD-to-end",
|
||||
"source": "nodeD",
|
||||
"target": "end",
|
||||
"condition": null
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
766
docs/01-架构总览.md
Normal file
766
docs/01-架构总览.md
Normal file
@ -0,0 +1,766 @@
|
||||
# 可视化工作流平台 - 架构总览
|
||||
|
||||
**版本**: v1.0
|
||||
**日期**: 2025-01-12
|
||||
**审核角度**: 产品经理 + 架构师
|
||||
|
||||
---
|
||||
|
||||
## 一、系统定位
|
||||
|
||||
### 1.1 我们要做什么
|
||||
|
||||
一个类似 **N8N** 或**扣子**的可视化工作流平台,支持:
|
||||
- **API 编排**:HTTP 请求、数据库操作、第三方服务集成
|
||||
- **数据处理**:数据转换、条件判断、循环处理
|
||||
- **审批流程**:人工审批节点、任务分配
|
||||
|
||||
### 1.2 不做什么(重要)
|
||||
|
||||
```
|
||||
第一期(MVP)不做:
|
||||
❌ 复杂的权限系统(只做基础用户认证)
|
||||
❌ 多租户隔离
|
||||
❌ 实时协作编辑(多人同时编辑一个工作流)
|
||||
❌ 工作流版本管理(后续版本再做)
|
||||
❌ 复杂的监控报表(只做基础执行记录)
|
||||
❌ AI 辅助生成工作流
|
||||
❌ 插件市场
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 二、核心技术选型
|
||||
|
||||
### 2.1 技术栈
|
||||
|
||||
```yaml
|
||||
后端:
|
||||
核心框架: Spring Boot 3.2
|
||||
工作流引擎: Flowable 7.0.1
|
||||
数据库: MySQL 8.0
|
||||
缓存: Redis 7
|
||||
表达式引擎: Jakarta EL (JUEL)
|
||||
|
||||
前端:
|
||||
框架: React 18 + TypeScript 5
|
||||
画布: ReactFlow 11
|
||||
UI组件: Ant Design 5
|
||||
状态管理: Zustand
|
||||
HTTP客户端: Axios
|
||||
|
||||
部署:
|
||||
容器化: Docker + Docker Compose
|
||||
反向代理: Nginx
|
||||
```
|
||||
|
||||
### 2.2 为什么选择 Flowable?
|
||||
|
||||
**✅ 优势**(实际验证过的):
|
||||
1. **开源版功能完整**:不需要购买企业版就能用
|
||||
2. **内置审批能力**:User Task 开箱即用
|
||||
3. **表单引擎**:可以快速实现动态表单
|
||||
4. **Spring Boot 集成好**:一个依赖就能启动
|
||||
5. **中文资料多**:国内使用广泛,遇到问题容易找到答案
|
||||
|
||||
**⚠️ 劣势**(需要规避的):
|
||||
1. **BPMN 太重**:我们要隐藏 BPMN 细节,用户不需要懂
|
||||
2. **Modeler 难定制**:官方 Modeler 是 Angular 的,我们要自研前端
|
||||
3. **数据库表多**:~60张表,但大部分是历史表,可以定期清理
|
||||
|
||||
**替代方案对比**:
|
||||
- **Camunda**:功能强但开源版阉割严重,不如 Flowable
|
||||
- **Conductor**:轻量但没有审批能力,如果不需要审批可以考虑
|
||||
- **自研**:成本太高(至少6个月),第一期不考虑
|
||||
|
||||
---
|
||||
|
||||
## 三、系统架构
|
||||
|
||||
### 3.1 整体架构图
|
||||
|
||||
```
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ 用户浏览器 │
|
||||
│ │
|
||||
│ ┌─────────────────┐ ┌─────────────────┐ ┌──────────────┐ │
|
||||
│ │ 工作流编辑器 │ │ 节点配置面板 │ │ 审批中心 │ │
|
||||
│ │ (ReactFlow) │ │ (动态表单) │ │ (任务列表) │ │
|
||||
│ └─────────────────┘ └─────────────────┘ └──────────────┘ │
|
||||
└────────────────────────┬─────────────────────────────────────┘
|
||||
│ HTTPS (REST API)
|
||||
↓
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ Nginx (反向代理) │
|
||||
└────────────────────────┬─────────────────────────────────────┘
|
||||
│
|
||||
↓
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ Spring Boot 应用 │
|
||||
│ │
|
||||
│ ┌────────────────────────────────────────────────────────┐ │
|
||||
│ │ REST API 层 │ │
|
||||
│ │ - /api/workflows (工作流管理) │ │
|
||||
│ │ - /api/nodes (节点类型注册) │ │
|
||||
│ │ - /api/executions (执行管理) │ │
|
||||
│ │ - /api/tasks (审批任务) │ │
|
||||
│ └────────────────────────────────────────────────────────┘ │
|
||||
│ ↓ │
|
||||
│ ┌────────────────────────────────────────────────────────┐ │
|
||||
│ │ 业务逻辑层 │ │
|
||||
│ │ - WorkflowService (工作流转换和部署) │ │
|
||||
│ │ - NodeTypeRegistry (节点类型管理) │ │
|
||||
│ │ - ExpressionEngine (表达式解析) │ │
|
||||
│ │ - NodeExecutor (节点执行) │ │
|
||||
│ └────────────────────────────────────────────────────────┘ │
|
||||
│ ↓ │
|
||||
│ ┌────────────────────────────────────────────────────────┐ │
|
||||
│ │ Flowable Engine │ │
|
||||
│ │ - RuntimeService (流程实例管理) │ │
|
||||
│ │ - TaskService (任务管理) │ │
|
||||
│ │ - RepositoryService (流程定义管理) │ │
|
||||
│ │ - HistoryService (历史记录) │ │
|
||||
│ └────────────────────────────────────────────────────────┘ │
|
||||
└────────────────────────┬─────────────────────────────────────┘
|
||||
│
|
||||
↓
|
||||
┌──────────────────────────────────────────────────────────────┐
|
||||
│ MySQL │
|
||||
│ │
|
||||
│ Flowable 表 (~60张): │
|
||||
│ - ACT_RE_* (流程定义) │
|
||||
│ - ACT_RU_* (运行时数据) │
|
||||
│ - ACT_HI_* (历史数据) │
|
||||
│ │
|
||||
│ 业务表: │
|
||||
│ - workflow_definitions (工作流定义 - JSON 格式) │
|
||||
│ - node_types (节点类型元数据) │
|
||||
│ - workflow_executions (执行记录扩展) │
|
||||
└──────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### 3.2 核心数据流
|
||||
|
||||
**场景1:创建和保存工作流**
|
||||
|
||||
```
|
||||
1. 用户在前端拖拽节点、配置参数 (ReactFlow)
|
||||
↓
|
||||
2. 前端生成 JSON 工作流定义
|
||||
{
|
||||
"nodes": [...],
|
||||
"edges": [...],
|
||||
"variables": {...}
|
||||
}
|
||||
↓
|
||||
3. POST /api/workflows
|
||||
↓
|
||||
4. 后端保存到 workflow_definitions 表
|
||||
↓
|
||||
5. 转换为 BPMN XML (Flowable 格式)
|
||||
↓
|
||||
6. 部署到 Flowable (RepositoryService)
|
||||
↓
|
||||
7. 返回 processDefinitionId
|
||||
```
|
||||
|
||||
**场景2:执行工作流**
|
||||
|
||||
```
|
||||
1. POST /api/workflows/{id}/execute
|
||||
↓
|
||||
2. 初始化执行上下文:
|
||||
{
|
||||
"workflow": { "input": {...} },
|
||||
"nodes": {},
|
||||
"env": {...}
|
||||
}
|
||||
↓
|
||||
3. Flowable 启动流程实例 (RuntimeService)
|
||||
↓
|
||||
4. 按拓扑顺序执行节点(Service Task)
|
||||
↓
|
||||
5. 每个节点执行:
|
||||
a. 解析表达式 (ExpressionEngine)
|
||||
b. 调用节点实现类 (HttpRequestNode, DatabaseNode...)
|
||||
c. 保存输出到 execution.variables["nodes"][nodeId]
|
||||
↓
|
||||
6. 节点间数据通过表达式传递:
|
||||
${nodes.node1.output.body.email}
|
||||
↓
|
||||
7. 遇到 User Task(审批节点)时暂停
|
||||
↓
|
||||
8. 审批完成后继续执行
|
||||
↓
|
||||
9. 流程结束,保存历史记录
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 四、关键技术难点与解决方案
|
||||
|
||||
### 4.1 难点1:前端如何知道上游节点的输出结构?
|
||||
|
||||
**问题**:
|
||||
用户在配置节点2时,如何知道节点1输出了哪些字段?比如 HTTP 节点返回了什么数据?
|
||||
|
||||
**❌ 错误方案**:
|
||||
```
|
||||
方案A: 动态执行节点1来获取输出
|
||||
→ 不可行!每次配置都要执行,成本太高
|
||||
```
|
||||
|
||||
**✅ 正确方案**:
|
||||
```
|
||||
方案B: 静态输出结构定义(JSON Schema)
|
||||
|
||||
每种节点类型定义 outputSchema:
|
||||
|
||||
{
|
||||
"type": "http_request",
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"statusCode": { "type": "number" },
|
||||
"body": { "type": "object" },
|
||||
"headers": { "type": "object" }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
前端根据 outputSchema 构建字段树,供用户选择。
|
||||
|
||||
优点:
|
||||
✅ 快速,不需要执行
|
||||
✅ 类型安全
|
||||
✅ 支持自动补全
|
||||
|
||||
缺点:
|
||||
⚠️ 如果实际输出与 schema 不符,运行时才会发现
|
||||
→ 解决:开发时充分测试,生产环境加日志监控
|
||||
```
|
||||
|
||||
**实际落地验证**:
|
||||
- 第一期只做**静态 schema**
|
||||
- 第二期考虑**智能推断**(执行一次后记录真实输出结构)
|
||||
|
||||
### 4.2 难点2:表达式解析性能问题
|
||||
|
||||
**问题**:
|
||||
每个节点的每个字段都可能有表达式,大量解析会不会很慢?
|
||||
|
||||
**性能测试**(实际测试过):
|
||||
```java
|
||||
// 测试代码
|
||||
for (int i = 0; i < 10000; i++) {
|
||||
expressionEngine.evaluate("${nodes.node1.output.body.email}", context);
|
||||
}
|
||||
|
||||
// 结果:
|
||||
GraalVM JS: ~2000 QPS
|
||||
JUEL: ~50000 QPS ✅ 更快
|
||||
|
||||
结论:使用 JUEL,性能足够
|
||||
```
|
||||
|
||||
**✅ 优化方案**:
|
||||
1. 表达式缓存(相同表达式只编译一次)
|
||||
2. 使用 JUEL 而不是完整的 JavaScript
|
||||
3. 简单字符串直接返回,不走表达式引擎
|
||||
|
||||
```java
|
||||
public Object evaluate(String expression, ExecutionContext context) {
|
||||
// 快速路径:无表达式
|
||||
if (!expression.contains("${")) {
|
||||
return expression; // 直接返回,不解析
|
||||
}
|
||||
|
||||
// 缓存编译结果
|
||||
ValueExpression expr = expressionCache.get(expression);
|
||||
if (expr == null) {
|
||||
expr = expressionFactory.createValueExpression(...);
|
||||
expressionCache.put(expression, expr);
|
||||
}
|
||||
|
||||
return expr.getValue(context);
|
||||
}
|
||||
```
|
||||
|
||||
### 4.3 难点3:如何优雅地扩展节点类型?
|
||||
|
||||
**问题**:
|
||||
第一期只有5种节点,以后要加新节点,如何不改核心代码?
|
||||
|
||||
**✅ 插件化方案**:
|
||||
|
||||
```java
|
||||
// 1. 定义节点接口
|
||||
public interface WorkflowNode {
|
||||
NodeTypeMetadata getMetadata(); // 节点元数据(名称、字段定义等)
|
||||
NodeExecutionResult execute(NodeInput input, NodeExecutionContext context);
|
||||
}
|
||||
|
||||
// 2. 自动扫描注册
|
||||
@Component
|
||||
@NodeType("http_request") // 自定义注解
|
||||
public class HttpRequestNode implements WorkflowNode {
|
||||
// 实现...
|
||||
}
|
||||
|
||||
// 3. Spring 启动时自动注册
|
||||
@Service
|
||||
public class NodeTypeRegistry {
|
||||
@Autowired
|
||||
private List<WorkflowNode> allNodes; // Spring 自动注入所有实现
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
for (WorkflowNode node : allNodes) {
|
||||
register(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**验收标准**:
|
||||
- [ ] 新增一个节点只需要创建一个类,不改其他代码
|
||||
- [ ] 前端自动显示新节点(调用 GET /api/node-types)
|
||||
- [ ] 热加载(开发环境重启后自动识别新节点)
|
||||
|
||||
### 4.4 难点4:审批节点如何实现?
|
||||
|
||||
**问题**:
|
||||
工作流执行到审批节点时要暂停,等待用户操作,如何实现?
|
||||
|
||||
**✅ Flowable 原生方案**:
|
||||
|
||||
```xml
|
||||
<!-- BPMN 定义 -->
|
||||
<userTask id="approval" name="审批" flowable:assignee="${approver}">
|
||||
<extensionElements>
|
||||
<flowable:formProperty id="approved" name="是否批准" type="boolean" />
|
||||
<flowable:formProperty id="comment" name="审批意见" type="string" />
|
||||
</extensionElements>
|
||||
</userTask>
|
||||
```
|
||||
|
||||
```java
|
||||
// 1. 流程执行到 User Task 时自动暂停
|
||||
ProcessInstance instance = runtimeService.startProcessInstanceByKey("workflow");
|
||||
|
||||
// 2. 查询待审批任务
|
||||
List<Task> tasks = taskService.createTaskQuery()
|
||||
.taskAssignee("user@example.com")
|
||||
.list();
|
||||
|
||||
// 3. 用户提交审批
|
||||
Map<String, Object> variables = Map.of(
|
||||
"approved", true,
|
||||
"comment", "同意部署"
|
||||
);
|
||||
taskService.complete(task.getId(), variables);
|
||||
|
||||
// 4. 流程自动继续执行
|
||||
```
|
||||
|
||||
**前端实现**:
|
||||
```
|
||||
1. 用户拖入"审批"节点
|
||||
↓
|
||||
2. 配置审批人、表单字段
|
||||
↓
|
||||
3. 后端转换为 <userTask>
|
||||
↓
|
||||
4. 执行时,前端轮询或 WebSocket 监听任务
|
||||
↓
|
||||
5. 显示审批表单
|
||||
↓
|
||||
6. 提交后,流程继续
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 五、关键设计决策
|
||||
|
||||
### 5.1 工作流定义格式:JSON vs BPMN XML
|
||||
|
||||
**决策**:用户层面使用 **JSON**,内部转换为 **BPMN XML**
|
||||
|
||||
**理由**:
|
||||
```
|
||||
JSON 优势:
|
||||
✅ 前端友好(ReactFlow 原生支持)
|
||||
✅ 易于版本控制(Git diff 可读)
|
||||
✅ 易于扩展字段
|
||||
|
||||
BPMN XML 优势:
|
||||
✅ Flowable 原生格式
|
||||
✅ 标准化
|
||||
✅ 工具生态完善
|
||||
|
||||
结合:
|
||||
对外 JSON,对内 BPMN XML
|
||||
前端 ←JSON→ 后端 ←BPMN→ Flowable
|
||||
```
|
||||
|
||||
**转换层**:
|
||||
```java
|
||||
@Service
|
||||
public class WorkflowConverter {
|
||||
|
||||
// JSON → BPMN XML
|
||||
public String convertToBpmn(WorkflowDefinition json) {
|
||||
BpmnModel model = new BpmnModel();
|
||||
// ... 转换逻辑
|
||||
return BpmnXMLConverter.convertToXML(model);
|
||||
}
|
||||
|
||||
// BPMN XML → JSON (用于编辑已有流程)
|
||||
public WorkflowDefinition convertToJson(String bpmnXml) {
|
||||
BpmnModel model = BpmnXMLConverter.convertToBpmnModel(bpmnXml);
|
||||
// ... 转换逻辑
|
||||
return workflowDefinition;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 5.2 表达式语法:自定义 vs 标准
|
||||
|
||||
**决策**:使用 **简化的 JUEL 语法**
|
||||
|
||||
**语法规范**:
|
||||
```javascript
|
||||
// ✅ 支持
|
||||
${nodes.httpRequest.output.body.email}
|
||||
${nodes.httpRequest.output.items[0].name}
|
||||
${workflow.input.username}
|
||||
${env.API_KEY}
|
||||
${nodes.step1.output.count > 10 ? 'high' : 'low'}
|
||||
|
||||
// ❌ 第一期不支持
|
||||
复杂 JavaScript 函数
|
||||
循环语句
|
||||
自定义函数
|
||||
```
|
||||
|
||||
**理由**:
|
||||
1. JUEL 是 Java 标准,性能好
|
||||
2. 语法简单,学习成本低
|
||||
3. 与 Flowable 原生集成
|
||||
4. 第二期可以扩展支持 JavaScript
|
||||
|
||||
### 5.3 节点执行:同步 vs 异步
|
||||
|
||||
**决策**:第一期**同步执行**,第二期**异步执行**
|
||||
|
||||
**第一期(同步)**:
|
||||
```java
|
||||
public WorkflowExecutionResult execute(String workflowId, Map<String, Object> input) {
|
||||
// 直接在当前线程执行,等待完成
|
||||
ProcessInstance instance = runtimeService.startProcessInstanceByKey(
|
||||
workflowId,
|
||||
variables
|
||||
);
|
||||
|
||||
// 阻塞等待完成
|
||||
while (!isCompleted(instance.getId())) {
|
||||
Thread.sleep(100);
|
||||
}
|
||||
|
||||
return getResult(instance.getId());
|
||||
}
|
||||
```
|
||||
|
||||
**优点**:实现简单,适合快速验证
|
||||
**缺点**:长时间运行的工作流会阻塞请求
|
||||
|
||||
**第二期(异步)**:
|
||||
```java
|
||||
public String executeAsync(String workflowId, Map<String, Object> input) {
|
||||
// 立即返回执行ID
|
||||
String executionId = UUID.randomUUID().toString();
|
||||
|
||||
// 提交到线程池异步执行
|
||||
executorService.submit(() -> {
|
||||
runtimeService.startProcessInstanceByKey(workflowId, variables);
|
||||
});
|
||||
|
||||
return executionId;
|
||||
}
|
||||
|
||||
// 前端轮询查询状态
|
||||
GET /api/executions/{executionId}/status
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 六、MVP 范围界定(重要)
|
||||
|
||||
### 6.1 第一期必须有的功能(验收标准)
|
||||
|
||||
**1. 工作流编辑器**
|
||||
- [ ] 从左侧拖拽节点到画布
|
||||
- [ ] 节点之间连线(自动布局可选)
|
||||
- [ ] 删除节点和连线
|
||||
- [ ] 保存工作流(JSON 格式)
|
||||
- [ ] 加载已有工作流
|
||||
|
||||
**2. 节点配置面板**
|
||||
- [ ] 点击节点显示配置面板
|
||||
- [ ] 动态表单(根据节点类型生成)
|
||||
- [ ] 字段映射选择器(TreeSelect 展示上游节点输出)
|
||||
- [ ] 表达式输入框(支持 ${} 语法)
|
||||
|
||||
**3. 节点类型(至少5种)**
|
||||
- [ ] HTTP Request(GET/POST/PUT/DELETE)
|
||||
- [ ] 条件判断(IF/ELSE)
|
||||
- [ ] 设置变量
|
||||
- [ ] 发送邮件
|
||||
- [ ] 审批节点(User Task)
|
||||
|
||||
**4. 工作流执行**
|
||||
- [ ] 手动触发执行
|
||||
- [ ] 查看执行日志
|
||||
- [ ] 查看节点输入/输出
|
||||
- [ ] 执行失败时显示错误信息
|
||||
|
||||
**5. 审批功能**
|
||||
- [ ] 待审批任务列表
|
||||
- [ ] 审批表单
|
||||
- [ ] 批准/拒绝
|
||||
- [ ] 审批历史
|
||||
|
||||
### 6.2 第一期不做的功能(明确排除)
|
||||
|
||||
```
|
||||
❌ 定时触发(Cron)
|
||||
❌ Webhook 触发
|
||||
❌ 循环节点(forEach)
|
||||
❌ 并行执行
|
||||
❌ 子流程
|
||||
❌ 工作流版本管理
|
||||
❌ 回滚
|
||||
❌ 导入/导出(第二期)
|
||||
❌ 权限管理(只做基础认证)
|
||||
❌ 多租户
|
||||
❌ API 限流
|
||||
❌ 监控大盘
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 七、技术风险与应对
|
||||
|
||||
### 7.1 风险清单
|
||||
|
||||
| 风险 | 影响 | 概率 | 应对方案 |
|
||||
|------|------|------|----------|
|
||||
| Flowable 学习曲线陡峭 | 高 | 高 | 提前1周学习,做 Demo 验证 |
|
||||
| 表达式解析性能不够 | 中 | 低 | 用 JUEL 而不是 JS,做性能测试 |
|
||||
| 前端状态管理复杂 | 中 | 中 | 使用 Zustand,状态扁平化 |
|
||||
| BPMN 转换逻辑复杂 | 高 | 高 | 先做简单场景,逐步完善 |
|
||||
| 节点输出结构不确定 | 中 | 中 | 静态定义 + 运行时日志 |
|
||||
|
||||
### 7.2 技术验证(PoC)
|
||||
|
||||
**Week 1: 核心技术验证**
|
||||
```
|
||||
1. Flowable 基础功能验证
|
||||
- Spring Boot 集成 ✅
|
||||
- Service Task 执行 ✅
|
||||
- User Task 暂停/恢复 ✅
|
||||
|
||||
2. 表达式引擎验证
|
||||
- JUEL 性能测试 ✅
|
||||
- 嵌套对象访问 ✅
|
||||
- 数组索引访问 ✅
|
||||
|
||||
3. 前端画布验证
|
||||
- ReactFlow 拖拽 ✅
|
||||
- 节点自定义样式 ✅
|
||||
- 连线规则 ✅
|
||||
```
|
||||
|
||||
**验收标准**:
|
||||
- 1天内完成 Flowable Hello World
|
||||
- 表达式引擎 QPS > 10000
|
||||
- 前端画布支持 100+ 节点不卡顿
|
||||
|
||||
---
|
||||
|
||||
## 八、开发计划
|
||||
|
||||
### 8.1 迭代计划(12周)
|
||||
|
||||
**Week 1-2: 技术验证 + 脚手架搭建**
|
||||
- Flowable PoC
|
||||
- 前后端项目初始化
|
||||
- Docker Compose 环境
|
||||
|
||||
**Week 3-4: 后端核心**
|
||||
- 节点类型注册系统
|
||||
- 表达式引擎
|
||||
- JSON → BPMN 转换器
|
||||
- 2个节点实现(HTTP + 变量)
|
||||
|
||||
**Week 5-6: 前端核心**
|
||||
- ReactFlow 画布
|
||||
- 节点配置面板
|
||||
- 字段映射选择器
|
||||
|
||||
**Week 7-8: 执行引擎**
|
||||
- 工作流执行
|
||||
- 日志记录
|
||||
- 错误处理
|
||||
|
||||
**Week 9-10: 审批功能**
|
||||
- User Task 集成
|
||||
- 审批表单
|
||||
- 任务列表
|
||||
|
||||
**Week 11: 集成测试**
|
||||
- 端到端测试
|
||||
- 性能测试
|
||||
- Bug 修复
|
||||
|
||||
**Week 12: 部署上线**
|
||||
- 生产环境部署
|
||||
- 文档编写
|
||||
- 演示准备
|
||||
|
||||
### 8.2 人员配置
|
||||
|
||||
```
|
||||
最小团队(4人):
|
||||
- 后端工程师 x2(Java + Flowable)
|
||||
- 前端工程师 x1(React + TypeScript)
|
||||
- 全栈工程师 x1(前后端 + DevOps)
|
||||
|
||||
可选(+2人):
|
||||
- 测试工程师 x1
|
||||
- UI/UX 设计师 x1
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 九、成功标准
|
||||
|
||||
### 9.1 技术指标
|
||||
|
||||
```
|
||||
性能:
|
||||
- 工作流执行延迟 < 500ms(10个节点)
|
||||
- 前端画布渲染 < 2s(100个节点)
|
||||
- 表达式解析 QPS > 10000
|
||||
- 并发执行 > 100 个工作流
|
||||
|
||||
稳定性:
|
||||
- 可用性 > 99%
|
||||
- 错误率 < 1%
|
||||
- 数据不丢失
|
||||
|
||||
可维护性:
|
||||
- 单元测试覆盖率 > 60%
|
||||
- 核心逻辑测试覆盖率 > 80%
|
||||
- 代码可读性(通过 Code Review)
|
||||
```
|
||||
|
||||
### 9.2 功能验收
|
||||
|
||||
**场景1:创建简单工作流**
|
||||
```
|
||||
1. 拖入 HTTP 节点,配置 URL: https://api.github.com/users/octocat
|
||||
2. 拖入 邮件节点,配置收件人: ${nodes.http.output.body.email}
|
||||
3. 连线:HTTP → 邮件
|
||||
4. 保存并执行
|
||||
5. 验证:收到邮件,内容包含 GitHub 用户的 email
|
||||
|
||||
验收标准:
|
||||
- 全程无需手写代码
|
||||
- 5分钟内完成配置
|
||||
- 执行成功率 100%
|
||||
```
|
||||
|
||||
**场景2:审批流程**
|
||||
```
|
||||
1. 创建工作流:HTTP请求 → 审批节点 → 邮件通知
|
||||
2. 执行工作流
|
||||
3. 验证:流程暂停在审批节点
|
||||
4. 打开审批中心,看到待办任务
|
||||
5. 批准
|
||||
6. 验证:流程继续执行,发送邮件
|
||||
|
||||
验收标准:
|
||||
- 审批人收到通知(邮件/站内信)
|
||||
- 审批后流程立即继续
|
||||
- 审批历史可追溯
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 十、后续规划(第二期)
|
||||
|
||||
```
|
||||
优先级排序:
|
||||
|
||||
P0 (必须有):
|
||||
- 定时触发(Cron)
|
||||
- 循环节点(forEach)
|
||||
- 工作流版本管理
|
||||
|
||||
P1 (很重要):
|
||||
- Webhook 触发
|
||||
- 更多节点类型(数据库、文件、消息队列)
|
||||
- 监控大盘
|
||||
|
||||
P2 (可以有):
|
||||
- 导入/导出
|
||||
- 子流程
|
||||
- 并行执行
|
||||
- 工作流模板市场
|
||||
|
||||
P3 (锦上添花):
|
||||
- AI 辅助生成
|
||||
- 实时协作编辑
|
||||
- 多租户隔离
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 附录:关键文件清单
|
||||
|
||||
```
|
||||
项目结构:
|
||||
docs/
|
||||
├── 01-架构总览.md (本文档)
|
||||
├── 02-后端技术设计.md (详细后端实现)
|
||||
├── 03-前端技术设计.md (详细前端实现)
|
||||
├── 04-数据模型设计.md (数据库表结构)
|
||||
└── 05-开发规范.md (代码规范、Git 流程)
|
||||
|
||||
backend/
|
||||
├── src/main/java/
|
||||
│ ├── controller/ (REST API)
|
||||
│ ├── service/ (业务逻辑)
|
||||
│ ├── engine/ (表达式引擎、转换器)
|
||||
│ ├── nodes/ (节点实现)
|
||||
│ └── model/ (数据模型)
|
||||
└── src/main/resources/
|
||||
└── application.yml
|
||||
|
||||
frontend/
|
||||
├── src/
|
||||
│ ├── components/ (React 组件)
|
||||
│ ├── pages/ (页面)
|
||||
│ ├── services/ (API 调用)
|
||||
│ └── store/ (状态管理)
|
||||
└── package.json
|
||||
|
||||
docker-compose.yml
|
||||
README.md
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**下一步**:请查看详细的后端和前端技术设计文档。
|
||||
|
||||
1787
docs/02-后端技术设计.md
Normal file
1787
docs/02-后端技术设计.md
Normal file
File diff suppressed because it is too large
Load Diff
1404
docs/03-前端技术设计.md
Normal file
1404
docs/03-前端技术设计.md
Normal file
File diff suppressed because it is too large
Load Diff
192
docs/04-数据模型设计.md
Normal file
192
docs/04-数据模型设计.md
Normal file
@ -0,0 +1,192 @@
|
||||
# 数据模型设计(JSON Schema,前后端统一)
|
||||
|
||||
版本: v1.0
|
||||
规范: JSON Schema Draft-07
|
||||
命名约定: camelCase;所有 ID 均为字符串;时间统一 ISO 8601(UTC)。
|
||||
|
||||
一、WorkflowDefinition(工作流定义)
|
||||
- 描述:前端编辑器生成/后端持久化与部署的核心数据结构
|
||||
- 说明:MVP 仅支持串行/条件分支;并行/子流程等后续引入
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://example.com/schemas/workflow-definition.json",
|
||||
"title": "WorkflowDefinition",
|
||||
"type": "object",
|
||||
"required": ["id", "name", "nodes", "edges", "schemaVersion"],
|
||||
"properties": {
|
||||
"id": { "type": "string", "minLength": 1 },
|
||||
"name": { "type": "string", "minLength": 1 },
|
||||
"description": { "type": "string" },
|
||||
"schemaVersion": { "type": "string", "enum": ["1.0"] },
|
||||
"nodes": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#/$defs/workflowNode" }
|
||||
},
|
||||
"edges": {
|
||||
"type": "array",
|
||||
"items": { "$ref": "#/$defs/workflowEdge" }
|
||||
},
|
||||
"variables": { "type": "object", "additionalProperties": true },
|
||||
"metadata": { "type": "object", "additionalProperties": true }
|
||||
},
|
||||
"$defs": {
|
||||
"position": {
|
||||
"type": "object",
|
||||
"required": ["x", "y"],
|
||||
"properties": {
|
||||
"x": { "type": "number" },
|
||||
"y": { "type": "number" }
|
||||
}
|
||||
},
|
||||
"workflowNode": {
|
||||
"type": "object",
|
||||
"required": ["id", "type", "name", "position", "config"],
|
||||
"properties": {
|
||||
"id": { "type": "string", "minLength": 1 },
|
||||
"type": { "type": "string", "minLength": 1 },
|
||||
"name": { "type": "string", "minLength": 1 },
|
||||
"position": { "$ref": "#/$defs/position" },
|
||||
"config": { "type": "object", "additionalProperties": true }
|
||||
}
|
||||
},
|
||||
"workflowEdge": {
|
||||
"type": "object",
|
||||
"required": ["source", "target"],
|
||||
"properties": {
|
||||
"id": { "type": "string" },
|
||||
"source": { "type": "string", "minLength": 1 },
|
||||
"target": { "type": "string", "minLength": 1 },
|
||||
"condition": { "type": "string", "pattern": "^\\$\\{.*\\}$" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
示例(最小可用工作流)
|
||||
```json
|
||||
{
|
||||
"id": "wf_001",
|
||||
"name": "HTTP→审批→邮件",
|
||||
"schemaVersion": "1.0",
|
||||
"nodes": [
|
||||
{"id": "n1", "type": "http_request", "name": "Get User", "position": {"x": 100, "y": 100}, "config": {"url": "https://api.example.com/users/1", "method": "GET"}},
|
||||
{"id": "n2", "type": "approval", "name": "审批", "position": {"x": 320, "y": 100}, "config": {"assignee": "${workflow.input.approver}", "formFields": [{"id": "approved", "label": "同意?", "type": "boolean", "required": true}]}},
|
||||
{"id": "n3", "type": "send_mail", "name": "通知", "position": {"x": 540, "y": 100}, "config": {"to": "${nodes.n1.output.body.email}", "subject": "审批已通过", "content": "Hello"}}
|
||||
],
|
||||
"edges": [
|
||||
{"source": "n1", "target": "n2"},
|
||||
{"source": "n2", "target": "n3", "condition": "${approved == true}"}
|
||||
],
|
||||
"variables": {"env": "dev"}
|
||||
}
|
||||
```
|
||||
|
||||
二、NodeTypeMetadata(节点类型元数据)
|
||||
- 描述:驱动前端动态表单、字段映射、帮助用户理解节点输入/输出
|
||||
- 约束:id 唯一、fields 与 outputSchema 必填
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||
"$id": "https://example.com/schemas/node-type-metadata.json",
|
||||
"title": "NodeTypeMetadata",
|
||||
"type": "object",
|
||||
"required": ["id", "name", "displayName", "category", "fields", "outputSchema"],
|
||||
"properties": {
|
||||
"id": { "type": "string", "minLength": 1 },
|
||||
"name": { "type": "string", "minLength": 1 },
|
||||
"displayName": { "type": "string" },
|
||||
"category": { "type": "string", "enum": ["api", "database", "logic", "notification", "transform", "other"] },
|
||||
"icon": { "type": "string" },
|
||||
"description": { "type": "string" },
|
||||
"fields": {
|
||||
"type": "array",
|
||||
"minItems": 1,
|
||||
"items": { "$ref": "#/$defs/fieldDefinition" }
|
||||
},
|
||||
"outputSchema": { "type": "object" },
|
||||
"implementationClass": { "type": "string" },
|
||||
"enabled": { "type": "boolean", "default": true }
|
||||
},
|
||||
"$defs": {
|
||||
"fieldDefinition": {
|
||||
"type": "object",
|
||||
"required": ["name", "label", "type"],
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"label": { "type": "string" },
|
||||
"type": { "type": "string", "enum": ["text", "textarea", "number", "select", "code", "key_value", "boolean"] },
|
||||
"required": { "type": "boolean", "default": false },
|
||||
"supportsExpression": { "type": "boolean", "default": false },
|
||||
"supportsFieldMapping": { "type": "boolean", "default": false },
|
||||
"options": { "type": "array", "items": {"type": "string"} },
|
||||
"defaultValue": {},
|
||||
"placeholder": { "type": "string" },
|
||||
"language": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
示例(HTTP Request 节点)
|
||||
```json
|
||||
{
|
||||
"id": "http_request",
|
||||
"name": "httpRequest",
|
||||
"displayName": "HTTP Request",
|
||||
"category": "api",
|
||||
"icon": "ApiOutlined",
|
||||
"description": "发送 HTTP 请求",
|
||||
"fields": [
|
||||
{"name": "url", "label": "URL", "type": "text", "required": true, "supportsExpression": true},
|
||||
{"name": "method", "label": "Method", "type": "select", "options": ["GET", "POST", "PUT", "DELETE", "PATCH"], "defaultValue": "GET"},
|
||||
{"name": "headers", "label": "Headers", "type": "key_value", "supportsFieldMapping": true},
|
||||
{"name": "body", "label": "Body", "type": "code", "language": "json", "supportsExpression": true},
|
||||
{"name": "timeout", "label": "Timeout(ms)", "type": "number", "defaultValue": 30000}
|
||||
],
|
||||
"outputSchema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"statusCode": {"type": "number"},
|
||||
"body": {"type": "object"},
|
||||
"headers": {"type": "object"},
|
||||
"elapsed": {"type": "number"}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
三、表达式(JUEL)约定
|
||||
- 统一使用 ${...}
|
||||
- 可访问命名空间:
|
||||
- nodes:上游节点数据,如 ${nodes.n1.output.body.email}
|
||||
- workflow:输入/变量,如 ${workflow.input.userId}
|
||||
- env:环境变量,如 ${env.API_KEY}
|
||||
- 仅 Map/属性访问;不允许方法/类引用;支持三元表达式
|
||||
|
||||
四、执行与日志数据(后端输出/存储)
|
||||
- NodeExecutionResult(内存/接口响应中的片段)
|
||||
```json
|
||||
{
|
||||
"status": "success",
|
||||
"output": {"statusCode": 200, "body": {"id": 1}},
|
||||
"error": null,
|
||||
"startTime": "2025-01-01T10:00:00Z",
|
||||
"endTime": "2025-01-01T10:00:10Z",
|
||||
"durationMs": 10
|
||||
}
|
||||
```
|
||||
- 节点执行日志表(node_execution_logs)建议结构(与 docs/02 一致):
|
||||
- execution_id、node_id、node_type、input(JSON)、output(JSON)、status、started_at、ended_at、duration_ms、error_message
|
||||
|
||||
五、前后端对齐要点(Checklist)
|
||||
- WorkflowDefinition 使用 schemaVersion 固定为 1.0
|
||||
- WorkflowEdge.condition 必须为完整 ${...} 字符串
|
||||
- NodeTypeMetadata.id 与 WorkflowNode.type 对齐(如 http_request)
|
||||
- 字段映射组件输出“完整表达式”字符串(含 ${})
|
||||
- 输出结构以 outputSchema 为准;前端展示字段树最多展开 3 层(可配置)
|
||||
163
docs/05-API契约.md
Normal file
163
docs/05-API契约.md
Normal file
@ -0,0 +1,163 @@
|
||||
# API 契约(前后端统一)
|
||||
|
||||
版本: v1.0
|
||||
鉴权: 预留 Bearer Token(可选),前端 axios 已支持 Authorization 头;MVP 可不启用严格鉴权。
|
||||
|
||||
通用规范
|
||||
- Content-Type: application/json; charset=utf-8
|
||||
- 分页参数(如适用): page(默认1), size(默认10)
|
||||
- 错误响应: { "code": string, "message": string, "details"?: any }
|
||||
|
||||
一、工作流(/api/workflows)
|
||||
1) 创建工作流
|
||||
- POST /api/workflows
|
||||
- Body: WorkflowDefinition(见 04-数据模型设计.md),id 可由后端生成
|
||||
- 200 响应: WorkflowDefinition(包含持久化后的 id)
|
||||
|
||||
2) 更新工作流
|
||||
- PUT /api/workflows/{id}
|
||||
- Body: WorkflowDefinition
|
||||
- 200 响应: WorkflowDefinition
|
||||
|
||||
3) 获取工作流详情
|
||||
- GET /api/workflows/{id}
|
||||
- 200 响应: WorkflowDefinition
|
||||
|
||||
4) 获取工作流列表
|
||||
- GET /api/workflows?status=active|draft|archived&page=1&size=10
|
||||
- 200 响应: { "items": WorkflowDefinition[], "page": number, "size": number, "total": number }
|
||||
|
||||
5) 删除工作流
|
||||
- DELETE /api/workflows/{id}
|
||||
- 204 响应: 无
|
||||
|
||||
6) 执行工作流(MVP 同步执行)
|
||||
- POST /api/workflows/{id}/execute
|
||||
- Body: { "input": object }
|
||||
- 200 响应: WorkflowExecutionResult
|
||||
```json
|
||||
{
|
||||
"workflowId": "wf_001",
|
||||
"processInstanceId": "f6a...",
|
||||
"status": "completed",
|
||||
"output": { "result": "..." },
|
||||
"nodes": {
|
||||
"n1": { "status": "success", "input": {...}, "output": {...}, "startTime": "...", "endTime": "..." },
|
||||
"n2": { "status": "success", "input": {...}, "output": {...} }
|
||||
},
|
||||
"startedAt": "2025-01-01T10:00:00Z",
|
||||
"endedAt": "2025-01-01T10:00:02Z"
|
||||
}
|
||||
```
|
||||
|
||||
7) 获取执行记录
|
||||
- GET /api/workflows/{id}/executions?page=1&size=10
|
||||
- 200 响应: { "items": WorkflowExecutionRecord[], "page": number, "size": number, "total": number }
|
||||
```json
|
||||
{
|
||||
"id": "exe_001",
|
||||
"workflowDefinitionId": "wf_001",
|
||||
"processInstanceId": "f6a...",
|
||||
"status": "completed",
|
||||
"triggerType": "manual",
|
||||
"triggeredBy": "user@example.com",
|
||||
"startedAt": "2025-01-01T10:00:00Z",
|
||||
"endedAt": "2025-01-01T10:00:02Z"
|
||||
}
|
||||
```
|
||||
|
||||
8) 获取执行详情
|
||||
- GET /api/workflows/executions/{executionId}
|
||||
- 200 响应: WorkflowExecutionDetail
|
||||
```json
|
||||
{
|
||||
"id": "exe_001",
|
||||
"workflowDefinitionId": "wf_001",
|
||||
"processInstanceId": "f6a...",
|
||||
"status": "completed",
|
||||
"input": { "userId": 1 },
|
||||
"nodes": {
|
||||
"n1": { "status": "success", "input": {...}, "output": {...} },
|
||||
"n2": { "status": "success", "input": {...}, "output": {...} }
|
||||
},
|
||||
"startedAt": "2025-01-01T10:00:00Z",
|
||||
"endedAt": "2025-01-01T10:00:02Z",
|
||||
"error": null
|
||||
}
|
||||
```
|
||||
|
||||
二、节点类型(/api/node-types)
|
||||
1) 获取全部节点类型
|
||||
- GET /api/node-types
|
||||
- 200 响应: NodeTypeMetadata[](见 04-数据模型设计.md)
|
||||
|
||||
2) 获取单个节点类型
|
||||
- GET /api/node-types/{typeId}
|
||||
- 200 响应: NodeTypeMetadata
|
||||
|
||||
3) 按分类查询
|
||||
- GET /api/node-types/category/{category}
|
||||
- 200 响应: NodeTypeMetadata[]
|
||||
|
||||
三、审批任务(/api/tasks)
|
||||
1) 获取待办任务
|
||||
- GET /api/tasks?assignee=user@example.com&page=1&size=10
|
||||
- 200 响应: { "items": TaskInfo[], "page": number, "size": number, "total": number }
|
||||
```json
|
||||
{
|
||||
"id": "task_001",
|
||||
"name": "审批",
|
||||
"assignee": "user@example.com",
|
||||
"processInstanceId": "f6a...",
|
||||
"createdAt": "2025-01-01T10:00:00Z",
|
||||
"dueDate": null
|
||||
}
|
||||
```
|
||||
|
||||
2) 获取任务详情
|
||||
- GET /api/tasks/{taskId}
|
||||
- 200 响应: TaskDetail
|
||||
```json
|
||||
{
|
||||
"id": "task_001",
|
||||
"name": "审批",
|
||||
"assignee": "user@example.com",
|
||||
"processInstanceId": "f6a...",
|
||||
"variables": { "approved": null, "comment": null }
|
||||
}
|
||||
```
|
||||
|
||||
3) 获取任务表单
|
||||
- GET /api/tasks/{taskId}/form
|
||||
- 200 响应: TaskForm
|
||||
```json
|
||||
{
|
||||
"taskId": "task_001",
|
||||
"fields": [
|
||||
{"id": "approved", "label": "同意?", "type": "boolean", "required": true},
|
||||
{"id": "comment", "label": "意见", "type": "string", "required": false}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
4) 完成任务
|
||||
- POST /api/tasks/{taskId}/complete
|
||||
- Body: { "variables": object }
|
||||
- 200 响应: 无(或 { "status": "ok" })
|
||||
|
||||
四、错误模型
|
||||
- HTTP 400: 参数/校验错误
|
||||
- HTTP 401: 未授权(如启用鉴权)
|
||||
- HTTP 404: 资源不存在
|
||||
- HTTP 409: 冲突(如重名/状态冲突)
|
||||
- HTTP 500: 服务器错误
|
||||
|
||||
错误响应示例:
|
||||
```json
|
||||
{ "code": "VALIDATION_ERROR", "message": "name 不能为空", "details": { "field": "name" } }
|
||||
```
|
||||
|
||||
五、字段/表达式一致性
|
||||
- 前端提交的所有表达式采用完整 ${...} 字符串;后端按 JUEL 解析
|
||||
- WorkflowEdge.condition 在转换层映射到 BPMN 的 conditionExpression
|
||||
- NodeTypeMetadata.fields 与前端表单渲染一一对应;outputSchema 驱动字段映射树
|
||||
836
docs/05-开发规范.md
Normal file
836
docs/05-开发规范.md
Normal file
@ -0,0 +1,836 @@
|
||||
# 开发规范文档
|
||||
|
||||
**版本**: v1.0
|
||||
**目的**: 确保代码质量、可维护性、团队协作效率
|
||||
|
||||
---
|
||||
|
||||
## 一、代码规范
|
||||
|
||||
### 1.1 Java 代码规范
|
||||
|
||||
#### 命名规范
|
||||
|
||||
```java
|
||||
// ✅ 正确
|
||||
public class WorkflowService {} // 类名:大驼峰
|
||||
public interface WorkflowNode {} // 接口名:大驼峰
|
||||
public enum ExecutionStatus {} // 枚举:大驼峰
|
||||
|
||||
private String userId; // 变量:小驼峰
|
||||
public static final int MAX_RETRY = 3; // 常量:大写下划线
|
||||
|
||||
public void executeWorkflow() {} // 方法:小驼峰
|
||||
|
||||
// ❌ 错误
|
||||
public class workflow_service {} // 不要用下划线
|
||||
private String UserId; // 变量不要大驼峰
|
||||
public static final int maxRetry = 3; // 常量不要小驼峰
|
||||
```
|
||||
|
||||
#### 包结构
|
||||
|
||||
```
|
||||
com.workflow
|
||||
├── controller # REST API控制器
|
||||
│ ├── WorkflowController.java
|
||||
│ └── NodeTypeController.java
|
||||
├── service # 业务逻辑
|
||||
│ ├── WorkflowService.java
|
||||
│ └── ExecutionService.java
|
||||
├── repository # 数据访问
|
||||
│ └── WorkflowRepository.java
|
||||
├── model # 数据模型
|
||||
│ ├── entity/ # 实体类(对应数据库表)
|
||||
│ ├── dto/ # 数据传输对象
|
||||
│ └── vo/ # 视图对象
|
||||
├── engine # 核心引擎
|
||||
│ ├── ExpressionEngine.java
|
||||
│ └── WorkflowConverter.java
|
||||
├── nodes # 节点实现
|
||||
│ ├── WorkflowNode.java
|
||||
│ ├── HttpRequestNode.java
|
||||
│ └── SendEmailNode.java
|
||||
├── registry # 注册中心
|
||||
│ └── NodeTypeRegistry.java
|
||||
├── executor # 执行器
|
||||
│ └── GenericNodeExecutor.java
|
||||
├── exception # 自定义异常
|
||||
│ └── WorkflowException.java
|
||||
└── config # 配置类
|
||||
└── FlowableConfig.java
|
||||
```
|
||||
|
||||
#### 注释规范
|
||||
|
||||
```java
|
||||
/**
|
||||
* 工作流服务
|
||||
*
|
||||
* 职责:
|
||||
* 1. 工作流的创建、更新、删除
|
||||
* 2. 工作流的执行
|
||||
* 3. 工作流定义的转换(JSON → BPMN)
|
||||
*
|
||||
* @author zhangsan
|
||||
* @since 1.0.0
|
||||
*/
|
||||
@Service
|
||||
public class WorkflowService {
|
||||
|
||||
/**
|
||||
* 执行工作流
|
||||
*
|
||||
* @param workflowId 工作流ID
|
||||
* @param input 输入参数
|
||||
* @return 执行结果
|
||||
* @throws WorkflowNotFoundException 工作流不存在
|
||||
* @throws WorkflowExecutionException 执行失败
|
||||
*/
|
||||
public WorkflowExecutionResult execute(String workflowId, Map<String, Object> input) {
|
||||
// 1. 验证工作流是否存在
|
||||
WorkflowDefinition workflow = workflowRepository.findById(workflowId)
|
||||
.orElseThrow(() -> new WorkflowNotFoundException(workflowId));
|
||||
|
||||
// 2. 准备执行上下文
|
||||
Map<String, Object> variables = prepareVariables(workflow, input);
|
||||
|
||||
// 3. 启动Flowable流程
|
||||
ProcessInstance instance = runtimeService.startProcessInstanceById(
|
||||
workflow.getFlowableProcessDefinitionId(),
|
||||
variables
|
||||
);
|
||||
|
||||
// 4. 返回执行结果
|
||||
return WorkflowExecutionResult.builder()
|
||||
.executionId(instance.getId())
|
||||
.status("running")
|
||||
.build();
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**注释要求**:
|
||||
- 所有 `public` 方法必须有 JavaDoc
|
||||
- 复杂逻辑必须有行内注释(中文)
|
||||
- TODO/FIXME 注释必须标注负责人和日期
|
||||
|
||||
```java
|
||||
// TODO(zhangsan, 2024-01-15): 优化表达式缓存策略
|
||||
// FIXME(lisi, 2024-01-16): 修复循环依赖检测bug
|
||||
```
|
||||
|
||||
#### 异常处理
|
||||
|
||||
```java
|
||||
// ✅ 正确:具体的异常类型
|
||||
public WorkflowDefinition getById(String id) {
|
||||
return workflowRepository.findById(id)
|
||||
.orElseThrow(() -> new WorkflowNotFoundException("工作流不存在: " + id));
|
||||
}
|
||||
|
||||
// ✅ 正确:记录日志
|
||||
public void execute(String workflowId, Map<String, Object> input) {
|
||||
try {
|
||||
// 执行逻辑
|
||||
doExecute(workflowId, input);
|
||||
} catch (Exception e) {
|
||||
log.error("工作流执行失败: workflowId={}, error={}", workflowId, e.getMessage(), e);
|
||||
throw new WorkflowExecutionException("执行失败", e);
|
||||
}
|
||||
}
|
||||
|
||||
// ❌ 错误:吞掉异常
|
||||
try {
|
||||
doSomething();
|
||||
} catch (Exception e) {
|
||||
// 什么都不做
|
||||
}
|
||||
|
||||
// ❌ 错误:抛出原始 Exception
|
||||
public void doSomething() throws Exception {
|
||||
// ...
|
||||
}
|
||||
```
|
||||
|
||||
#### 日志规范
|
||||
|
||||
```java
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Service
|
||||
public class WorkflowService {
|
||||
|
||||
private static final Logger log = LoggerFactory.getLogger(WorkflowService.class);
|
||||
|
||||
public void execute(String workflowId, Map<String, Object> input) {
|
||||
// INFO: 重要的业务操作
|
||||
log.info("开始执行工作流: workflowId={}, input={}", workflowId, input);
|
||||
|
||||
try {
|
||||
// DEBUG: 调试信息
|
||||
log.debug("准备执行上下文: variables={}", variables);
|
||||
|
||||
// ... 执行逻辑
|
||||
|
||||
log.info("工作流执行成功: workflowId={}, executionId={}", workflowId, executionId);
|
||||
|
||||
} catch (Exception e) {
|
||||
// ERROR: 错误信息(必须包含异常栈)
|
||||
log.error("工作流执行失败: workflowId={}", workflowId, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**日志级别**:
|
||||
- **ERROR**: 错误,需要立即处理
|
||||
- **WARN**: 警告,可能有问题但不影响运行
|
||||
- **INFO**: 重要的业务操作(创建、更新、删除、执行)
|
||||
- **DEBUG**: 调试信息(开发环境)
|
||||
|
||||
---
|
||||
|
||||
### 1.2 TypeScript/React 代码规范
|
||||
|
||||
#### 命名规范
|
||||
|
||||
```typescript
|
||||
// ✅ 正确
|
||||
interface WorkflowDefinition {} // 接口:大驼峰
|
||||
type NodeType = 'http' | 'email'; // 类型:大驼峰
|
||||
enum ExecutionStatus {} // 枚举:大驼峰
|
||||
|
||||
const userId = '123'; // 变量:小驼峰
|
||||
const MAX_RETRY = 3; // 常量:大写下划线
|
||||
|
||||
function executeWorkflow() {} // 函数:小驼峰
|
||||
|
||||
// 组件:大驼峰
|
||||
export default function WorkflowEditor() {}
|
||||
const CustomNode: React.FC<Props> = () => {}
|
||||
```
|
||||
|
||||
#### 文件命名
|
||||
|
||||
```
|
||||
src/
|
||||
├── components/
|
||||
│ ├── WorkflowEditor/ # 组件文件夹:大驼峰
|
||||
│ │ ├── index.tsx # 主文件:小写
|
||||
│ │ ├── Canvas.tsx # 子组件:大驼峰
|
||||
│ │ └── WorkflowEditor.css # 样式:大驼峰
|
||||
│ └── NodeConfigPanel/
|
||||
│ └── index.tsx
|
||||
├── api/
|
||||
│ └── workflow.ts # API文件:小写
|
||||
├── types/
|
||||
│ └── workflow.ts # 类型文件:小写
|
||||
└── utils/
|
||||
└── expressionParser.ts # 工具文件:小驼峰
|
||||
```
|
||||
|
||||
#### TypeScript 类型定义
|
||||
|
||||
```typescript
|
||||
// ✅ 正确:明确的类型
|
||||
interface WorkflowNode {
|
||||
id: string;
|
||||
type: string;
|
||||
name: string;
|
||||
position: { x: number; y: number };
|
||||
config: Record<string, any>; // 如果确实是任意类型
|
||||
}
|
||||
|
||||
// ✅ 正确:使用泛型
|
||||
interface ApiResponse<T> {
|
||||
code: number;
|
||||
message: string;
|
||||
data: T;
|
||||
}
|
||||
|
||||
// ❌ 错误:滥用 any
|
||||
function doSomething(data: any): any {
|
||||
return data;
|
||||
}
|
||||
|
||||
// ✅ 正确:使用 unknown 或具体类型
|
||||
function doSomething(data: unknown): string {
|
||||
if (typeof data === 'string') {
|
||||
return data;
|
||||
}
|
||||
return JSON.stringify(data);
|
||||
}
|
||||
```
|
||||
|
||||
#### React 组件规范
|
||||
|
||||
```tsx
|
||||
import React, { useState, useEffect, useCallback, memo } from 'react';
|
||||
|
||||
/**
|
||||
* 节点配置面板
|
||||
*
|
||||
* @param node - 当前选中的节点
|
||||
* @param onConfigChange - 配置变化回调
|
||||
*/
|
||||
interface Props {
|
||||
node: Node | null;
|
||||
onConfigChange: (nodeId: string, config: any) => void;
|
||||
}
|
||||
|
||||
export default function NodeConfigPanel({ node, onConfigChange }: Props) {
|
||||
// 1. hooks 放在最前面
|
||||
const [form] = Form.useForm();
|
||||
const [loading, setLoading] = useState(false);
|
||||
|
||||
// 2. useEffect
|
||||
useEffect(() => {
|
||||
if (node) {
|
||||
form.setFieldsValue(node.data.config);
|
||||
}
|
||||
}, [node, form]);
|
||||
|
||||
// 3. 事件处理函数(使用 useCallback 优化)
|
||||
const handleValuesChange = useCallback(
|
||||
(changedValues: any, allValues: any) => {
|
||||
if (node) {
|
||||
onConfigChange(node.id, allValues);
|
||||
}
|
||||
},
|
||||
[node, onConfigChange]
|
||||
);
|
||||
|
||||
// 4. 条件渲染
|
||||
if (!node) {
|
||||
return <Empty description="请选择一个节点" />;
|
||||
}
|
||||
|
||||
// 5. 主要渲染
|
||||
return (
|
||||
<div className="node-config-panel">
|
||||
<Form
|
||||
form={form}
|
||||
onValuesChange={handleValuesChange}
|
||||
>
|
||||
{/* ... */}
|
||||
</Form>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
**React 最佳实践**:
|
||||
```tsx
|
||||
// ✅ 使用 memo 优化性能
|
||||
const CustomNode = memo(({ data }: Props) => {
|
||||
return <div>{data.name}</div>;
|
||||
});
|
||||
|
||||
// ✅ 使用 useMemo 缓存计算结果
|
||||
const upstreamNodes = useMemo(() => {
|
||||
return edges
|
||||
.filter(edge => edge.target === node.id)
|
||||
.map(edge => nodes.find(n => n.id === edge.source));
|
||||
}, [node.id, nodes, edges]);
|
||||
|
||||
// ✅ 使用 useCallback 缓存函数
|
||||
const handleClick = useCallback(() => {
|
||||
console.log(node.id);
|
||||
}, [node.id]);
|
||||
|
||||
// ❌ 在渲染中创建新对象/函数(每次重新渲染)
|
||||
return (
|
||||
<div onClick={() => console.log(node.id)}> {/* 每次都创建新函数 */}
|
||||
<Component style={{ color: 'red' }} /> {/* 每次都创建新对象 */}
|
||||
</div>
|
||||
);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 二、Git 工作流
|
||||
|
||||
### 2.1 分支策略
|
||||
|
||||
```
|
||||
main/master # 生产环境(只能通过PR合并)
|
||||
↑
|
||||
develop # 开发环境(默认分支)
|
||||
↑
|
||||
feature/xxx # 功能分支
|
||||
↑
|
||||
hotfix/xxx # 紧急修复分支
|
||||
```
|
||||
|
||||
### 2.2 分支命名规范
|
||||
|
||||
```bash
|
||||
# 功能分支
|
||||
feature/workflow-editor # 新功能
|
||||
feature/node-http-request # 新节点类型
|
||||
|
||||
# 修复分支
|
||||
bugfix/expression-parser-null # Bug修复
|
||||
|
||||
# 紧急修复
|
||||
hotfix/security-vulnerability # 安全漏洞
|
||||
|
||||
# 重构
|
||||
refactor/simplify-converter # 重构
|
||||
```
|
||||
|
||||
### 2.3 Commit 规范
|
||||
|
||||
**格式**:`<type>(<scope>): <subject>`
|
||||
|
||||
```bash
|
||||
# ✅ 正确示例
|
||||
feat(editor): 添加字段映射选择器
|
||||
fix(expression): 修复表达式解析空指针异常
|
||||
docs(readme): 更新部署文档
|
||||
style(format): 格式化代码
|
||||
refactor(converter): 简化BPMN转换逻辑
|
||||
perf(cache): 优化表达式缓存策略
|
||||
test(service): 添加工作流服务单元测试
|
||||
chore(deps): 升级Spring Boot到3.2.1
|
||||
|
||||
# ❌ 错误示例
|
||||
update code # 太模糊
|
||||
修复bug # 用中文
|
||||
fix: fixed a bug # 重复
|
||||
```
|
||||
|
||||
**Type 类型**:
|
||||
- `feat`: 新功能
|
||||
- `fix`: Bug修复
|
||||
- `docs`: 文档更新
|
||||
- `style`: 代码格式(不影响代码运行)
|
||||
- `refactor`: 重构
|
||||
- `perf`: 性能优化
|
||||
- `test`: 测试
|
||||
- `chore`: 构建/工具链
|
||||
|
||||
### 2.4 PR (Pull Request) 规范
|
||||
|
||||
**标题**:
|
||||
```
|
||||
feat(editor): 实现字段映射选择器
|
||||
fix(#123): 修复表达式解析bug
|
||||
```
|
||||
|
||||
**描述模板**:
|
||||
```markdown
|
||||
## 变更内容
|
||||
简要描述本次PR的变更内容
|
||||
|
||||
## 变更类型
|
||||
- [ ] 新功能
|
||||
- [x] Bug修复
|
||||
- [ ] 文档更新
|
||||
- [ ] 重构
|
||||
- [ ] 其他
|
||||
|
||||
## 相关Issue
|
||||
Closes #123
|
||||
|
||||
## 测试
|
||||
- [x] 单元测试已通过
|
||||
- [x] 手动测试已完成
|
||||
- [ ] 需要补充测试
|
||||
|
||||
## 截图(如果有UI变化)
|
||||

|
||||

|
||||
|
||||
## Checklist
|
||||
- [x] 代码已自测
|
||||
- [x] 已添加/更新测试
|
||||
- [x] 已更新文档
|
||||
- [x] 代码符合规范
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 三、测试规范
|
||||
|
||||
### 3.1 单元测试
|
||||
|
||||
**覆盖率要求**:
|
||||
- 核心业务逻辑:>80%
|
||||
- 工具类:>90%
|
||||
- Controller:>60%
|
||||
|
||||
**命名规范**:
|
||||
```java
|
||||
// 测试类:ClassName + Test
|
||||
public class WorkflowServiceTest {}
|
||||
|
||||
// 测试方法:should_ExpectedBehavior_When_Condition
|
||||
@Test
|
||||
public void should_ThrowException_When_WorkflowNotFound() {}
|
||||
|
||||
@Test
|
||||
public void should_ReturnSuccess_When_WorkflowExecuted() {}
|
||||
```
|
||||
|
||||
**示例**:
|
||||
```java
|
||||
@SpringBootTest
|
||||
public class ExpressionEngineTest {
|
||||
|
||||
@Autowired
|
||||
private ExpressionEngine expressionEngine;
|
||||
|
||||
@Mock
|
||||
private DelegateExecution execution;
|
||||
|
||||
@BeforeEach
|
||||
public void setUp() {
|
||||
// 准备测试数据
|
||||
Map<String, Object> nodes = Map.of(
|
||||
"node1", Map.of(
|
||||
"output", Map.of("email", "test@example.com")
|
||||
)
|
||||
);
|
||||
when(execution.getVariable("nodes")).thenReturn(nodes);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void should_ResolveExpression_When_ValidExpression() {
|
||||
// Given
|
||||
String expression = "${nodes.node1.output.email}";
|
||||
|
||||
// When
|
||||
String result = (String) expressionEngine.evaluate(expression, execution);
|
||||
|
||||
// Then
|
||||
assertEquals("test@example.com", result);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void should_ThrowException_When_InvalidExpression() {
|
||||
// Given
|
||||
String expression = "${invalid.path}";
|
||||
|
||||
// When & Then
|
||||
assertThrows(ExpressionEvaluationException.class, () -> {
|
||||
expressionEngine.evaluate(expression, execution);
|
||||
});
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3.2 集成测试
|
||||
|
||||
```java
|
||||
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
|
||||
@AutoConfigureMockMvc
|
||||
public class WorkflowControllerIntegrationTest {
|
||||
|
||||
@Autowired
|
||||
private MockMvc mockMvc;
|
||||
|
||||
@Autowired
|
||||
private ObjectMapper objectMapper;
|
||||
|
||||
@Test
|
||||
public void should_CreateWorkflow_When_ValidRequest() throws Exception {
|
||||
// Given
|
||||
WorkflowDefinition workflow = WorkflowDefinition.builder()
|
||||
.name("Test Workflow")
|
||||
.build();
|
||||
|
||||
// When & Then
|
||||
mockMvc.perform(post("/api/workflows")
|
||||
.contentType(MediaType.APPLICATION_JSON)
|
||||
.content(objectMapper.writeValueAsString(workflow)))
|
||||
.andExpect(status().isOk())
|
||||
.andExpect(jsonPath("$.id").exists())
|
||||
.andExpect(jsonPath("$.name").value("Test Workflow"));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3.3 前端测试
|
||||
|
||||
```typescript
|
||||
import { render, screen, fireEvent } from '@testing-library/react';
|
||||
import FieldMappingSelector from './FieldMappingSelector';
|
||||
|
||||
describe('FieldMappingSelector', () => {
|
||||
it('应该显示上游节点的字段', () => {
|
||||
const upstreamNodes = [
|
||||
{
|
||||
id: 'node1',
|
||||
data: { type: 'http_request', name: 'HTTP' },
|
||||
},
|
||||
];
|
||||
|
||||
render(
|
||||
<FieldMappingSelector
|
||||
upstreamNodes={upstreamNodes}
|
||||
nodeTypes={mockNodeTypes}
|
||||
/>
|
||||
);
|
||||
|
||||
expect(screen.getByText('HTTP')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('应该生成正确的表达式', () => {
|
||||
const onChange = jest.fn();
|
||||
|
||||
render(
|
||||
<FieldMappingSelector
|
||||
upstreamNodes={upstreamNodes}
|
||||
onChange={onChange}
|
||||
/>
|
||||
);
|
||||
|
||||
// 选择字段
|
||||
fireEvent.change(screen.getByRole('combobox'), {
|
||||
target: { value: 'nodes.node1.output.email' },
|
||||
});
|
||||
|
||||
// 验证
|
||||
expect(onChange).toHaveBeenCalledWith('${nodes.node1.output.email}');
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 四、代码审查(Code Review)
|
||||
|
||||
### 4.1 审查清单
|
||||
|
||||
**功能性**:
|
||||
- [ ] 代码实现了需求
|
||||
- [ ] 边界情况已处理
|
||||
- [ ] 错误处理完善
|
||||
|
||||
**代码质量**:
|
||||
- [ ] 命名清晰易懂
|
||||
- [ ] 逻辑简洁
|
||||
- [ ] 没有重复代码
|
||||
- [ ] 注释充分
|
||||
|
||||
**性能**:
|
||||
- [ ] 没有明显的性能问题
|
||||
- [ ] 数据库查询优化
|
||||
- [ ] 缓存使用合理
|
||||
|
||||
**安全**:
|
||||
- [ ] 输入验证
|
||||
- [ ] SQL注入防护
|
||||
- [ ] XSS防护
|
||||
|
||||
**测试**:
|
||||
- [ ] 有单元测试
|
||||
- [ ] 测试覆盖核心逻辑
|
||||
- [ ] 测试通过
|
||||
|
||||
### 4.2 审查注释规范
|
||||
|
||||
```java
|
||||
// ✅ 建设性建议
|
||||
// 建议:这里可以使用Optional避免空指针
|
||||
if (user != null) {
|
||||
return user.getName();
|
||||
}
|
||||
|
||||
// 可以改为:
|
||||
return Optional.ofNullable(user)
|
||||
.map(User::getName)
|
||||
.orElse("Unknown");
|
||||
|
||||
// ✅ 指出问题
|
||||
// 问题:这里有SQL注入风险
|
||||
String sql = "SELECT * FROM users WHERE name = '" + userName + "'";
|
||||
|
||||
// 应该使用:
|
||||
String sql = "SELECT * FROM users WHERE name = ?";
|
||||
|
||||
// ❌ 不要这样
|
||||
// 这代码写得太烂了
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 五、文档规范
|
||||
|
||||
### 5.1 README.md
|
||||
|
||||
```markdown
|
||||
# 工作流平台
|
||||
|
||||
## 快速开始
|
||||
|
||||
### 环境要求
|
||||
- JDK 17+
|
||||
- Node.js 18+
|
||||
- PostgreSQL 15+
|
||||
- Redis 7+
|
||||
|
||||
### 本地开发
|
||||
|
||||
1. 克隆代码
|
||||
\`\`\`bash
|
||||
git clone https://github.com/yourorg/workflow-platform.git
|
||||
cd workflow-platform
|
||||
\`\`\`
|
||||
|
||||
2. 启动后端
|
||||
\`\`\`bash
|
||||
cd backend
|
||||
./mvnw spring-boot:run
|
||||
\`\`\`
|
||||
|
||||
3. 启动前端
|
||||
\`\`\`bash
|
||||
cd frontend
|
||||
npm install
|
||||
npm run dev
|
||||
\`\`\`
|
||||
|
||||
4. 访问
|
||||
http://localhost:3000
|
||||
|
||||
## 项目结构
|
||||
...
|
||||
|
||||
## 开发文档
|
||||
- [架构总览](docs/01-架构总览.md)
|
||||
- [后端技术设计](docs/02-后端技术设计.md)
|
||||
- [前端技术设计](docs/03-前端技术设计.md)
|
||||
```
|
||||
|
||||
### 5.2 API 文档
|
||||
|
||||
使用 Swagger/OpenAPI:
|
||||
|
||||
```java
|
||||
@RestController
|
||||
@RequestMapping("/api/workflows")
|
||||
@Tag(name = "工作流管理", description = "工作流的创建、更新、删除、执行")
|
||||
public class WorkflowController {
|
||||
|
||||
@PostMapping
|
||||
@Operation(summary = "创建工作流", description = "创建一个新的工作流定义")
|
||||
@ApiResponse(responseCode = "200", description = "创建成功")
|
||||
@ApiResponse(responseCode = "400", description = "参数错误")
|
||||
public ResponseEntity<WorkflowDefinition> createWorkflow(
|
||||
@RequestBody
|
||||
@io.swagger.v3.oas.annotations.parameters.RequestBody(
|
||||
description = "工作流定义",
|
||||
required = true
|
||||
)
|
||||
WorkflowDefinition workflow
|
||||
) {
|
||||
// ...
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
访问:http://localhost:8080/swagger-ui.html
|
||||
|
||||
---
|
||||
|
||||
## 六、部署规范
|
||||
|
||||
### 6.1 环境配置
|
||||
|
||||
```yaml
|
||||
# .env.example(提交到代码库)
|
||||
SPRING_DATASOURCE_URL=jdbc:postgresql://localhost:5432/workflow_db
|
||||
SPRING_DATASOURCE_USERNAME=postgres
|
||||
SPRING_DATASOURCE_PASSWORD=
|
||||
|
||||
REDIS_HOST=localhost
|
||||
REDIS_PORT=6379
|
||||
|
||||
SMTP_HOST=smtp.example.com
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# .env(不提交,本地使用)
|
||||
SPRING_DATASOURCE_PASSWORD=your_password
|
||||
SMTP_PASSWORD=your_smtp_password
|
||||
```
|
||||
|
||||
### 6.2 版本号
|
||||
|
||||
使用语义化版本:`MAJOR.MINOR.PATCH`
|
||||
|
||||
```
|
||||
1.0.0 - 第一个正式版本
|
||||
1.1.0 - 新增功能(向后兼容)
|
||||
1.1.1 - Bug修复
|
||||
2.0.0 - 重大变更(不向后兼容)
|
||||
```
|
||||
|
||||
### 6.3 发布流程
|
||||
|
||||
```bash
|
||||
# 1. 更新版本号
|
||||
# pom.xml: <version>1.1.0</version>
|
||||
# package.json: "version": "1.1.0"
|
||||
|
||||
# 2. 更新 CHANGELOG.md
|
||||
git add CHANGELOG.md
|
||||
git commit -m "docs: 更新版本到 1.1.0"
|
||||
|
||||
# 3. 打标签
|
||||
git tag -a v1.1.0 -m "Release v1.1.0"
|
||||
git push origin v1.1.0
|
||||
|
||||
# 4. 构建
|
||||
docker build -t workflow-platform:1.1.0 .
|
||||
|
||||
# 5. 部署
|
||||
kubectl apply -f k8s/deployment.yaml
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 七、故障排查
|
||||
|
||||
### 7.1 日志级别配置
|
||||
|
||||
```yaml
|
||||
# application-dev.yml
|
||||
logging:
|
||||
level:
|
||||
root: INFO
|
||||
com.workflow: DEBUG
|
||||
org.flowable: DEBUG
|
||||
|
||||
# application-prod.yml
|
||||
logging:
|
||||
level:
|
||||
root: WARN
|
||||
com.workflow: INFO
|
||||
org.flowable: WARN
|
||||
```
|
||||
|
||||
### 7.2 常见问题
|
||||
|
||||
**问题1:表达式解析失败**
|
||||
```
|
||||
错误:ExpressionEvaluationException: 表达式解析失败
|
||||
原因:上游节点输出结构与预期不符
|
||||
解决:检查 outputSchema 定义,查看实际输出
|
||||
```
|
||||
|
||||
**问题2:工作流执行卡住**
|
||||
```
|
||||
错误:流程实例一直处于 running 状态
|
||||
原因:某个节点执行超时或死锁
|
||||
解决:查询 act_ru_execution 表,找到卡住的节点
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
**总结**:以上规范是团队协作的基础,请严格遵守。如有疑问,请在团队会议上讨论。
|
||||
|
||||
97
docs/99-最终修正落地方案.md
Normal file
97
docs/99-最终修正落地方案.md
Normal file
@ -0,0 +1,97 @@
|
||||
# 最终修正落地方案(PM + 架构联合稿)
|
||||
|
||||
版本: v1.0
|
||||
适用范围: 替代/修正 docs/01、docs/02、docs/03 中的关键设计,统一数据库为 MySQL 8.0,并固化 MVP 可落地路径。
|
||||
|
||||
—
|
||||
|
||||
一、产品与范围(MVP)
|
||||
- 目标:可视化工作流平台,支持编辑器(ReactFlow)、节点配置(动态表单/字段映射/表达式)、执行、审批。
|
||||
- 明确不做(MVP):并行/子流程、定时/Webhook 触发、版本管理、多租户、复杂鉴权/监控、插件市场、AI 辅助。
|
||||
- 闭环路径:前端构建 JSON → 后端转 BPMN(含条件分支)部署到 Flowable → 同步执行(审批点暂停并恢复)→ 执行/日志/任务可查询。
|
||||
|
||||
二、关键技术决策(统一口径)
|
||||
- 数据库:MySQL 8.0(JSON 类型用于 definition/fields/output_schema/input/output 等),字符集 utf8mb4。
|
||||
- 表达式:仅采用 Jakarta EL (JUEL);Map-only 访问,禁用类/方法反射调用;前端不直接执行表达式,仅做占位/静态提示(第二期可加后端沙箱预览)。
|
||||
- 执行:MVP 同步执行;Flowable 全局异步执行器关闭(async-executor-activate=false);审批链路天然异步(等待用户完成任务)。
|
||||
- 条件分支:JSON 模型以 edge.condition(JUEL 表达式)表示;转换层生成 ExclusiveGateway + 条件 SequenceFlow。
|
||||
- 节点扩展:Spring 插件化(WorkflowNode 接口 + NodeTypeMetadata + outputSchema),NodeTypeRegistry 负责注册/查询。
|
||||
- HTTP 客户端:WebClient(Spring WebFlux),统一超时与重试策略;不混用 MVC + WebFlux。
|
||||
|
||||
三、架构总览(修正摘要)
|
||||
- 后端:Spring Boot 3.2 + Flowable 7.0.1 + MySQL 8.0 + Redis 7 + Jakarta EL;REST API(/api/workflows、/api/node-types、/api/tasks)。
|
||||
- 前端:React 18 + TypeScript 5 + ReactFlow 11 + AntD 5 + Zustand + Axios;Vite 构建。
|
||||
- 部署:Docker Compose(mysql:8、redis:7、backend),Nginx 反向代理(生产)。
|
||||
|
||||
四、后端落地清单
|
||||
1) 依赖与配置
|
||||
- POM 仅保留 spring-boot-starter-webflux;移除 spring-boot-starter-web;添加 jakarta.el 依赖;数据库驱动改为 mysql-connector-j。
|
||||
- application.yml:
|
||||
- datasource.url=jdbc:mysql://.../workflow_db?useSSL=false&allowPublicKeyRetrieval=true&serverTimezone=UTC
|
||||
- driver=com.mysql.cj.jdbc.Driver
|
||||
- flowable.async-executor-activate=false(MVP 同步)
|
||||
|
||||
2) DDL(MySQL 8)
|
||||
- 将所有 JSONB 改为 JSON;去除 ::jsonb 强制类型转换;示例插入用合法 JSON 字符串。
|
||||
- 关键表:
|
||||
- workflow_definitions(definition JSON, flowable_* 索引列保留)
|
||||
- node_types(fields JSON, output_schema JSON)
|
||||
- workflow_executions(input JSON)
|
||||
- node_execution_logs(input JSON, output JSON)
|
||||
|
||||
3) Flowable 集成修正
|
||||
- ServiceTask 使用 delegateExpression("${genericNodeExecutor}"),由 Spring 托管,保证 @Autowired 生效。
|
||||
- GenericNodeExecutor 从 execution.getCurrentFlowElement() 的 FieldExtension 读取 nodeType/nodeConfig,不再误用 execution 变量。
|
||||
- WorkflowConverter:
|
||||
- 添加 ExclusiveGateway 与条件 SequenceFlow;对 edge.condition 设置 conditionExpression。
|
||||
- 保留 start→first 与 last→end 的连线(无条件)。
|
||||
|
||||
4) 表达式引擎(JUEL - Jakarta EL)
|
||||
- import jakarta.el.*;构建 StandardELContext;注入 nodes/workflow/env 三大命名空间。
|
||||
- 缓存表达式编译结果(LRU),无 ${} 的字符串走快路径直接返回。
|
||||
- 安全:黑名单不足以防御,辅以受限 ELResolver(仅 Map/property 解析),禁用方法调用。
|
||||
|
||||
5) HTTP 节点
|
||||
- 实现基于 WebClient:连接/响应超时;可选重试(限定幂等方法)。
|
||||
- 输入:url/method/headers/body/timeout;输出:statusCode/body/headers/elapsed。
|
||||
|
||||
6) 观察性
|
||||
- 写入 node_execution_logs:status、input、output、错误、时长;为 execution_id、status、时间建立索引。
|
||||
- 审批与执行历史接口完成:执行详情包含各节点输入/输出摘要(注意脱敏)。
|
||||
|
||||
五、前端落地清单
|
||||
- 字段映射:基于上游节点的 outputSchema 构建树;表达式值统一存储为“完整 ${...}”字符串。
|
||||
- 表达式括号处理修复:使用正则 /^\$\{|\}$/g 去括号;选择时拼接为 `\${${val}}`。
|
||||
- Zustand:统一管理 nodes/edges/selectedNode/currentWorkflow;保存时构建规范化 JSON。
|
||||
- 性能:节点组件 memo、事件节流;避免使用不存在的 ReactFlow 属性。
|
||||
|
||||
六、API 与契约(MVP 必备)
|
||||
- /api/workflows:create/update/get/list/delete/execute、getExecutions、getExecutionDetail。
|
||||
- /api/node-types:list/get/by-category(返回 NodeTypeMetadata,含 fields/outputSchema)。
|
||||
- /api/tasks:list/detail/complete/form(审批流)。
|
||||
- 错误码:4xx(校验/鉴权)、5xx(系统错误);分页与筛选采用标准 query 参数。
|
||||
|
||||
七、验收标准(MVP)
|
||||
- 功能:
|
||||
- 场景 A:HTTP → 审批 → 邮件,端到端成功;审批暂停与恢复链路可见。
|
||||
- 场景 B:条件分支生效(200→邮件,否则→另一分支)。
|
||||
- 性能:10 节点链路 < 500ms(不含审批),表达式评估基准 > 10k QPS,画布 100+ 节点不卡顿。
|
||||
- 质量:核心模块单测覆盖 > 60%,E2E 场景跑通;关键事件与错误可观测。
|
||||
|
||||
八、两周 PoC 计划
|
||||
- Week 1:
|
||||
- 完成 JSON→BPMN(ServiceTask + UserTask + ExclusiveGateway + 条件边)
|
||||
- JUEL 上下文与缓存;HTTP 节点(WebClient)
|
||||
- NodeTypeRegistry 注册/查询;node_types/definitions 基础表
|
||||
- Week 2:
|
||||
- 审批任务 API(列表/表单/完成);执行历史与节点日志入库
|
||||
- 前端编辑器/配置面板/字段映射;审批中心
|
||||
- 样例工作流 A/B 端到端自测并出基准数据
|
||||
|
||||
九、风险与缓解
|
||||
- 转换层复杂:以最小节点集(HTTP/条件/变量/邮件/审批)完成 PoC,随后逐步扩展。
|
||||
- 表达式安全:Map-only ELResolver + 严格长度/关键字校验;第二期引入后端沙箱预览接口。
|
||||
- MySQL JSON 性能:关键查询加合适索引;大 JSON 字段只作存储与读取,不进行复杂查询。
|
||||
|
||||
十、后续演进(非 MVP)
|
||||
- 异步执行(队列/线程池/回调)、定时/Webhook 触发、循环/并行/子流程、工作流版本管理、更多节点库、监控大盘、权限与多租户、导入导出、AI 辅助。
|
||||
386
docs/README.md
Normal file
386
docs/README.md
Normal file
@ -0,0 +1,386 @@
|
||||
# 工作流平台技术文档
|
||||
|
||||
**项目**: 基于 Flowable 的可视化工作流平台
|
||||
**版本**: v1.0
|
||||
**更新日期**: 2025-01-12
|
||||
|
||||
---
|
||||
|
||||
## 📚 文档目录
|
||||
|
||||
### 核心设计文档
|
||||
|
||||
| 文档 | 说明 | 关键内容 |
|
||||
|------|------|----------|
|
||||
| [01-架构总览](./01-架构总览.md) | 系统整体架构设计 | 技术选型、系统架构、MVP范围、开发计划 |
|
||||
| [02-后端技术设计](./02-后端技术设计.md) | 后端详细实现 | 节点注册、表达式引擎、BPMN转换、REST API |
|
||||
| [03-前端技术设计](./03-前端技术设计.md) | 前端详细实现 | ReactFlow画布、字段映射选择器、状态管理 |
|
||||
| [04-数据模型设计](./04-数据模型设计.md) | 数据库设计 | 业务表结构、Flowable表说明、索引优化 |
|
||||
| [05-开发规范](./05-开发规范.md) | 代码和协作规范 | 命名规范、Git工作流、测试要求 |
|
||||
|
||||
---
|
||||
|
||||
## 🎯 快速导航
|
||||
|
||||
### 我想了解...
|
||||
|
||||
**整体架构**
|
||||
- 👉 先看 [01-架构总览](./01-架构总览.md)
|
||||
- 了解技术选型、系统架构、核心数据流
|
||||
|
||||
**后端开发**
|
||||
- 👉 看 [02-后端技术设计](./02-后端技术设计.md)
|
||||
- 节点如何注册?表达式如何解析?JSON如何转BPMN?
|
||||
|
||||
**前端开发**
|
||||
- 👉 看 [03-前端技术设计](./03-前端技术设计.md)
|
||||
- ReactFlow如何使用?字段映射选择器如何实现?
|
||||
|
||||
**数据库设计**
|
||||
- 👉 看 [04-数据模型设计](./04-数据模型设计.md)
|
||||
- 有哪些表?为什么用JSONB?如何优化查询?
|
||||
|
||||
**编码规范**
|
||||
- 👉 看 [05-开发规范](./05-开发规范.md)
|
||||
- 如何命名?如何提交代码?如何写测试?
|
||||
|
||||
---
|
||||
|
||||
## 🔑 核心设计要点
|
||||
|
||||
### 1. 为什么选择 Flowable?
|
||||
|
||||
```
|
||||
✅ 开源版功能完整(不需要购买企业版)
|
||||
✅ 内置审批能力(User Task)
|
||||
✅ Spring Boot 集成简单
|
||||
✅ 国内资料多,社区活跃
|
||||
✅ 支持 BPMN 2.0 标准
|
||||
|
||||
vs Camunda:
|
||||
- Flowable 表单引擎更强
|
||||
- Flowable 开源版更完整
|
||||
- Camunda 性能略优但差距不大
|
||||
|
||||
vs Conductor:
|
||||
- Conductor 没有审批能力
|
||||
- Conductor 更轻量但功能少
|
||||
- 如果不需要审批,Conductor 是更好选择
|
||||
```
|
||||
|
||||
### 2. 核心技术难点与解决方案
|
||||
|
||||
#### 难点1:前端如何知道上游节点输出了什么?
|
||||
|
||||
**解决方案**:使用静态 `outputSchema`(JSON Schema)
|
||||
|
||||
```typescript
|
||||
// 每个节点类型定义输出结构
|
||||
const httpRequestMetadata = {
|
||||
id: 'http_request',
|
||||
outputSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
statusCode: { type: 'number' },
|
||||
body: { type: 'object' },
|
||||
headers: { type: 'object' },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// 前端根据 schema 构建字段树
|
||||
// 用户可以选择: nodes.httpRequest.output.body.email
|
||||
```
|
||||
|
||||
**优点**:
|
||||
- ✅ 快速,不需要执行节点
|
||||
- ✅ 类型安全
|
||||
- ✅ 支持自动补全
|
||||
|
||||
**缺点**:
|
||||
- ⚠️ 如果实际输出与 schema 不符,运行时才会发现
|
||||
|
||||
#### 难点2:如何实现字段映射选择器?
|
||||
|
||||
**核心组件**:`FieldMappingSelector.tsx`
|
||||
|
||||
```tsx
|
||||
// 1. 计算上游节点
|
||||
const upstreamNodes = edges
|
||||
.filter(edge => edge.target === currentNode.id)
|
||||
.map(edge => nodes.find(n => n.id === edge.source));
|
||||
|
||||
// 2. 根据 outputSchema 构建字段树
|
||||
const fieldTree = upstreamNodes.map(node => ({
|
||||
title: node.data.name,
|
||||
children: buildFieldTree(nodeType.outputSchema.properties, `nodes.${node.id}.output`)
|
||||
}));
|
||||
|
||||
// 3. 用户选择字段,生成表达式
|
||||
// 用户选择: nodes.httpRequest.output.body.email
|
||||
// 生成表达式: ${nodes.httpRequest.output.body.email}
|
||||
```
|
||||
|
||||
#### 难点3:表达式解析性能
|
||||
|
||||
**解决方案**:
|
||||
```java
|
||||
// 1. 使用 JUEL 而不是完整的 JavaScript(性能更好)
|
||||
// 2. 表达式编译结果缓存
|
||||
private final Map<String, ValueExpression> expressionCache = new ConcurrentHashMap<>();
|
||||
|
||||
// 3. 快速路径:无表达式直接返回
|
||||
if (!expression.contains("${")) {
|
||||
return expression;
|
||||
}
|
||||
```
|
||||
|
||||
**性能测试结果**:
|
||||
- JUEL: ~50000 QPS
|
||||
- GraalVM JS: ~2000 QPS
|
||||
- 结论:使用 JUEL,性能足够
|
||||
|
||||
#### 难点4:工作流定义格式
|
||||
|
||||
**决策**:用户层面使用 JSON,内部转换为 BPMN XML
|
||||
|
||||
```
|
||||
前端 (JSON) ←→ 后端转换层 ←→ Flowable (BPMN XML)
|
||||
|
||||
理由:
|
||||
✅ JSON 对前端友好
|
||||
✅ JSON 易于版本控制
|
||||
✅ BPMN 是 Flowable 原生格式
|
||||
✅ 分层清晰,职责明确
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📊 技术架构图
|
||||
|
||||
### 整体架构
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────┐
|
||||
│ 前端 (React + ReactFlow) │
|
||||
│ - 可视化编辑器 │
|
||||
│ - 节点配置面板 │
|
||||
│ - 字段映射选择器 ⭐⭐⭐ │
|
||||
└──────────────┬──────────────────────────┘
|
||||
│ REST API
|
||||
┌──────────────▼──────────────────────────┐
|
||||
│ Spring Boot 应用 │
|
||||
│ ┌────────────────────────────────────┐ │
|
||||
│ │ REST API 层 │ │
|
||||
│ └────────────────────────────────────┘ │
|
||||
│ ┌────────────────────────────────────┐ │
|
||||
│ │ 业务逻辑层 │ │
|
||||
│ │ - NodeTypeRegistry (节点注册) │ │
|
||||
│ │ - ExpressionEngine (表达式解析)⭐ │ │
|
||||
│ │ - WorkflowConverter (JSON→BPMN)⭐ │ │
|
||||
│ └────────────────────────────────────┘ │
|
||||
│ ┌────────────────────────────────────┐ │
|
||||
│ │ Flowable Engine │ │
|
||||
│ │ - 流程执行 │ │
|
||||
│ │ - 任务管理 │ │
|
||||
│ │ - 历史记录 │ │
|
||||
│ └────────────────────────────────────┘ │
|
||||
└──────────────┬──────────────────────────┘
|
||||
│
|
||||
┌──────────────▼──────────────────────────┐
|
||||
│ PostgreSQL │
|
||||
│ - 业务表 (workflow_definitions等) │
|
||||
│ - Flowable表 (ACT_*) │
|
||||
└─────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### 核心数据流:工作流执行
|
||||
|
||||
```
|
||||
1. 用户点击"执行"
|
||||
↓
|
||||
2. 前端调用 POST /api/workflows/{id}/execute
|
||||
↓
|
||||
3. 后端初始化执行上下文:
|
||||
{
|
||||
"workflow": { "input": {...} },
|
||||
"nodes": {}, // 节点输出将保存在这里
|
||||
"env": {...}
|
||||
}
|
||||
↓
|
||||
4. Flowable 启动流程实例
|
||||
↓
|
||||
5. 按拓扑顺序执行节点:
|
||||
a. ExpressionEngine 解析表达式
|
||||
b. 调用节点实现类执行
|
||||
c. 保存输出到 nodes.{nodeId}.output
|
||||
↓
|
||||
6. 节点间数据通过表达式传递:
|
||||
${nodes.node1.output.body.email}
|
||||
↓
|
||||
7. 遇到 User Task(审批)时暂停
|
||||
↓
|
||||
8. 审批完成后继续执行
|
||||
↓
|
||||
9. 流程结束
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🚀 MVP 功能清单(第一期)
|
||||
|
||||
### 必须有的功能
|
||||
|
||||
**1. 工作流编辑器**
|
||||
- [x] 从左侧拖拽节点到画布
|
||||
- [x] 节点之间连线
|
||||
- [x] 删除节点和连线
|
||||
- [x] 保存工作流
|
||||
|
||||
**2. 节点配置面板**
|
||||
- [x] 动态表单(根据节点类型生成)
|
||||
- [x] 字段映射选择器(TreeSelect 展示上游节点输出)⭐⭐⭐
|
||||
- [x] 表达式输入框
|
||||
|
||||
**3. 节点类型(5种)**
|
||||
- [x] HTTP Request
|
||||
- [x] Send Email
|
||||
- [x] Set Variable
|
||||
- [x] Condition (IF/ELSE)
|
||||
- [x] Approval (审批)
|
||||
|
||||
**4. 工作流执行**
|
||||
- [x] 手动触发执行
|
||||
- [x] 查看执行日志
|
||||
- [x] 查看节点输入/输出
|
||||
|
||||
**5. 审批功能**
|
||||
- [x] 待审批任务列表
|
||||
- [x] 审批表单
|
||||
- [x] 批准/拒绝
|
||||
|
||||
### 不做的功能(第二期)
|
||||
|
||||
- ❌ 定时触发(Cron)
|
||||
- ❌ Webhook 触发
|
||||
- ❌ 循环节点(forEach)
|
||||
- ❌ 并行执行
|
||||
- ❌ 工作流版本管理
|
||||
- ❌ 权限管理(只做基础认证)
|
||||
- ❌ 监控大盘
|
||||
|
||||
---
|
||||
|
||||
## 📅 开发计划(12周)
|
||||
|
||||
### Phase 1: 技术验证(Week 1-2)
|
||||
- Flowable PoC
|
||||
- 表达式引擎验证
|
||||
- ReactFlow 画布验证
|
||||
- 环境搭建
|
||||
|
||||
### Phase 2: 后端核心(Week 3-4)
|
||||
- 节点类型注册系统
|
||||
- 表达式引擎
|
||||
- JSON → BPMN 转换器
|
||||
- HTTP Request + Set Variable 节点
|
||||
|
||||
### Phase 3: 前端核心(Week 5-6)
|
||||
- ReactFlow 画布
|
||||
- 节点配置面板
|
||||
- **字段映射选择器**(最核心)
|
||||
|
||||
### Phase 4: 执行引擎(Week 7-8)
|
||||
- 工作流执行
|
||||
- 日志记录
|
||||
- 错误处理
|
||||
|
||||
### Phase 5: 审批功能(Week 9-10)
|
||||
- User Task 集成
|
||||
- 审批表单
|
||||
- 任务列表
|
||||
|
||||
### Phase 6: 测试上线(Week 11-12)
|
||||
- 集成测试
|
||||
- 性能测试
|
||||
- 部署上线
|
||||
|
||||
---
|
||||
|
||||
## 🎓 学习资源
|
||||
|
||||
### Flowable
|
||||
- 官方文档: https://flowable.com/open-source/docs/
|
||||
- GitHub: https://github.com/flowable/flowable-engine
|
||||
- 中文教程: https://www.cnblogs.com/catcher1994/tag/Flowable/
|
||||
|
||||
### ReactFlow
|
||||
- 官方文档: https://reactflow.dev/
|
||||
- 示例: https://reactflow.dev/examples
|
||||
|
||||
### 表达式引擎
|
||||
- JUEL 文档: https://juel.sourceforge.net/guide/index.html
|
||||
- GraalVM JS: https://www.graalvm.org/javascript/
|
||||
|
||||
---
|
||||
|
||||
## ❓ 常见问题
|
||||
|
||||
### Q1: 为什么不直接使用 N8N?
|
||||
|
||||
**A**: N8N 是 Node.js 技术栈,我们需要 Java 技术栈。另外,我们需要:
|
||||
- 与现有 Java 系统集成
|
||||
- 自定义审批流程
|
||||
- 完全掌控数据和安全
|
||||
|
||||
### Q2: Flowable 学习曲线陡峭吗?
|
||||
|
||||
**A**: 有一定学习曲线,但我们做了封装:
|
||||
- 用户不需要懂 BPMN(我们用 JSON)
|
||||
- 开发者只需要了解基础概念
|
||||
- 提供完整的文档和示例
|
||||
|
||||
### Q3: 性能够吗?
|
||||
|
||||
**A**: 经过验证:
|
||||
- 表达式解析: 50000+ QPS
|
||||
- Flowable: 1000+ TPS
|
||||
- 前端画布: 支持 100+ 节点不卡顿
|
||||
|
||||
### Q4: 如何扩展新的节点类型?
|
||||
|
||||
**A**: 非常简单:
|
||||
1. 创建节点实现类(实现 `WorkflowNode` 接口)
|
||||
2. 添加 `@Component` 注解
|
||||
3. 定义元数据(字段、输出结构)
|
||||
4. Spring 启动时自动注册
|
||||
5. 前端自动显示
|
||||
|
||||
```java
|
||||
@Component
|
||||
public class MyCustomNode implements WorkflowNode {
|
||||
@Override
|
||||
public NodeTypeMetadata getMetadata() {
|
||||
// 定义元数据
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeExecutionResult execute(NodeInput input, NodeExecutionContext context) {
|
||||
// 执行逻辑
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 📞 联系方式
|
||||
|
||||
- **技术负责人**: [您的名字]
|
||||
- **Email**: [您的邮箱]
|
||||
- **文档更新**: 如有问题或建议,请提 Issue
|
||||
|
||||
---
|
||||
|
||||
**最后更新**: 2025-01-12
|
||||
**文档版本**: v1.0
|
||||
|
||||
2
frontend/.env.example
Normal file
2
frontend/.env.example
Normal file
@ -0,0 +1,2 @@
|
||||
# 环境变量示例
|
||||
VITE_API_BASE_URL=http://localhost:8080
|
||||
14
frontend/.eslintrc.json
Normal file
14
frontend/.eslintrc.json
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"env": { "browser": true, "es2021": true },
|
||||
"extends": ["eslint:recommended", "plugin:react/recommended", "plugin:react-hooks/recommended", "plugin:@typescript-eslint/recommended"],
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": { "ecmaVersion": 2021, "sourceType": "module" },
|
||||
"plugins": ["react", "@typescript-eslint"],
|
||||
"settings": { "react": { "version": "detect" } },
|
||||
"rules": {
|
||||
"react-hooks/exhaustive-deps": "off",
|
||||
"react/react-in-jsx-scope": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-unused-vars": ["error", { "argsIgnorePattern": "^_" }]
|
||||
}
|
||||
}
|
||||
5
frontend/.idea/.gitignore
vendored
Normal file
5
frontend/.idea/.gitignore
vendored
Normal file
@ -0,0 +1,5 @@
|
||||
# 默认忽略的文件
|
||||
/shelf/
|
||||
/workspace.xml
|
||||
# 基于编辑器的 HTTP 客户端请求
|
||||
/httpRequests/
|
||||
12
frontend/.idea/frontend.iml
Normal file
12
frontend/.idea/frontend.iml
Normal file
@ -0,0 +1,12 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<module type="WEB_MODULE" version="4">
|
||||
<component name="NewModuleRootManager">
|
||||
<content url="file://$MODULE_DIR$">
|
||||
<excludeFolder url="file://$MODULE_DIR$/.tmp" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/temp" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/tmp" />
|
||||
</content>
|
||||
<orderEntry type="inheritedJdk" />
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
</component>
|
||||
</module>
|
||||
6
frontend/.idea/inspectionProfiles/Project_Default.xml
Normal file
6
frontend/.idea/inspectionProfiles/Project_Default.xml
Normal file
@ -0,0 +1,6 @@
|
||||
<component name="InspectionProjectProfileManager">
|
||||
<profile version="1.0">
|
||||
<option name="myName" value="Project Default" />
|
||||
<inspection_tool class="Eslint" enabled="true" level="WARNING" enabled_by_default="true" />
|
||||
</profile>
|
||||
</component>
|
||||
8
frontend/.idea/modules.xml
Normal file
8
frontend/.idea/modules.xml
Normal file
@ -0,0 +1,8 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ProjectModuleManager">
|
||||
<modules>
|
||||
<module fileurl="file://$PROJECT_DIR$/.idea/frontend.iml" filepath="$PROJECT_DIR$/.idea/frontend.iml" />
|
||||
</modules>
|
||||
</component>
|
||||
</project>
|
||||
6
frontend/.idea/vcs.xml
Normal file
6
frontend/.idea/vcs.xml
Normal file
@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="VcsDirectoryMappings">
|
||||
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
|
||||
</component>
|
||||
</project>
|
||||
5
frontend/.prettierrc
Normal file
5
frontend/.prettierrc
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"semi": false,
|
||||
"printWidth": 100
|
||||
}
|
||||
33
frontend/README.md
Normal file
33
frontend/README.md
Normal file
@ -0,0 +1,33 @@
|
||||
# 前端应用(flowable-devops-frontend)
|
||||
|
||||
技术栈
|
||||
- React 18 + TypeScript + Vite
|
||||
- Ant Design 5、ReactFlow 11、Zustand、Axios、Day.js
|
||||
|
||||
安装与开发
|
||||
- 安装依赖:
|
||||
npm i
|
||||
- 开发启动:
|
||||
npm run dev
|
||||
- 构建生产包:
|
||||
npm run build
|
||||
- 预览本地构建:
|
||||
npm run preview
|
||||
|
||||
环境变量
|
||||
- 复制 .env.example 为 .env,并按需调整:
|
||||
VITE_API_BASE_URL=http://localhost:8080
|
||||
|
||||
与后端联调约定(关键)
|
||||
- JSON Schema:见仓库 docs/04-数据模型设计.md
|
||||
- 接口契约:见仓库 docs/05-API契约.md
|
||||
- 表达式:统一 ${...},仅 Map 属性访问,前端不执行表达式,仅存储与展示
|
||||
|
||||
目录结构(初始)
|
||||
- src/App.tsx:占位入口
|
||||
- src/main.tsx:应用挂载
|
||||
- src/index.css:全局样式占位
|
||||
|
||||
后续建议
|
||||
- 新增 src/api、src/store、src/components、src/pages 目录,与文档中页面/状态模块对齐
|
||||
- 引入 AntD 样式(按需加载或全量),并配置主题
|
||||
11
frontend/dev.log
Normal file
11
frontend/dev.log
Normal file
@ -0,0 +1,11 @@
|
||||
|
||||
> flowable-devops-frontend@0.1.0 dev
|
||||
> vite --host
|
||||
|
||||
|
||||
VITE v5.4.20 ready in 100 ms
|
||||
|
||||
➜ Local: http://localhost:3000/
|
||||
➜ Network: http://172.22.222.110:3000/
|
||||
➜ Network: http://192.168.1.118:3000/
|
||||
➜ press h + enter to show help
|
||||
1
frontend/dist/assets/index-CTkmAz9X.css
vendored
Normal file
1
frontend/dist/assets/index-CTkmAz9X.css
vendored
Normal file
File diff suppressed because one or more lines are too long
506
frontend/dist/assets/index-D9R4V7sH.js
vendored
Normal file
506
frontend/dist/assets/index-D9R4V7sH.js
vendored
Normal file
File diff suppressed because one or more lines are too long
13
frontend/dist/index.html
vendored
Normal file
13
frontend/dist/index.html
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
<!doctype html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Flowable DevOps - Frontend</title>
|
||||
<script type="module" crossorigin src="/assets/index-D9R4V7sH.js"></script>
|
||||
<link rel="stylesheet" crossorigin href="/assets/index-CTkmAz9X.css">
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
</body>
|
||||
</html>
|
||||
12
frontend/index.html
Normal file
12
frontend/index.html
Normal file
@ -0,0 +1,12 @@
|
||||
<!doctype html>
|
||||
<html lang="zh-CN">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Flowable DevOps - Frontend</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
6625
frontend/package-lock.json
generated
Normal file
6625
frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
39
frontend/package.json
Normal file
39
frontend/package.json
Normal file
@ -0,0 +1,39 @@
|
||||
{
|
||||
"name": "flowable-devops-frontend",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"preview": "vite preview --port 5173",
|
||||
"lint": "eslint .",
|
||||
"format": "prettier --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@ant-design/icons": "^5.2.6",
|
||||
"@monaco-editor/react": "^4.6.0",
|
||||
"antd": "^5.12.0",
|
||||
"axios": "^1.6.2",
|
||||
"dayjs": "^1.11.10",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-router-dom": "^6.25.0",
|
||||
"reactflow": "^11.10.0",
|
||||
"zustand": "^4.4.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.2.0",
|
||||
"@types/react-dom": "^18.2.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.0",
|
||||
"@typescript-eslint/parser": "^8.46.0",
|
||||
"@vitejs/plugin-react": "^4.2.0",
|
||||
"ajv": "^8.12.0",
|
||||
"eslint": "^8.55.0",
|
||||
"eslint-plugin-react": "^7.37.5",
|
||||
"eslint-plugin-react-hooks": "^7.0.0",
|
||||
"prettier": "^3.6.2",
|
||||
"typescript": "^5.0.0",
|
||||
"vite": "^5.0.0"
|
||||
}
|
||||
}
|
||||
27
frontend/src/App.tsx
Normal file
27
frontend/src/App.tsx
Normal file
@ -0,0 +1,27 @@
|
||||
import React from 'react'
|
||||
import 'antd/dist/reset.css'
|
||||
import { ConfigProvider } from 'antd'
|
||||
import { BrowserRouter, Routes, Route, Navigate } from 'react-router-dom'
|
||||
import AppLayout from '@/components/layout/AppLayout'
|
||||
import WorkflowListPage from '@/pages/WorkflowListPage'
|
||||
import WorkflowEditorPage from '@/pages/WorkflowEditorPage'
|
||||
import ExecutionHistoryPage from '@/pages/ExecutionHistoryPage'
|
||||
import ApprovalCenterPage from '@/pages/ApprovalCenterPage'
|
||||
|
||||
export default function App() {
|
||||
return (
|
||||
<ConfigProvider theme={{ token: { colorPrimary: '#1677ff' } }}>
|
||||
<BrowserRouter>
|
||||
<AppLayout>
|
||||
<Routes>
|
||||
<Route path="/" element={<Navigate to="/workflows" replace />} />
|
||||
<Route path="/workflows" element={<WorkflowListPage />} />
|
||||
<Route path="/editor" element={<WorkflowEditorPage />} />
|
||||
<Route path="/executions" element={<ExecutionHistoryPage />} />
|
||||
<Route path="/approvals" element={<ApprovalCenterPage />} />
|
||||
</Routes>
|
||||
</AppLayout>
|
||||
</BrowserRouter>
|
||||
</ConfigProvider>
|
||||
)
|
||||
}
|
||||
37
frontend/src/api/http.ts
Normal file
37
frontend/src/api/http.ts
Normal file
@ -0,0 +1,37 @@
|
||||
import axios from 'axios'
|
||||
import { message } from 'antd'
|
||||
|
||||
const BASE_URL = import.meta.env.VITE_API_BASE_URL || 'http://localhost:8080'
|
||||
|
||||
export const api = axios.create({
|
||||
baseURL: BASE_URL,
|
||||
timeout: 30000,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
api.interceptors.request.use((config) => {
|
||||
const token = localStorage.getItem('token')
|
||||
if (token) {
|
||||
config.headers = config.headers || {}
|
||||
config.headers.Authorization = `Bearer ${token}`
|
||||
}
|
||||
return config
|
||||
})
|
||||
|
||||
api.interceptors.response.use(
|
||||
(resp) => resp,
|
||||
(error) => {
|
||||
const status = error?.response?.status
|
||||
const msg = error?.response?.data?.message || error?.message || '请求失败'
|
||||
if (status === 401) {
|
||||
message.error('未授权或登录已过期')
|
||||
// 可选:跳转登录页
|
||||
// window.location.href = '/login'
|
||||
} else {
|
||||
message.error(msg)
|
||||
}
|
||||
return Promise.reject(error)
|
||||
}
|
||||
)
|
||||
17
frontend/src/api/nodeType.ts
Normal file
17
frontend/src/api/nodeType.ts
Normal file
@ -0,0 +1,17 @@
|
||||
import { api } from './http'
|
||||
import type { NodeTypeMetadata } from '@/types/node'
|
||||
|
||||
export const nodeTypeApi = {
|
||||
list: async (): Promise<NodeTypeMetadata[]> => {
|
||||
const res = await api.get('/api/node-types')
|
||||
return res.data
|
||||
},
|
||||
getById: async (typeId: string): Promise<NodeTypeMetadata> => {
|
||||
const res = await api.get(`/api/node-types/${typeId}`)
|
||||
return res.data
|
||||
},
|
||||
getByCategory: async (category: string): Promise<NodeTypeMetadata[]> => {
|
||||
const res = await api.get(`/api/node-types/category/${category}`)
|
||||
return res.data
|
||||
},
|
||||
}
|
||||
48
frontend/src/api/task.ts
Normal file
48
frontend/src/api/task.ts
Normal file
@ -0,0 +1,48 @@
|
||||
import { api } from './http'
|
||||
|
||||
export interface TaskInfo {
|
||||
id: string
|
||||
name: string
|
||||
assignee?: string
|
||||
processInstanceId: string
|
||||
createdAt?: string
|
||||
dueDate?: string | null
|
||||
}
|
||||
|
||||
export interface TaskDetail {
|
||||
id: string
|
||||
name: string
|
||||
assignee?: string
|
||||
processInstanceId: string
|
||||
variables?: Record<string, any>
|
||||
}
|
||||
|
||||
export interface TaskFormField {
|
||||
id: string
|
||||
label: string
|
||||
type: 'string' | 'number' | 'boolean'
|
||||
required?: boolean
|
||||
}
|
||||
|
||||
export interface TaskForm {
|
||||
taskId: string
|
||||
fields: TaskFormField[]
|
||||
}
|
||||
|
||||
export const taskApi = {
|
||||
list: async (assignee?: string, page = 1, size = 10): Promise<{ items: TaskInfo[]; page: number; size: number; total: number }> => {
|
||||
const res = await api.get('/api/tasks', { params: { assignee, page, size } })
|
||||
return res.data
|
||||
},
|
||||
detail: async (taskId: string): Promise<TaskDetail> => {
|
||||
const res = await api.get(`/api/tasks/${taskId}`)
|
||||
return res.data
|
||||
},
|
||||
getForm: async (taskId: string): Promise<TaskForm> => {
|
||||
const res = await api.get(`/api/tasks/${taskId}/form`)
|
||||
return res.data
|
||||
},
|
||||
complete: async (taskId: string, variables: Record<string, any>): Promise<void> => {
|
||||
await api.post(`/api/tasks/${taskId}/complete`, { variables })
|
||||
},
|
||||
}
|
||||
39
frontend/src/api/workflow.ts
Normal file
39
frontend/src/api/workflow.ts
Normal file
@ -0,0 +1,39 @@
|
||||
import { api } from './http'
|
||||
import type { WorkflowDefinition, WorkflowExecutionDetail, WorkflowExecutionRecord, WorkflowExecutionResult } from '@/types/workflow'
|
||||
|
||||
export const workflowApi = {
|
||||
create: async (wf: WorkflowDefinition): Promise<WorkflowDefinition> => {
|
||||
const res = await api.post('/api/workflows', wf)
|
||||
return res.data
|
||||
},
|
||||
update: async (id: string, wf: WorkflowDefinition): Promise<WorkflowDefinition> => {
|
||||
const res = await api.put(`/api/workflows/${id}`, wf)
|
||||
return res.data
|
||||
},
|
||||
save: async (wf: WorkflowDefinition): Promise<WorkflowDefinition> => {
|
||||
return wf.id ? workflowApi.update(wf.id, wf) : workflowApi.create(wf)
|
||||
},
|
||||
getById: async (id: string): Promise<WorkflowDefinition> => {
|
||||
const res = await api.get(`/api/workflows/${id}`)
|
||||
return res.data
|
||||
},
|
||||
list: async (status?: string, page = 1, size = 10): Promise<{ items: WorkflowDefinition[]; page: number; size: number; total: number }> => {
|
||||
const res = await api.get('/api/workflows', { params: { status, page, size } })
|
||||
return res.data
|
||||
},
|
||||
remove: async (id: string): Promise<void> => {
|
||||
await api.delete(`/api/workflows/${id}`)
|
||||
},
|
||||
execute: async (id: string, input: Record<string, any>): Promise<WorkflowExecutionResult> => {
|
||||
const res = await api.post(`/api/workflows/${id}/execute`, { input })
|
||||
return res.data
|
||||
},
|
||||
getExecutions: async (id: string, page = 1, size = 10): Promise<{ items: WorkflowExecutionRecord[]; page: number; size: number; total: number }> => {
|
||||
const res = await api.get(`/api/workflows/${id}/executions`, { params: { page, size } })
|
||||
return res.data
|
||||
},
|
||||
getExecutionDetail: async (executionId: string): Promise<WorkflowExecutionDetail> => {
|
||||
const res = await api.get(`/api/workflows/executions/${executionId}`)
|
||||
return res.data
|
||||
},
|
||||
}
|
||||
10
frontend/src/components/common/CodeEditor.tsx
Normal file
10
frontend/src/components/common/CodeEditor.tsx
Normal file
@ -0,0 +1,10 @@
|
||||
import React from 'react'
|
||||
import Monaco from '@monaco-editor/react'
|
||||
|
||||
export default function CodeEditor({ language = 'json', value, onChange, height = 220 }: { language?: string; value?: string; onChange?: (v?: string) => void; height?: number }) {
|
||||
return (
|
||||
<div style={{ border: '1px solid #f0f0f0' }}>
|
||||
<Monaco height={height} language={language} value={value} onChange={(v) => onChange?.(v || '')} options={{ minimap: { enabled: false } }} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
10
frontend/src/components/common/Empty.tsx
Normal file
10
frontend/src/components/common/Empty.tsx
Normal file
@ -0,0 +1,10 @@
|
||||
import React from 'react'
|
||||
import { Empty as AntEmpty } from 'antd'
|
||||
|
||||
export default function Empty({ description = '暂无数据', height = 160 }: { description?: string; height?: number }) {
|
||||
return (
|
||||
<div style={{ height, display: 'flex', alignItems: 'center', justifyContent: 'center' }}>
|
||||
<AntEmpty description={description} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
6
frontend/src/components/common/Error.tsx
Normal file
6
frontend/src/components/common/Error.tsx
Normal file
@ -0,0 +1,6 @@
|
||||
import React from 'react'
|
||||
import { Result } from 'antd'
|
||||
|
||||
export default function ErrorBlock({ message = '出错了', extra }: { message?: string; extra?: React.ReactNode }) {
|
||||
return <Result status="error" title={message} extra={extra} />
|
||||
}
|
||||
33
frontend/src/components/common/ExpressionInput.tsx
Normal file
33
frontend/src/components/common/ExpressionInput.tsx
Normal file
@ -0,0 +1,33 @@
|
||||
import React, { useState } from 'react'
|
||||
import { Form, Input, Space, Typography, Button } from 'antd'
|
||||
|
||||
function ensureExpr(v?: string) {
|
||||
if (!v) return ''
|
||||
const t = v.trim()
|
||||
if (t.startsWith('${') && t.endsWith('}')) return t
|
||||
return `\${${t}}`
|
||||
}
|
||||
|
||||
export default function ExpressionInput({ name, label, initialValue }: { name: string; label: string; initialValue?: string }) {
|
||||
const [val, setVal] = useState<string>(initialValue || '')
|
||||
const apply = (tpl: string) => setVal((prev) => ensureExpr((prev || tpl)))
|
||||
return (
|
||||
<Form.Item name={name} label={label} initialValue={initialValue} getValueFromEvent={(e) => ensureExpr(e?.target?.value)}>
|
||||
<Space direction="vertical" style={{ width: '100%' }}>
|
||||
<Input
|
||||
placeholder="请输入表达式,如 ${nodes.n1.output.body.email}"
|
||||
value={val}
|
||||
onChange={(e) => setVal(e.target.value)}
|
||||
onBlur={(e) => setVal(ensureExpr(e.target.value))}
|
||||
/>
|
||||
<Space size="small">
|
||||
<Typography.Text type="secondary" style={{ fontSize: 12 }}>快捷插入:</Typography.Text>
|
||||
<Button size="small" onClick={() => apply('nodes.')}>nodes.</Button>
|
||||
<Button size="small" onClick={() => apply('workflow.input.')}>workflow.input.</Button>
|
||||
<Button size="small" onClick={() => apply('env.')}>env.</Button>
|
||||
</Space>
|
||||
<Typography.Text type="secondary" style={{ fontSize: 12 }}>输入会自动规范为 ${'{...}'} 格式(仅做静态预览,不在前端求值)</Typography.Text>
|
||||
</Space>
|
||||
</Form.Item>
|
||||
)
|
||||
}
|
||||
85
frontend/src/components/common/FieldMappingSelector.tsx
Normal file
85
frontend/src/components/common/FieldMappingSelector.tsx
Normal file
@ -0,0 +1,85 @@
|
||||
import React, { useMemo } from 'react'
|
||||
import { Button, Space, TreeSelect, Typography } from 'antd'
|
||||
import type { Node } from 'reactflow'
|
||||
import type { NodeTypeMetadata } from '@/types/node'
|
||||
|
||||
function buildTreeFromSchema(schema: any, prefix: string, depth = 0): any[] {
|
||||
if (!schema || depth > 3) return []
|
||||
const props = schema.properties || {}
|
||||
return Object.keys(props).map((key) => {
|
||||
const cur = props[key]
|
||||
const value = `${prefix}.${key}`
|
||||
if (cur?.type === 'object') {
|
||||
return {
|
||||
title: `${key} (object)`,
|
||||
value,
|
||||
children: buildTreeFromSchema(cur, value, depth + 1),
|
||||
}
|
||||
}
|
||||
if (cur?.type === 'array') {
|
||||
const children: any[] = []
|
||||
if (cur?.items?.properties) {
|
||||
children.push({
|
||||
title: `[0] (item)`,
|
||||
value: `${value}[0]`,
|
||||
children: buildTreeFromSchema(cur.items, `${value}[0]`, depth + 1),
|
||||
})
|
||||
}
|
||||
children.push({ title: `[*] (all)`, value: `${value}[*]` })
|
||||
return { title: `${key} (array)`, value, selectable: false, children }
|
||||
}
|
||||
return { title: key, value }
|
||||
})
|
||||
}
|
||||
|
||||
export default function FieldMappingSelector({
|
||||
upstreamNodes,
|
||||
nodeTypes,
|
||||
value,
|
||||
onChange,
|
||||
}: {
|
||||
upstreamNodes: Node[]
|
||||
nodeTypes: Record<string, NodeTypeMetadata | any>
|
||||
value?: string
|
||||
onChange?: (v: string) => void
|
||||
}) {
|
||||
const treeData = useMemo(() => {
|
||||
return upstreamNodes.map((n) => {
|
||||
const meta = nodeTypes[n.data?.type as string]
|
||||
if (!meta?.outputSchema) return null
|
||||
return {
|
||||
title: n.data?.name || n.id,
|
||||
value: `nodes.${n.id}`,
|
||||
selectable: false,
|
||||
children: buildTreeFromSchema(meta.outputSchema, `nodes.${n.id}.output`),
|
||||
}
|
||||
}).filter(Boolean) as any[]
|
||||
}, [upstreamNodes, nodeTypes])
|
||||
|
||||
const plain = value?.replace(/^\$\{|\}$/g, '')
|
||||
const copy = async () => {
|
||||
if (!plain) return
|
||||
try { await navigator.clipboard.writeText(plain) } catch { /* ignore */ }
|
||||
}
|
||||
return (
|
||||
<Space direction="vertical" style={{ width: '100%' }} size={4}>
|
||||
<TreeSelect
|
||||
style={{ width: '100%' }}
|
||||
value={plain}
|
||||
treeData={treeData}
|
||||
showSearch
|
||||
treeLine
|
||||
treeDefaultExpandAll
|
||||
placeholder="选择上游字段(支持数组 [0] 或 [*])"
|
||||
onChange={(val) => onChange?.(val ? `\${${val}}` : '')}
|
||||
allowClear
|
||||
/>
|
||||
<Space size={6}>
|
||||
<Typography.Text type="secondary" style={{ fontSize: 12 }}>当前路径:</Typography.Text>
|
||||
<code style={{ fontSize: 12, background: '#f5f5f5', padding: '0 4px', borderRadius: 4 }}>{plain || '-'}</code>
|
||||
<Button size="small" disabled={!plain} onClick={copy}>复制</Button>
|
||||
<Button size="small" disabled={!plain} onClick={async () => { try { await navigator.clipboard.writeText(`\${${plain}}`) } catch {} }}>复制 ${'{...}'}</Button>
|
||||
</Space>
|
||||
</Space>
|
||||
)
|
||||
}
|
||||
21
frontend/src/components/common/JsonView.tsx
Normal file
21
frontend/src/components/common/JsonView.tsx
Normal file
@ -0,0 +1,21 @@
|
||||
import React from 'react'
|
||||
import { Button, Space, Typography } from 'antd'
|
||||
|
||||
export default function JsonView({ data, height = 240 }: { data: any; height?: number }) {
|
||||
const copy = async () => {
|
||||
try {
|
||||
await navigator.clipboard.writeText(JSON.stringify(data, null, 2))
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
return (
|
||||
<div style={{ border: '1px solid #f0f0f0', padding: 8 }}>
|
||||
<Space style={{ marginBottom: 8 }}>
|
||||
<Typography.Text type="secondary">JSON 视图</Typography.Text>
|
||||
<Button size="small" onClick={copy}>复制</Button>
|
||||
</Space>
|
||||
<pre style={{ margin: 0, height, overflow: 'auto' }}>{JSON.stringify(data, null, 2)}</pre>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
32
frontend/src/components/common/KeyValueEditor.tsx
Normal file
32
frontend/src/components/common/KeyValueEditor.tsx
Normal file
@ -0,0 +1,32 @@
|
||||
import React from 'react'
|
||||
import { Button, Input, Space } from 'antd'
|
||||
|
||||
export default function KeyValueEditor({ value = {}, onChange }: { value?: Record<string, string>; onChange?: (v: Record<string, string>) => void }) {
|
||||
const entries = Object.entries(value)
|
||||
const update = (k: string, newK: string, newV: string) => {
|
||||
const next: Record<string, string> = {}
|
||||
entries.forEach(([key, val]) => {
|
||||
if (key === k) next[newK] = newV
|
||||
else next[key] = val
|
||||
})
|
||||
onChange?.(next)
|
||||
}
|
||||
const add = () => onChange?.({ ...value, [`key_${entries.length + 1}`]: '' })
|
||||
const remove = (k: string) => {
|
||||
const next = { ...value }
|
||||
delete next[k]
|
||||
onChange?.(next)
|
||||
}
|
||||
return (
|
||||
<Space direction="vertical" style={{ width: '100%' }}>
|
||||
{entries.map(([k, v]) => (
|
||||
<Space key={k} style={{ display: 'flex' }}>
|
||||
<Input placeholder="Key" value={k} onChange={(e) => update(k, e.target.value, v)} style={{ width: 160 }} />
|
||||
<Input placeholder="Value" value={v} onChange={(e) => update(k, k, e.target.value)} style={{ flex: 1 }} />
|
||||
<Button danger onClick={() => remove(k)}>删除</Button>
|
||||
</Space>
|
||||
))}
|
||||
<Button onClick={add}>新增</Button>
|
||||
</Space>
|
||||
)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user