增加定时器管理

This commit is contained in:
dengqichen 2025-10-29 16:29:47 +08:00
parent 6eb4743176
commit 6d882dd6e3
55 changed files with 2868 additions and 295 deletions

View File

@ -64,6 +64,18 @@
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- Redis -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<!-- Quartz -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-quartz</artifactId>
</dependency>
<!-- Database -->
<dependency>
<groupId>com.mysql</groupId>

View File

@ -25,6 +25,30 @@ import java.util.List;
@Tag(name = "Git仓库分支管理", description = "Git仓库分支管理相关接口")
public class RepositoryBranchApiController extends BaseController<RepositoryBranch, RepositoryBranchDTO, Long, RepositoryBranchQuery> {
@Resource
private IRepositoryBranchService repositoryBranchService;
@Operation(summary = "同步Git分支", description = "支持三种同步模式1)只传externalSystemId-全量同步 2)传externalSystemId+repoGroupId-同步仓库组 3)传externalSystemId+repoGroupId+repoProjectId-同步单个项目")
@PostMapping("/sync")
public Response<Integer> sync(
@Parameter(description = "外部系统ID", required = true) @RequestParam Long externalSystemId,
@Parameter(description = "仓库组ID可选", required = false) @RequestParam(required = false) Long repoGroupId,
@Parameter(description = "仓库项目ID可选传此参数时repoGroupId必传", required = false) @RequestParam(required = false) Long repoProjectId
) {
Integer count;
if (repoProjectId != null) {
// 同步单个项目
count = repositoryBranchService.syncBranches(externalSystemId, repoGroupId, repoProjectId);
} else if (repoGroupId != null) {
// 同步仓库组
count = repositoryBranchService.syncBranches(externalSystemId, repoGroupId);
} else {
// 全量同步
count = repositoryBranchService.syncBranches(externalSystemId);
}
return Response.success(count);
}
@Override
protected void exportData(HttpServletResponse response, List<RepositoryBranchDTO> data) {

View File

@ -3,8 +3,13 @@ package com.qqchen.deploy.backend.deploy.api;
import com.qqchen.deploy.backend.deploy.entity.RepositoryGroup;
import com.qqchen.deploy.backend.deploy.dto.RepositoryGroupDTO;
import com.qqchen.deploy.backend.deploy.query.RepositoryGroupQuery;
import com.qqchen.deploy.backend.deploy.service.IRepositoryGroupService;
import com.qqchen.deploy.backend.framework.api.Response;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.annotation.Resource;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
@ -20,6 +25,17 @@ import java.util.List;
@Tag(name = "Git仓库组管理", description = "Git仓库组管理相关接口")
public class RepositoryGroupApiController extends BaseController<RepositoryGroup, RepositoryGroupDTO, Long, RepositoryGroupQuery> {
@Resource
private IRepositoryGroupService repositoryGroupService;
@Operation(summary = "同步Git仓库组", description = "同步指定外部系统的所有仓库组")
@PostMapping("/sync")
public Response<Integer> sync(
@Parameter(description = "外部系统ID", required = true) @RequestParam Long externalSystemId
) {
Integer count = repositoryGroupService.syncGroups(externalSystemId);
return Response.success(count);
}
@Override
protected void exportData(HttpServletResponse response, List<RepositoryGroupDTO> data) {

View File

@ -1,59 +0,0 @@
package com.qqchen.deploy.backend.deploy.api;
import com.qqchen.deploy.backend.deploy.dto.GitInstanceDTO;
import com.qqchen.deploy.backend.deploy.service.IRepositoryManagerService;
import com.qqchen.deploy.backend.framework.api.Response;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
/**
* Git仓库管理API控制器
*/
@Slf4j
@RestController
@RequestMapping("/api/v1/repository-manager")
@Tag(name = "Git仓库管理", description = "Git仓库管理相关接口")
public class RepositoryManagerApiController {
@Resource
private IRepositoryManagerService repositoryManagerService;
@Operation(summary = "同步Git仓库组", description = "同步指定外部系统的所有仓库组")
@PostMapping("/{externalSystemId}/sync-groups")
public Response<Void> syncGroups(
@Parameter(description = "外部系统ID", required = true) @PathVariable Long externalSystemId
) {
repositoryManagerService.syncGroups(externalSystemId);
return Response.success();
}
@Operation(summary = "同步Git项目", description = "同步指定外部系统下指定仓库组的所有项目")
@PostMapping("/{externalSystemId}/groups/sync-projects")
public Response<Void> syncProjects(
@Parameter(description = "外部系统ID", required = true) @PathVariable Long externalSystemId
) {
repositoryManagerService.syncProjects(externalSystemId);
return Response.success();
}
@Operation(summary = "同步Git分支", description = "同步指定外部系统下指定项目的所有分支")
@PostMapping("/{externalSystemId}/projects/sync-branches")
public Response<Void> syncBranches(
@Parameter(description = "外部系统ID", required = true) @PathVariable Long externalSystemId
) {
repositoryManagerService.syncBranches(externalSystemId);
return Response.success();
}
@Operation(summary = "获取Git实例信息", description = "获取指定外部系统的Git实例信息包括仓库组、项目和分支的数量及最后同步时间")
@GetMapping("/{externalSystemId}/instance")
public Response<GitInstanceDTO> instance(
@Parameter(description = "外部系统ID", required = true) @PathVariable Long externalSystemId
) {
return Response.success(repositoryManagerService.instance(externalSystemId));
}
}

View File

@ -3,8 +3,13 @@ package com.qqchen.deploy.backend.deploy.api;
import com.qqchen.deploy.backend.deploy.entity.RepositoryProject;
import com.qqchen.deploy.backend.deploy.dto.RepositoryProjectDTO;
import com.qqchen.deploy.backend.deploy.query.RepositoryProjectQuery;
import com.qqchen.deploy.backend.deploy.service.IRepositoryProjectService;
import com.qqchen.deploy.backend.framework.api.Response;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.annotation.Resource;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
@ -20,6 +25,26 @@ import java.util.List;
@Tag(name = "Git仓库项目管理", description = "Git仓库项目管理相关接口")
public class RepositoryProjectApiController extends BaseController<RepositoryProject, RepositoryProjectDTO, Long, RepositoryProjectQuery> {
@Resource
private IRepositoryProjectService repositoryProjectService;
@Operation(summary = "同步Git项目", description = "支持两种同步模式1)只传externalSystemId-全量同步 2)传externalSystemId+repoGroupId-同步单个仓库组")
@PostMapping("/sync")
public Response<Integer> sync(
@Parameter(description = "外部系统ID", required = true) @RequestParam Long externalSystemId,
@Parameter(description = "仓库组ID可选", required = false) @RequestParam(required = false) Long repoGroupId
) {
Integer count;
if (repoGroupId != null) {
// 同步单个仓库组
count = repositoryProjectService.syncProjects(externalSystemId, repoGroupId);
} else {
// 全量同步
count = repositoryProjectService.syncProjects(externalSystemId);
}
return Response.success(count);
}
@Override
protected void exportData(HttpServletResponse response, List<RepositoryProjectDTO> data) {

View File

@ -33,5 +33,14 @@ public interface IRepositoryProjectRepository extends IBaseRepository<Repository
List<RepositoryProject> findByExternalSystemIdAndDeletedFalse(Long externalSystemId);
/**
* 根据外部系统ID和仓库组ID查找未删除的项目列表
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID
* @return 项目列表
*/
List<RepositoryProject> findByExternalSystemIdAndRepoGroupIdAndDeletedFalse(Long externalSystemId, Long repoGroupId);
Optional<RepositoryProject> findByRepoProjectId(Long repoProjectId);
}

View File

@ -11,13 +11,32 @@ import com.qqchen.deploy.backend.framework.service.IBaseService;
public interface IRepositoryBranchService extends IBaseService<RepositoryBranch, RepositoryBranchDTO, RepositoryBranchQuery, Long> {
/**
* 同步指定外部系统下指定项目的分支
* 同步指定外部系统下所有分支全量同步
*
* @param externalSystemId 外部系统ID
* @return 同步的分支数量
*/
Integer syncBranches(Long externalSystemId);
/**
* 同步指定仓库组下所有项目的分支
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID
* @return 同步的分支数量
*/
Integer syncBranches(Long externalSystemId, Long repoGroupId);
/**
* 同步指定项目的分支
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID
* @param repoProjectId 仓库项目ID
* @return 同步的分支数量
*/
Integer syncBranches(Long externalSystemId, Long repoGroupId, Long repoProjectId);
/**
* 统计指定外部系统下的分支数量
*

View File

@ -13,7 +13,7 @@ import java.util.List;
public interface IRepositoryGroupService extends IBaseService<RepositoryGroup, RepositoryGroupDTO, RepositoryGroupQuery, Long> {
/**
* 同步指定外部系统的所有仓库组
* 同步指定外部系统的所有仓库组包含同步历史记录
*
* @param externalSystemId 外部系统ID
* @return 同步的仓库组数量

View File

@ -1,38 +0,0 @@
package com.qqchen.deploy.backend.deploy.service;
import com.qqchen.deploy.backend.deploy.dto.GitInstanceDTO;
/**
* Git管理服务接口
*/
public interface IRepositoryManagerService {
/**
* 同步Git组
*
* @param externalSystemId 外部系统ID
*/
void syncGroups(Long externalSystemId);
/**
* 同步Git项目
*
* @param externalSystemId 外部系统ID
*/
void syncProjects(Long externalSystemId);
/**
* 同步Git分支
*
* @param externalSystemId 外部系统ID
*/
void syncBranches(Long externalSystemId);
/**
* 获取Git实例信息
*
* @param externalSystemId 外部系统ID
* @return Git实例信息
*/
GitInstanceDTO instance(Long externalSystemId);
}

View File

@ -13,13 +13,22 @@ import java.util.List;
public interface IRepositoryProjectService extends IBaseService<RepositoryProject, RepositoryProjectDTO, RepositoryProjectQuery, Long> {
/**
* 同步指定外部系统下指定仓库组的项目
* 同步指定外部系统下所有项目全量同步
*
* @param externalSystemId 外部系统ID
* @return 同步的项目数量
*/
Integer syncProjects(Long externalSystemId);
/**
* 同步指定仓库组下的所有项目
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID
* @return 同步的项目数量
*/
Integer syncProjects(Long externalSystemId, Long repoGroupId);
/**
* 统计指定外部系统下的项目数量
*

View File

@ -189,8 +189,32 @@ public class RepositoryBranchServiceImpl extends BaseServiceImpl<RepositoryBranc
}
@Override
@Transactional
@Transactional(rollbackFor = Exception.class)
public Integer syncBranches(Long externalSystemId) {
return doSyncBranches(externalSystemId, null, null);
}
@Override
@Transactional(rollbackFor = Exception.class)
public Integer syncBranches(Long externalSystemId, Long repoGroupId) {
return doSyncBranches(externalSystemId, repoGroupId, null);
}
@Override
@Transactional(rollbackFor = Exception.class)
public Integer syncBranches(Long externalSystemId, Long repoGroupId, Long repoProjectId) {
return doSyncBranches(externalSystemId, repoGroupId, repoProjectId);
}
/**
* 执行分支同步的核心方法
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID可选
* @param repoProjectId 仓库项目ID可选
* @return 同步的分支数量
*/
private Integer doSyncBranches(Long externalSystemId, Long repoGroupId, Long repoProjectId) {
// 1. 创建同步历史记录
RepositorySyncHistoryDTO syncHistory = repositorySyncHistoryService.createSyncHistory(externalSystemId, RepositorySyncType.BRANCH);
@ -198,12 +222,23 @@ public class RepositoryBranchServiceImpl extends BaseServiceImpl<RepositoryBranc
// 2. 获取外部系统信息
ExternalSystem externalSystem = externalSystemRepository.findById(externalSystemId)
.orElseThrow(() -> new BusinessException(ResponseCode.EXTERNAL_SYSTEM_NOT_FOUND));
log.info("Starting branch sync for external system: {} (ID: {})", externalSystem.getName(), externalSystemId);
// 3. 获取所有项目
List<RepositoryProject> projects = repositoryProjectRepository.findByExternalSystemIdAndDeletedFalse(externalSystemId);
// 记录同步模式
if (repoProjectId != null) {
log.info("Starting branch sync for project (external system: {}, repoGroupId: {}, repoProjectId: {})",
externalSystemId, repoGroupId, repoProjectId);
} else if (repoGroupId != null) {
log.info("Starting branch sync for group (external system: {}, repoGroupId: {})",
externalSystemId, repoGroupId);
} else {
log.info("Starting full branch sync for external system: {} (ID: {})",
externalSystem.getName(), externalSystemId);
}
// 3. 根据参数获取需要同步的项目列表
List<RepositoryProject> projects = getProjectsToSync(externalSystemId, repoGroupId, repoProjectId);
if (projects.isEmpty()) {
log.info("No projects found for external system: {}", externalSystem.getName());
log.info("No projects found to sync");
repositorySyncHistoryService.updateSyncHistory(syncHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
return 0;
}
@ -313,6 +348,38 @@ public class RepositoryBranchServiceImpl extends BaseServiceImpl<RepositoryBranc
}
}
/**
* 根据参数获取需要同步的项目列表
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID可选
* @param repoProjectId 仓库项目ID可选
* @return 项目列表
*/
private List<RepositoryProject> getProjectsToSync(Long externalSystemId, Long repoGroupId, Long repoProjectId) {
if (repoProjectId != null) {
// 同步单个项目
RepositoryProject project = repositoryProjectRepository.findById(repoProjectId)
.orElseThrow(() -> new BusinessException(ResponseCode.DATA_NOT_FOUND));
// 验证项目属于指定的外部系统和仓库组
if (!project.getExternalSystemId().equals(externalSystemId)) {
throw new BusinessException(ResponseCode.DATA_NOT_FOUND);
}
if (repoGroupId != null && !project.getRepoGroupId().equals(repoGroupId)) {
throw new BusinessException(ResponseCode.DATA_NOT_FOUND);
}
return List.of(project);
} else if (repoGroupId != null) {
// 同步仓库组下所有项目
return repositoryProjectRepository.findByExternalSystemIdAndRepoGroupIdAndDeletedFalse(externalSystemId, repoGroupId);
} else {
// 全量同步
return repositoryProjectRepository.findByExternalSystemIdAndDeletedFalse(externalSystemId);
}
}
/**
* 检查分支信息是否有更新
*/

View File

@ -1,15 +1,19 @@
package com.qqchen.deploy.backend.deploy.service.impl;
import com.qqchen.deploy.backend.deploy.converter.RepositoryGroupConverter;
import com.qqchen.deploy.backend.deploy.dto.RepositorySyncHistoryDTO;
import com.qqchen.deploy.backend.deploy.entity.ExternalSystem;
import com.qqchen.deploy.backend.deploy.entity.RepositoryGroup;
import com.qqchen.deploy.backend.deploy.dto.RepositoryGroupDTO;
import com.qqchen.deploy.backend.deploy.enums.ExternalSystemSyncStatus;
import com.qqchen.deploy.backend.deploy.enums.RepositorySyncType;
import com.qqchen.deploy.backend.deploy.integration.IGitServiceIntegration;
import com.qqchen.deploy.backend.deploy.integration.response.GitGroupResponse;
import com.qqchen.deploy.backend.deploy.query.RepositoryGroupQuery;
import com.qqchen.deploy.backend.deploy.repository.IExternalSystemRepository;
import com.qqchen.deploy.backend.deploy.repository.IRepositoryGroupRepository;
import com.qqchen.deploy.backend.deploy.service.IRepositoryGroupService;
import com.qqchen.deploy.backend.deploy.service.IRepositorySyncHistoryService;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.exception.BusinessException;
import com.qqchen.deploy.backend.framework.service.impl.BaseServiceImpl;
@ -44,27 +48,34 @@ public class RepositoryGroupServiceImpl extends BaseServiceImpl<RepositoryGroup,
@Resource
private RepositoryGroupConverter repositoryGroupConverter;
@Resource
private IRepositorySyncHistoryService repositorySyncHistoryService;
@Override
@Transactional
@Transactional(rollbackFor = Exception.class)
public Integer syncGroups(Long externalSystemId) {
// 1. 获取外部系统信息
// 1. 创建同步历史记录
RepositorySyncHistoryDTO groupHistory = repositorySyncHistoryService.createSyncHistory(externalSystemId, RepositorySyncType.GROUP);
try {
// 2. 获取外部系统信息
ExternalSystem externalSystem = externalSystemRepository.findById(externalSystemId)
.orElseThrow(() -> new BusinessException(ResponseCode.EXTERNAL_SYSTEM_NOT_FOUND));
// 2. 从Git API获取所有仓库组信息
// 3. 从Git API获取所有仓库组信息
List<GitGroupResponse> remoteGroups = gitServiceIntegration.groups(externalSystem);
if (remoteGroups.isEmpty()) {
log.info("No groups found in remote git system: {}", externalSystem.getName());
repositorySyncHistoryService.updateSyncHistory(groupHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
return 0;
}
try {
// 3. 获取本地已存在的仓库组
// 4. 获取本地已存在的仓库组
List<RepositoryGroup> existingGroups = repositoryGroupRepository.findByExternalSystemIdAndDeletedFalse(externalSystemId);
Map<Long, RepositoryGroup> existingGroupMap = existingGroups.stream()
.collect(Collectors.toMap(RepositoryGroup::getRepoGroupId, Function.identity()));
// 4. 处理每个远程仓库组
// 5. 处理每个远程仓库组
List<RepositoryGroup> groupsToSave = new ArrayList<>();
for (GitGroupResponse remoteGroup : remoteGroups) {
RepositoryGroup group = existingGroupMap.getOrDefault(remoteGroup.getId(), new RepositoryGroup());
@ -87,16 +98,21 @@ public class RepositoryGroupServiceImpl extends BaseServiceImpl<RepositoryGroup,
groupsToSave.add(group);
}
// 5. 保存所有仓库组
// 6. 保存所有仓库组
List<RepositoryGroup> savedGroups = repositoryGroupRepository.saveAll(groupsToSave);
// 7. 更新同步历史记录为成功
repositorySyncHistoryService.updateSyncHistory(groupHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
log.info("Successfully synchronized {} groups for external system: {}",
savedGroups.size(), externalSystem.getName());
return savedGroups.size();
} catch (Exception e) {
// 8. 更新同步历史记录为失败
repositorySyncHistoryService.updateSyncHistory(groupHistory.getId(), ExternalSystemSyncStatus.FAILED, e.getMessage());
log.error("Failed to sync repository groups for external system: {}", externalSystemId, e);
throw new BusinessException(ResponseCode.REPOSITORY_SYNC_FAILED);
throw new BusinessException(ResponseCode.REPOSITORY_GROUP_SYNC_FAILED);
}
}

View File

@ -1,164 +0,0 @@
package com.qqchen.deploy.backend.deploy.service.impl;
import com.qqchen.deploy.backend.deploy.dto.GitInstanceDTO;
import com.qqchen.deploy.backend.deploy.dto.RepositoryGroupDTO;
import com.qqchen.deploy.backend.deploy.dto.RepositoryProjectDTO;
import com.qqchen.deploy.backend.deploy.dto.RepositorySyncHistoryDTO;
import com.qqchen.deploy.backend.deploy.entity.RepositorySyncHistory;
import com.qqchen.deploy.backend.deploy.enums.ExternalSystemSyncStatus;
import com.qqchen.deploy.backend.deploy.enums.RepositorySyncType;
import com.qqchen.deploy.backend.deploy.repository.IRepositorySyncHistoryRepository;
import com.qqchen.deploy.backend.deploy.service.*;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.exception.BusinessException;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import static com.qqchen.deploy.backend.framework.enums.ResponseCode.REPOSITORY_BRANCH_SYNC_FAILED;
import static com.qqchen.deploy.backend.framework.enums.ResponseCode.REPOSITORY_PROJECT_SYNC_FAILED;
/**
* Git管理服务实现
*/
@Slf4j
@Service
public class RepositoryManagerServiceImpl implements IRepositoryManagerService {
@Resource
private IRepositoryGroupService repositoryGroupService;
@Resource
private IRepositoryProjectService repositoryProjectService;
@Resource
private IRepositoryBranchService repositoryBranchService;
@Resource
private IRepositorySyncHistoryService repositorySyncHistoryService;
@Resource
private IRepositorySyncHistoryRepository repositorySyncHistoryRepository;
@Override
@Transactional(rollbackFor = Exception.class)
public void syncGroups(Long externalSystemId) {
try {
// 1. 创建同步历史记录
RepositorySyncHistoryDTO groupHistory = repositorySyncHistoryService.createSyncHistory(externalSystemId, RepositorySyncType.GROUP);
try {
// 2. 同步组
Integer groupCount = repositoryGroupService.syncGroups(externalSystemId);
// 3. 更新同步历史记录为成功
repositorySyncHistoryService.updateSyncHistory(groupHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
log.info("Successfully synchronized {} groups for external system: {}", groupCount, externalSystemId);
} catch (Exception e) {
// 4. 更新同步历史记录为失败
repositorySyncHistoryService.updateSyncHistory(groupHistory.getId(), ExternalSystemSyncStatus.FAILED, e.getMessage());
log.error("Failed to synchronize groups for external system: {}", externalSystemId, e);
throw e;
}
} catch (Exception e) {
log.error("Failed to create sync history for external system: {}", externalSystemId, e);
throw new BusinessException(ResponseCode.REPOSITORY_GROUP_SYNC_FAILED);
}
}
@Override
@Transactional(rollbackFor = Exception.class)
public void syncProjects(Long externalSystemId) {
try {
// 1. 创建同步历史记录
RepositorySyncHistoryDTO projectHistory = repositorySyncHistoryService.createSyncHistory(externalSystemId, RepositorySyncType.PROJECT);
try {
// 2. 同步项目
Integer projectCount = repositoryProjectService.syncProjects(externalSystemId);
// 3. 更新同步历史记录为成功
repositorySyncHistoryService.updateSyncHistory(projectHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
log.info("Successfully synchronized {} projects in external system: {}", projectCount, externalSystemId);
} catch (Exception e) {
// 4. 更新同步历史记录为失败
repositorySyncHistoryService.updateSyncHistory(projectHistory.getId(), ExternalSystemSyncStatus.FAILED, e.getMessage());
log.error("Failed to synchronize projects in external system: {}", externalSystemId, e);
throw e;
}
} catch (Exception e) {
log.error("Failed to create sync history for external system: {}", externalSystemId, e);
throw new BusinessException(REPOSITORY_PROJECT_SYNC_FAILED);
}
}
@Override
@Transactional(rollbackFor = Exception.class)
public void syncBranches(Long externalSystemId) {
try {
// 1. 创建同步历史记录
RepositorySyncHistoryDTO branchHistory = repositorySyncHistoryService.createSyncHistory(externalSystemId, RepositorySyncType.BRANCH);
try {
// 2. 同步分支
Integer branchCount = repositoryBranchService.syncBranches(externalSystemId);
// 3. 更新同步历史记录为成功
repositorySyncHistoryService.updateSyncHistory(branchHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
log.info("Successfully synchronized {} branches for external system: {}", branchCount, externalSystemId);
} catch (Exception e) {
// 4. 更新同步历史记录为失败
repositorySyncHistoryService.updateSyncHistory(branchHistory.getId(), ExternalSystemSyncStatus.FAILED, e.getMessage());
log.error("Failed to synchronize branches for external system: {}", externalSystemId, e);
throw e;
}
} catch (Exception e) {
log.error("Failed to create sync history for external system: {}", externalSystemId, e);
throw new BusinessException(REPOSITORY_BRANCH_SYNC_FAILED);
}
}
@Override
public GitInstanceDTO instance(Long externalSystemId) {
// 1. 创建返回对象
GitInstanceDTO instanceDTO = new GitInstanceDTO();
// 2. 获取组列表
List<RepositoryGroupDTO> groups = repositoryGroupService.findByExternalSystemId(externalSystemId);
instanceDTO.setRepositoryGroupList(groups);
instanceDTO.setTotalGroups(groups.size());
// 3. 获取项目列表
List<RepositoryProjectDTO> projects = repositoryProjectService.findByExternalSystemId(externalSystemId);
instanceDTO.setRepositoryProjectList(projects);
instanceDTO.setTotalProjects(projects.size());
// 4. 获取分支总数
Long totalBranches = repositoryBranchService.countByExternalSystemId(externalSystemId);
instanceDTO.setTotalBranches(totalBranches.intValue());
// 5. 获取最后同步时间
RepositorySyncHistory lastGroupSync = repositorySyncHistoryRepository.findTopByExternalSystemIdAndSyncTypeAndStatusOrderByStartTimeDesc(
externalSystemId, RepositorySyncType.GROUP, ExternalSystemSyncStatus.SUCCESS);
if (lastGroupSync != null) {
instanceDTO.setLastSyncGroupsTime(lastGroupSync.getEndTime());
}
RepositorySyncHistory lastProjectSync = repositorySyncHistoryRepository.findTopByExternalSystemIdAndSyncTypeAndStatusOrderByStartTimeDesc(
externalSystemId, RepositorySyncType.PROJECT, ExternalSystemSyncStatus.SUCCESS);
if (lastProjectSync != null) {
instanceDTO.setLastSyncProjectsTime(lastProjectSync.getEndTime());
}
RepositorySyncHistory lastBranchSync = repositorySyncHistoryRepository.findTopByExternalSystemIdAndSyncTypeAndStatusOrderByStartTimeDesc(
externalSystemId, RepositorySyncType.BRANCH, ExternalSystemSyncStatus.SUCCESS);
if (lastBranchSync != null) {
instanceDTO.setLastSyncBranchesTime(lastBranchSync.getEndTime());
}
return instanceDTO;
}
}

View File

@ -1,10 +1,13 @@
package com.qqchen.deploy.backend.deploy.service.impl;
import com.qqchen.deploy.backend.deploy.converter.RepositoryProjectConverter;
import com.qqchen.deploy.backend.deploy.dto.RepositorySyncHistoryDTO;
import com.qqchen.deploy.backend.deploy.entity.ExternalSystem;
import com.qqchen.deploy.backend.deploy.entity.RepositoryGroup;
import com.qqchen.deploy.backend.deploy.entity.RepositoryProject;
import com.qqchen.deploy.backend.deploy.dto.RepositoryProjectDTO;
import com.qqchen.deploy.backend.deploy.enums.ExternalSystemSyncStatus;
import com.qqchen.deploy.backend.deploy.enums.RepositorySyncType;
import com.qqchen.deploy.backend.deploy.integration.IGitServiceIntegration;
import com.qqchen.deploy.backend.deploy.integration.response.GitProjectResponse;
import com.qqchen.deploy.backend.deploy.query.RepositoryProjectQuery;
@ -12,6 +15,7 @@ import com.qqchen.deploy.backend.deploy.repository.IExternalSystemRepository;
import com.qqchen.deploy.backend.deploy.repository.IRepositoryGroupRepository;
import com.qqchen.deploy.backend.deploy.repository.IRepositoryProjectRepository;
import com.qqchen.deploy.backend.deploy.service.IRepositoryProjectService;
import com.qqchen.deploy.backend.deploy.service.IRepositorySyncHistoryService;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.exception.BusinessException;
import com.qqchen.deploy.backend.framework.service.impl.BaseServiceImpl;
@ -59,6 +63,9 @@ public class RepositoryProjectServiceImpl extends BaseServiceImpl<RepositoryProj
@Resource(name = "repositoryProjectExecutor")
private ThreadPoolTaskExecutor executor;
@Resource
private IRepositorySyncHistoryService repositorySyncHistoryService;
private static final int BATCH_SIZE = 100;
private static final int MAX_RETRIES = 3;
private static final long RETRY_DELAY = 100L;
@ -91,18 +98,47 @@ public class RepositoryProjectServiceImpl extends BaseServiceImpl<RepositoryProj
}
@Override
@Transactional
@Transactional(rollbackFor = Exception.class)
public Integer syncProjects(Long externalSystemId) {
return doSyncProjects(externalSystemId, null);
}
@Override
@Transactional(rollbackFor = Exception.class)
public Integer syncProjects(Long externalSystemId, Long repoGroupId) {
return doSyncProjects(externalSystemId, repoGroupId);
}
/**
* 执行项目同步的核心方法
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID可选
* @return 同步的项目数量
*/
private Integer doSyncProjects(Long externalSystemId, Long repoGroupId) {
// 1. 创建同步历史记录
RepositorySyncHistoryDTO projectHistory = repositorySyncHistoryService.createSyncHistory(externalSystemId, RepositorySyncType.PROJECT);
try {
// 1. 获取外部系统信息
// 2. 获取外部系统信息
ExternalSystem externalSystem = externalSystemRepository.findById(externalSystemId)
.orElseThrow(() -> new BusinessException(ResponseCode.EXTERNAL_SYSTEM_NOT_FOUND));
log.info("Start syncing projects for external system: {} (ID: {})", externalSystem.getName(), externalSystemId);
// 2. 获取所有组
List<RepositoryGroup> groups = repositoryGroupRepository.findByExternalSystemIdAndDeletedFalse(externalSystemId);
// 记录同步模式
if (repoGroupId != null) {
log.info("Start syncing projects for external system: {} (ID: {}), repoGroupId: {}",
externalSystem.getName(), externalSystemId, repoGroupId);
} else {
log.info("Start syncing all projects for external system: {} (ID: {})",
externalSystem.getName(), externalSystemId);
}
// 3. 获取需要同步的仓库组列表
List<RepositoryGroup> groups = getGroupsToSync(externalSystemId, repoGroupId);
if (groups.isEmpty()) {
log.info("No groups found for external system: {}", externalSystem.getName());
repositorySyncHistoryService.updateSyncHistory(projectHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
return 0;
}
log.info("Found {} groups to sync", groups.size());
@ -210,15 +246,45 @@ public class RepositoryProjectServiceImpl extends BaseServiceImpl<RepositoryProj
}
}
// 7. 更新同步历史记录为成功
repositorySyncHistoryService.updateSyncHistory(projectHistory.getId(), ExternalSystemSyncStatus.SUCCESS, null);
log.info("Successfully synced projects. Added {} new projects, processed {} total projects",
totalCount.get(), processedProjectIds.size());
return totalCount.get();
} catch (Exception e) {
// 8. 更新同步历史记录为失败
repositorySyncHistoryService.updateSyncHistory(projectHistory.getId(), ExternalSystemSyncStatus.FAILED, e.getMessage());
log.error("Failed to sync repository projects for external system: {}", externalSystemId, e);
throw new BusinessException(ResponseCode.REPOSITORY_PROJECT_SYNC_FAILED);
}
}
/**
* 根据参数获取需要同步的仓库组列表
*
* @param externalSystemId 外部系统ID
* @param repoGroupId 仓库组ID可选
* @return 仓库组列表
*/
private List<RepositoryGroup> getGroupsToSync(Long externalSystemId, Long repoGroupId) {
if (repoGroupId != null) {
// 同步单个仓库组
RepositoryGroup group = repositoryGroupRepository.findById(repoGroupId)
.orElseThrow(() -> new BusinessException(ResponseCode.DATA_NOT_FOUND));
// 验证仓库组属于指定的外部系统
if (!group.getExternalSystemId().equals(externalSystemId)) {
throw new BusinessException(ResponseCode.DATA_NOT_FOUND);
}
return List.of(group);
} else {
// 全量同步
return repositoryGroupRepository.findByExternalSystemIdAndDeletedFalse(externalSystemId);
}
}
@Override
public Long countByExternalSystemId(Long externalSystemId) {
return repositoryProjectRepository.countByExternalSystemId(externalSystemId);

View File

@ -0,0 +1,54 @@
package com.qqchen.deploy.backend.framework.config;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.jsontype.impl.LaissezFaireSubTypeValidator;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.serializer.Jackson2JsonRedisSerializer;
import org.springframework.data.redis.serializer.StringRedisSerializer;
/**
* Redis配置类
*
* @author qichen
*/
@Configuration
public class RedisConfig {
@Bean
public RedisTemplate<String, Object> redisTemplate(RedisConnectionFactory factory) {
RedisTemplate<String, Object> template = new RedisTemplate<>();
template.setConnectionFactory(factory);
// 使用Jackson2JsonRedisSerializer来序列化和反序列化redis的value值
Jackson2JsonRedisSerializer<Object> serializer = new Jackson2JsonRedisSerializer<>(Object.class);
ObjectMapper mapper = new ObjectMapper();
mapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
mapper.activateDefaultTyping(LaissezFaireSubTypeValidator.instance, ObjectMapper.DefaultTyping.NON_FINAL);
// 支持Java 8时间类型
mapper.registerModule(new JavaTimeModule());
serializer.setObjectMapper(mapper);
// 使用StringRedisSerializer来序列化和反序列化redis的key值
StringRedisSerializer stringRedisSerializer = new StringRedisSerializer();
// key采用String的序列化方式
template.setKeySerializer(stringRedisSerializer);
// hash的key也采用String的序列化方式
template.setHashKeySerializer(stringRedisSerializer);
// value序列化方式采用jackson
template.setValueSerializer(serializer);
// hash的value序列化方式采用jackson
template.setHashValueSerializer(serializer);
template.afterPropertiesSet();
return template;
}
}

View File

@ -28,10 +28,12 @@ public enum ResponseCode {
// 业务异常 (2开头)
TENANT_NOT_FOUND(2001, "tenant.not.found"),
DATA_NOT_FOUND(2002, "data.not.found"),
USER_NOT_FOUND(2003, "user.not.found"),
USERNAME_EXISTS(2004, "user.username.exists"),
EMAIL_EXISTS(2005, "user.email.exists"),
LOGIN_ERROR(2006, "user.login.error"),
DATA_ALREADY_EXISTS(2003, "data.already.exists"),
DATA_IN_USE(2004, "data.in.use"),
USER_NOT_FOUND(2005, "user.not.found"),
USERNAME_EXISTS(2006, "user.username.exists"),
EMAIL_EXISTS(2007, "user.email.exists"),
LOGIN_ERROR(2008, "user.login.error"),
// 角色相关错误码 (2100-2199)
ROLE_NOT_FOUND(2100, "role.not.found"),
@ -170,7 +172,17 @@ public enum ResponseCode {
TEAM_MEMBER_NOT_FOUND(2924, "team.member.not.found"),
TEAM_MEMBER_ALREADY_EXISTS(2925, "team.member.already.exists"),
TEAM_APPLICATION_NOT_FOUND(2926, "team.application.not.found"),
TEAM_APPLICATION_ALREADY_EXISTS(2927, "team.application.already.exists");
TEAM_APPLICATION_ALREADY_EXISTS(2927, "team.application.already.exists"),
// 定时任务相关错误码 (2950-2979)
SCHEDULE_JOB_NOT_FOUND(2950, "schedule.job.not.found"),
SCHEDULE_JOB_NAME_EXISTS(2951, "schedule.job.name.exists"),
SCHEDULE_JOB_CATEGORY_NOT_FOUND(2952, "schedule.job.category.not.found"),
SCHEDULE_JOB_CATEGORY_CODE_EXISTS(2953, "schedule.job.category.code.exists"),
SCHEDULE_JOB_CATEGORY_HAS_JOBS(2954, "schedule.job.category.has.jobs"),
SCHEDULE_JOB_LOG_NOT_FOUND(2955, "schedule.job.log.not.found"),
SCHEDULE_JOB_EXECUTOR_NOT_FOUND(2956, "schedule.job.executor.not.found"),
SCHEDULE_JOB_CRON_INVALID(2957, "schedule.job.cron.invalid");
private final int code;
private final String messageKey; // 国际化消息key

View File

@ -0,0 +1,537 @@
package com.qqchen.deploy.backend.framework.utils;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import java.util.*;
import java.util.concurrent.TimeUnit;
/**
* Redis工具类
* 封装常用的Redis操作支持StringHashListSet等数据结构
*
* @author qichen
*/
@Slf4j
@Component
public class RedisUtil {
@Resource
private RedisTemplate<String, Object> redisTemplate;
// =============================Common============================
/**
* 指定缓存失效时间
* @param key
* @param time 时间()
*/
public boolean expire(String key, long time) {
try {
if (time > 0) {
redisTemplate.expire(key, time, TimeUnit.SECONDS);
}
return true;
} catch (Exception e) {
log.error("Redis expire error: key={}, time={}", key, time, e);
return false;
}
}
/**
* 根据key获取过期时间
* @param key 不能为null
* @return 时间() 返回0代表为永久有效
*/
public long getExpire(String key) {
Long expire = redisTemplate.getExpire(key, TimeUnit.SECONDS);
return expire != null ? expire : -2;
}
/**
* 判断key是否存在
* @param key
* @return true 存在 false不存在
*/
public boolean hasKey(String key) {
try {
return Boolean.TRUE.equals(redisTemplate.hasKey(key));
} catch (Exception e) {
log.error("Redis hasKey error: key={}", key, e);
return false;
}
}
/**
* 删除缓存
* @param keys 可以传一个值 或多个
*/
public void del(String... keys) {
if (keys != null && keys.length > 0) {
if (keys.length == 1) {
redisTemplate.delete(keys[0]);
} else {
redisTemplate.delete(Arrays.asList(keys));
}
}
}
/**
* 删除缓存
* @param keys 集合
*/
public void del(Collection<String> keys) {
if (keys != null && !keys.isEmpty()) {
redisTemplate.delete(keys);
}
}
// ============================String=============================
/**
* 普通缓存获取
* @param key
* @return
*/
public Object get(String key) {
return key == null ? null : redisTemplate.opsForValue().get(key);
}
/**
* 普通缓存放入
* @param key
* @param value
* @return true成功 false失败
*/
public boolean set(String key, Object value) {
try {
redisTemplate.opsForValue().set(key, value);
return true;
} catch (Exception e) {
log.error("Redis set error: key={}, value={}", key, value, e);
return false;
}
}
/**
* 普通缓存放入并设置时间
* @param key
* @param value
* @param time 时间() time要大于0 如果time小于等于0 将设置无限期
* @return true成功 false 失败
*/
public boolean set(String key, Object value, long time) {
try {
if (time > 0) {
redisTemplate.opsForValue().set(key, value, time, TimeUnit.SECONDS);
} else {
set(key, value);
}
return true;
} catch (Exception e) {
log.error("Redis set with expire error: key={}, value={}, time={}", key, value, time, e);
return false;
}
}
/**
* 递增
* @param key
* @param delta 要增加几(大于0)
*/
public long incr(String key, long delta) {
if (delta < 0) {
throw new IllegalArgumentException("递增因子必须大于0");
}
Long result = redisTemplate.opsForValue().increment(key, delta);
return result != null ? result : 0;
}
/**
* 递减
* @param key
* @param delta 要减少几(小于0)
*/
public long decr(String key, long delta) {
if (delta < 0) {
throw new IllegalArgumentException("递减因子必须大于0");
}
Long result = redisTemplate.opsForValue().increment(key, -delta);
return result != null ? result : 0;
}
// ================================Hash=================================
/**
* HashGet
* @param key 不能为null
* @param field 不能为null
*/
public Object hget(String key, String field) {
return redisTemplate.opsForHash().get(key, field);
}
/**
* 获取hashKey对应的所有键值
* @param key
* @return 对应的多个键值
*/
public Map<Object, Object> hmget(String key) {
return redisTemplate.opsForHash().entries(key);
}
/**
* HashSet
* @param key
* @param map 对应多个键值
*/
public boolean hmset(String key, Map<String, Object> map) {
try {
redisTemplate.opsForHash().putAll(key, map);
return true;
} catch (Exception e) {
log.error("Redis hmset error: key={}", key, e);
return false;
}
}
/**
* HashSet 并设置时间
* @param key
* @param map 对应多个键值
* @param time 时间()
*/
public boolean hmset(String key, Map<String, Object> map, long time) {
try {
redisTemplate.opsForHash().putAll(key, map);
if (time > 0) {
expire(key, time);
}
return true;
} catch (Exception e) {
log.error("Redis hmset with expire error: key={}, time={}", key, time, e);
return false;
}
}
/**
* 向一张hash表中放入数据,如果不存在将创建
* @param key
* @param field
* @param value
*/
public boolean hset(String key, String field, Object value) {
try {
redisTemplate.opsForHash().put(key, field, value);
return true;
} catch (Exception e) {
log.error("Redis hset error: key={}, field={}, value={}", key, field, value, e);
return false;
}
}
/**
* 向一张hash表中放入数据,如果不存在将创建
* @param key
* @param field
* @param value
* @param time 时间() 注意:如果已存在的hash表有时间,这里将会替换原有的时间
*/
public boolean hset(String key, String field, Object value, long time) {
try {
redisTemplate.opsForHash().put(key, field, value);
if (time > 0) {
expire(key, time);
}
return true;
} catch (Exception e) {
log.error("Redis hset with expire error: key={}, field={}, value={}, time={}",
key, field, value, time, e);
return false;
}
}
/**
* 删除hash表中的值
* @param key 不能为null
* @param fields 可以使多个 不能为null
*/
public void hdel(String key, Object... fields) {
redisTemplate.opsForHash().delete(key, fields);
}
/**
* 判断hash表中是否有该项的值
* @param key 不能为null
* @param field 不能为null
* @return true 存在 false不存在
*/
public boolean hHasKey(String key, String field) {
return redisTemplate.opsForHash().hasKey(key, field);
}
/**
* hash递增 如果不存在,就会创建一个 并把新增后的值返回
* @param key
* @param field
* @param delta 要增加几(大于0)
*/
public double hincr(String key, String field, double delta) {
return redisTemplate.opsForHash().increment(key, field, delta);
}
/**
* hash递减
* @param key
* @param field
* @param delta 要减少记(小于0)
*/
public double hdecr(String key, String field, double delta) {
return redisTemplate.opsForHash().increment(key, field, -delta);
}
// ============================Set=============================
/**
* 根据key获取Set中的所有值
* @param key
*/
public Set<Object> sGet(String key) {
try {
return redisTemplate.opsForSet().members(key);
} catch (Exception e) {
log.error("Redis sGet error: key={}", key, e);
return new HashSet<>();
}
}
/**
* 根据value从一个set中查询,是否存在
* @param key
* @param value
* @return true 存在 false不存在
*/
public boolean sHasKey(String key, Object value) {
try {
return Boolean.TRUE.equals(redisTemplate.opsForSet().isMember(key, value));
} catch (Exception e) {
log.error("Redis sHasKey error: key={}, value={}", key, value, e);
return false;
}
}
/**
* 将数据放入set缓存
* @param key
* @param values 可以是多个
* @return 成功个数
*/
public long sSet(String key, Object... values) {
try {
Long count = redisTemplate.opsForSet().add(key, values);
return count != null ? count : 0;
} catch (Exception e) {
log.error("Redis sSet error: key={}", key, e);
return 0;
}
}
/**
* 将set数据放入缓存
* @param key
* @param time 时间()
* @param values 可以是多个
* @return 成功个数
*/
public long sSetAndTime(String key, long time, Object... values) {
try {
Long count = redisTemplate.opsForSet().add(key, values);
if (time > 0) {
expire(key, time);
}
return count != null ? count : 0;
} catch (Exception e) {
log.error("Redis sSetAndTime error: key={}, time={}", key, time, e);
return 0;
}
}
/**
* 获取set缓存的长度
* @param key
*/
public long sGetSetSize(String key) {
try {
Long size = redisTemplate.opsForSet().size(key);
return size != null ? size : 0;
} catch (Exception e) {
log.error("Redis sGetSetSize error: key={}", key, e);
return 0;
}
}
/**
* 移除值为value的
* @param key
* @param values 可以是多个
* @return 移除的个数
*/
public long setRemove(String key, Object... values) {
try {
Long count = redisTemplate.opsForSet().remove(key, values);
return count != null ? count : 0;
} catch (Exception e) {
log.error("Redis setRemove error: key={}", key, e);
return 0;
}
}
// ===============================List=================================
/**
* 获取list缓存的内容
* @param key
* @param start 开始
* @param end 结束 0 -1代表所有值
*/
public List<Object> lGet(String key, long start, long end) {
try {
return redisTemplate.opsForList().range(key, start, end);
} catch (Exception e) {
log.error("Redis lGet error: key={}, start={}, end={}", key, start, end, e);
return new ArrayList<>();
}
}
/**
* 获取list缓存的长度
* @param key
*/
public long lGetListSize(String key) {
try {
Long size = redisTemplate.opsForList().size(key);
return size != null ? size : 0;
} catch (Exception e) {
log.error("Redis lGetListSize error: key={}", key, e);
return 0;
}
}
/**
* 通过索引 获取list中的值
* @param key
* @param index 索引 index>=0时 0 表头1 第二个元素依次类推index<0时-1表尾-2倒数第二个元素依次类推
*/
public Object lGetIndex(String key, long index) {
try {
return redisTemplate.opsForList().index(key, index);
} catch (Exception e) {
log.error("Redis lGetIndex error: key={}, index={}", key, index, e);
return null;
}
}
/**
* 将list放入缓存
* @param key
* @param value
*/
public boolean lSet(String key, Object value) {
try {
redisTemplate.opsForList().rightPush(key, value);
return true;
} catch (Exception e) {
log.error("Redis lSet error: key={}, value={}", key, value, e);
return false;
}
}
/**
* 将list放入缓存
* @param key
* @param value
* @param time 时间()
*/
public boolean lSet(String key, Object value, long time) {
try {
redisTemplate.opsForList().rightPush(key, value);
if (time > 0) {
expire(key, time);
}
return true;
} catch (Exception e) {
log.error("Redis lSet with expire error: key={}, value={}, time={}", key, value, time, e);
return false;
}
}
/**
* 将list放入缓存
* @param key
* @param value
*/
public boolean lSet(String key, List<Object> value) {
try {
redisTemplate.opsForList().rightPushAll(key, value);
return true;
} catch (Exception e) {
log.error("Redis lSet list error: key={}", key, e);
return false;
}
}
/**
* 将list放入缓存
* @param key
* @param value
* @param time 时间()
*/
public boolean lSet(String key, List<Object> value, long time) {
try {
redisTemplate.opsForList().rightPushAll(key, value);
if (time > 0) {
expire(key, time);
}
return true;
} catch (Exception e) {
log.error("Redis lSet list with expire error: key={}, time={}", key, time, e);
return false;
}
}
/**
* 根据索引修改list中的某条数据
* @param key
* @param index 索引
* @param value
*/
public boolean lUpdateIndex(String key, long index, Object value) {
try {
redisTemplate.opsForList().set(key, index, value);
return true;
} catch (Exception e) {
log.error("Redis lUpdateIndex error: key={}, index={}, value={}", key, index, value, e);
return false;
}
}
/**
* 移除N个值为value
* @param key
* @param count 移除多少个
* @param value
* @return 移除的个数
*/
public long lRemove(String key, long count, Object value) {
try {
Long remove = redisTemplate.opsForList().remove(key, count, value);
return remove != null ? remove : 0;
} catch (Exception e) {
log.error("Redis lRemove error: key={}, count={}, value={}", key, count, value, e);
return 0;
}
}
}

View File

@ -0,0 +1,26 @@
package com.qqchen.deploy.backend.schedule.annotation;
import java.lang.annotation.*;
/**
* 定时任务监控注解
* 标记需要自动监控执行状态的任务方法
*
* @author qichen
*/
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
@Documented
public @interface MonitoredJob {
/**
* 任务ID对应数据库中的schedule_job.id
*/
long jobId();
/**
* 任务名称
*/
String jobName();
}

View File

@ -0,0 +1,31 @@
package com.qqchen.deploy.backend.schedule.api;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJob;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobQuery;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* 定时任务API控制器
*
* @author qichen
*/
@Slf4j
@RestController
@RequestMapping("/api/v1/schedule/jobs")
@Tag(name = "定时任务管理", description = "定时任务的增删改查接口")
public class ScheduleJobApiController extends BaseController<ScheduleJob, ScheduleJobDTO, Long, ScheduleJobQuery> {
@Override
protected void exportData(HttpServletResponse response, List<ScheduleJobDTO> data) {
// TODO: 实现导出功能
}
}

View File

@ -0,0 +1,31 @@
package com.qqchen.deploy.backend.schedule.api;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobCategoryDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobCategory;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobCategoryQuery;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.servlet.http.HttpServletResponse;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* 定时任务分类API控制器
*
* @author qichen
*/
@Slf4j
@RestController
@RequestMapping("/api/v1/schedule/job-categories")
@Tag(name = "定时任务分类管理", description = "定时任务分类的增删改查接口")
public class ScheduleJobCategoryApiController extends BaseController<ScheduleJobCategory, ScheduleJobCategoryDTO, Long, ScheduleJobCategoryQuery> {
@Override
protected void exportData(HttpServletResponse response, List<ScheduleJobCategoryDTO> data) {
// TODO: 实现导出功能
}
}

View File

@ -0,0 +1,29 @@
package com.qqchen.deploy.backend.schedule.api;
import com.qqchen.deploy.backend.framework.controller.BaseController;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobLogDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobLog;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobLogQuery;
import io.swagger.v3.oas.annotations.tags.Tag;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* 定时任务执行日志API控制器
*
* @author qichen
*/
@Slf4j
@RestController
@RequestMapping("/api/v1/schedule/job-logs")
@Tag(name = "定时任务执行日志", description = "查询任务执行历史日志")
public class ScheduleJobLogApiController extends BaseController<ScheduleJobLog, ScheduleJobLogDTO, Long, ScheduleJobLogQuery> {
@Override
protected void exportData(jakarta.servlet.http.HttpServletResponse response, java.util.List<ScheduleJobLogDTO> data) {
// TODO: 实现日志导出功能
throw new UnsupportedOperationException("日志导出功能暂未实现");
}
}

View File

@ -0,0 +1,60 @@
package com.qqchen.deploy.backend.schedule.api;
import com.qqchen.deploy.backend.framework.api.Response;
import com.qqchen.deploy.backend.schedule.dto.JobStatusDTO;
import com.qqchen.deploy.backend.schedule.service.JobStatusRedisService;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
import java.util.Map;
/**
* 定时任务状态查询API控制器
*
* @author qichen
*/
@Slf4j
@RestController
@RequestMapping("/api/v1/schedule/jobs")
@Tag(name = "定时任务状态查询", description = "查询任务实时执行状态")
public class ScheduleJobStatusApiController {
@Resource
private JobStatusRedisService jobStatusRedisService;
/**
* 获取单个任务状态
*/
@Operation(summary = "获取任务执行状态", description = "获取指定任务的实时执行状态")
@GetMapping("/{jobId}/status")
public Response<JobStatusDTO> getJobStatus(
@Parameter(description = "任务ID") @PathVariable Long jobId) {
JobStatusDTO status = jobStatusRedisService.getJobStatus(jobId);
return Response.success(status);
}
/**
* 批量获取所有任务状态
*/
@Operation(summary = "批量获取任务状态", description = "获取所有任务的执行状态,用于列表页展示")
@GetMapping("/status/all")
public Response<Map<String, JobStatusDTO>> getAllJobStatus() {
Map<String, JobStatusDTO> statusMap = jobStatusRedisService.getAllJobStatus();
return Response.success(statusMap);
}
/**
* 获取状态版本号
*/
@Operation(summary = "获取状态版本号", description = "获取当前状态版本号,用于判断状态是否有变化")
@GetMapping("/status/version")
public Response<Long> getStatusVersion() {
Long version = jobStatusRedisService.getStatusVersion();
return Response.success(version);
}
}

View File

@ -0,0 +1,161 @@
package com.qqchen.deploy.backend.schedule.aspect;
import com.qqchen.deploy.backend.schedule.annotation.MonitoredJob;
import com.qqchen.deploy.backend.schedule.dto.JobStatusDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobLog;
import com.qqchen.deploy.backend.schedule.enums.JobStatusEnum;
import com.qqchen.deploy.backend.schedule.repository.IScheduleJobLogRepository;
import com.qqchen.deploy.backend.schedule.service.JobProgressReporter;
import com.qqchen.deploy.backend.schedule.service.JobStatusRedisService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.reflect.MethodSignature;
import org.springframework.stereotype.Component;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.net.InetAddress;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.stream.Collectors;
/**
* 定时任务监控切面
* 自动拦截带有@MonitoredJob注解的方法记录执行状态
*
* @author qichen
*/
@Slf4j
@Aspect
@Component
public class JobMonitorAspect {
@Resource
private JobStatusRedisService jobStatusRedisService;
@Resource
private JobProgressReporter jobProgressReporter;
@Resource
private IScheduleJobLogRepository jobLogRepository;
@Around("@annotation(monitoredJob)")
public Object monitor(ProceedingJoinPoint joinPoint, MonitoredJob monitoredJob) throws Throwable {
Long jobId = monitoredJob.jobId();
String jobName = monitoredJob.jobName();
LocalDateTime startTime = LocalDateTime.now();
// 获取执行器信息
MethodSignature signature = (MethodSignature) joinPoint.getSignature();
String beanName = joinPoint.getTarget().getClass().getSimpleName();
String methodName = signature.getName();
String methodParams = Arrays.stream(joinPoint.getArgs())
.map(Object::toString)
.collect(Collectors.joining(", "));
// 设置ThreadLocal供进度报告使用
jobProgressReporter.setCurrentJob(jobId, jobName, startTime, beanName, methodName, methodParams);
try {
// 1. 记录开始状态到Redis
log.info("任务开始执行: jobId={}, jobName={}", jobId, jobName);
saveStatus(jobId, jobName, "RUNNING", 0, "任务执行中", startTime);
// 2. 执行业务逻辑
Object result = joinPoint.proceed();
// 3. 记录成功状态到Redis
log.info("任务执行成功: jobId={}", jobId);
saveStatus(jobId, jobName, "SUCCESS", 100, "任务执行成功", startTime);
// 4. 保存成功日志到数据库
saveLog(jobId, jobName, beanName, methodName, startTime, JobStatusEnum.SUCCESS, "任务执行成功", null);
return result;
} catch (Throwable e) {
// 5. 记录失败状态到Redis
log.error("任务执行失败: jobId={}", jobId, e);
saveStatus(jobId, jobName, "FAIL", null, "任务执行失败: " + e.getMessage(), startTime);
// 6. 保存失败日志到数据库
saveLog(jobId, jobName, beanName, methodName, startTime, JobStatusEnum.FAIL, "任务执行失败", e);
throw e;
} finally {
// 清除ThreadLocal
jobProgressReporter.clearCurrentJob();
}
}
/**
* 保存任务状态到Redis
*/
private void saveStatus(Long jobId, String jobName, String status, Integer progress,
String message, LocalDateTime startTime) {
JobStatusDTO statusDTO = JobStatusDTO.builder()
.jobId(jobId)
.jobName(jobName)
.status(status)
.progress(progress != null ? progress : 0)
.currentStep(message)
.message(message)
.startTime(startTime)
.build();
jobStatusRedisService.saveJobStatus(jobId, statusDTO);
}
/**
* 保存任务日志到数据库
*/
private void saveLog(Long jobId, String jobName, String beanName, String methodName,
LocalDateTime startTime, JobStatusEnum status, String message, Throwable exception) {
try {
ScheduleJobLog log = new ScheduleJobLog();
log.setJobId(jobId);
log.setJobName(jobName);
log.setBeanName(beanName);
log.setMethodName(methodName);
log.setExecuteTime(startTime);
log.setFinishTime(LocalDateTime.now());
log.setDuration(Duration.between(startTime, LocalDateTime.now()).toMillis());
log.setStatus(status);
log.setResultMessage(message);
if (exception != null) {
log.setExceptionInfo(getStackTrace(exception));
}
// 获取服务器信息
try {
InetAddress addr = InetAddress.getLocalHost();
log.setServerIp(addr.getHostAddress());
log.setServerHost(addr.getHostName());
} catch (Exception e) {
this.log.warn("获取服务器信息失败", e);
}
jobLogRepository.save(log);
} catch (Exception e) {
this.log.error("保存任务日志失败", e);
}
}
/**
* 获取异常堆栈信息
*/
private String getStackTrace(Throwable throwable) {
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
throwable.printStackTrace(pw);
return sw.toString();
}
}

View File

@ -0,0 +1,20 @@
package com.qqchen.deploy.backend.schedule.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
* 定时任务配置
* 启用Spring调度功能
*
* @author qichen
*/
@Configuration
@EnableScheduling
@EnableAsync
public class ScheduleConfig {
// Spring @Scheduled 会自动处理定时任务
// 配合 @MonitoredJob 注解实现自动监控
}

View File

@ -0,0 +1,16 @@
package com.qqchen.deploy.backend.schedule.converter;
import com.qqchen.deploy.backend.framework.converter.BaseConverter;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobCategoryDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobCategory;
import org.mapstruct.Mapper;
/**
* 定时任务分类Converter
*
* @author qichen
*/
@Mapper(config = BaseConverter.class)
public interface ScheduleJobCategoryConverter extends BaseConverter<ScheduleJobCategory, ScheduleJobCategoryDTO> {
}

View File

@ -0,0 +1,16 @@
package com.qqchen.deploy.backend.schedule.converter;
import com.qqchen.deploy.backend.framework.converter.BaseConverter;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJob;
import org.mapstruct.Mapper;
/**
* 定时任务Converter
*
* @author qichen
*/
@Mapper(config = BaseConverter.class)
public interface ScheduleJobConverter extends BaseConverter<ScheduleJob, ScheduleJobDTO> {
}

View File

@ -0,0 +1,16 @@
package com.qqchen.deploy.backend.schedule.converter;
import com.qqchen.deploy.backend.framework.converter.BaseConverter;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobLogDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobLog;
import org.mapstruct.Mapper;
/**
* 定时任务执行日志Converter
*
* @author qichen
*/
@Mapper(config = BaseConverter.class)
public interface ScheduleJobLogConverter extends BaseConverter<ScheduleJobLog, ScheduleJobLogDTO> {
}

View File

@ -0,0 +1,47 @@
package com.qqchen.deploy.backend.schedule.dto;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
/**
* 任务执行状态DTO用于Redis存储和前端展示
*
* @author qichen
*/
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
@Schema(description = "任务执行状态")
public class JobStatusDTO {
@Schema(description = "任务ID")
private Long jobId;
@Schema(description = "任务名称")
private String jobName;
@Schema(description = "执行状态WAITING-等待、RUNNING-运行中、SUCCESS-成功、FAIL-失败、TIMEOUT-超时")
private String status;
@Schema(description = "执行进度0-100")
private Integer progress;
@Schema(description = "当前步骤描述")
private String currentStep;
@Schema(description = "详细消息")
private String message;
@Schema(description = "开始时间")
private LocalDateTime startTime;
@Schema(description = "更新时间Redis中存储为字符串")
private String updateTime;
}

View File

@ -0,0 +1,48 @@
package com.qqchen.deploy.backend.schedule.dto;
import com.qqchen.deploy.backend.framework.dto.BaseDTO;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* 定时任务分类DTO
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Schema(description = "定时任务分类信息")
public class ScheduleJobCategoryDTO extends BaseDTO {
@Schema(description = "分类编码", example = "DATA_CLEAN")
@NotBlank(message = "分类编码不能为空")
private String code;
@Schema(description = "分类名称", example = "数据清理")
@NotBlank(message = "分类名称不能为空")
private String name;
@Schema(description = "描述")
private String description;
@Schema(description = "图标")
private String icon;
@Schema(description = "颜色")
private String color;
@Schema(description = "是否启用")
@NotNull(message = "启用状态不能为空")
private Boolean enabled;
@Schema(description = "排序号")
@NotNull(message = "排序号不能为空")
private Integer sort;
@Schema(description = "该分类下的任务数量")
private Long jobCount;
}

View File

@ -0,0 +1,98 @@
package com.qqchen.deploy.backend.schedule.dto;
import com.qqchen.deploy.backend.framework.dto.BaseDTO;
import com.qqchen.deploy.backend.schedule.enums.ScheduleJobStatusEnum;
import com.qqchen.deploy.backend.workflow.dto.FormDefinitionDTO;
import io.swagger.v3.oas.annotations.media.Schema;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.time.LocalDateTime;
/**
* 定时任务DTO
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Schema(description = "定时任务信息")
public class ScheduleJobDTO extends BaseDTO {
@Schema(description = "任务名称", example = "工作流数据清理")
@NotBlank(message = "任务名称不能为空")
private String jobName;
@Schema(description = "任务描述")
private String jobDescription;
@Schema(description = "任务分类ID")
@NotNull(message = "任务分类ID不能为空")
private Long categoryId;
@Schema(description = "任务分类信息")
private ScheduleJobCategoryDTO category;
// ==================== 执行配置 ====================
@Schema(description = "Spring Bean名称", example = "workflowCleanJob")
@NotBlank(message = "Bean名称不能为空")
private String beanName;
@Schema(description = "方法名称", example = "execute")
@NotBlank(message = "方法名称不能为空")
private String methodName;
@Schema(description = "参数表单ID")
private Long formDefinitionId;
@Schema(description = "参数表单信息")
private FormDefinitionDTO formDefinition;
@Schema(description = "方法参数JSON")
private String methodParams;
// ==================== 调度配置 ====================
@Schema(description = "Cron表达式", example = "0 0 2 * * ?")
@NotBlank(message = "Cron表达式不能为空")
private String cronExpression;
@Schema(description = "任务状态")
@NotNull(message = "任务状态不能为空")
private ScheduleJobStatusEnum status;
@Schema(description = "是否允许并发执行")
private Boolean concurrent;
// ==================== 统计信息 ====================
@Schema(description = "上次执行时间")
private LocalDateTime lastExecuteTime;
@Schema(description = "下次执行时间")
private LocalDateTime nextExecuteTime;
@Schema(description = "执行次数")
private Integer executeCount;
@Schema(description = "成功次数")
private Integer successCount;
@Schema(description = "失败次数")
private Integer failCount;
// ==================== 高级配置 ====================
@Schema(description = "超时时间(秒)")
private Integer timeoutSeconds;
@Schema(description = "失败重试次数")
private Integer retryCount;
@Schema(description = "告警邮箱(多个用逗号分隔)")
private String alertEmail;
}

View File

@ -0,0 +1,68 @@
package com.qqchen.deploy.backend.schedule.dto;
import com.qqchen.deploy.backend.framework.dto.BaseDTO;
import com.qqchen.deploy.backend.schedule.enums.JobStatusEnum;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.time.LocalDateTime;
/**
* 定时任务执行日志DTO
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Schema(description = "定时任务执行日志")
public class ScheduleJobLogDTO extends BaseDTO {
@Schema(description = "任务ID")
private Long jobId;
@Schema(description = "任务名称")
private String jobName;
// ==================== 执行信息 ====================
@Schema(description = "执行器Bean名称")
private String beanName;
@Schema(description = "执行方法名称")
private String methodName;
@Schema(description = "方法参数")
private String methodParams;
// ==================== 时间统计 ====================
@Schema(description = "开始执行时间")
private LocalDateTime executeTime;
@Schema(description = "完成时间")
private LocalDateTime finishTime;
@Schema(description = "执行耗时(毫秒)")
private Long duration;
// ==================== 状态信息 ====================
@Schema(description = "执行状态")
private JobStatusEnum status;
@Schema(description = "执行结果消息")
private String resultMessage;
@Schema(description = "异常堆栈信息")
private String exceptionInfo;
// ==================== 服务器信息 ====================
@Schema(description = "执行服务器IP")
private String serverIp;
@Schema(description = "执行服务器主机名")
private String serverHost;
}

View File

@ -0,0 +1,144 @@
package com.qqchen.deploy.backend.schedule.entity;
import com.qqchen.deploy.backend.framework.annotation.LogicDelete;
import com.qqchen.deploy.backend.framework.domain.Entity;
import com.qqchen.deploy.backend.schedule.enums.ScheduleJobStatusEnum;
import jakarta.persistence.Column;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Table;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.time.LocalDateTime;
/**
* 定时任务
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@jakarta.persistence.Entity
@Table(name = "schedule_job")
@LogicDelete
public class ScheduleJob extends Entity<Long> {
/**
* 任务名称
*/
@Column(name = "job_name", nullable = false, length = 100)
private String jobName;
/**
* 任务描述
*/
@Column(name = "job_description", length = 500)
private String jobDescription;
/**
* 任务分类ID
*/
@Column(name = "category_id", nullable = false)
private Long categoryId;
// ==================== 执行配置 ====================
/**
* Spring Bean名称
*/
@Column(name = "bean_name", nullable = false, length = 100)
private String beanName;
/**
* 方法名称
*/
@Column(name = "method_name", nullable = false, length = 100)
private String methodName;
/**
* 参数表单ID
*/
@Column(name = "form_definition_id")
private Long formDefinitionId;
/**
* 方法参数JSON
*/
@Column(name = "method_params", columnDefinition = "TEXT")
private String methodParams;
// ==================== 调度配置 ====================
/**
* Cron表达式
*/
@Column(name = "cron_expression", nullable = false, length = 100)
private String cronExpression;
/**
* 任务状态
*/
@Enumerated(EnumType.STRING)
@Column(name = "status", nullable = false, length = 20)
private ScheduleJobStatusEnum status = ScheduleJobStatusEnum.ENABLED;
/**
* 是否允许并发执行
*/
@Column(name = "concurrent", nullable = false)
private Boolean concurrent = false;
// ==================== 统计信息 ====================
/**
* 上次执行时间
*/
@Column(name = "last_execute_time")
private LocalDateTime lastExecuteTime;
/**
* 下次执行时间
*/
@Column(name = "next_execute_time")
private LocalDateTime nextExecuteTime;
/**
* 执行次数
*/
@Column(name = "execute_count", nullable = false)
private Integer executeCount = 0;
/**
* 成功次数
*/
@Column(name = "success_count", nullable = false)
private Integer successCount = 0;
/**
* 失败次数
*/
@Column(name = "fail_count", nullable = false)
private Integer failCount = 0;
// ==================== 高级配置 ====================
/**
* 超时时间
*/
@Column(name = "timeout_seconds")
private Integer timeoutSeconds;
/**
* 失败重试次数
*/
@Column(name = "retry_count", nullable = false)
private Integer retryCount = 0;
/**
* 告警邮箱多个用逗号分隔
*/
@Column(name = "alert_email", length = 500)
private String alertEmail;
}

View File

@ -0,0 +1,64 @@
package com.qqchen.deploy.backend.schedule.entity;
import com.qqchen.deploy.backend.framework.annotation.LogicDelete;
import com.qqchen.deploy.backend.framework.domain.Entity;
import jakarta.persistence.Column;
import jakarta.persistence.Table;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* 定时任务分类
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@jakarta.persistence.Entity
@Table(name = "schedule_job_category")
@LogicDelete
public class ScheduleJobCategory extends Entity<Long> {
/**
* 分类编码
*/
@Column(name = "code", nullable = false, length = 50)
private String code;
/**
* 分类名称
*/
@Column(name = "name", nullable = false, length = 100)
private String name;
/**
* 描述
*/
@Column(name = "description", length = 500)
private String description;
/**
* 图标
*/
@Column(name = "icon", length = 50)
private String icon;
/**
* 颜色
*/
@Column(name = "color", length = 20)
private String color;
/**
* 是否启用
*/
@Column(name = "enabled", nullable = false)
private Boolean enabled = true;
/**
* 排序号
*/
@Column(name = "sort", nullable = false)
private Integer sort = 0;
}

View File

@ -0,0 +1,114 @@
package com.qqchen.deploy.backend.schedule.entity;
import com.qqchen.deploy.backend.framework.annotation.LogicDelete;
import com.qqchen.deploy.backend.framework.domain.Entity;
import com.qqchen.deploy.backend.schedule.enums.JobStatusEnum;
import jakarta.persistence.Column;
import jakarta.persistence.EnumType;
import jakarta.persistence.Enumerated;
import jakarta.persistence.Table;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.time.LocalDateTime;
/**
* 定时任务执行日志
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@jakarta.persistence.Entity
@Table(name = "schedule_job_log")
@LogicDelete
public class ScheduleJobLog extends Entity<Long> {
/**
* 任务ID
*/
@Column(name = "job_id", nullable = false)
private Long jobId;
/**
* 任务名称
*/
@Column(name = "job_name", nullable = false, length = 100)
private String jobName;
// ==================== 执行信息 ====================
/**
* 执行器Bean名称
*/
@Column(name = "bean_name", nullable = false, length = 100)
private String beanName;
/**
* 执行方法名称
*/
@Column(name = "method_name", nullable = false, length = 100)
private String methodName;
/**
* 方法参数
*/
@Column(name = "method_params", columnDefinition = "TEXT")
private String methodParams;
// ==================== 时间统计 ====================
/**
* 开始执行时间
*/
@Column(name = "execute_time", nullable = false)
private LocalDateTime executeTime;
/**
* 完成时间
*/
@Column(name = "finish_time")
private LocalDateTime finishTime;
/**
* 执行耗时毫秒
*/
@Column(name = "duration")
private Long duration;
// ==================== 状态信息 ====================
/**
* 执行状态
*/
@Enumerated(EnumType.STRING)
@Column(name = "status", nullable = false, length = 20)
private JobStatusEnum status;
/**
* 执行结果消息
*/
@Column(name = "result_message", columnDefinition = "TEXT")
private String resultMessage;
/**
* 异常堆栈信息
*/
@Column(name = "exception_info", columnDefinition = "TEXT")
private String exceptionInfo;
// ==================== 服务器信息 ====================
/**
* 执行服务器IP
*/
@Column(name = "server_ip", length = 50)
private String serverIp;
/**
* 执行服务器主机名
*/
@Column(name = "server_host", length = 100)
private String serverHost;
}

View File

@ -0,0 +1,45 @@
package com.qqchen.deploy.backend.schedule.enums;
/**
* 任务执行状态枚举
*
* @author qichen
*/
public enum JobStatusEnum {
/**
* 等待执行
*/
WAITING("等待执行"),
/**
* 运行中
*/
RUNNING("运行中"),
/**
* 执行成功
*/
SUCCESS("执行成功"),
/**
* 执行失败
*/
FAIL("执行失败"),
/**
* 执行超时
*/
TIMEOUT("执行超时");
private final String description;
JobStatusEnum(String description) {
this.description = description;
}
public String getDescription() {
return description;
}
}

View File

@ -0,0 +1,35 @@
package com.qqchen.deploy.backend.schedule.enums;
/**
* 定时任务状态枚举
*
* @author qichen
*/
public enum ScheduleJobStatusEnum {
/**
* 启用
*/
ENABLED("启用"),
/**
* 禁用
*/
DISABLED("禁用"),
/**
* 暂停
*/
PAUSED("暂停");
private final String description;
ScheduleJobStatusEnum(String description) {
this.description = description;
}
public String getDescription() {
return description;
}
}

View File

@ -0,0 +1,106 @@
package com.qqchen.deploy.backend.schedule.job;
import com.qqchen.deploy.backend.schedule.annotation.MonitoredJob;
import com.qqchen.deploy.backend.schedule.service.JobProgressReporter;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.time.LocalDateTime;
/**
* 工作流历史数据清理任务
* 示例定时任务演示如何使用MonitoredJob注解和JobProgressReporter进行进度报告
*
* @author qichen
*/
@Slf4j
@Component("workflowCleanJob")
public class WorkflowCleanJob {
@Resource
private JobProgressReporter progressReporter;
/**
* 执行工作流历史数据清理
*
* @param retentionDays 保留天数
*/
@MonitoredJob(jobId = 1, jobName = "工作流历史数据清理任务")
public void execute(Integer retentionDays) {
log.info("开始执行工作流历史数据清理任务,保留天数: {}", retentionDays);
try {
// 步骤1: 查询需要清理的数据
progressReporter.updateProgress(10, "正在查询需要清理的数据...");
LocalDateTime cutoffDate = LocalDateTime.now().minusDays(retentionDays != null ? retentionDays : 90);
log.info("清理截止日期: {}", cutoffDate);
// 模拟耗时操作
Thread.sleep(1000);
// 步骤2: 清理工作流实例数据
progressReporter.updateProgress(30, "正在清理工作流实例数据...");
int deletedInstances = cleanWorkflowInstances(cutoffDate);
log.info("已清理工作流实例数: {}", deletedInstances);
Thread.sleep(1000);
// 步骤3: 清理节点实例数据
progressReporter.updateProgress(60, "正在清理节点实例数据...");
int deletedNodes = cleanNodeInstances(cutoffDate);
log.info("已清理节点实例数: {}", deletedNodes);
Thread.sleep(1000);
// 步骤4: 清理表单数据
progressReporter.updateProgress(80, "正在清理表单数据...");
int deletedForms = cleanFormData(cutoffDate);
log.info("已清理表单数据数: {}", deletedForms);
Thread.sleep(1000);
// 步骤5: 完成
progressReporter.updateProgress(100, "清理任务完成");
log.info("工作流历史数据清理任务执行完成,共清理实例: {}, 节点: {}, 表单: {}",
deletedInstances, deletedNodes, deletedForms);
} catch (InterruptedException e) {
log.error("任务执行被中断", e);
Thread.currentThread().interrupt();
throw new RuntimeException("任务执行被中断", e);
} catch (Exception e) {
log.error("工作流历史数据清理任务执行失败", e);
throw new RuntimeException("工作流历史数据清理任务执行失败", e);
}
}
/**
* 清理工作流实例数据
*/
private int cleanWorkflowInstances(LocalDateTime cutoffDate) {
// TODO: 实现实际的清理逻辑
// 示例DELETE FROM workflow_instance WHERE deleted = true AND update_time < cutoffDate
log.debug("清理工作流实例,截止日期: {}", cutoffDate);
return 100; // 模拟清理数量
}
/**
* 清理节点实例数据
*/
private int cleanNodeInstances(LocalDateTime cutoffDate) {
// TODO: 实现实际的清理逻辑
log.debug("清理节点实例,截止日期: {}", cutoffDate);
return 500; // 模拟清理数量
}
/**
* 清理表单数据
*/
private int cleanFormData(LocalDateTime cutoffDate) {
// TODO: 实现实际的清理逻辑
log.debug("清理表单数据,截止日期: {}", cutoffDate);
return 50; // 模拟清理数量
}
}

View File

@ -0,0 +1,32 @@
package com.qqchen.deploy.backend.schedule.query;
import com.qqchen.deploy.backend.framework.annotation.QueryField;
import com.qqchen.deploy.backend.framework.enums.QueryType;
import com.qqchen.deploy.backend.framework.query.BaseQuery;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* 定时任务分类查询条件
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Schema(description = "定时任务分类查询条件")
public class ScheduleJobCategoryQuery extends BaseQuery {
@QueryField(field = "name", type = QueryType.LIKE)
@Schema(description = "分类名称(模糊查询)")
private String name;
@QueryField(field = "code", type = QueryType.EQUAL)
@Schema(description = "分类编码(精确查询)")
private String code;
@QueryField(field = "enabled", type = QueryType.EQUAL)
@Schema(description = "是否启用")
private Boolean enabled;
}

View File

@ -0,0 +1,43 @@
package com.qqchen.deploy.backend.schedule.query;
import com.qqchen.deploy.backend.framework.annotation.QueryField;
import com.qqchen.deploy.backend.framework.enums.QueryType;
import com.qqchen.deploy.backend.framework.query.BaseQuery;
import com.qqchen.deploy.backend.schedule.enums.JobStatusEnum;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.time.LocalDateTime;
/**
* 定时任务执行日志查询条件
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Schema(description = "定时任务执行日志查询条件")
public class ScheduleJobLogQuery extends BaseQuery {
@QueryField(field = "jobId", type = QueryType.EQUAL)
@Schema(description = "任务ID")
private Long jobId;
@QueryField(field = "jobName", type = QueryType.LIKE)
@Schema(description = "任务名称(模糊查询)")
private String jobName;
@QueryField(field = "status", type = QueryType.EQUAL)
@Schema(description = "执行状态")
private JobStatusEnum status;
@QueryField(field = "executeTime", type = QueryType.GREATER_EQUAL)
@Schema(description = "执行时间起始")
private LocalDateTime executeTimeStart;
@QueryField(field = "executeTime", type = QueryType.LESS_EQUAL)
@Schema(description = "执行时间结束")
private LocalDateTime executeTimeEnd;
}

View File

@ -0,0 +1,37 @@
package com.qqchen.deploy.backend.schedule.query;
import com.qqchen.deploy.backend.framework.annotation.QueryField;
import com.qqchen.deploy.backend.framework.enums.QueryType;
import com.qqchen.deploy.backend.framework.query.BaseQuery;
import com.qqchen.deploy.backend.schedule.enums.ScheduleJobStatusEnum;
import io.swagger.v3.oas.annotations.media.Schema;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* 定时任务查询条件
*
* @author qichen
*/
@Data
@EqualsAndHashCode(callSuper = true)
@Schema(description = "定时任务查询条件")
public class ScheduleJobQuery extends BaseQuery {
@QueryField(field = "jobName", type = QueryType.LIKE)
@Schema(description = "任务名称(模糊查询)")
private String jobName;
@QueryField(field = "categoryId", type = QueryType.EQUAL)
@Schema(description = "任务分类ID")
private Long categoryId;
@QueryField(field = "status", type = QueryType.EQUAL)
@Schema(description = "任务状态")
private ScheduleJobStatusEnum status;
@QueryField(field = "beanName", type = QueryType.LIKE)
@Schema(description = "Bean名称模糊查询")
private String beanName;
}

View File

@ -0,0 +1,20 @@
package com.qqchen.deploy.backend.schedule.repository;
import com.qqchen.deploy.backend.framework.repository.IBaseRepository;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobCategory;
import org.springframework.stereotype.Repository;
/**
* 定时任务分类Repository
*
* @author qichen
*/
@Repository
public interface IScheduleJobCategoryRepository extends IBaseRepository<ScheduleJobCategory, Long> {
/**
* 检查分类编码是否存在
*/
boolean existsByCodeAndDeletedFalse(String code);
}

View File

@ -0,0 +1,20 @@
package com.qqchen.deploy.backend.schedule.repository;
import com.qqchen.deploy.backend.framework.repository.IBaseRepository;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobLog;
import org.springframework.stereotype.Repository;
/**
* 定时任务执行日志Repository
*
* @author qichen
*/
@Repository
public interface IScheduleJobLogRepository extends IBaseRepository<ScheduleJobLog, Long> {
/**
* 统计任务的执行日志数量排除已删除
*/
Long countByJobIdAndDeletedFalse(Long jobId);
}

View File

@ -0,0 +1,25 @@
package com.qqchen.deploy.backend.schedule.repository;
import com.qqchen.deploy.backend.framework.repository.IBaseRepository;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJob;
import org.springframework.stereotype.Repository;
/**
* 定时任务Repository
*
* @author qichen
*/
@Repository
public interface IScheduleJobRepository extends IBaseRepository<ScheduleJob, Long> {
/**
* 检查任务名称是否存在
*/
boolean existsByJobNameAndDeletedFalse(String jobName);
/**
* 统计分类下的任务数量
*/
Long countByCategoryIdAndDeletedFalse(Long categoryId);
}

View File

@ -0,0 +1,15 @@
package com.qqchen.deploy.backend.schedule.service;
import com.qqchen.deploy.backend.framework.service.IBaseService;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobCategoryDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobCategory;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobCategoryQuery;
/**
* 定时任务分类Service
*
* @author qichen
*/
public interface IScheduleJobCategoryService extends IBaseService<ScheduleJobCategory, ScheduleJobCategoryDTO, ScheduleJobCategoryQuery, Long> {
}

View File

@ -0,0 +1,15 @@
package com.qqchen.deploy.backend.schedule.service;
import com.qqchen.deploy.backend.framework.service.IBaseService;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobLogDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobLog;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobLogQuery;
/**
* 定时任务执行日志Service
*
* @author qichen
*/
public interface IScheduleJobLogService extends IBaseService<ScheduleJobLog, ScheduleJobLogDTO, ScheduleJobLogQuery, Long> {
}

View File

@ -0,0 +1,15 @@
package com.qqchen.deploy.backend.schedule.service;
import com.qqchen.deploy.backend.framework.service.IBaseService;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJob;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobQuery;
/**
* 定时任务Service
*
* @author qichen
*/
public interface IScheduleJobService extends IBaseService<ScheduleJob, ScheduleJobDTO, ScheduleJobQuery, Long> {
}

View File

@ -0,0 +1,82 @@
package com.qqchen.deploy.backend.schedule.service;
import com.qqchen.deploy.backend.schedule.dto.JobStatusDTO;
import jakarta.annotation.Resource;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.time.LocalDateTime;
/**
* 任务进度报告工具
* 供长时间执行的任务手动报告进度
*
* @author qichen
*/
@Slf4j
@Component
public class JobProgressReporter {
@Resource
private JobStatusRedisService jobStatusRedisService;
// 线程本地变量存储当前任务信息
private static final ThreadLocal<JobInfo> CURRENT_JOB = new ThreadLocal<>();
/**
* 设置当前任务由AOP调用
*/
public void setCurrentJob(Long jobId, String jobName, LocalDateTime startTime,
String beanName, String methodName, String methodParams) {
CURRENT_JOB.set(new JobInfo(jobId, jobName, startTime, beanName, methodName, methodParams));
}
/**
* 清除当前任务由AOP调用
*/
public void clearCurrentJob() {
CURRENT_JOB.remove();
}
/**
* 更新进度业务代码调用
*/
public void updateProgress(Integer progress, String message) {
JobInfo jobInfo = CURRENT_JOB.get();
if (jobInfo == null) {
log.warn("当前线程没有任务信息,无法更新进度");
return;
}
JobStatusDTO statusDTO = JobStatusDTO.builder()
.jobId(jobInfo.getJobId())
.jobName(jobInfo.getJobName())
.status("RUNNING")
.progress(progress)
.currentStep(message)
.message(message)
.startTime(jobInfo.getStartTime())
.build();
jobStatusRedisService.saveJobStatus(jobInfo.getJobId(), statusDTO);
log.debug("任务进度更新: jobId={}, progress={}, message={}",
jobInfo.getJobId(), progress, message);
}
/**
* 任务信息
*/
@Data
@AllArgsConstructor
private static class JobInfo {
private Long jobId;
private String jobName;
private LocalDateTime startTime;
private String beanName;
private String methodName;
private String methodParams;
}
}

View File

@ -0,0 +1,149 @@
package com.qqchen.deploy.backend.schedule.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.qqchen.deploy.backend.framework.utils.RedisUtil;
import com.qqchen.deploy.backend.schedule.dto.JobStatusDTO;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.util.HashMap;
import java.util.Map;
/**
* 任务状态Redis服务
*
* @author qichen
*/
@Slf4j
@Service
public class JobStatusRedisService {
@Resource
private RedisUtil redisUtil;
@Resource
private ObjectMapper objectMapper;
private static final String JOB_STATUS_KEY_PREFIX = "schedule:job:status:";
private static final String ALL_STATUS_KEY = "schedule:job:status:all";
private static final String VERSION_KEY = "schedule:job:status:version";
private static final long STATUS_EXPIRE_SECONDS = 3600; // 1小时
/**
* 保存任务状态到Redis
*/
public void saveJobStatus(Long jobId, JobStatusDTO status) {
try {
String key = JOB_STATUS_KEY_PREFIX + jobId;
Map<String, Object> statusMap = new HashMap<>();
statusMap.put("jobId", jobId);
statusMap.put("jobName", status.getJobName() != null ? status.getJobName() : "");
statusMap.put("status", status.getStatus());
statusMap.put("progress", status.getProgress() != null ? status.getProgress() : 0);
statusMap.put("currentStep", status.getCurrentStep() != null ? status.getCurrentStep() : "");
statusMap.put("message", status.getMessage() != null ? status.getMessage() : "");
statusMap.put("startTime", status.getStartTime() != null ? status.getStartTime().toString() : "");
statusMap.put("updateTime", LocalDateTime.now().toString());
// 保存到单独的key1小时过期
redisUtil.hmset(key, statusMap, STATUS_EXPIRE_SECONDS);
// 同时更新到all hash
String statusJson = objectMapper.writeValueAsString(statusMap);
redisUtil.hset(ALL_STATUS_KEY, jobId.toString(), statusJson);
// 增加版本号
redisUtil.incr(VERSION_KEY, 1);
log.debug("保存任务状态到Redis: jobId={}, status={}", jobId, status.getStatus());
} catch (Exception e) {
log.error("保存任务状态到Redis失败: jobId={}", jobId, e);
}
}
/**
* 获取单个任务状态
*/
public JobStatusDTO getJobStatus(Long jobId) {
try {
String key = JOB_STATUS_KEY_PREFIX + jobId;
Map<Object, Object> statusMap = redisUtil.hmget(key);
if (statusMap.isEmpty()) {
return null;
}
return JobStatusDTO.builder()
.jobId(Long.parseLong(statusMap.get("jobId").toString()))
.jobName((String) statusMap.get("jobName"))
.status((String) statusMap.get("status"))
.progress(Integer.parseInt(statusMap.get("progress").toString()))
.currentStep((String) statusMap.get("currentStep"))
.message((String) statusMap.get("message"))
.updateTime((String) statusMap.get("updateTime"))
.build();
} catch (Exception e) {
log.error("从Redis获取任务状态失败: jobId={}", jobId, e);
return null;
}
}
/**
* 批量获取所有任务状态
*/
public Map<String, JobStatusDTO> getAllJobStatus() {
try {
Map<Object, Object> allStatus = redisUtil.hmget(ALL_STATUS_KEY);
Map<String, JobStatusDTO> result = new HashMap<>();
for (Map.Entry<Object, Object> entry : allStatus.entrySet()) {
String jobId = entry.getKey().toString();
String statusJson = entry.getValue().toString();
@SuppressWarnings("unchecked")
Map<String, Object> statusMap = objectMapper.readValue(statusJson, Map.class);
JobStatusDTO dto = JobStatusDTO.builder()
.jobId(Long.parseLong(jobId))
.jobName((String) statusMap.get("jobName"))
.status((String) statusMap.get("status"))
.progress((Integer) statusMap.get("progress"))
.currentStep((String) statusMap.get("currentStep"))
.message((String) statusMap.get("message"))
.updateTime((String) statusMap.get("updateTime"))
.build();
result.put(jobId, dto);
}
return result;
} catch (Exception e) {
log.error("从Redis批量获取任务状态失败", e);
return new HashMap<>();
}
}
/**
* 获取当前状态版本号
*/
public Long getStatusVersion() {
Object version = redisUtil.get(VERSION_KEY);
return version != null ? Long.parseLong(version.toString()) : 0L;
}
/**
* 删除任务状态
*/
public void deleteJobStatus(Long jobId) {
String key = JOB_STATUS_KEY_PREFIX + jobId;
redisUtil.del(key);
redisUtil.hdel(ALL_STATUS_KEY, jobId.toString());
}
}

View File

@ -0,0 +1,74 @@
package com.qqchen.deploy.backend.schedule.service.impl;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.exception.BusinessException;
import com.qqchen.deploy.backend.framework.service.impl.BaseServiceImpl;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobCategoryDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobCategory;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobCategoryQuery;
import com.qqchen.deploy.backend.schedule.repository.IScheduleJobCategoryRepository;
import com.qqchen.deploy.backend.schedule.repository.IScheduleJobRepository;
import com.qqchen.deploy.backend.schedule.service.IScheduleJobCategoryService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.stream.Collectors;
/**
* 定时任务分类Service实现
*
* @author qichen
*/
@Slf4j
@Service
public class ScheduleJobCategoryServiceImpl extends BaseServiceImpl<ScheduleJobCategory, ScheduleJobCategoryDTO, ScheduleJobCategoryQuery, Long>
implements IScheduleJobCategoryService {
@Resource
private IScheduleJobCategoryRepository categoryRepository;
@Resource
private IScheduleJobRepository jobRepository;
@Override
@Transactional
public ScheduleJobCategoryDTO create(ScheduleJobCategoryDTO dto) {
// 检查编码唯一性
if (categoryRepository.existsByCodeAndDeletedFalse(dto.getCode())) {
throw new BusinessException(ResponseCode.DATA_ALREADY_EXISTS, new Object[]{"分类编码", dto.getCode()});
}
return super.create(dto);
}
@Override
@Transactional
public void delete(Long id) {
// 检查是否有关联的任务
Long jobCount = jobRepository.countByCategoryIdAndDeletedFalse(id);
if (jobCount > 0) {
throw new BusinessException(ResponseCode.DATA_IN_USE, new Object[]{"该分类下存在任务"});
}
super.delete(id);
}
@Override
public Page<ScheduleJobCategoryDTO> page(ScheduleJobCategoryQuery query) {
Page<ScheduleJobCategoryDTO> page = super.page(query);
// 统计每个分类的任务数量
List<ScheduleJobCategoryDTO> content = page.getContent().stream()
.peek(category -> {
Long count = jobRepository.countByCategoryIdAndDeletedFalse(category.getId());
category.setJobCount(count);
})
.collect(Collectors.toList());
return new PageImpl<>(content, page.getPageable(), page.getTotalElements());
}
}

View File

@ -0,0 +1,21 @@
package com.qqchen.deploy.backend.schedule.service.impl;
import com.qqchen.deploy.backend.framework.service.impl.BaseServiceImpl;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobLogDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobLog;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobLogQuery;
import com.qqchen.deploy.backend.schedule.service.IScheduleJobLogService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
/**
* 定时任务执行日志Service实现
*
* @author qichen
*/
@Slf4j
@Service
public class ScheduleJobLogServiceImpl extends BaseServiceImpl<ScheduleJobLog, ScheduleJobLogDTO, ScheduleJobLogQuery, Long>
implements IScheduleJobLogService {
}

View File

@ -0,0 +1,87 @@
package com.qqchen.deploy.backend.schedule.service.impl;
import com.qqchen.deploy.backend.framework.enums.ResponseCode;
import com.qqchen.deploy.backend.framework.exception.BusinessException;
import com.qqchen.deploy.backend.framework.service.impl.BaseServiceImpl;
import com.qqchen.deploy.backend.schedule.converter.ScheduleJobCategoryConverter;
import com.qqchen.deploy.backend.schedule.dto.ScheduleJobDTO;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJob;
import com.qqchen.deploy.backend.schedule.entity.ScheduleJobCategory;
import com.qqchen.deploy.backend.schedule.query.ScheduleJobQuery;
import com.qqchen.deploy.backend.schedule.repository.IScheduleJobCategoryRepository;
import com.qqchen.deploy.backend.schedule.repository.IScheduleJobRepository;
import com.qqchen.deploy.backend.schedule.service.IScheduleJobService;
import com.qqchen.deploy.backend.workflow.converter.FormDefinitionConverter;
import com.qqchen.deploy.backend.workflow.entity.FormDefinition;
import com.qqchen.deploy.backend.workflow.repository.IFormDefinitionRepository;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
/**
* 定时任务Service实现
*
* @author qichen
*/
@Slf4j
@Service
public class ScheduleJobServiceImpl extends BaseServiceImpl<ScheduleJob, ScheduleJobDTO, ScheduleJobQuery, Long>
implements IScheduleJobService {
@Resource
private IScheduleJobRepository jobRepository;
@Resource
private IScheduleJobCategoryRepository categoryRepository;
@Resource
private ScheduleJobCategoryConverter categoryConverter;
@Resource
private IFormDefinitionRepository formDefinitionRepository;
@Resource
private FormDefinitionConverter formDefinitionConverter;
@Override
@Transactional
public ScheduleJobDTO create(ScheduleJobDTO dto) {
// 检查任务名称唯一性
if (jobRepository.existsByJobNameAndDeletedFalse(dto.getJobName())) {
throw new BusinessException(ResponseCode.DATA_ALREADY_EXISTS, new Object[]{"任务名称", dto.getJobName()});
}
return super.create(dto);
}
@Override
public Page<ScheduleJobDTO> page(ScheduleJobQuery query) {
Page<ScheduleJobDTO> page = super.page(query);
// 填充关联信息
List<ScheduleJobDTO> content = page.getContent().stream()
.peek(job -> {
// 填充分类信息
if (job.getCategoryId() != null) {
Optional<ScheduleJobCategory> categoryOptional = categoryRepository.findById(job.getCategoryId());
categoryOptional.ifPresent(category -> job.setCategory(categoryConverter.toDto(category)));
}
// 填充表单信息
if (job.getFormDefinitionId() != null) {
Optional<FormDefinition> formOptional = formDefinitionRepository.findById(job.getFormDefinitionId());
formOptional.ifPresent(form -> job.setFormDefinition(formDefinitionConverter.toDto(form)));
}
})
.collect(Collectors.toList());
return new PageImpl<>(content, page.getPageable(), page.getTotalElements());
}
}

View File

@ -61,6 +61,18 @@ spring:
drop-first: false
default-schema: deploy-ease-platform
contexts: default
redis:
host: 172.22.222.111
port: 6379
password: Qichen5210523...
database: 7
timeout: 6000ms
lettuce:
pool:
max-active: 8 # 连接池最大连接数
max-idle: 8 # 连接池最大空闲连接数
min-idle: 0 # 连接池最小空闲连接数
max-wait: -1ms # 连接池最大阻塞等待时间
flowable:
database-schema-update: true
# id-generator: org.flowable.common.engine.impl.db.DbIdGenerator

View File

@ -68,6 +68,8 @@ VALUES
(201, '团队管理', '/deploy/teams', '/src/pages/Deploy/Team/List/index', 'TeamOutlined', 2, 200, 1, FALSE, TRUE, 'system', '2024-01-01 00:00:00', 0, FALSE),
-- 应用管理
(202, '应用管理', '/deploy/applications', '/src/pages/Deploy/Application/List/index', 'AppstoreOutlined', 2, 200, 2, FALSE, TRUE, 'system', '2024-01-01 00:00:00', 0, FALSE),
-- 定时任务管理
(203, '定时任务管理', '/deploy/schedule-jobs', '/src/pages/Deploy/ScheduleJob/List/index', 'ClockCircleOutlined', 2, 200, 3, FALSE, TRUE, 'system', '2024-01-01 00:00:00', 0, FALSE),
-- 资源管理
(300, '资源管理', '/resource', 'Layout', 'DatabaseOutlined', 1, NULL, 3, FALSE, TRUE, 'system', '2024-01-01 00:00:00', 0, FALSE),
@ -868,3 +870,25 @@ VALUES
-- DevOps团队成员
(2, 4, 'ops_manager', '负责人', NOW(), 'admin', NOW(), 'admin', NOW(), 1, 0),
(2, 2, 'it_manager', '运维', NOW(), 'admin', NOW(), 'admin', NOW(), 1, 0);
-- ====================================================================
-- 定时任务初始化数据
-- ====================================================================
-- 初始化定时任务分类数据
INSERT INTO schedule_job_category (id, code, name, description, icon, color, enabled, sort, create_by, create_time, update_by, update_time, version, deleted)
VALUES
(1, 'DATA_CLEAN', '数据清理', '定期清理系统历史数据和临时文件', 'DeleteOutlined', '#ff4d4f', 1, 1, 'system', '2024-01-01 00:00:00', 'system', '2024-01-01 00:00:00', 1, 0),
(2, 'DATA_SYNC', '数据同步', '同步外部系统数据到本地', 'SyncOutlined', '#1890ff', 1, 2, 'system', '2024-01-01 00:00:00', 'system', '2024-01-01 00:00:00', 1, 0),
(3, 'REPORT', '报表生成', '定期生成和发送各类统计报表', 'BarChartOutlined', '#52c41a', 1, 3, 'system', '2024-01-01 00:00:00', 'system', '2024-01-01 00:00:00', 1, 0),
(4, 'MONITOR', '系统监控', '监控系统健康状态和性能指标', 'EyeOutlined', '#faad14', 1, 4, 'system', '2024-01-01 00:00:00', 'system', '2024-01-01 00:00:00', 1, 0),
(5, 'BACKUP', '备份任务', '定期备份数据库和重要文件', 'DatabaseOutlined', '#722ed1', 1, 5, 'system', '2024-01-01 00:00:00', 'system', '2024-01-01 00:00:00', 1, 0);
-- 初始化定时任务数据
INSERT INTO schedule_job (id, job_name, job_description, category_id, bean_name, method_name, method_params,
cron_expression, status, concurrent, timeout_seconds, retry_count,
create_by, create_time, update_by, update_time, version, deleted)
VALUES
(1, '工作流历史数据清理', '清理90天前已完成或已取消的工作流实例数据', 1, 'workflowCleanJob', 'execute',
'{"retentionDays": 90}', '0 0 2 * * ?', 'ENABLED', 0, 3600, 2,
'system', '2024-01-01 00:00:00', 'system', '2024-01-01 00:00:00', 1, 0);

View File

@ -889,3 +889,112 @@ CREATE TABLE sys_notification_channel (
INDEX idx_deleted (deleted)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='通知渠道配置表';
-- --------------------------------------------------------------------------------------
-- 定时任务管理表
-- --------------------------------------------------------------------------------------
-- 定时任务分类表
CREATE TABLE schedule_job_category (
id BIGINT AUTO_INCREMENT PRIMARY KEY COMMENT '主键ID',
create_by VARCHAR(100) NULL COMMENT '创建人',
create_time DATETIME(6) NULL COMMENT '创建时间',
update_by VARCHAR(100) NULL COMMENT '更新人',
update_time DATETIME(6) NULL COMMENT '更新时间',
version INT NOT NULL DEFAULT 1 COMMENT '版本号',
deleted BIT NOT NULL DEFAULT 0 COMMENT '是否删除',
code VARCHAR(50) NOT NULL COMMENT '分类编码',
name VARCHAR(100) NOT NULL COMMENT '分类名称',
description VARCHAR(500) NULL COMMENT '描述',
icon VARCHAR(50) NULL COMMENT '图标',
color VARCHAR(20) NULL COMMENT '颜色',
enabled BIT NOT NULL DEFAULT 1 COMMENT '是否启用',
sort INT NOT NULL DEFAULT 0 COMMENT '排序号',
UNIQUE INDEX uk_code (code, deleted),
INDEX idx_enabled (enabled)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='定时任务分类表';
-- 定时任务表
CREATE TABLE schedule_job (
id BIGINT AUTO_INCREMENT PRIMARY KEY COMMENT '主键ID',
create_by VARCHAR(100) NULL COMMENT '创建人',
create_time DATETIME(6) NULL COMMENT '创建时间',
update_by VARCHAR(100) NULL COMMENT '更新人',
update_time DATETIME(6) NULL COMMENT '更新时间',
version INT NOT NULL DEFAULT 1 COMMENT '版本号',
deleted BIT NOT NULL DEFAULT 0 COMMENT '是否删除',
job_name VARCHAR(100) NOT NULL COMMENT '任务名称',
job_description VARCHAR(500) NULL COMMENT '任务描述',
category_id BIGINT NOT NULL COMMENT '任务分类ID',
-- 执行配置
bean_name VARCHAR(100) NOT NULL COMMENT 'Spring Bean名称',
method_name VARCHAR(100) NOT NULL COMMENT '方法名称',
form_definition_id BIGINT NULL COMMENT '参数表单ID',
method_params TEXT NULL COMMENT '方法参数JSON',
-- 调度配置
cron_expression VARCHAR(100) NOT NULL COMMENT 'Cron表达式',
status VARCHAR(20) NOT NULL DEFAULT 'ENABLED' COMMENT '任务状态ENABLED-启用、DISABLED-禁用、PAUSED-暂停',
concurrent BIT NOT NULL DEFAULT 0 COMMENT '是否允许并发执行',
-- 统计信息
last_execute_time DATETIME(6) NULL COMMENT '上次执行时间',
next_execute_time DATETIME(6) NULL COMMENT '下次执行时间',
execute_count INT NOT NULL DEFAULT 0 COMMENT '执行次数',
success_count INT NOT NULL DEFAULT 0 COMMENT '成功次数',
fail_count INT NOT NULL DEFAULT 0 COMMENT '失败次数',
-- 高级配置
timeout_seconds INT NULL COMMENT '超时时间(秒)',
retry_count INT NOT NULL DEFAULT 0 COMMENT '失败重试次数',
alert_email VARCHAR(500) NULL COMMENT '告警邮箱(多个用逗号分隔)',
UNIQUE INDEX uk_job_name (job_name, deleted),
INDEX idx_category (category_id),
INDEX idx_status (status),
INDEX idx_next_execute_time (next_execute_time),
CONSTRAINT fk_job_category FOREIGN KEY (category_id) REFERENCES schedule_job_category(id),
CONSTRAINT fk_job_form FOREIGN KEY (form_definition_id) REFERENCES form_definition(id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='定时任务表';
-- 定时任务执行日志表
CREATE TABLE schedule_job_log (
id BIGINT AUTO_INCREMENT PRIMARY KEY COMMENT '主键ID',
create_by VARCHAR(100) NULL COMMENT '创建人',
create_time DATETIME(6) NULL COMMENT '创建时间',
update_by VARCHAR(100) NULL COMMENT '更新人',
update_time DATETIME(6) NULL COMMENT '更新时间',
version INT NOT NULL DEFAULT 1 COMMENT '版本号',
deleted BIT NOT NULL DEFAULT 0 COMMENT '是否删除',
job_id BIGINT NOT NULL COMMENT '任务ID',
job_name VARCHAR(100) NOT NULL COMMENT '任务名称',
-- 执行信息
bean_name VARCHAR(100) NOT NULL COMMENT '执行器Bean名称',
method_name VARCHAR(100) NOT NULL COMMENT '执行方法名称',
method_params TEXT NULL COMMENT '方法参数',
-- 时间统计
execute_time DATETIME(6) NOT NULL COMMENT '开始执行时间',
finish_time DATETIME(6) NULL COMMENT '完成时间',
duration BIGINT NULL COMMENT '执行耗时(毫秒)',
-- 状态信息
status VARCHAR(20) NOT NULL COMMENT '执行状态SUCCESS-成功、FAIL-失败、TIMEOUT-超时',
result_message TEXT NULL COMMENT '执行结果消息',
exception_info TEXT NULL COMMENT '异常堆栈信息',
-- 服务器信息
server_ip VARCHAR(50) NULL COMMENT '执行服务器IP',
server_host VARCHAR(100) NULL COMMENT '执行服务器主机名',
INDEX idx_job_id (job_id),
INDEX idx_execute_time (execute_time),
INDEX idx_status (status),
CONSTRAINT fk_log_job FOREIGN KEY (job_id) REFERENCES schedule_job(id)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci COMMENT='定时任务执行日志表';

View File

@ -37,6 +37,8 @@ dependency.injection.entitypath.failed=初始化实体 {0} 的EntityPath失败:
# 通用业务错误 (2000-2099)
tenant.not.found=租户不存在
data.not.found=找不到ID为{0}的{1}
data.already.exists=数据已存在:{0}
data.in.use=数据正在使用中,无法删除
entity.not.found.id=找不到ID为{0}的实体
entity.not.found.message={0}
entity.not.found.name.id=找不到ID为{1}的{0}
@ -185,3 +187,14 @@ team.member.not.found=团队成员不存在或已删除
team.member.already.exists=该用户已是团队成员
team.application.not.found=团队应用关联不存在或已删除
team.application.already.exists=该应用已关联到此团队
# 定时任务相关 (2950-2969)
schedule.job.category.not.found=定时任务分类不存在或已删除
schedule.job.category.code.exists=分类编码{0}已存在
schedule.job.category.has.jobs=该分类下存在定时任务,无法删除
schedule.job.not.found=定时任务不存在或已删除
schedule.job.name.exists=任务名称{0}已存在
schedule.job.category.has.jobs=该分类下存在任务,无法删除
schedule.job.log.not.found=任务执行日志不存在
schedule.job.executor.not.found=找不到任务执行器:{0}
schedule.job.cron.invalid=Cron表达式{0}无效