处理issue I5ZS0R,代码优化

This commit is contained in:
inrgihc
2023-02-11 20:03:21 +08:00
parent 10a90611e0
commit 87a6ce601b
7 changed files with 80 additions and 5 deletions

View File

@@ -12,7 +12,9 @@ package com.gitee.dbswitch.admin;
import org.springframework.boot.Banner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableScheduling;
@EnableScheduling
@tk.mybatis.spring.annotation.MapperScan("com.gitee.dbswitch.admin.mapper")
@SpringBootApplication
public class AdminApplication {

View File

@@ -14,13 +14,12 @@ import com.gitee.dbswitch.admin.mapper.AssignmentJobMapper;
import com.gitee.dbswitch.admin.model.ops.OpsTaskJobTrend;
import com.gitee.dbswitch.admin.type.JobStatusEnum;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Resource;
import org.springframework.stereotype.Repository;
import tk.mybatis.mapper.entity.Example;
import tk.mybatis.mapper.util.Sqls;
@Repository
public class AssignmentJobDAO {
@@ -72,12 +71,23 @@ public class AssignmentJobDAO {
}
public int getTotalCount() {
return Optional.ofNullable(assignmentJobMapper.selectAll())
.orElseGet(ArrayList::new).size();
return assignmentJobMapper.selectCountByExample(null);
}
public List<OpsTaskJobTrend> queryTaskJobTrend(Integer days) {
return assignmentJobMapper.queryTaskJobTrend(days);
}
public void updateStatus(JobStatusEnum originalStatus, JobStatusEnum targetStatus, String errorLog) {
AssignmentJobEntity updateSet = new AssignmentJobEntity();
updateSet.setStatus(targetStatus.getValue());
if (JobStatusEnum.FAIL.equals(targetStatus)) {
updateSet.setErrorLog(errorLog);
}
Example condition = Example.builder(AssignmentJobEntity.class)
.where(Sqls.custom().andEqualTo("status", originalStatus.getValue()))
.build();
assignmentJobMapper.updateByExampleSelective(updateSet, condition);
}
}

View File

@@ -13,7 +13,7 @@ import com.gitee.dbswitch.admin.entity.JobLogbackEntity;
import com.gitee.dbswitch.admin.mapper.JobLogbackMapper;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import java.util.Objects;
import javax.annotation.Resource;
import org.springframework.stereotype.Repository;
import tk.mybatis.mapper.entity.Example;
@@ -48,4 +48,10 @@ public class JobLogbackDAO {
return jobLogbackMapper.selectByExample(example);
}
public void deleteOldest(Integer days) {
if (Objects.nonNull(days)) {
jobLogbackMapper.deleteByDays(days);
}
}
}

View File

@@ -10,8 +10,19 @@
package com.gitee.dbswitch.admin.mapper;
import com.gitee.dbswitch.admin.entity.JobLogbackEntity;
import org.apache.ibatis.annotations.Delete;
import org.apache.ibatis.annotations.Param;
import tk.mybatis.mapper.common.Mapper;
public interface JobLogbackMapper extends Mapper<JobLogbackEntity> {
@Delete("<script>"
+ "<if test='_databaseId == \"mysql\" '>"
+ "DELETE FROM DBSWITCH_JOB_LOGBACK WHERE date(create_time) &lt; DATE_SUB( CURDATE(), INTERVAL ${days} DAY )"
+ "</if>"
+ "<if test='_databaseId == \"postgresql\" '>"
+ "DELETE FROM DBSWITCH_JOB_LOGBACK WHERE create_time::date &lt; CURRENT_DATE - INTERVAL'${days} day'"
+ "</if>"
+ "</script>")
void deleteByDays(@Param("days") Integer days);
}

View File

@@ -16,9 +16,15 @@ import java.util.Optional;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import javax.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
@Slf4j
@Service
public class JobLogbackService {
@@ -26,6 +32,27 @@ public class JobLogbackService {
private AssignmentJobDAO assignmentJobDAO;
@Resource
private JobLogbackDAO jobLogbackDAO;
@Value("${job.log.clean.days:30}")
private Integer cleanJobLogDays;
@EventListener(ApplicationReadyEvent.class)
public void cleanOnceAfterRestart() {
doCleanHistoryLog();
}
@Scheduled(cron = "0 0 0 * * ? ")
public void cleanSchedule() {
doCleanHistoryLog();
}
private void doCleanHistoryLog() {
try {
jobLogbackDAO.deleteOldest(cleanJobLogDays);
log.error("Success to clean history job log for {} days", cleanJobLogDays);
} catch (Throwable t) {
log.error("Failed to clean history job log,", t);
}
}
public Result<TaskJobLogbackResponse> tailLog(Long jobId, Integer size) {
TaskJobLogbackResponse response = new TaskJobLogbackResponse();

View File

@@ -23,8 +23,12 @@ import java.util.List;
import java.util.Objects;
import java.util.function.Supplier;
import javax.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.event.EventListener;
import org.springframework.stereotype.Service;
@Slf4j
@Service
public class JobManagerService {
@@ -33,6 +37,17 @@ public class JobManagerService {
@Resource
private ScheduleService scheduleService;
@EventListener(ApplicationReadyEvent.class)
public void initAfterRestart() {
String errorLog = "Job was canceled by restart dbswitch program! ";
try {
assignmentJobDAO.updateStatus(JobStatusEnum.RUNNING, JobStatusEnum.FAIL, errorLog);
log.info("Success to revise job status");
} catch (Throwable t) {
log.error("Error when revise job status from running to failed:", t);
}
}
public PageResult<TaskJobDetailResponse> listJobs(Long assignmentId, Integer page, Integer size) {
Supplier<List<TaskJobDetailResponse>> method = () -> {
List<AssignmentJobEntity> jobs = assignmentJobDAO.getByAssignmentId(assignmentId);

View File

@@ -153,6 +153,10 @@ public class DatabaseDmImpl extends AbstractDatabase implements IDatabaseInterfa
case ColumnMetaData.TYPE_STRING:
if (null != pks && pks.contains(fieldname)) {
retval.append("VARCHAR(" + length + ")");
} else if (length > 0 && length < 1900) {
// 最大存储长度由数据库页面大小决定,支持按照字节存放字符串,数据库页面大小与实际最大存储长度的关系为:
// 4K->1900;8k->3900;16k->8000;32k->8188
retval.append("VARCHAR(").append(length).append(')');
} else {
retval.append("TEXT");
}