version for 1.6.9

This commit is contained in:
inrgihc
2022-04-24 23:57:06 +08:00
parent d01c01a5e5
commit a803b39dd9
41 changed files with 204 additions and 73 deletions

View File

@@ -2,7 +2,7 @@
set -e set -e
DBSWITCH_VERSION=1.6.8 DBSWITCH_VERSION=1.6.9
BUILD_DOCKER_DIR="$( cd "$( dirname "$0" )" && pwd )" BUILD_DOCKER_DIR="$( cd "$( dirname "$0" )" && pwd )"
PROJECT_ROOT_DIR=$( dirname "$BUILD_DOCKER_DIR") PROJECT_ROOT_DIR=$( dirname "$BUILD_DOCKER_DIR")
DOCKER_DBSWITCH_DIR=$BUILD_DOCKER_DIR/dbswitch DOCKER_DBSWITCH_DIR=$BUILD_DOCKER_DIR/dbswitch

View File

@@ -13,7 +13,7 @@ services:
MYSQL_ROOT_HOST: '%' MYSQL_ROOT_HOST: '%'
dbswitch: dbswitch:
container_name: dbswitch_webui container_name: dbswitch_webui
image: inrgihc/dbswitch:1.6.8 image: inrgihc/dbswitch:1.6.9
environment: environment:
MYSQLDB_HOST: dbswitch_mysqldb MYSQLDB_HOST: dbswitch_mysqldb
MYSQLDB_PORT: 3306 MYSQLDB_PORT: 3306

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-admin</artifactId> <artifactId>dbswitch-admin</artifactId>

View File

@@ -13,9 +13,9 @@ import com.gitee.dbswitch.admin.common.excption.DbswitchException;
import com.gitee.dbswitch.admin.common.response.ResultCode; import com.gitee.dbswitch.admin.common.response.ResultCode;
import com.gitee.dbswitch.admin.entity.AssignmentConfigEntity; import com.gitee.dbswitch.admin.entity.AssignmentConfigEntity;
import com.gitee.dbswitch.admin.entity.AssignmentTaskEntity; import com.gitee.dbswitch.admin.entity.AssignmentTaskEntity;
import com.gitee.dbswitch.admin.service.ScheduleService;
import com.gitee.dbswitch.admin.type.IncludeExcludeEnum; import com.gitee.dbswitch.admin.type.IncludeExcludeEnum;
import com.gitee.dbswitch.admin.type.ScheduleModeEnum; import com.gitee.dbswitch.admin.type.ScheduleModeEnum;
import com.gitee.dbswitch.admin.util.CronExprUtils;
import com.gitee.dbswitch.common.entity.PatternMapper; import com.gitee.dbswitch.common.entity.PatternMapper;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@@ -55,11 +55,7 @@ public class AssigmentCreateRequest {
assignment.setDescription(description); assignment.setDescription(description);
assignment.setScheduleMode(scheduleMode); assignment.setScheduleMode(scheduleMode);
if (ScheduleModeEnum.SYSTEM_SCHEDULED == this.getScheduleMode()) { if (ScheduleModeEnum.SYSTEM_SCHEDULED == this.getScheduleMode()) {
if (!ScheduleService.checkCronExpressionValid(this.getCronExpression())) { CronExprUtils.checkCronExpressionValid(this.getCronExpression(), 120);
throw new DbswitchException(ResultCode.ERROR_INVALID_ARGUMENT,
"CRON表达式[" + this.getCronExpression() + "]");
}
assignment.setCronExpression(this.getCronExpression()); assignment.setCronExpression(this.getCronExpression());
} }

View File

@@ -13,9 +13,9 @@ import com.gitee.dbswitch.admin.common.excption.DbswitchException;
import com.gitee.dbswitch.admin.common.response.ResultCode; import com.gitee.dbswitch.admin.common.response.ResultCode;
import com.gitee.dbswitch.admin.entity.AssignmentConfigEntity; import com.gitee.dbswitch.admin.entity.AssignmentConfigEntity;
import com.gitee.dbswitch.admin.entity.AssignmentTaskEntity; import com.gitee.dbswitch.admin.entity.AssignmentTaskEntity;
import com.gitee.dbswitch.admin.service.ScheduleService;
import com.gitee.dbswitch.admin.type.IncludeExcludeEnum; import com.gitee.dbswitch.admin.type.IncludeExcludeEnum;
import com.gitee.dbswitch.admin.type.ScheduleModeEnum; import com.gitee.dbswitch.admin.type.ScheduleModeEnum;
import com.gitee.dbswitch.admin.util.CronExprUtils;
import com.gitee.dbswitch.common.entity.PatternMapper; import com.gitee.dbswitch.common.entity.PatternMapper;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
@@ -56,11 +56,7 @@ public class AssigmentUpdateRequest {
assignment.setDescription(description); assignment.setDescription(description);
assignment.setScheduleMode(scheduleMode); assignment.setScheduleMode(scheduleMode);
if (ScheduleModeEnum.SYSTEM_SCHEDULED == this.getScheduleMode()) { if (ScheduleModeEnum.SYSTEM_SCHEDULED == this.getScheduleMode()) {
if (!ScheduleService.checkCronExpressionValid(this.getCronExpression())) { CronExprUtils.checkCronExpressionValid(this.getCronExpression(), 120);
throw new DbswitchException(ResultCode.ERROR_INVALID_ARGUMENT,
"CRON表达式[" + this.getCronExpression() + "]");
}
assignment.setCronExpression(this.getCronExpression()); assignment.setCronExpression(this.getCronExpression());
} }

View File

@@ -41,8 +41,11 @@ import java.util.function.Supplier;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import javax.annotation.Resource; import javax.annotation.Resource;
import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.quartz.CronExpression;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
import org.quartz.CronExpression;
@Service @Service
public class AssignmentService { public class AssignmentService {

View File

@@ -118,7 +118,10 @@ public class JobExecutorService extends QuartzJobBean implements InterruptableJo
try { try {
ReentrantLock lock = mutexes.get(taskId.toString(), ReentrantLock::new); ReentrantLock lock = mutexes.get(taskId.toString(), ReentrantLock::new);
lock.lock(); while (!lock.tryLock(1, TimeUnit.SECONDS)) {
TimeUnit.SECONDS.sleep(1);
}
try { try {
log.info("Execute Quartz Job, and task id is : {} , job id is: {}", taskId, log.info("Execute Quartz Job, and task id is : {} , job id is: {}", taskId,
assignmentJobEntity.getId()); assignmentJobEntity.getId());
@@ -141,6 +144,12 @@ public class JobExecutorService extends QuartzJobBean implements InterruptableJo
} }
MigrationService mainService = new MigrationService(properties); MigrationService mainService = new MigrationService(properties);
if (interrupted) {
log.info("Quartz task id:{} interrupted", jobDataMap.getLong(TASK_ID));
return;
}
// 实际执行JOB
mainService.run(); mainService.run();
if (assignmentConfigEntity.getFirstFlag()) { if (assignmentConfigEntity.getFirstFlag()) {
@@ -166,7 +175,7 @@ public class JobExecutorService extends QuartzJobBean implements InterruptableJo
} finally { } finally {
lock.unlock(); lock.unlock();
} }
} catch (ExecutionException e) { } catch (ExecutionException | InterruptedException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

View File

@@ -83,10 +83,18 @@ public class PatternMapperService {
dbConn.getUrl(), dbConn.getUsername(), dbConn.getPassword(), request.getSchemaName(), dbConn.getUrl(), dbConn.getUsername(), dbConn.getPassword(), request.getSchemaName(),
request.getTableName()); request.getTableName());
for (ColumnDescription cd : tables) { for (ColumnDescription cd : tables) {
result.add(PreviewNameMapperResponse.builder() String targetName = PatterNameUtils.getFinalName(cd.getFieldName(), request.getNameMapper());
.originalName(cd.getFieldName()) if (StringUtils.isNotBlank(targetName)) {
.targetName(PatterNameUtils.getFinalName(cd.getFieldName(), request.getNameMapper())) result.add(PreviewNameMapperResponse.builder()
.build()); .originalName(cd.getFieldName())
.targetName(targetName)
.build());
} else {
result.add(PreviewNameMapperResponse.builder()
.originalName(cd.getFieldName())
.targetName("<!字段被删除>")
.build());
}
} }
return Result.success(result); return Result.success(result);

View File

@@ -55,16 +55,6 @@ public class ScheduleService {
@Resource @Resource
private AssignmentJobDAO assignmentJobDAO; private AssignmentJobDAO assignmentJobDAO;
public static boolean checkCronExpressionValid(String cronExpression) {
try {
CronScheduleBuilder.cronSchedule(cronExpression);
return true;
} catch (Exception e) {
return false;
}
}
// public Trigger getQuartzJobDetail(String jobKey) { // public Trigger getQuartzJobDetail(String jobKey) {
// Scheduler scheduler = schedulerFactoryBean.getScheduler(); // Scheduler scheduler = schedulerFactoryBean.getScheduler();
// //

View File

@@ -0,0 +1,54 @@
// Copyright tang. All rights reserved.
// https://gitee.com/inrgihc/dbswitch
//
// Use of this source code is governed by a BSD-style license
//
// Author: tang (inrgihc@126.com)
// Date : 2020/1/2
// Location: beijing , china
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.admin.util;
import com.gitee.dbswitch.admin.common.excption.DbswitchException;
import com.gitee.dbswitch.admin.common.response.ResultCode;
import java.text.ParseException;
import java.util.Date;
import org.apache.commons.lang.StringUtils;
import org.quartz.CronExpression;
/**
* CRON表达式工具类
*/
public final class CronExprUtils {
/**
* 检查CRON表达式的有效性
*
* @param cronExpression CRON表达式
* @param minIntervalSeconds 最小间隔时间(单位:秒)
*/
public static void checkCronExpressionValid(String cronExpression, int minIntervalSeconds) {
if (StringUtils.isNotBlank(cronExpression)) {
CronExpression expression;
try {
expression = new CronExpression(cronExpression);
} catch (ParseException e) {
throw new DbswitchException(ResultCode.ERROR_INVALID_ARGUMENT, String.format("正则表达式%s无效"));
}
Date nextDate = expression.getNextValidTimeAfter(new Date(System.currentTimeMillis()));
if (null == nextDate) {
throw new DbswitchException(ResultCode.ERROR_INVALID_ARGUMENT,
String.format("cron表达式[%s]不可以在历史时间运行", cronExpression));
}
Date calculateDate = expression.getNextValidTimeAfter(new Date(nextDate.getTime() + 1));
if (null != calculateDate) {
long intervalSeconds = (calculateDate.getTime() - nextDate.getTime()) / 1000;
if (intervalSeconds < minIntervalSeconds) {
throw new DbswitchException(ResultCode.ERROR_INVALID_ARGUMENT,
String.format("cron表达式[%s]运行间隔时间为%d秒, 小于设定的阈值 [%s秒]",
cronExpression, intervalSeconds, minIntervalSeconds));
}
}
}
}
}

View File

@@ -17,6 +17,9 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
/**
* JSON序列化与反序列化工具类
*/
@Slf4j @Slf4j
public final class JsonUtils { public final class JsonUtils {

View File

@@ -16,6 +16,9 @@ import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.function.Supplier; import java.util.function.Supplier;
/**
* 分页工具类
*/
public class PageUtils { public class PageUtils {
public static <E> PageResult<E> getPage(Supplier<List<E>> method, Integer pageNum, public static <E> PageResult<E> getPage(Supplier<List<E>> method, Integer pageNum,

View File

@@ -11,6 +11,9 @@ package com.gitee.dbswitch.admin.util;
import cn.hutool.crypto.digest.BCrypt; import cn.hutool.crypto.digest.BCrypt;
/**
* 密码工具类
*/
public final class PasswordUtils { public final class PasswordUtils {
public static String encryptPassword(String password, String credentialsSalt) { public static String encryptPassword(String password, String credentialsSalt) {

View File

@@ -15,6 +15,9 @@ import org.springframework.util.StringUtils;
import org.springframework.web.context.request.RequestContextHolder; import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes; import org.springframework.web.context.request.ServletRequestAttributes;
/**
* 获取Servlet服务器的HTTP参数相关工具类
*/
@Slf4j @Slf4j
public class ServletUtils { public class ServletUtils {
@@ -74,7 +77,7 @@ public class ServletUtils {
ip = request.getRemoteAddr(); ip = request.getRemoteAddr();
} }
} catch (Exception e) { } catch (Exception e) {
log.error("IPUtils ERROR ", e); log.error("get client IP address error: ", e);
} }
return ip; return ip;

View File

@@ -14,6 +14,9 @@ import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextAware;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
/**
* Spring容器获取BEAN工具类
*/
@Component @Component
public class SpringUtils implements ApplicationContextAware { public class SpringUtils implements ApplicationContextAware {

View File

@@ -11,6 +11,9 @@ package com.gitee.dbswitch.admin.util;
import java.util.UUID; import java.util.UUID;
/**
* UUID工具类
*/
public class UuidUtils { public class UuidUtils {
public static String generateUuid() { public static String generateUuid() {

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-common</artifactId> <artifactId>dbswitch-common</artifactId>

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-core</artifactId> <artifactId>dbswitch-core</artifactId>

View File

@@ -141,7 +141,7 @@ public class DatabaseDB2Impl extends AbstractDatabase implements IDatabaseInterf
retval += "DATE"; retval += "DATE";
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval += "CHARACTER(32)"; retval += "BOOLEAN";
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_BIGNUMBER: case ColumnMetaData.TYPE_BIGNUMBER:

View File

@@ -116,7 +116,7 @@ public class DatabaseDmImpl extends AbstractDatabase implements IDatabaseInterfa
retval.append("DATE"); retval.append("DATE");
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval.append("VARCHAR(32)"); retval.append("BIT");
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_BIGNUMBER: case ColumnMetaData.TYPE_BIGNUMBER:

View File

@@ -139,7 +139,7 @@ public class DatabaseGreenplumImpl extends AbstractDatabase implements IDatabase
retval += "DATE"; retval += "DATE";
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval += "VARCHAR(32)"; retval += "BOOLEAN";
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_INTEGER: case ColumnMetaData.TYPE_INTEGER:

View File

@@ -118,7 +118,7 @@ public class DatabaseKingbaseImpl extends AbstractDatabase implements IDatabaseI
retval += "DATE"; retval += "DATE";
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval += "VARCHAR(32)"; retval += "BOOLEAN";
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_INTEGER: case ColumnMetaData.TYPE_INTEGER:

View File

@@ -180,7 +180,7 @@ public class DatabaseMysqlImpl extends AbstractDatabase implements IDatabaseInte
retval += "DATE"; retval += "DATE";
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval += "VARCHAR(32)"; retval += "TINYINT";
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_INTEGER: case ColumnMetaData.TYPE_INTEGER:

View File

@@ -185,7 +185,7 @@ public class DatabaseOracleImpl extends AbstractDatabase implements IDatabaseInt
retval.append("DATE"); retval.append("DATE");
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval.append("VARCHAR(32)"); retval.append("NUMBER(1)");
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_BIGNUMBER: case ColumnMetaData.TYPE_BIGNUMBER:

View File

@@ -154,7 +154,7 @@ public class DatabasePostgresImpl extends AbstractDatabase implements IDatabaseI
retval += "DATE"; retval += "DATE";
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval += "VARCHAR(32)"; retval += "BOOLEAN";
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_INTEGER: case ColumnMetaData.TYPE_INTEGER:

View File

@@ -238,7 +238,7 @@ public class DatabaseSqlserverImpl extends AbstractDatabase implements IDatabase
retval += "DATE"; retval += "DATE";
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval += "VARCHAR(32)"; retval += "BIT";
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_INTEGER: case ColumnMetaData.TYPE_INTEGER:

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-data</artifactId> <artifactId>dbswitch-data</artifactId>

View File

@@ -34,6 +34,7 @@ import com.gitee.dbswitch.dbwriter.IDatabaseWriter;
import com.zaxxer.hikari.HikariDataSource; import com.zaxxer.hikari.HikariDataSource;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
@@ -46,6 +47,7 @@ import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.ehcache.sizeof.SizeOf; import org.ehcache.sizeof.SizeOf;
import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.util.StringUtils;
/** /**
* 在一个线程内的单表迁移处理逻辑 * 在一个线程内的单表迁移处理逻辑
@@ -153,7 +155,11 @@ public class MigrationHandler implements Supplier<Long> {
for (int i = 0; i < sourceColumnDescriptions.size(); ++i) { for (int i = 0; i < sourceColumnDescriptions.size(); ++i) {
String sourceColumnName = sourceColumnDescriptions.get(i).getFieldName(); String sourceColumnName = sourceColumnDescriptions.get(i).getFieldName();
String targetColumnName = targetColumnDescriptions.get(i).getFieldName(); String targetColumnName = targetColumnDescriptions.get(i).getFieldName();
columnMapperPairs.add(String.format("%s --> %s", sourceColumnName, targetColumnName)); if (StringUtils.hasLength(targetColumnName)) {
columnMapperPairs.add(String.format("%s --> %s", sourceColumnName, targetColumnName));
} else {
columnMapperPairs.add(String.format("%s --> %s", sourceColumnName, "<!Field is Deleted>"));
}
mapChecker.put(sourceColumnName, targetColumnName); mapChecker.put(sourceColumnName, targetColumnName);
} }
log.info("Mapping relation : \ntable mapper :\n\t{} \ncolumn mapper :\n\t{} ", log.info("Mapping relation : \ntable mapper :\n\t{} \ncolumn mapper :\n\t{} ",
@@ -183,8 +189,15 @@ public class MigrationHandler implements Supplier<Long> {
// 生成建表语句并创建 // 生成建表语句并创建
String sqlCreateTable = sourceMetaDataService.getDDLCreateTableSQL( String sqlCreateTable = sourceMetaDataService.getDDLCreateTableSQL(
targetProductType, targetColumnDescriptions, targetPrimaryKeys, targetProductType,
targetSchemaName, targetTableName, properties.getTarget().getCreateTableAutoIncrement()); targetColumnDescriptions.stream()
.filter(column -> StringUtils.hasLength(column.getFieldName()))
.collect(Collectors.toList()),
targetPrimaryKeys,
targetSchemaName,
targetTableName,
properties.getTarget().getCreateTableAutoIncrement()
);
JdbcTemplate targetJdbcTemplate = new JdbcTemplate(targetDataSource); JdbcTemplate targetJdbcTemplate = new JdbcTemplate(targetDataSource);
targetJdbcTemplate.execute(sqlCreateTable); targetJdbcTemplate.execute(sqlCreateTable);
@@ -227,8 +240,26 @@ public class MigrationHandler implements Supplier<Long> {
private Long doFullCoverSynchronize(IDatabaseWriter writer) { private Long doFullCoverSynchronize(IDatabaseWriter writer) {
final int BATCH_SIZE = fetchSize; final int BATCH_SIZE = fetchSize;
List<String> sourceFields = sourceColumnDescriptions.stream()
.map(ColumnDescription::getFieldName)
.collect(Collectors.toList());
List<String> targetFields = targetColumnDescriptions.stream()
.map(ColumnDescription::getFieldName)
.collect(Collectors.toList());
List<Integer> deletedFieldIndexes = new ArrayList<>();
for (int i = 0; i < targetFields.size(); ++i) {
if (StringUtils.isEmpty(targetFields.get(i))) {
deletedFieldIndexes.add(i);
}
}
Collections.reverse(deletedFieldIndexes);
deletedFieldIndexes.forEach(i -> {
sourceFields.remove(sourceFields.get(i));
targetFields.remove(targetFields.get(i));
});
// 准备目的端的数据写入操作 // 准备目的端的数据写入操作
writer.prepareWrite(targetSchemaName, targetTableName); writer.prepareWrite(targetSchemaName, targetTableName, targetFields);
// 清空目的端表的数据 // 清空目的端表的数据
IDatabaseOperator targetOperator = DatabaseOperatorFactory IDatabaseOperator targetOperator = DatabaseOperatorFactory
@@ -240,14 +271,9 @@ public class MigrationHandler implements Supplier<Long> {
.createDatabaseOperator(sourceDataSource); .createDatabaseOperator(sourceDataSource);
sourceOperator.setFetchSize(BATCH_SIZE); sourceOperator.setFetchSize(BATCH_SIZE);
List<String> sourceFields = sourceColumnDescriptions.stream()
.map(ColumnDescription::getFieldName)
.collect(Collectors.toList());
List<String> targetFields = targetColumnDescriptions.stream()
.map(ColumnDescription::getFieldName)
.collect(Collectors.toList());
StatementResultSet srs = sourceOperator.queryTableData( StatementResultSet srs = sourceOperator.queryTableData(
sourceSchemaName, sourceTableName, sourceFields); sourceSchemaName, sourceTableName, sourceFields
);
List<Object[]> cache = new LinkedList<>(); List<Object[]> cache = new LinkedList<>();
long cacheBytes = 0; long cacheBytes = 0;
@@ -312,7 +338,17 @@ public class MigrationHandler implements Supplier<Long> {
List<String> targetFields = targetColumnDescriptions.stream() List<String> targetFields = targetColumnDescriptions.stream()
.map(ColumnDescription::getFieldName) .map(ColumnDescription::getFieldName)
.collect(Collectors.toList()); .collect(Collectors.toList());
List<Integer> deletedFieldIndexes = new ArrayList<>();
for (int i = 0; i < targetFields.size(); ++i) {
if (StringUtils.isEmpty(targetFields.get(i))) {
deletedFieldIndexes.add(i);
}
}
Collections.reverse(deletedFieldIndexes);
deletedFieldIndexes.forEach(i -> {
sourceFields.remove(sourceFields.get(i));
targetFields.remove(targetFields.get(i));
});
Map<String, String> columnNameMaps = new HashMap<>(); Map<String, String> columnNameMaps = new HashMap<>();
for (int i = 0; i < sourceFields.size(); ++i) { for (int i = 0; i < sourceFields.size(); ++i) {
columnNameMaps.put(sourceFields.get(i), targetFields.get(i)); columnNameMaps.put(sourceFields.get(i), targetFields.get(i));

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-dbchange</artifactId> <artifactId>dbswitch-dbchange</artifactId>

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-dbcommon</artifactId> <artifactId>dbswitch-dbcommon</artifactId>

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-dbsynch</artifactId> <artifactId>dbswitch-dbsynch</artifactId>

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-dbwriter</artifactId> <artifactId>dbswitch-dbwriter</artifactId>

View File

@@ -28,6 +28,7 @@ import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionException; import org.springframework.transaction.TransactionException;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition; import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.util.CollectionUtils;
/** /**
* 数据库写入抽象基类 * 数据库写入抽象基类
@@ -57,8 +58,8 @@ public abstract class AbstractDatabaseWriter implements IDatabaseWriter {
} }
@Override @Override
public void prepareWrite(String schemaName, String tableName) { public void prepareWrite(String schemaName, String tableName, List<String> fieldNames) {
String sql = this.selectTableMetaDataSqlString(schemaName, tableName); String sql = this.selectTableMetaDataSqlString(schemaName, tableName, fieldNames);
Map<String, Integer> columnMetaData = new HashMap<>(); Map<String, Integer> columnMetaData = new HashMap<>();
jdbcTemplate.execute((Connection conn) -> { jdbcTemplate.execute((Connection conn) -> {
try (Statement stmt = conn.createStatement(); try (Statement stmt = conn.createStatement();
@@ -85,8 +86,14 @@ public abstract class AbstractDatabaseWriter implements IDatabaseWriter {
} }
protected String selectTableMetaDataSqlString(String schemaName, String tableName) { protected String selectTableMetaDataSqlString(String schemaName, String tableName,
return String.format("SELECT * FROM \"%s\".\"%s\" WHERE 1=2", schemaName, tableName); List<String> fieldNames) {
if (CollectionUtils.isEmpty(fieldNames)) {
return String.format("SELECT * FROM \"%s\".\"%s\" WHERE 1=2", schemaName, tableName);
} else {
return String.format("SELECT \"%s\" FROM \"%s\".\"%s\" WHERE 1=2",
StringUtils.join(fieldNames, "\",\""), schemaName, tableName);
}
} }
protected abstract String getDatabaseProductName(); protected abstract String getDatabaseProductName();

View File

@@ -32,7 +32,7 @@ public interface IDatabaseWriter {
* @param schemaName schema名称 * @param schemaName schema名称
* @param tableName table名称 * @param tableName table名称
*/ */
void prepareWrite(String schemaName, String tableName); void prepareWrite(String schemaName, String tableName,List<String> fieldNames);
/** /**
* 批量数据写入 * 批量数据写入

View File

@@ -24,6 +24,7 @@ import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionException; import org.springframework.transaction.TransactionException;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition; import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.util.CollectionUtils;
/** /**
* SQLServer批量写入实现类 * SQLServer批量写入实现类
@@ -43,8 +44,14 @@ public class SqlServerWriterImpl extends AbstractDatabaseWriter implements IData
} }
@Override @Override
protected String selectTableMetaDataSqlString(String schemaName, String tableName) { protected String selectTableMetaDataSqlString(String schemaName, String tableName,
return String.format("SELECT * FROM [%s].[%s] WHERE 1=2", schemaName, tableName); List<String> fieldNames) {
if (CollectionUtils.isEmpty(fieldNames)) {
return String.format("SELECT * FROM [%s].[%s] WHERE 1=2", schemaName, tableName);
} else {
return String.format("SELECT [%s] FROM [%s].[%s] WHERE 1=2",
StringUtils.join(fieldNames, "],["), schemaName, tableName);
}
} }
@Override @Override

View File

@@ -32,6 +32,7 @@ import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition; import org.springframework.transaction.support.DefaultTransactionDefinition;
import org.springframework.transaction.support.TransactionTemplate; import org.springframework.transaction.support.TransactionTemplate;
import org.springframework.util.CollectionUtils;
/** /**
* MySQL数据库写入实现类 * MySQL数据库写入实现类
@@ -58,8 +59,14 @@ public class MySqlWriterImpl extends AbstractDatabaseWriter implements IDatabase
} }
@Override @Override
protected String selectTableMetaDataSqlString(String schemaName, String tableName) { protected String selectTableMetaDataSqlString(String schemaName, String tableName,
return String.format("SELECT * FROM `%s`.`%s` WHERE 1=2", schemaName, tableName); List<String> fieldNames) {
if (CollectionUtils.isEmpty(fieldNames)) {
return String.format("SELECT * FROM `%s`.`%s` WHERE 1=2", schemaName, tableName);
} else {
return String.format("SELECT `%s` FROM `%s`.`%s` WHERE 1=2",
StringUtils.join(fieldNames, "`,`"), schemaName, tableName);
}
} }
@Override @Override

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-pgwriter</artifactId> <artifactId>dbswitch-pgwriter</artifactId>

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>dbswitch-sql</artifactId> <artifactId>dbswitch-sql</artifactId>

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
</parent> </parent>
<artifactId>package-tool</artifactId> <artifactId>package-tool</artifactId>

View File

@@ -4,7 +4,7 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<groupId>com.gitee.dbswitch</groupId> <groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId> <artifactId>dbswitch-parent</artifactId>
<version>1.6.8</version> <version>1.6.9</version>
<packaging>pom</packaging> <packaging>pom</packaging>
<name>dbswitch</name> <name>dbswitch</name>
<description>database switch project</description> <description>database switch project</description>

View File

@@ -1,6 +1,6 @@
@echo off @echo off
set APP_VERSION=1.6.8 set APP_VERSION=1.6.9
echo "Clean Project ..." echo "Clean Project ..."
call mvn clean -f pom.xml call mvn clean -f pom.xml