v1.6.16:修复issue I5X9ED、I5XNB7、I5XNLZ等问题

This commit is contained in:
inrgihc
2022-10-25 21:27:58 +08:00
parent f7bf8f134e
commit c6ec7ef22e
5 changed files with 52 additions and 38 deletions

View File

@@ -248,9 +248,10 @@ jdbc驱动名称com.microsoft.sqlserver.jdbc.SQLServerDriver
**Sybase数据库** **Sybase数据库**
``` ```
jdbc连接地址jdbc:sybase:Tds:172.17.2.10:5000/test jdbc连接地址jdbc:sybase:Tds:172.17.2.10:5000/test?charset=cp936
jdbc驱动名称com.sybase.jdbc4.jdbc.SybDriver jdbc驱动名称com.sybase.jdbc4.jdbc.SybDriver
``` ```
> JDBC连接Sybase数据库使用中文时只能使用CP936这个字符集
**PostgreSQL/Greenplum数据库** **PostgreSQL/Greenplum数据库**

View File

@@ -99,6 +99,13 @@ public class DatabaseDmImpl extends AbstractDatabase implements IDatabaseInterfa
return String.format("explain %s", sql.replace(";", "")); return String.format("explain %s", sql.replace(";", ""));
} }
/**
* https://eco.dameng.com/document/dm/zh-cn/sql-dev/dmpl-sql-datatype.html
* <p>
* https://eco.dameng.com/document/dm/zh-cn/pm/dm8_sql-data-types-operators.html
* <p>
* 违反表[xxx]唯一性约束: https://www.cnblogs.com/theli/p/12858875.html
*/
@Override @Override
public String getFieldDefinition(ColumnMetaData v, List<String> pks, boolean useAutoInc, public String getFieldDefinition(ColumnMetaData v, List<String> pks, boolean useAutoInc,
boolean addCr, boolean withRemarks) { boolean addCr, boolean withRemarks) {
@@ -123,33 +130,31 @@ public class DatabaseDmImpl extends AbstractDatabase implements IDatabaseInterfa
break; break;
case ColumnMetaData.TYPE_NUMBER: case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_BIGNUMBER: case ColumnMetaData.TYPE_BIGNUMBER:
retval.append("NUMBER"); if (null != pks && !pks.isEmpty() && pks.contains(fieldname)) {
if (length > 0) { retval.append("BIGINT");
if (length > 38) { } else {
length = 38; retval.append("NUMERIC");
} if (length > 0) {
if (length > 38) {
length = 38;
}
retval.append('(').append(length); retval.append('(').append(length);
if (precision > 0) { if (precision > 0) {
retval.append(", ").append(precision); retval.append(", ").append(precision);
}
retval.append(')');
} }
retval.append(')');
} }
break; break;
case ColumnMetaData.TYPE_INTEGER: case ColumnMetaData.TYPE_INTEGER:
retval.append("INTEGER"); retval.append("BIGINT");
break; break;
case ColumnMetaData.TYPE_STRING: case ColumnMetaData.TYPE_STRING:
if (2 * length >= AbstractDatabase.CLOB_LENGTH) { if (null != pks && pks.contains(fieldname)) {
retval.append("CLOB"); retval.append("VARCHAR(" + length + ")");
} else { } else {
if (length == 1) { retval.append("TEXT");
retval.append("NVARCHAR2(2)");
} else if (length > 0 && length < 2048) {
retval.append("NVARCHAR2(").append(2 * length).append(')');
} else {
retval.append("CLOB");
}
} }
break; break;
case ColumnMetaData.TYPE_BINARY: case ColumnMetaData.TYPE_BINARY:

View File

@@ -181,7 +181,7 @@ public class DatabaseSybaseImpl extends AbstractDatabase implements IDatabaseInt
} }
break; break;
case ColumnMetaData.TYPE_BOOLEAN: case ColumnMetaData.TYPE_BOOLEAN:
retval += "BOOLEAN"; retval += "TINYINT";
if (null != pks && !pks.isEmpty() && pks.contains(fieldname)) { if (null != pks && !pks.isEmpty() && pks.contains(fieldname)) {
retval += " NOT NULL"; retval += " NOT NULL";
} }

View File

@@ -78,6 +78,7 @@ public class MigrationHandler implements Supplier<Long> {
// 目的端 // 目的端
private final HikariDataSource targetDataSource; private final HikariDataSource targetDataSource;
private ProductTypeEnum targetProductType; private ProductTypeEnum targetProductType;
private Set<String> targetExistTables;
private String targetSchemaName; private String targetSchemaName;
private String targetTableName; private String targetTableName;
private List<ColumnDescription> targetColumnDescriptions; private List<ColumnDescription> targetColumnDescriptions;
@@ -90,15 +91,17 @@ public class MigrationHandler implements Supplier<Long> {
DbswichProperties properties, DbswichProperties properties,
Integer sourcePropertiesIndex, Integer sourcePropertiesIndex,
HikariDataSource sds, HikariDataSource sds,
HikariDataSource tds) { HikariDataSource tds,
return new MigrationHandler(td, properties, sourcePropertiesIndex, sds, tds); Set<String> targetExistTables) {
return new MigrationHandler(td, properties, sourcePropertiesIndex, sds, tds, targetExistTables);
} }
private MigrationHandler(TableDescription td, private MigrationHandler(TableDescription td,
DbswichProperties properties, DbswichProperties properties,
Integer sourcePropertiesIndex, Integer sourcePropertiesIndex,
HikariDataSource sds, HikariDataSource sds,
HikariDataSource tds) { HikariDataSource tds,
Set<String> targetExistTables) {
this.sourceSchemaName = td.getSchemaName(); this.sourceSchemaName = td.getSchemaName();
this.sourceTableName = td.getTableName(); this.sourceTableName = td.getTableName();
this.properties = properties; this.properties = properties;
@@ -110,6 +113,7 @@ public class MigrationHandler implements Supplier<Long> {
fetchSize = sourceProperties.getFetchSize(); fetchSize = sourceProperties.getFetchSize();
} }
this.targetExistTables = targetExistTables;
// 获取映射转换后新的表名 // 获取映射转换后新的表名
this.targetSchemaName = properties.getTarget().getTargetSchema(); this.targetSchemaName = properties.getTarget().getTargetSchema();
this.targetTableName = PatterNameUtils.getFinalName(td.getTableName(), this.targetTableName = PatterNameUtils.getFinalName(td.getTableName(),
@@ -252,14 +256,7 @@ public class MigrationHandler implements Supplier<Long> {
throw new RuntimeException("task is interrupted"); throw new RuntimeException("task is interrupted");
} }
IMetaDataByDatasourceService metaDataByDatasourceService = if (!targetExistTables.contains(targetTableName)) {
new MetaDataByDataSourceServiceImpl(targetDataSource, targetProductType);
List<String> targetTableNames = metaDataByDatasourceService
.queryTableList(targetSchemaName)
.stream().map(TableDescription::getTableName)
.collect(Collectors.toList());
if (!targetTableNames.contains(targetSchemaName)) {
// 当目标端不存在该表时,则生成建表语句并创建 // 当目标端不存在该表时,则生成建表语句并创建
List<String> sqlCreateTable = sourceMetaDataService.getDDLCreateTableSQL( List<String> sqlCreateTable = sourceMetaDataService.getDDLCreateTableSQL(
targetProductType, targetProductType,
@@ -289,6 +286,8 @@ public class MigrationHandler implements Supplier<Long> {
// 判断是否具备变化量同步的条件1两端表结构一致且都有一样的主键字段(2)MySQL使用Innodb引擎 // 判断是否具备变化量同步的条件1两端表结构一致且都有一样的主键字段(2)MySQL使用Innodb引擎
if (properties.getTarget().getChangeDataSync()) { if (properties.getTarget().getChangeDataSync()) {
// 根据主键情况判断同步的方式:增量同步或覆盖同步 // 根据主键情况判断同步的方式:增量同步或覆盖同步
IMetaDataByDatasourceService metaDataByDatasourceService =
new MetaDataByDataSourceServiceImpl(targetDataSource, targetProductType);
List<String> dbTargetPks = metaDataByDatasourceService.queryTablePrimaryKeys( List<String> dbTargetPks = metaDataByDatasourceService.queryTablePrimaryKeys(
targetSchemaName, targetTableName); targetSchemaName, targetTableName);

View File

@@ -24,12 +24,14 @@ import com.zaxxer.hikari.HikariDataSource;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function; import java.util.function.Function;
import java.util.function.Supplier; import java.util.function.Supplier;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.springframework.core.task.AsyncTaskExecutor; import org.springframework.core.task.AsyncTaskExecutor;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@@ -103,6 +105,9 @@ public class MigrationService {
//log.info("Application properties configuration \n{}", properties); //log.info("Application properties configuration \n{}", properties);
try (HikariDataSource targetDataSource = DataSourceUtils.createTargetDataSource(properties.getTarget())) { try (HikariDataSource targetDataSource = DataSourceUtils.createTargetDataSource(properties.getTarget())) {
IMetaDataByDatasourceService tdsService = new MetaDataByDataSourceServiceImpl(targetDataSource);
Set<String> tablesAlreadyExist = tdsService.queryTableList(properties.getTarget().getTargetSchema())
.stream().map(TableDescription::getTableName).collect(Collectors.toSet());
int sourcePropertiesIndex = 0; int sourcePropertiesIndex = 0;
int totalTableCount = 0; int totalTableCount = 0;
List<SourceDataSourceProperties> sourcesProperties = properties.getSource(); List<SourceDataSourceProperties> sourcesProperties = properties.getSource();
@@ -160,19 +165,19 @@ public class MigrationService {
if (useExcludeTables) { if (useExcludeTables) {
if (!filters.contains(tableName)) { if (!filters.contains(tableName)) {
futures.add( futures.add(
makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, tablesAlreadyExist,
numberOfFailures, totalBytesSize)); numberOfFailures, totalBytesSize));
} }
} else { } else {
if (includes.size() == 1 && (includes.get(0).contains("*") || includes.get(0).contains("?"))) { if (includes.size() == 1 && (includes.get(0).contains("*") || includes.get(0).contains("?"))) {
if (Pattern.matches(includes.get(0), tableName)) { if (Pattern.matches(includes.get(0), tableName)) {
futures.add( futures.add(
makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, tablesAlreadyExist,
numberOfFailures, totalBytesSize)); numberOfFailures, totalBytesSize));
} }
} else if (includes.contains(tableName)) { } else if (includes.contains(tableName)) {
futures.add( futures.add(
makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, tablesAlreadyExist,
numberOfFailures, totalBytesSize)); numberOfFailures, totalBytesSize));
} }
} }
@@ -222,6 +227,7 @@ public class MigrationService {
* @param indexInternal 源端索引号 * @param indexInternal 源端索引号
* @param sds 源端的DataSource数据源 * @param sds 源端的DataSource数据源
* @param tds 目的端的DataSource数据源 * @param tds 目的端的DataSource数据源
* @param exists 目的端已经存在的表名列表
* @param numberOfFailures 失败的数量 * @param numberOfFailures 失败的数量
* @param totalBytesSize 同步的字节大小 * @param totalBytesSize 同步的字节大小
* @return CompletableFuture<Void> * @return CompletableFuture<Void>
@@ -231,10 +237,11 @@ public class MigrationService {
Integer indexInternal, Integer indexInternal,
HikariDataSource sds, HikariDataSource sds,
HikariDataSource tds, HikariDataSource tds,
Set<String> exists,
AtomicInteger numberOfFailures, AtomicInteger numberOfFailures,
AtomicLong totalBytesSize) { AtomicLong totalBytesSize) {
return CompletableFuture return CompletableFuture
.supplyAsync(getMigrateHandler(td, indexInternal, sds, tds), this.taskExecutor) .supplyAsync(getMigrateHandler(td, indexInternal, sds, tds, exists), this.taskExecutor)
.exceptionally(getExceptHandler(td, numberOfFailures)) .exceptionally(getExceptHandler(td, numberOfFailures))
.thenAccept(totalBytesSize::addAndGet); .thenAccept(totalBytesSize::addAndGet);
} }
@@ -246,14 +253,16 @@ public class MigrationService {
* @param indexInternal 源端索引号 * @param indexInternal 源端索引号
* @param sds 源端的DataSource数据源 * @param sds 源端的DataSource数据源
* @param tds 目的端的DataSource数据源 * @param tds 目的端的DataSource数据源
* @param exists 目的端已经存在的表名列表
* @return Supplier<Long> * @return Supplier<Long>
*/ */
private Supplier<Long> getMigrateHandler( private Supplier<Long> getMigrateHandler(
TableDescription td, TableDescription td,
Integer indexInternal, Integer indexInternal,
HikariDataSource sds, HikariDataSource sds,
HikariDataSource tds) { HikariDataSource tds,
MigrationHandler instance = MigrationHandler.createInstance(td, properties, indexInternal, sds, tds); Set<String> exists) {
MigrationHandler instance = MigrationHandler.createInstance(td, properties, indexInternal, sds, tds, exists);
migrationHandlers.add(instance); migrationHandlers.add(instance);
return instance; return instance;
} }