diff --git a/README.md b/README.md
index e6bb77de..51660f14 100644
--- a/README.md
+++ b/README.md
@@ -248,9 +248,10 @@ jdbc驱动名称:com.microsoft.sqlserver.jdbc.SQLServerDriver
**Sybase数据库**
```
-jdbc连接地址:jdbc:sybase:Tds:172.17.2.10:5000/test
+jdbc连接地址:jdbc:sybase:Tds:172.17.2.10:5000/test?charset=cp936
jdbc驱动名称:com.sybase.jdbc4.jdbc.SybDriver
```
+> JDBC连接Sybase数据库使用中文时只能使用CP936这个字符集
**PostgreSQL/Greenplum数据库**
diff --git a/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseDmImpl.java b/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseDmImpl.java
index ed12bdf6..09503d00 100644
--- a/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseDmImpl.java
+++ b/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseDmImpl.java
@@ -99,6 +99,13 @@ public class DatabaseDmImpl extends AbstractDatabase implements IDatabaseInterfa
return String.format("explain %s", sql.replace(";", ""));
}
+ /**
+ * https://eco.dameng.com/document/dm/zh-cn/sql-dev/dmpl-sql-datatype.html
+ *
+ * https://eco.dameng.com/document/dm/zh-cn/pm/dm8_sql-data-types-operators.html
+ *
+ * 违反表[xxx]唯一性约束: https://www.cnblogs.com/theli/p/12858875.html
+ */
@Override
public String getFieldDefinition(ColumnMetaData v, List pks, boolean useAutoInc,
boolean addCr, boolean withRemarks) {
@@ -123,33 +130,31 @@ public class DatabaseDmImpl extends AbstractDatabase implements IDatabaseInterfa
break;
case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_BIGNUMBER:
- retval.append("NUMBER");
- if (length > 0) {
- if (length > 38) {
- length = 38;
- }
+ if (null != pks && !pks.isEmpty() && pks.contains(fieldname)) {
+ retval.append("BIGINT");
+ } else {
+ retval.append("NUMERIC");
+ if (length > 0) {
+ if (length > 38) {
+ length = 38;
+ }
- retval.append('(').append(length);
- if (precision > 0) {
- retval.append(", ").append(precision);
+ retval.append('(').append(length);
+ if (precision > 0) {
+ retval.append(", ").append(precision);
+ }
+ retval.append(')');
}
- retval.append(')');
}
break;
case ColumnMetaData.TYPE_INTEGER:
- retval.append("INTEGER");
+ retval.append("BIGINT");
break;
case ColumnMetaData.TYPE_STRING:
- if (2 * length >= AbstractDatabase.CLOB_LENGTH) {
- retval.append("CLOB");
+ if (null != pks && pks.contains(fieldname)) {
+ retval.append("VARCHAR(" + length + ")");
} else {
- if (length == 1) {
- retval.append("NVARCHAR2(2)");
- } else if (length > 0 && length < 2048) {
- retval.append("NVARCHAR2(").append(2 * length).append(')');
- } else {
- retval.append("CLOB");
- }
+ retval.append("TEXT");
}
break;
case ColumnMetaData.TYPE_BINARY:
diff --git a/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseSybaseImpl.java b/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseSybaseImpl.java
index 38fcf0fb..8462aa6b 100644
--- a/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseSybaseImpl.java
+++ b/dbswitch-core/src/main/java/com/gitee/dbswitch/core/database/impl/DatabaseSybaseImpl.java
@@ -181,7 +181,7 @@ public class DatabaseSybaseImpl extends AbstractDatabase implements IDatabaseInt
}
break;
case ColumnMetaData.TYPE_BOOLEAN:
- retval += "BOOLEAN";
+ retval += "TINYINT";
if (null != pks && !pks.isEmpty() && pks.contains(fieldname)) {
retval += " NOT NULL";
}
diff --git a/dbswitch-data/src/main/java/com/gitee/dbswitch/data/handler/MigrationHandler.java b/dbswitch-data/src/main/java/com/gitee/dbswitch/data/handler/MigrationHandler.java
index 5b2ea104..54a538a1 100644
--- a/dbswitch-data/src/main/java/com/gitee/dbswitch/data/handler/MigrationHandler.java
+++ b/dbswitch-data/src/main/java/com/gitee/dbswitch/data/handler/MigrationHandler.java
@@ -78,6 +78,7 @@ public class MigrationHandler implements Supplier {
// 目的端
private final HikariDataSource targetDataSource;
private ProductTypeEnum targetProductType;
+ private Set targetExistTables;
private String targetSchemaName;
private String targetTableName;
private List targetColumnDescriptions;
@@ -90,15 +91,17 @@ public class MigrationHandler implements Supplier {
DbswichProperties properties,
Integer sourcePropertiesIndex,
HikariDataSource sds,
- HikariDataSource tds) {
- return new MigrationHandler(td, properties, sourcePropertiesIndex, sds, tds);
+ HikariDataSource tds,
+ Set targetExistTables) {
+ return new MigrationHandler(td, properties, sourcePropertiesIndex, sds, tds, targetExistTables);
}
private MigrationHandler(TableDescription td,
DbswichProperties properties,
Integer sourcePropertiesIndex,
HikariDataSource sds,
- HikariDataSource tds) {
+ HikariDataSource tds,
+ Set targetExistTables) {
this.sourceSchemaName = td.getSchemaName();
this.sourceTableName = td.getTableName();
this.properties = properties;
@@ -110,6 +113,7 @@ public class MigrationHandler implements Supplier {
fetchSize = sourceProperties.getFetchSize();
}
+ this.targetExistTables = targetExistTables;
// 获取映射转换后新的表名
this.targetSchemaName = properties.getTarget().getTargetSchema();
this.targetTableName = PatterNameUtils.getFinalName(td.getTableName(),
@@ -252,14 +256,7 @@ public class MigrationHandler implements Supplier {
throw new RuntimeException("task is interrupted");
}
- IMetaDataByDatasourceService metaDataByDatasourceService =
- new MetaDataByDataSourceServiceImpl(targetDataSource, targetProductType);
- List targetTableNames = metaDataByDatasourceService
- .queryTableList(targetSchemaName)
- .stream().map(TableDescription::getTableName)
- .collect(Collectors.toList());
-
- if (!targetTableNames.contains(targetSchemaName)) {
+ if (!targetExistTables.contains(targetTableName)) {
// 当目标端不存在该表时,则生成建表语句并创建
List sqlCreateTable = sourceMetaDataService.getDDLCreateTableSQL(
targetProductType,
@@ -289,6 +286,8 @@ public class MigrationHandler implements Supplier {
// 判断是否具备变化量同步的条件:(1)两端表结构一致,且都有一样的主键字段;(2)MySQL使用Innodb引擎;
if (properties.getTarget().getChangeDataSync()) {
// 根据主键情况判断同步的方式:增量同步或覆盖同步
+ IMetaDataByDatasourceService metaDataByDatasourceService =
+ new MetaDataByDataSourceServiceImpl(targetDataSource, targetProductType);
List dbTargetPks = metaDataByDatasourceService.queryTablePrimaryKeys(
targetSchemaName, targetTableName);
diff --git a/dbswitch-data/src/main/java/com/gitee/dbswitch/data/service/MigrationService.java b/dbswitch-data/src/main/java/com/gitee/dbswitch/data/service/MigrationService.java
index 057de05c..1268ace8 100644
--- a/dbswitch-data/src/main/java/com/gitee/dbswitch/data/service/MigrationService.java
+++ b/dbswitch-data/src/main/java/com/gitee/dbswitch/data/service/MigrationService.java
@@ -24,12 +24,14 @@ import com.zaxxer.hikari.HikariDataSource;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
+import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.regex.Pattern;
+import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.springframework.core.task.AsyncTaskExecutor;
import org.springframework.stereotype.Service;
@@ -103,6 +105,9 @@ public class MigrationService {
//log.info("Application properties configuration \n{}", properties);
try (HikariDataSource targetDataSource = DataSourceUtils.createTargetDataSource(properties.getTarget())) {
+ IMetaDataByDatasourceService tdsService = new MetaDataByDataSourceServiceImpl(targetDataSource);
+ Set tablesAlreadyExist = tdsService.queryTableList(properties.getTarget().getTargetSchema())
+ .stream().map(TableDescription::getTableName).collect(Collectors.toSet());
int sourcePropertiesIndex = 0;
int totalTableCount = 0;
List sourcesProperties = properties.getSource();
@@ -160,19 +165,19 @@ public class MigrationService {
if (useExcludeTables) {
if (!filters.contains(tableName)) {
futures.add(
- makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource,
+ makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, tablesAlreadyExist,
numberOfFailures, totalBytesSize));
}
} else {
if (includes.size() == 1 && (includes.get(0).contains("*") || includes.get(0).contains("?"))) {
if (Pattern.matches(includes.get(0), tableName)) {
futures.add(
- makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource,
+ makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, tablesAlreadyExist,
numberOfFailures, totalBytesSize));
}
} else if (includes.contains(tableName)) {
futures.add(
- makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource,
+ makeFutureTask(td, indexInternal, sourceDataSource, targetDataSource, tablesAlreadyExist,
numberOfFailures, totalBytesSize));
}
}
@@ -222,6 +227,7 @@ public class MigrationService {
* @param indexInternal 源端索引号
* @param sds 源端的DataSource数据源
* @param tds 目的端的DataSource数据源
+ * @param exists 目的端已经存在的表名列表
* @param numberOfFailures 失败的数量
* @param totalBytesSize 同步的字节大小
* @return CompletableFuture
@@ -231,10 +237,11 @@ public class MigrationService {
Integer indexInternal,
HikariDataSource sds,
HikariDataSource tds,
+ Set exists,
AtomicInteger numberOfFailures,
AtomicLong totalBytesSize) {
return CompletableFuture
- .supplyAsync(getMigrateHandler(td, indexInternal, sds, tds), this.taskExecutor)
+ .supplyAsync(getMigrateHandler(td, indexInternal, sds, tds, exists), this.taskExecutor)
.exceptionally(getExceptHandler(td, numberOfFailures))
.thenAccept(totalBytesSize::addAndGet);
}
@@ -246,14 +253,16 @@ public class MigrationService {
* @param indexInternal 源端索引号
* @param sds 源端的DataSource数据源
* @param tds 目的端的DataSource数据源
+ * @param exists 目的端已经存在的表名列表
* @return Supplier
*/
private Supplier getMigrateHandler(
TableDescription td,
Integer indexInternal,
HikariDataSource sds,
- HikariDataSource tds) {
- MigrationHandler instance = MigrationHandler.createInstance(td, properties, indexInternal, sds, tds);
+ HikariDataSource tds,
+ Set exists) {
+ MigrationHandler instance = MigrationHandler.createInstance(td, properties, indexInternal, sds, tds, exists);
migrationHandlers.add(instance);
return instance;
}