代码接口调整

This commit is contained in:
inrgihc
2024-07-24 21:45:20 +08:00
parent 5ec8f5c48a
commit 815528b817
25 changed files with 179 additions and 160 deletions

View File

@@ -1,76 +0,0 @@
package com.gitee.dbswitch.common.util;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryUsage;
import java.lang.management.OperatingSystemMXBean;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.util.List;
import java.util.Locale;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
/**
* 机器统计信息
*/
@Slf4j
@UtilityClass
public class MachineInfoUtils {
private static NumberFormat fmtI = new DecimalFormat("###,###", new DecimalFormatSymbols(Locale.ENGLISH));
/**
* 获取操作系统信息
*
* @return String
*/
public static String getOSInfo() {
OperatingSystemMXBean os = ManagementFactory.getOperatingSystemMXBean();
StringBuffer sb = new StringBuffer();
sb.append("Operation System Information :" + "\r\n");
sb.append("OS Name: " + os.getName() + "\r\n");
sb.append("OS Core Arch : " + os.getArch() + "\r\n");
sb.append("Available CPU Count: " + os.getAvailableProcessors() + "\r\n");
sb.append("System Avg Load: " + os.getSystemLoadAverage() + "\r\n");
sb.append("JAVA Version: " + System.getProperty("java.version") + "\r\n");
return sb.toString();
}
/**
* 日志打印JVM信息
*
* @return String
*/
public static void printJVMInfo() {
List<MemoryPoolMXBean> pools = ManagementFactory.getMemoryPoolMXBeans();
for (MemoryPoolMXBean pool : pools) {
final String kind = pool.getType().name();
final MemoryUsage usage = pool.getUsage();
log.info("model:" + getKindName(kind)
+ ", name:" + pool.getName()
+ ", init:" + bytesToMB(usage.getInit())
+ ", used:" + bytesToMB(usage.getUsed())
+ ", available:" + bytesToMB(usage.getCommitted())
+ ", max:" + bytesToMB(usage.getMax()));
}
}
protected static String getKindName(String kind) {
if ("NON_HEAP".equals(kind)) {
return "NonHeap";
} else {
return "Heap";
}
}
protected static String bytesToMB(long bytes) {
return fmtI.format((bytes / 1024 / 1024)) + " MB";
}
public static void main(String[] args) {
log.info(getOSInfo());
printJVMInfo();
}
}

View File

@@ -26,7 +26,9 @@ public final class ProductTypeUtils {
if (resultSet.next()) {
String version = resultSet.getString(1);
if (Objects.nonNull(version) && version.contains("Greenplum")) {
log.info("#### Target database is Greenplum Cluster");
if (log.isDebugEnabled()) {
log.debug("#### Target database is Greenplum Cluster");
}
return true;
}
}

View File

@@ -46,8 +46,8 @@ public abstract class AbstractCommonProvider {
return dataSource;
}
public ProductFeatures getProductFeatures() {
return productFeatures;
public <T extends ProductFeatures> T getProductFeatures() {
return (T) productFeatures;
}
protected String getTableFieldsQuerySQL(String schemaName, String tableName) {

View File

@@ -19,6 +19,7 @@ import com.gitee.dbswitch.schema.ColumnMetaData;
import com.gitee.dbswitch.schema.IndexDescription;
import com.gitee.dbswitch.schema.IndexFieldMeta;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
@@ -247,7 +248,7 @@ public abstract class AbstractMetadataProvider
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
// Nothing, please override by subclass!
}

View File

@@ -14,9 +14,9 @@ import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.ColumnMetaData;
import com.gitee.dbswitch.schema.IndexDescription;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import java.sql.Connection;
import java.util.List;
import java.util.Map;
/**
* 元数据查询
@@ -183,7 +183,7 @@ public interface MetadataProvider {
* @param tblProperties 表的属性
*/
void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties);
SourceProperties tblProperties);
/**
* 主键列转换为逗号分隔的字符串
@@ -203,7 +203,7 @@ public interface MetadataProvider {
List<String> getTableColumnCommentDefinition(TableDescription td, List<ColumnDescription> cds);
/**
* 为hive定制的获取联邦建表导数SQL列表
* 为hive定制的获取联邦建表导数SQL列表
*
* @param fieldNames 字段结构信息
* @param primaryKeys 主键字段信息
@@ -214,7 +214,7 @@ public interface MetadataProvider {
* @return 建表导数SQL列表
*/
default List<String> getCreateTableSqlList(List<ColumnDescription> fieldNames, List<String> primaryKeys,
String schemaName, String tableName, String tableRemarks, boolean autoIncr, Map<String, String> tblProperties) {
String schemaName, String tableName, String tableRemarks, boolean autoIncr, SourceProperties tblProperties) {
throw new UnsupportedOperationException("Unsupported function!");
}
}

View File

@@ -14,6 +14,7 @@ import com.gitee.dbswitch.common.consts.Constants;
import com.gitee.dbswitch.common.entity.ResultSetWrapper;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.common.util.ObjectCastUtils;
import com.gitee.dbswitch.features.ProductFeatures;
import com.gitee.dbswitch.provider.AbstractCommonProvider;
import com.gitee.dbswitch.provider.ProductFactoryProvider;
import com.gitee.dbswitch.schema.SchemaTableData;
@@ -68,7 +69,8 @@ public class DefaultTableDataQueryProvider
sb.append(" ORDER BY ");
sb.append(productType.quoteName(StringUtils.join(orders, productType.quoteName(","))));
}
return this.selectTableData(sb.toString(), getProductFeatures().convertFetchSize(this.fetchSize));
ProductFeatures features = getProductFeatures();
return this.selectTableData(sb.toString(), features.convertFetchSize(this.fetchSize));
}
protected ResultSetWrapper selectTableData(String sql, int fetchSize) {

View File

@@ -38,7 +38,7 @@ public class DefaultTableDataSynchronizeProvider
private JdbcTemplate jdbcTemplate;
private PlatformTransactionManager tx;
private Map<String, Integer> columnType;
protected Map<String, Integer> columnType;
protected List<String> fieldOrders;
protected List<String> pksOrders;
protected String insertStatementSql;

View File

@@ -0,0 +1,82 @@
package com.gitee.dbswitch.schema;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import java.util.List;
public class SourceProperties {
private ProductTypeEnum productType;
private String driverClass;
private String jdbcUrl;
private String username;
private String password;
private String schemaName;
private String tableName;
private List<String> columnNames;
public ProductTypeEnum getProductType() {
return productType;
}
public void setProductType(ProductTypeEnum productType) {
this.productType = productType;
}
public String getDriverClass() {
return driverClass;
}
public void setDriverClass(String driverClass) {
this.driverClass = driverClass;
}
public String getJdbcUrl() {
return jdbcUrl;
}
public void setJdbcUrl(String jdbcUrl) {
this.jdbcUrl = jdbcUrl;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getSchemaName() {
return schemaName;
}
public void setSchemaName(String schemaName) {
this.schemaName = schemaName;
}
public String getTableName() {
return tableName;
}
public void setTableName(String tableName) {
this.tableName = tableName;
}
public List<String> getColumnNames() {
return columnNames;
}
public void setColumnNames(List<String> columnNames) {
this.columnNames = columnNames;
}
}

View File

@@ -19,12 +19,12 @@ import com.gitee.dbswitch.schema.IndexDescription;
import com.gitee.dbswitch.schema.SchemaTableData;
import com.gitee.dbswitch.schema.SchemaTableMeta;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import com.gitee.dbswitch.util.GenerateSqlUtils;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
/**
@@ -224,7 +224,7 @@ public class DefaultMetadataService implements MetadataService {
@Override
public List<String> getDDLCreateTableSQL(MetadataProvider provider,
List<ColumnDescription> fieldNames, List<String> primaryKeys, String schemaName,
String tableName, String tableRemarks, boolean autoIncr, Map<String, String> tblProperties) {
String tableName, String tableRemarks, boolean autoIncr, SourceProperties tblProperties) {
return GenerateSqlUtils.getDDLCreateTableSQL(
provider, fieldNames, primaryKeys, schemaName, tableName, tableRemarks, autoIncr, tblProperties);
}

View File

@@ -11,12 +11,12 @@ package com.gitee.dbswitch.service;
import com.gitee.dbswitch.provider.meta.MetadataProvider;
import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import com.gitee.dbswitch.schema.IndexDescription;
import com.gitee.dbswitch.schema.SchemaTableData;
import com.gitee.dbswitch.schema.SchemaTableMeta;
import com.gitee.dbswitch.schema.TableDescription;
import java.util.List;
import java.util.Map;
import javax.sql.DataSource;
public interface MetadataService {
@@ -159,5 +159,5 @@ public interface MetadataService {
*/
List<String> getDDLCreateTableSQL(MetadataProvider provider, List<ColumnDescription> fieldNames,
List<String> primaryKeys, String schemaName, String tableName, String tableRemarks,
boolean autoIncr, Map<String, String> tblProperties);
boolean autoIncr, SourceProperties tblProperties);
}

View File

@@ -17,12 +17,11 @@ import com.gitee.dbswitch.provider.meta.MetadataProvider;
import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.ColumnMetaData;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import com.google.common.collect.Lists;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import lombok.experimental.UtilityClass;
@@ -52,7 +51,7 @@ public final class GenerateSqlUtils {
false,
null,
autoIncr,
Collections.emptyMap());
null);
}
public static String getDDLCreateTableSQL(
@@ -64,7 +63,7 @@ public final class GenerateSqlUtils {
boolean withRemarks,
String tableRemarks,
boolean autoIncr,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
ProductTypeEnum type = provider.getProductType();
StringBuilder sb = new StringBuilder();
Set<String> fieldNameSets = fieldNames.stream()
@@ -120,7 +119,7 @@ public final class GenerateSqlUtils {
String tableName,
String tableRemarks,
boolean autoIncr,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
ProductTypeEnum productType = provider.getProductType();
if (productType.isLikeHive()) {
return provider.getCreateTableSqlList(

View File

@@ -31,7 +31,6 @@ import com.gitee.dbswitch.data.domain.ReaderTaskParam;
import com.gitee.dbswitch.data.domain.ReaderTaskResult;
import com.gitee.dbswitch.data.entity.SourceDataSourceProperties;
import com.gitee.dbswitch.data.entity.TargetDataSourceProperties;
import com.gitee.dbswitch.data.util.HiveTblUtils;
import com.gitee.dbswitch.provider.ProductFactoryProvider;
import com.gitee.dbswitch.provider.ProductProviderFactory;
import com.gitee.dbswitch.provider.manage.TableManageProvider;
@@ -42,6 +41,7 @@ import com.gitee.dbswitch.provider.transform.RecordTransformProvider;
import com.gitee.dbswitch.provider.write.TableDataWriteProvider;
import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import com.gitee.dbswitch.service.DefaultMetadataService;
import com.gitee.dbswitch.service.MetadataService;
import com.google.common.collect.Lists;
@@ -692,12 +692,20 @@ public class ReaderTaskThread extends TaskProcessor<ReaderTaskResult> {
.build();
}
public Map<String, String> getTblProperties() {
if (targetProductType.isLikeHive()) {
return HiveTblUtils.getTblProperties(sourceProductType, sourceDataSource,
sourceSchemaName, sourceTableName, sourceColumnDescriptions);
}
return new HashMap<>();
public SourceProperties getTblProperties() {
List<String> columnNames = sourceColumnDescriptions.stream()
.map(ColumnDescription::getFieldName)
.collect(Collectors.toList());
SourceProperties param = new SourceProperties();
param.setProductType(sourceProductType);
param.setDriverClass(sourceDataSource.getDriverClass());
param.setJdbcUrl(sourceDataSource.getJdbcUrl());
param.setUsername(sourceDataSource.getUserName());
param.setPassword(sourceDataSource.getPassword());
param.setSchemaName(sourceSchemaName);
param.setTableName(sourceTableName);
param.setColumnNames(columnNames);
return param;
}
@Override

View File

@@ -15,6 +15,7 @@ import com.gitee.dbswitch.provider.meta.AbstractMetadataProvider;
import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.ColumnMetaData;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -23,7 +24,6 @@ import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
@@ -303,7 +303,7 @@ public class ClickhouseMetadataQueryProvider extends AbstractMetadataProvider {
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
builder.append("ENGINE=MergeTree");
if (CollectionUtils.isEmpty(primaryKeys)) {
builder.append(Constants.CR);

View File

@@ -12,6 +12,7 @@ package com.gitee.dbswitch.product.gbase;
import com.gitee.dbswitch.annotation.Product;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.features.ProductFeatures;
import com.gitee.dbswitch.product.mysql.MysqlFeatures;
import com.gitee.dbswitch.provider.AbstractFactoryProvider;
import com.gitee.dbswitch.provider.meta.MetadataProvider;
import com.gitee.dbswitch.provider.sync.AutoCastTableDataSynchronizeProvider;
@@ -28,7 +29,7 @@ public class GbaseFactoryProvider extends AbstractFactoryProvider {
}
public ProductFeatures getProductFeatures() {
return new GbaseFeatures();
return new MysqlFeatures();
}
@Override

View File

@@ -1,20 +0,0 @@
// Copyright tang. All rights reserved.
// https://gitee.com/inrgihc/dbswitch
//
// Use of this source code is governed by a BSD-style license
//
// Author: tang (inrgihc@126.com)
// Date : 2020/1/2
// Location: beijing , china
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.product.gbase;
import com.gitee.dbswitch.features.ProductFeatures;
public class GbaseFeatures implements ProductFeatures {
public int convertFetchSize(int fetchSize) {
return Integer.MIN_VALUE;
}
}

View File

@@ -2,8 +2,8 @@ package com.gitee.dbswitch.product.gbase;
import com.gitee.dbswitch.product.mysql.MysqlMetadataQueryProvider;
import com.gitee.dbswitch.provider.ProductFactoryProvider;
import com.gitee.dbswitch.schema.SourceProperties;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
public class GbaseMetadataQueryProvider extends MysqlMetadataQueryProvider {
@@ -11,10 +11,10 @@ public class GbaseMetadataQueryProvider extends MysqlMetadataQueryProvider {
public GbaseMetadataQueryProvider(ProductFactoryProvider factoryProvider) {
super(factoryProvider);
}
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
builder.append("ENGINE=EXPRESS DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin");
if (StringUtils.isNotBlank(tblComment)) {
builder.append(String.format(" COMMENT='%s' ", tblComment.replace("'", "\\'")));

View File

@@ -11,21 +11,25 @@ package com.gitee.dbswitch.product.greenplum;
import com.gitee.dbswitch.product.postgresql.PostgresMetadataQueryProvider;
import com.gitee.dbswitch.provider.ProductFactoryProvider;
import com.gitee.dbswitch.schema.SourceProperties;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class GreenplumMetadataQueryProvider extends PostgresMetadataQueryProvider {
static {
systemSchemas.add("gp_toolkit");
}
public GreenplumMetadataQueryProvider(ProductFactoryProvider factoryProvider) {
super(factoryProvider);
}
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
// 有主键就优先使用主键作为分布键。
if (Objects.nonNull(primaryKeys) && !primaryKeys.isEmpty()) {
String pk = getPrimaryKeyAsString(primaryKeys);

View File

@@ -13,8 +13,11 @@ import com.gitee.dbswitch.annotation.Product;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.features.ProductFeatures;
import com.gitee.dbswitch.provider.AbstractFactoryProvider;
import com.gitee.dbswitch.provider.manage.TableManageProvider;
import com.gitee.dbswitch.provider.meta.MetadataProvider;
import com.gitee.dbswitch.provider.query.TableDataQueryProvider;
import com.gitee.dbswitch.provider.sync.TableDataSynchronizeProvider;
import com.gitee.dbswitch.provider.write.TableDataWriteProvider;
import javax.sql.DataSource;
@Product(ProductTypeEnum.HIVE)
@@ -39,8 +42,8 @@ public class HiveFactoryProvider extends AbstractFactoryProvider {
}
// @Override
// public TableOperateProvider createTableOperateProvider() {
// throw new UnsupportedOperationException("Unsupported hive to operator tale!");
// public TableManageProvider createTableManageProvider() {
// throw new UnsupportedOperationException("Unsupported hive to manage tale!");
// }
//
// @Override
@@ -49,7 +52,7 @@ public class HiveFactoryProvider extends AbstractFactoryProvider {
// }
//
// @Override
// public TableDataSynchronizer createTableDataSynchronizer() {
// public TableDataSynchronizeProvider createTableDataSynchronizeProvider() {
// throw new UnsupportedOperationException("Unsupported hive to sync tale data!");
// }
}

View File

@@ -12,5 +12,13 @@ package com.gitee.dbswitch.product.hive;
import com.gitee.dbswitch.features.ProductFeatures;
public class HiveFeatures implements ProductFeatures {
/**
* 是否使用CreateTableAs方式建表
*
* @return boolean
*/
public boolean useCTAS() {
return false;
}
}

View File

@@ -18,6 +18,7 @@ import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.ColumnMetaData;
import com.gitee.dbswitch.schema.IndexDescription;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import com.gitee.dbswitch.util.GenerateSqlUtils;
import java.sql.Connection;
import java.sql.ResultSet;
@@ -28,16 +29,15 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
@Slf4j
public class HiveMetadataQueryProvider extends AbstractMetadataProvider {
private static final boolean HIVE_USE_CTAS = false;
private static final String SHOW_CREATE_TABLE_SQL = "SHOW CREATE TABLE `%s`.`%s` ";
public HiveMetadataQueryProvider(ProductFactoryProvider factoryProvider) {
@@ -183,14 +183,15 @@ public class HiveMetadataQueryProvider extends AbstractMetadataProvider {
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties) {
if (MapUtils.isNotEmpty(tblProperties)) {
SourceProperties tblProperties) {
if (Objects.nonNull(tblProperties)) {
Map<String, String> keyValues = HiveTblUtils.getTblProperties(tblProperties);
builder.append(Constants.CR);
builder.append("STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler'");
builder.append(Constants.CR);
builder.append("TBLPROPERTIES (");
builder.append(
tblProperties.entrySet().stream()
keyValues.entrySet().stream()
.map(entry -> String.format("\t\t'%s' = '%s'", entry.getKey(), entry.getValue()))
.collect(Collectors.joining(",\n")));
builder.append(")");
@@ -202,13 +203,15 @@ public class HiveMetadataQueryProvider extends AbstractMetadataProvider {
@Override
public List<String> getCreateTableSqlList(List<ColumnDescription> fieldNames, List<String> primaryKeys,
String schemaName, String tableName, String tableRemarks, boolean autoIncr, Map<String, String> tblProperties) {
String schemaName, String tableName, String tableRemarks, boolean autoIncr, SourceProperties tblProperties) {
List<String> sqlLists = new ArrayList<>();
String tmpTableName = "tmp_" + UuidUtils.generateUuid();
String createTableSql = GenerateSqlUtils.getDDLCreateTableSQL(this, fieldNames, primaryKeys, schemaName,
tmpTableName, true, tableRemarks, autoIncr, tblProperties);
sqlLists.add(createTableSql);
if (HIVE_USE_CTAS) {
HiveFeatures features = getProductFeatures();
if (features.useCTAS()) {
String createAsTableSql = String.format("CREATE TABLE `%s`.`%s` STORED AS ORC AS (SELECT * FROM `%s`.`%s`)",
schemaName, tableName, schemaName, tmpTableName);
sqlLists.add(createAsTableSql);

View File

@@ -1,9 +1,8 @@
package com.gitee.dbswitch.data.util;
package com.gitee.dbswitch.product.hive;
import com.gitee.dbswitch.common.entity.CloseableDataSource;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.common.util.ExamineUtils;
import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
@@ -24,17 +23,17 @@ public class HiveTblUtils {
*
* @return Map<String, String>
*/
public static Map<String, String> getTblProperties(ProductTypeEnum sourceProductType,
CloseableDataSource sourceDataSource, String sourceSchemaName,
String sourceTableName, List<ColumnDescription> sourceColumnDescriptions) {
public static Map<String, String> getTblProperties(SourceProperties tblProperties) {
ProductTypeEnum sourceProductType = tblProperties.getProductType();
String sourceSchemaName = tblProperties.getSchemaName();
String sourceTableName = tblProperties.getTableName();
ExamineUtils.check(supportedProductTypes.contains(sourceProductType),
"Unsupported data from %s to Hive", sourceProductType.name());
Map<String, String> ret = new HashMap<>();
String querySql = String.format("SELECT %s FROM %s",
sourceColumnDescriptions.stream()
.map(ColumnDescription::getFieldName)
tblProperties.getColumnNames().stream()
.map(s -> sourceProductType.quoteName(s))
.collect(Collectors.joining(",")),
sourceProductType.quoteSchemaTableName(sourceSchemaName, sourceTableName));
@@ -48,10 +47,10 @@ public class HiveTblUtils {
databaseType = sourceProductType.name().toUpperCase();
}
ret.put("hive.sql.database.type", databaseType);
ret.put("hive.sql.jdbc.driver", sourceDataSource.getDriverClass());
ret.put("hive.sql.jdbc.url", sourceDataSource.getJdbcUrl());
ret.put("hive.sql.dbcp.username", sourceDataSource.getUserName());
ret.put("hive.sql.dbcp.password", sourceDataSource.getPassword());
ret.put("hive.sql.jdbc.driver", tblProperties.getDriverClass());
ret.put("hive.sql.jdbc.url", tblProperties.getJdbcUrl());
ret.put("hive.sql.dbcp.username", tblProperties.getUsername());
ret.put("hive.sql.dbcp.password", tblProperties.getPassword());
ret.put("hive.sql.query", querySql);
ret.put("hive.sql.jdbc.read-write", "read");
ret.put("hive.sql.jdbc.fetch.size", "2000");

View File

@@ -16,6 +16,7 @@ import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.ColumnMetaData;
import com.gitee.dbswitch.schema.IndexDescription;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -24,7 +25,6 @@ import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
@@ -361,7 +361,7 @@ public class MysqlMetadataQueryProvider extends AbstractMetadataProvider {
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
builder.append("ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin");
if (StringUtils.isNotBlank(tblComment)) {
builder.append(String.format(" COMMENT='%s' ", tblComment.replace("'", "\\'")));

View File

@@ -26,7 +26,8 @@ public class OracleTableDataSynchronizer extends DefaultTableDataSynchronizeProv
List<InputStream> iss = new ArrayList<>();
records.parallelStream().forEach((Object[] row) -> {
for (int i = 0; i < row.length; ++i) {
row[i] = OracleCastUtils.castByJdbcType(insertArgsType[i], row[i], iss);
int jdbcType = this.columnType.get(this.fieldOrders.get(i));
row[i] = OracleCastUtils.castByJdbcType(jdbcType, row[i], iss);
}
});
@@ -47,7 +48,8 @@ public class OracleTableDataSynchronizer extends DefaultTableDataSynchronizeProv
List<InputStream> iss = new ArrayList<>();
records.parallelStream().forEach((Object[] row) -> {
for (int i = 0; i < row.length; ++i) {
row[i] = OracleCastUtils.castByJdbcType(updateArgsType[i], row[i], iss);
int jdbcType = this.columnType.get(this.fieldOrders.get(i));
row[i] = OracleCastUtils.castByJdbcType(jdbcType, row[i], iss);
}
});

View File

@@ -32,7 +32,7 @@ import org.apache.commons.lang3.math.NumberUtils;
@Slf4j
public class PostgresMetadataQueryProvider extends AbstractMetadataProvider {
private static Set<String> systemSchemas = new HashSet<>();
protected static Set<String> systemSchemas = new HashSet<>();
private static final String SHOW_CREATE_VIEW_SQL_1 =
"SELECT pg_get_viewdef((select pg_class.relfilenode from pg_catalog.pg_class \n"
@@ -42,6 +42,7 @@ public class PostgresMetadataQueryProvider extends AbstractMetadataProvider {
"select pg_get_viewdef('\"%s\".\"%s\"', true)";
static {
systemSchemas.add("pg_temp");
systemSchemas.add("pg_aoseg");
systemSchemas.add("information_schema");
systemSchemas.add("pg_catalog");

View File

@@ -17,6 +17,7 @@ import com.gitee.dbswitch.schema.ColumnDescription;
import com.gitee.dbswitch.schema.ColumnMetaData;
import com.gitee.dbswitch.schema.IndexDescription;
import com.gitee.dbswitch.schema.TableDescription;
import com.gitee.dbswitch.schema.SourceProperties;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
@@ -25,7 +26,6 @@ import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
@@ -367,7 +367,7 @@ public class StarrocksMetadataQueryProvider extends AbstractMetadataProvider {
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
Map<String, String> tblProperties) {
SourceProperties tblProperties) {
String pk = getPrimaryKeyAsString(primaryKeys);
builder.append("PRIMARY KEY (").append(pk).append(")");
builder.append("\n DISTRIBUTED BY HASH(").append(pk).append(")");