!207 添加新数据源支持 - DORIS

添加数据源支持doirs
This commit is contained in:
janke
2024-10-08 04:31:33 +00:00
committed by inrgihc
parent 385bfa9a8e
commit 4dbcd2cd80
23 changed files with 527 additions and 81 deletions

Binary file not shown.

After

Width:  |  Height:  |  Size: 59 KiB

View File

@@ -264,6 +264,7 @@ public class AssignmentService {
File driverVersionFile = driverLoadService.getVersionDriverFile(
sourceDatabaseConnectionEntity.getType(),
sourceDatabaseConnectionEntity.getVersion());
sourceDataSourceProperties.setType(sourceDatabaseConnectionEntity.getType());
sourceDataSourceProperties.setUrl(sourceDatabaseConnectionEntity.getUrl());
sourceDataSourceProperties.setDriverClassName(sourceDatabaseConnectionEntity.getDriver());
sourceDataSourceProperties.setDriverPath(driverVersionFile.getAbsolutePath());
@@ -306,6 +307,7 @@ public class AssignmentService {
File driverVersionFile = driverLoadService.getVersionDriverFile(
targetDatabaseConnectionEntity.getType(),
targetDatabaseConnectionEntity.getVersion());
targetDataSourceProperties.setType(targetDatabaseConnectionEntity.getType());
targetDataSourceProperties.setUrl(targetDatabaseConnectionEntity.getUrl());
targetDataSourceProperties.setDriverClassName(targetDatabaseConnectionEntity.getDriver());
targetDataSourceProperties.setDriverPath(driverVersionFile.getAbsolutePath());

View File

@@ -93,7 +93,7 @@ public class ConnectionService {
public MetadataService getMetaDataCoreService(DatabaseConnectionEntity dbConn) {
CloseableDataSource dataSource = getDataSource(dbConn);
MetadataService metaDataService = new DefaultMetadataService(dataSource);
MetadataService metaDataService = new DefaultMetadataService(dataSource, dbConn.getType());
return metaDataService;
}

View File

@@ -206,6 +206,18 @@ public enum ProductTypeEnum {
new String[]{"jdbc:postgresql://{host}[:{port}]/[{database}][\\?{params}]"},
"jdbc:postgresql://127.0.0.1:5432/test"),
/**
* DORIS数据库类型
*/
DORIS(21, "`", "Doris", "com.mysql.jdbc.Driver", 9030,
"/* ping */ SELECT 1",
"jdbc:mysql://",
new String[]{"jdbc:mysql://{host}[:{port}]/[{database}][\\?{params}]"},
"jdbc:mysql://127.0.0.1:9030/test?useUnicode=true&characterEncoding=utf-8&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=Asia/Shanghai&tinyInt1isBit=false&rewriteBatchedStatements=true&useCompression=true"),
;
private int id;
@@ -255,7 +267,7 @@ public enum ProductTypeEnum {
* @return boolean
*/
public boolean isPrimaryKeyShouldAtFirst() {
return this == STARROCKS;
return this == STARROCKS || this == DORIS;
}
/**
@@ -312,6 +324,15 @@ public enum ProductTypeEnum {
return this == ELASTICSEARCH;
}
/**
* 是否为Doris数据库类型
*
* @return boolean
*/
public boolean isDoris() {
return this == DORIS;
}
/**
* 是否为支持使用SQL的数据库类型
*

View File

@@ -10,14 +10,12 @@
package com.gitee.dbswitch.common.util;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.google.common.collect.Sets;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.sql.DataSource;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
@@ -75,58 +73,6 @@ public final class DatabaseAwareUtils {
driverNameMap.put("esJestDriver", ProductTypeEnum.ELASTICSEARCH);
}
/**
* 获取数据库的产品枚举
*
* @param dataSource 数据源
* @return 数据库产品枚举
*/
public static ProductTypeEnum getProductTypeByDataSource(DataSource dataSource) {
try (Connection connection = dataSource.getConnection()) {
String productName = connection.getMetaData().getDatabaseProductName();
String driverName = connection.getMetaData().getDriverName();
if (driverNameMap.containsKey(driverName)) {
ProductTypeEnum productType = driverNameMap.get(driverName);
if (productType == ProductTypeEnum.POSTGRESQL) {
if (ProductTypeUtils.isGreenplum(connection)) {
return ProductTypeEnum.GREENPLUM;
}
String url = connection.getMetaData().getURL();
Set<ProductTypeEnum> excludes = Sets.immutableEnumSet(ProductTypeEnum.POSTGRESQL, ProductTypeEnum.GREENPLUM);
ProductTypeEnum pgLikeType = ProductTypeEnum.getProductType(url, excludes);
if (null != pgLikeType) {
return pgLikeType;
}
} else if (productType == ProductTypeEnum.MYSQL) {
if (ProductTypeUtils.isStarRocks(connection)) {
return ProductTypeEnum.STARROCKS;
}
}
return productType;
}
ProductTypeEnum type = productNameMap.get(productName);
if (null != type) {
return type;
}
String url = connection.getMetaData().getURL();
if (null != url && url.contains("mongodb://")) {
return ProductTypeEnum.MONGODB;
}
if (null != url && url.contains("jest://")) {
return ProductTypeEnum.ELASTICSEARCH;
}
type = ProductTypeEnum.getProductType(url);
if (null != type) {
return type;
}
throw new IllegalStateException("Unable to detect database type from data source instance");
} catch (SQLException se) {
throw new RuntimeException(se);
}
}
/**
* 检查MySQL数据库表的存储引擎是否为Innodb
*

View File

@@ -133,8 +133,8 @@ public final class DefaultChangeCalculatorService implements RecordRowChangeCalc
boolean useOwnFieldsColumns = !CollectionUtils.isEmpty(task.getFieldColumns());
// 检查新旧两张表的主键字段与比较字段
MetadataService oldMd = new DefaultMetadataService(task.getOldDataSource());
MetadataService newMd = new DefaultMetadataService(task.getNewDataSource());
MetadataService oldMd = new DefaultMetadataService(task.getOldDataSource(), task.getOldProductType());
MetadataService newMd = new DefaultMetadataService(task.getNewDataSource(), task.getNewProductType());
List<String> fieldsPrimaryKeyOld = oldMd
.queryTablePrimaryKeys(task.getOldSchemaName(), task.getOldTableName());
List<String> fieldsAllColumnOld = oldMd
@@ -213,11 +213,11 @@ public final class DefaultChangeCalculatorService implements RecordRowChangeCalc
try {
// 提取新旧两表数据的结果集(按主键排序后的)
TableDataQueryProvider oldQuery = ProductProviderFactory
.newProvider(task.getOldDataSource())
.newProvider(task.getOldProductType(), task.getOldDataSource())
.createTableDataQueryProvider();
oldQuery.setQueryFetchSize(this.queryFetchSize);
TableDataQueryProvider newQuery = ProductProviderFactory
.newProvider(task.getNewDataSource())
.newProvider(task.getNewProductType(), task.getNewDataSource())
.createTableDataQueryProvider();
newQuery.setQueryFetchSize(this.queryFetchSize);

View File

@@ -9,6 +9,7 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.calculate;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.provider.transform.RecordTransformProvider;
import java.util.Collections;
import java.util.List;
@@ -47,6 +48,13 @@ public class TaskParamEntity {
@NonNull
private String oldTableName;
/**
* 老表的数据库产品类型
*/
@NonNull
private ProductTypeEnum oldProductType;
/**
* 新表的数据源
*/
@@ -65,6 +73,12 @@ public class TaskParamEntity {
@NonNull
private String newTableName;
/**
* 新表的数据库产品类型
*/
@NonNull
private ProductTypeEnum newProductType;
/**
* 字段列
*/

View File

@@ -10,7 +10,6 @@
package com.gitee.dbswitch.provider;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.common.util.DatabaseAwareUtils;
import com.gitee.dbswitch.common.util.ExamineUtils;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@@ -71,9 +70,4 @@ public class ProductProviderFactory {
return instance;
}
public static ProductFactoryProvider newProvider(DataSource dataSource) {
ProductTypeEnum type = DatabaseAwareUtils.getProductTypeByDataSource(dataSource);
return newProvider(type, dataSource);
}
}

View File

@@ -39,14 +39,6 @@ public class DefaultMetadataService implements MetadataService {
private MetadataProvider metaQueryProvider;
private TableDataQueryProvider dataQueryProvider;
public DefaultMetadataService(DataSource dataSource) {
this.dataSource = dataSource;
ProductFactoryProvider factoryProvider = ProductProviderFactory
.newProvider(dataSource);
this.metaQueryProvider = factoryProvider.createMetadataQueryProvider();
this.dataQueryProvider = factoryProvider.createTableDataQueryProvider();
}
public DefaultMetadataService(DataSource dataSource, ProductTypeEnum type) {
this.dataSource = dataSource;
this.factoryProvider = ProductProviderFactory

View File

@@ -104,8 +104,14 @@ public final class GenerateSqlUtils {
sb.append(provider.getFieldDefinition(v, pks, autoIncr, false, withRemarks));
}
provider.appendPrimaryKeyForCreateTableSql(sb, pks);
sb.append(")");
if (type.isDoris()) {
sb.append(")");
provider.appendPrimaryKeyForCreateTableSql(sb, pks);
} else {
provider.appendPrimaryKeyForCreateTableSql(sb, pks);
sb.append(")");
}
provider.postAppendCreateTableSql(sb, tableRemarks, pks, tblProperties);
return DDLFormatterUtils.format(sb.toString());

View File

@@ -12,6 +12,7 @@ package com.gitee.dbswitch.data.entity;
import com.gitee.dbswitch.common.entity.PatternMapper;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import lombok.Data;
/**
@@ -22,6 +23,7 @@ import lombok.Data;
@Data
public class SourceDataSourceProperties {
private ProductTypeEnum type;
private String url;
private String driverClassName;
private String username;

View File

@@ -10,6 +10,7 @@
package com.gitee.dbswitch.data.entity;
import com.gitee.dbswitch.common.type.CaseConvertEnum;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.common.type.SyncOptionEnum;
import java.util.concurrent.TimeUnit;
import lombok.Data;
@@ -22,6 +23,7 @@ import lombok.Data;
@Data
public class TargetDataSourceProperties {
private ProductTypeEnum type;
private String url;
private String driverClassName;
private String username;

View File

@@ -114,6 +114,8 @@ public class ReaderTaskThread extends TaskProcessor<ReaderTaskResult> {
this.properties = taskParam.getConfiguration();
this.sourceProperties = this.properties.getSource();
this.targetProperties = this.properties.getTarget();
this.sourceProductType = this.sourceProperties.getType();
this.targetProductType = this.targetProperties.getType();
this.sourceSchemaName = this.sourceProperties.getSourceSchema();
this.sourceTableName = this.tableDescription.getTableName();
this.targetExistTables = taskParam.getTargetExistTables();
@@ -126,8 +128,6 @@ public class ReaderTaskThread extends TaskProcessor<ReaderTaskResult> {
fetchSize = sourceProperties.getFetchSize();
}
this.sourceProductType = DatabaseAwareUtils.getProductTypeByDataSource(sourceDataSource);
this.targetProductType = DatabaseAwareUtils.getProductTypeByDataSource(targetDataSource);
if (this.targetProductType.isLikeHive()) {
// !! hive does not support upper table name and upper column name
@@ -519,9 +519,11 @@ public class ReaderTaskThread extends TaskProcessor<ReaderTaskResult> {
taskBuilder.oldDataSource(targetDataSource);
taskBuilder.oldSchemaName(targetSchemaName);
taskBuilder.oldTableName(targetTableName);
taskBuilder.oldProductType(targetProductType);
taskBuilder.newDataSource(sourceDataSource);
taskBuilder.newSchemaName(sourceSchemaName);
taskBuilder.newTableName(sourceTableName);
taskBuilder.newProductType(sourceProductType);
taskBuilder.fieldColumns(sourceFields);
taskBuilder.columnsMap(columnNameMaps);
taskBuilder.transformer(transformer);

View File

@@ -77,7 +77,7 @@ public class DefaultReaderRobot extends RobotReader<ReaderTaskResult> {
public void init(AsyncTaskExecutor threadExecutor) {
this.threadExecutor = threadExecutor;
this.readTaskThreads = new ArrayList<>();
MetadataService tdsService = new DefaultMetadataService(targetDataSource);
MetadataService tdsService = new DefaultMetadataService(targetDataSource, configuration.getTarget().getType());
Set<String> targetExistTables = tdsService.queryTableList(configuration.getTarget().getTargetSchema())
.stream().map(TableDescription::getTableName).collect(Collectors.toSet());
List<TableDescription> tableDescriptions = splitReaderTask();
@@ -105,10 +105,10 @@ public class DefaultReaderRobot extends RobotReader<ReaderTaskResult> {
private List<TableDescription> splitReaderTask() {
List<TableDescription> tableDescriptions = new ArrayList<>();
MetadataService sourceMetaDataService = new DefaultMetadataService(sourceDataSource);
SourceDataSourceProperties sourceProperties = configuration.getSource();
MetadataService sourceMetaDataService = new DefaultMetadataService(sourceDataSource, sourceProperties.getType());
// 判断处理的策略:是排除还是包含
SourceDataSourceProperties sourceProperties = configuration.getSource();
List<String> includes =
StreamUtil.of(StrUtil.split(sourceProperties.getSourceIncludes(), StrPool.COMMA))
.collect(Collectors.toList());

View File

@@ -3,6 +3,7 @@ dbswitch:
# source database connection information
## support MySQL/MariaDB/DB2/DM/Kingbase8/Oracle/SQLServer/PostgreSQL/Greenplum etc.
## support multiple source database connection
type: MYSQL
url: jdbc:mysql://172.17.2.10:3306/test?useUnicode=true&characterEncoding=utf-8&useSSL=false&zeroDateTimeBehavior=convertToNull&serverTimezone=Asia/Shanghai&nullCatalogMeansCurrent=true&tinyInt1isBit=false&rewriteBatchedStatements=true&useCompression=true
driver-class-name: 'com.mysql.jdbc.Driver'
driver-path: D:\Workspace\IdeaProjects\dbswitch\drivers\mysql\mysql-5
@@ -29,6 +30,7 @@ dbswitch:
target:
# target database connection information
## Best support for Oracle/PostgreSQL/Greenplum/DM etc.
type: POSTGRESQL
url: jdbc:postgresql://172.17.2.10:5432/test
driver-class-name: org.postgresql.Driver
driver-path: D:\Workspace\IdeaProjects\dbswitch\drivers\postgresql\postgresql-11.4

View File

@@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<artifactId>dbswitch-product</artifactId>
<groupId>com.gitee.dbswitch</groupId>
<version>1.9.9</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>dbswitch-product-doris</artifactId>
<dependencies>
<dependency>
<groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-common</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-core</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>

View File

@@ -0,0 +1,49 @@
// Copyright tang. All rights reserved.
// https://gitee.com/inrgihc/dbswitch
//
// Use of this source code is governed by a BSD-style license
//
// Author: wjk (wanglv110@163.com)
// Date : 2024/9/29
// Location: wuhan , china
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.product.doris;
import com.gitee.dbswitch.annotation.Product;
import com.gitee.dbswitch.common.type.ProductTypeEnum;
import com.gitee.dbswitch.features.ProductFeatures;
import com.gitee.dbswitch.provider.AbstractFactoryProvider;
import com.gitee.dbswitch.provider.meta.MetadataProvider;
import com.gitee.dbswitch.provider.sync.AutoCastTableDataSynchronizeProvider;
import com.gitee.dbswitch.provider.sync.TableDataSynchronizeProvider;
import com.gitee.dbswitch.provider.write.AutoCastTableDataWriteProvider;
import com.gitee.dbswitch.provider.write.TableDataWriteProvider;
import javax.sql.DataSource;
@Product(ProductTypeEnum.DORIS)
public class DorisFactoryProvider extends AbstractFactoryProvider {
public DorisFactoryProvider(DataSource dataSource) {
super(dataSource);
}
public ProductFeatures getProductFeatures() {
return new DorisFeatures();
}
@Override
public MetadataProvider createMetadataQueryProvider() {
return new DorisMetadataQueryProvider(this);
}
@Override
public TableDataWriteProvider createTableDataWriteProvider(boolean useInsert) {
return new AutoCastTableDataWriteProvider(this);
}
@Override
public TableDataSynchronizeProvider createTableDataSynchronizeProvider() {
return new AutoCastTableDataSynchronizeProvider(this);
}
}

View File

@@ -0,0 +1,20 @@
// Copyright tang. All rights reserved.
// https://gitee.com/inrgihc/dbswitch
//
// Use of this source code is governed by a BSD-style license
//
// Author: wjk (wanglv110@163.com)
// Date : 2024/9/29
// Location: wuhan , china
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.product.doris;
import com.gitee.dbswitch.features.ProductFeatures;
public class DorisFeatures implements ProductFeatures {
public int convertFetchSize(int fetchSize) {
return Integer.MIN_VALUE;
}
}

View File

@@ -0,0 +1,360 @@
// Copyright tang. All rights reserved.
// https://gitee.com/inrgihc/dbswitch
//
// Use of this source code is governed by a BSD-style license
//
// Author: wjk (wanglv110@163.com)
// Date : 2024/9/29
// Location: wuhan , china
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.product.doris;
import com.gitee.dbswitch.common.consts.Constants;
import com.gitee.dbswitch.provider.ProductFactoryProvider;
import com.gitee.dbswitch.provider.meta.AbstractMetadataProvider;
import com.gitee.dbswitch.schema.*;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import java.sql.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
@Slf4j
public class DorisMetadataQueryProvider extends AbstractMetadataProvider {
private static final String SHOW_CREATE_TABLE_SQL = "SHOW CREATE TABLE `%s`.`%s` ";
private static final String SHOW_CREATE_VIEW_SQL = "SHOW CREATE VIEW `%s`.`%s` ";
private static final String QUERY_TABLE_LIST_SQL =
"SELECT `TABLE_SCHEMA`,`TABLE_NAME`,`TABLE_TYPE`,`TABLE_COMMENT` "
+ "FROM `information_schema`.`TABLES` WHERE `TABLE_SCHEMA`= ? ";
private static final String QUERY_TABLE_METADATA_SQL =
"SELECT `TABLE_COMMENT`,`TABLE_TYPE` FROM `information_schema`.`TABLES` "
+ "WHERE `TABLE_SCHEMA` = ? AND `TABLE_NAME` = ?";
public DorisMetadataQueryProvider(ProductFactoryProvider factoryProvider) {
super(factoryProvider);
}
@Override
public List<String> querySchemaList(Connection connection) {
List<String> result = new ArrayList<>();
try (ResultSet rs = connection.getMetaData().getCatalogs()) {
while (rs.next()) {
Optional.ofNullable(rs.getString(1)).ifPresent(result::add);
}
return result.stream().distinct().collect(Collectors.toList());
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public List<TableDescription> queryTableList(Connection connection, String schemaName) {
List<TableDescription> result = new ArrayList<>();
try (PreparedStatement ps = connection.prepareStatement(QUERY_TABLE_LIST_SQL)) {
ps.setString(1, schemaName);
try (ResultSet rs = ps.executeQuery();) {
while (rs.next()) {
TableDescription td = new TableDescription();
td.setSchemaName(rs.getString("TABLE_SCHEMA"));
td.setTableName(rs.getString("TABLE_NAME"));
td.setRemarks(rs.getString("TABLE_COMMENT"));
String tableType = rs.getString("TABLE_TYPE");
if (tableType.equalsIgnoreCase("VIEW")) {
td.setTableType("VIEW");
} else {
td.setTableType("TABLE");
}
result.add(td);
}
return result;
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public TableDescription queryTableMeta(Connection connection, String schemaName, String tableName) {
try (PreparedStatement ps = connection.prepareStatement(QUERY_TABLE_METADATA_SQL)) {
ps.setString(1, schemaName);
ps.setString(2, tableName);
try (ResultSet rs = ps.executeQuery();) {
while (rs.next()) {
TableDescription td = new TableDescription();
td.setSchemaName(schemaName);
td.setTableName(tableName);
td.setRemarks(rs.getString(1));
String tableType = rs.getString(2);
if (tableType.equalsIgnoreCase("VIEW")) {
td.setTableType("VIEW");
} else {
td.setTableType("TABLE");
}
return td;
}
return null;
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public List<String> queryTableColumnName(Connection connection, String schemaName, String tableName) {
List<String> columns = new ArrayList<>();
try (ResultSet rs = connection.getMetaData()
.getColumns(schemaName, null, tableName, null)) {
while (rs.next()) {
columns.add(rs.getString("COLUMN_NAME"));
}
return columns.stream().distinct().collect(Collectors.toList());
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public List<ColumnDescription> queryTableColumnMeta(Connection connection, String schemaName,
String tableName) {
String sql = this.getTableFieldsQuerySQL(schemaName, tableName);
List<ColumnDescription> ret = this.querySelectSqlColumnMeta(connection, sql);
// 补充一下注释信息
try (ResultSet columns = connection.getMetaData()
.getColumns(schemaName, null, tableName, null)) {
while (columns.next()) {
String columnName = columns.getString("COLUMN_NAME");
String remarks = columns.getString("REMARKS");
String columnDefault = columns.getString("COLUMN_DEF");
for (ColumnDescription cd : ret) {
if (columnName.equals(cd.getFieldName())) {
cd.setRemarks(remarks);
// 补充默认值信息
cd.setDefaultValue(columnDefault);
}
}
}
return ret;
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public List<String> queryTablePrimaryKeys(Connection connection, String schemaName, String tableName) {
List<String> ret = new ArrayList<>();
try (ResultSet primaryKeys = connection.getMetaData()
.getPrimaryKeys(schemaName, null, tableName)) {
while (primaryKeys.next()) {
ret.add(primaryKeys.getString("COLUMN_NAME"));
}
return ret.stream().distinct().collect(Collectors.toList());
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public synchronized List<IndexDescription> queryTableIndexes(Connection connection, String schemaName,
String tableName) {
setCatalogName(schemaName);
return super.queryTableIndexes(connection, schemaName, tableName);
}
@Override
public String getTableDDL(Connection connection, String schemaName, String tableName) {
List<String> result = new ArrayList<>();
try (Statement st = connection.createStatement()) {
if (st.execute(String.format(SHOW_CREATE_TABLE_SQL, schemaName, tableName))) {
try (ResultSet rs = st.getResultSet()) {
if (rs != null) {
while (rs.next()) {
String value = rs.getString(2);
Optional.ofNullable(value).ifPresent(result::add);
}
}
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
return result.stream().findAny().orElse(null);
}
@Override
public String getViewDDL(Connection connection, String schemaName, String tableName) {
List<String> result = new ArrayList<>();
try (Statement st = connection.createStatement()) {
if (st.execute(String.format(SHOW_CREATE_VIEW_SQL, schemaName, tableName))) {
try (ResultSet rs = st.getResultSet()) {
if (rs != null) {
while (rs.next()) {
String value = rs.getString(2);
Optional.ofNullable(value).ifPresent(result::add);
}
}
}
}
} catch (SQLException e) {
throw new RuntimeException(e);
}
return result.stream().findAny().orElse(null);
}
@Override
public List<ColumnDescription> querySelectSqlColumnMeta(Connection connection, String sql) {
String querySQL = String.format(" %s LIMIT 0,1", sql.replace(";", ""));
return this.getSelectSqlColumnMeta(connection, querySQL);
}
@Override
public void testQuerySQL(Connection connection, String sql) {
String testQuerySql = String.format("explain %s", sql.replace(";", ""));
if (log.isDebugEnabled()) {
log.debug("Execute sql :{}", testQuerySql);
}
try (Statement st = connection.createStatement()) {
st.execute(testQuerySql);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public String getFieldDefinition(ColumnMetaData v, List<String> pks, boolean useAutoInc,
boolean addCr, boolean withRemarks) {
String fieldname = v.getName();
int length = v.getLength();
int precision = v.getPrecision();
int type = v.getType();
String retval = " `" + fieldname + "` ";
switch (type) {
case ColumnMetaData.TYPE_TIMESTAMP:
retval += "DATETIME";
break;
case ColumnMetaData.TYPE_TIME:
retval += "TIME";
break;
case ColumnMetaData.TYPE_DATE:
retval += "DATE";
break;
case ColumnMetaData.TYPE_BOOLEAN:
retval += "TINYINT";
break;
case ColumnMetaData.TYPE_NUMBER:
case ColumnMetaData.TYPE_INTEGER:
case ColumnMetaData.TYPE_BIGNUMBER:
if (null != pks && !pks.isEmpty() && pks.contains(fieldname)) {
if (useAutoInc) {
retval += "BIGINT AUTO_INCREMENT NOT NULL";
} else {
retval += "BIGINT NOT NULL";
}
} else {
// Integer values...
if (precision == 0) {
if (length > 9) {
if (length < 19) {
// can hold signed values between -9223372036854775808 and 9223372036854775807
// 18 significant digits
retval += "BIGINT";
} else {
retval += "DECIMAL(" + length + ")";
}
} else {
retval += "INT";
}
} else {
// Floating point values...
if (length > 15) {
retval += "DECIMAL(" + length;
if (precision > 0) {
retval += ", " + precision;
}
retval += ")";
} else {
// A double-precision floating-point number is accurate to approximately 15
// decimal places.
// http://mysql.mirrors-r-us.net/doc/refman/5.1/en/numeric-type-overview.html
retval += "DOUBLE";
}
}
}
break;
case ColumnMetaData.TYPE_STRING:
long newLength = length * 3;
if (newLength < 255) {
retval += "CHAR(" + newLength + ")";
} else if (newLength < 65533) {
retval += "VARCHAR(" + newLength + ")";
} else {
retval += "STRING";
}
break;
case ColumnMetaData.TYPE_BINARY:
retval += "STRING";
break;
default:
retval += "STRING";
break;
}
if (withRemarks && StringUtils.isNotBlank(v.getRemarks())) {
retval += String.format(" COMMENT '%s' ", v.getRemarks().replace("'", "\\'"));
}
if (addCr) {
retval += Constants.CR;
}
return retval;
}
@Override
public List<String> getTableColumnCommentDefinition(TableDescription td, List<ColumnDescription> cds) {
return Collections.emptyList();
}
@Override
public void preAppendCreateTableSql(StringBuilder builder) {
// builder.append( Const.IF_NOT_EXISTS );
}
@Override
public void postAppendCreateTableSql(StringBuilder builder, String tblComment, List<String> primaryKeys,
SourceProperties tblProperties) {
if (CollectionUtils.isNotEmpty(primaryKeys)) {
String primaryKeyAsString = getPrimaryKeyAsString(primaryKeys);
// builder.append("ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin");
builder.append(" DISTRIBUTED BY HASH(").append(primaryKeyAsString).append(") BUCKETS AUTO");
if (StringUtils.isNotBlank(tblComment)) {
builder.append(String.format(" COMMENT='%s' ", tblComment.replace("'", "\\'")));
}
}
}
@Override
public void appendPrimaryKeyForCreateTableSql(StringBuilder builder, List<String> primaryKeys) {
// 不支持主键的数据库类型(例如hive),需要覆盖掉该方法
if (CollectionUtils.isNotEmpty(primaryKeys)) {
String primaryKeyAsString = getPrimaryKeyAsString(primaryKeys);
builder.append(" UNIQUE KEY (").append(primaryKeyAsString).append(")");
}
}
}

View File

@@ -0,0 +1 @@
com.gitee.dbswitch.product.doris.DorisFactoryProvider

View File

@@ -113,6 +113,11 @@
<artifactId>dbswitch-product-greenplum</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-product-doris</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</project>

View File

@@ -33,6 +33,7 @@
<module>dbswitch-product-highgo</module>
<module>dbswitch-product-starrocks</module>
<module>dbswitch-product-greenplum</module>
<module>dbswitch-product-doris</module>
</modules>
</project>

Binary file not shown.