mirror of
https://gitee.com/dromara/dbswitch.git
synced 2025-09-14 07:59:07 +00:00
代码整理
This commit is contained in:
@@ -3,15 +3,6 @@ FROM openjdk:8-jre-alpine
|
||||
ENV TZ=Asia/Shanghai
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
ENV TZ=Asia/Shanghai
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
ENV TZ=Asia/Shanghai
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
ENV TZ=Asia/Shanghai
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
||||
|
||||
ADD dbswitch-release.tar.gz /
|
||||
|
||||
EXPOSE 9088
|
||||
|
0
build-docker/dbswitch/dbswitch-release/bin/startup.sh
Executable file → Normal file
0
build-docker/dbswitch/dbswitch-release/bin/startup.sh
Executable file → Normal file
@@ -41,8 +41,11 @@ import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
import javax.annotation.Resource;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.quartz.CronExpression;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.quartz.CronExpression;
|
||||
|
||||
@Service
|
||||
public class AssignmentService {
|
||||
|
@@ -22,6 +22,9 @@ import org.springframework.stereotype.Service;
|
||||
@Service
|
||||
public class PatternMapperService {
|
||||
|
||||
private final String STRING_EMPTY = "<!空>";
|
||||
private final String STRING_DELETE = "<!删除>";
|
||||
|
||||
@Resource
|
||||
private DbConnectionService connectionService;
|
||||
|
||||
@@ -34,18 +37,22 @@ public class PatternMapperService {
|
||||
List<PreviewNameMapperResponse> result = new ArrayList<>();
|
||||
if (CollectionUtils.isEmpty(request.getTableNames())) {
|
||||
for (TableDescription td : getAllTableNames(request)) {
|
||||
String targetName = PatterNameUtils.getFinalName(
|
||||
td.getTableName(), request.getNameMapper());
|
||||
result.add(PreviewNameMapperResponse.builder()
|
||||
.originalName(td.getTableName())
|
||||
.targetName(PatterNameUtils.getFinalName(td.getTableName(), request.getNameMapper()))
|
||||
.targetName(StringUtils.isNotBlank(targetName) ? targetName : STRING_EMPTY)
|
||||
.build());
|
||||
}
|
||||
} else {
|
||||
if (include) {
|
||||
for (String name : request.getTableNames()) {
|
||||
if (StringUtils.isNotBlank(name)) {
|
||||
String targetName = PatterNameUtils.getFinalName(
|
||||
name, request.getNameMapper());
|
||||
result.add(PreviewNameMapperResponse.builder()
|
||||
.originalName(name)
|
||||
.targetName(PatterNameUtils.getFinalName(name, request.getNameMapper()))
|
||||
.targetName(StringUtils.isNotBlank(targetName) ? targetName : STRING_EMPTY)
|
||||
.build());
|
||||
}
|
||||
}
|
||||
@@ -92,7 +99,7 @@ public class PatternMapperService {
|
||||
} else {
|
||||
result.add(PreviewNameMapperResponse.builder()
|
||||
.originalName(cd.getFieldName())
|
||||
.targetName("<!字段被删除>")
|
||||
.targetName(STRING_DELETE)
|
||||
.build());
|
||||
}
|
||||
}
|
||||
@@ -110,14 +117,10 @@ public class PatternMapperService {
|
||||
if (null == dbConn) {
|
||||
throw new DbswitchException(ResultCode.ERROR_RESOURCE_NOT_EXISTS, "id=" + request.getId());
|
||||
}
|
||||
|
||||
IMetaDataByJdbcService service = connectionService.getMetaDataCoreService(dbConn);
|
||||
return service.queryTableList(
|
||||
dbConn.getUrl(),
|
||||
dbConn.getUsername(),
|
||||
dbConn.getPassword(),
|
||||
request.getSchemaName()
|
||||
).stream()
|
||||
.filter(td -> !td.isViewTable())
|
||||
return service.queryTableList(dbConn.getUrl(), dbConn.getUsername(), dbConn.getPassword(),
|
||||
request.getSchemaName()).stream().filter(td -> !td.isViewTable())
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
@@ -9,312 +9,310 @@
|
||||
/////////////////////////////////////////////////////////////
|
||||
package com.gitee.dbswitch.core.database;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Properties;
|
||||
import java.util.Set;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import com.gitee.dbswitch.common.type.DatabaseTypeEnum;
|
||||
import com.gitee.dbswitch.common.util.DbswitchStrUtils;
|
||||
import com.gitee.dbswitch.common.util.HivePrepareUtils;
|
||||
import com.gitee.dbswitch.common.util.TypeConvertUtils;
|
||||
import com.gitee.dbswitch.core.model.ColumnDescription;
|
||||
import com.gitee.dbswitch.core.model.ColumnMetaData;
|
||||
import com.gitee.dbswitch.core.model.SchemaTableData;
|
||||
import com.gitee.dbswitch.core.model.TableDescription;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DatabaseMetaData;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.Statement;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.ResultSetMetaData;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import com.gitee.dbswitch.common.constant.DatabaseTypeEnum;
|
||||
import com.gitee.dbswitch.core.model.ColumnDescription;
|
||||
import com.gitee.dbswitch.core.model.ColumnMetaData;
|
||||
import com.gitee.dbswitch.core.model.TableDescription;
|
||||
import com.gitee.dbswitch.core.util.JdbcOperatorUtils;
|
||||
|
||||
/**
|
||||
* 数据库元信息抽象基类
|
||||
*
|
||||
* @author tang
|
||||
*
|
||||
* @author tang
|
||||
*/
|
||||
public abstract class AbstractDatabase implements IDatabaseInterface {
|
||||
|
||||
public static final int CLOB_LENGTH = 9999999;
|
||||
public static final int CLOB_LENGTH = 9999999;
|
||||
|
||||
protected Connection connection = null;
|
||||
protected DatabaseMetaData metaData = null;
|
||||
protected String catalogName = null;
|
||||
protected String driverClassName;
|
||||
protected String catalogName = null;
|
||||
|
||||
public AbstractDatabase(String driverClassName) {
|
||||
this.catalogName = null;
|
||||
public AbstractDatabase(String driverClassName) {
|
||||
try {
|
||||
this.driverClassName = driverClassName;
|
||||
Class.forName(driverClassName);
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
Class.forName(driverClassName);
|
||||
} catch (ClassNotFoundException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public String getDriverClassName() {
|
||||
return this.driverClassName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connect(String jdbcUrl, String username, String password) {
|
||||
/*
|
||||
* 超时时间设置问题: https://blog.csdn.net/lsunwing/article/details/79461217
|
||||
* https://blog.csdn.net/weixin_34405332/article/details/91664781
|
||||
*/
|
||||
try {
|
||||
/**
|
||||
* Oracle在通过jdbc连接的时候需要添加一个参数来设置是否获取注释
|
||||
*/
|
||||
Properties props = new Properties();
|
||||
props.put("user", username);
|
||||
props.put("password", password);
|
||||
props.put("remarksReporting", "true");
|
||||
@Override
|
||||
public List<String> querySchemaList(Connection connection) {
|
||||
Set<String> ret = new HashSet<>();
|
||||
try (ResultSet schemas = connection.getMetaData().getSchemas()) {
|
||||
while (schemas.next()) {
|
||||
ret.add(schemas.getString("TABLE_SCHEM"));
|
||||
}
|
||||
return new ArrayList<>(ret);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
// 设置最大时间
|
||||
DriverManager.setLoginTimeout(15);
|
||||
|
||||
this.connection = DriverManager.getConnection(jdbcUrl, props);
|
||||
if (Objects.isNull(this.connection)) {
|
||||
throw new RuntimeException("数据库连接失败,连接参数为:" + jdbcUrl);
|
||||
}
|
||||
|
||||
this.metaData = Objects.requireNonNull(this.connection.getMetaData());
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@Override
|
||||
public List<TableDescription> queryTableList(Connection connection, String schemaName) {
|
||||
List<TableDescription> ret = new ArrayList<>();
|
||||
Set<String> uniqueSet = new HashSet<>();
|
||||
String[] types = new String[]{"TABLE", "VIEW"};
|
||||
try (ResultSet tables = connection.getMetaData()
|
||||
.getTables(this.catalogName, schemaName, "%", types)) {
|
||||
while (tables.next()) {
|
||||
String tableName = tables.getString("TABLE_NAME");
|
||||
if (uniqueSet.contains(tableName)) {
|
||||
continue;
|
||||
} else {
|
||||
uniqueSet.add(tableName);
|
||||
}
|
||||
|
||||
}
|
||||
TableDescription td = new TableDescription();
|
||||
td.setSchemaName(schemaName);
|
||||
td.setTableName(tableName);
|
||||
td.setRemarks(tables.getString("REMARKS"));
|
||||
td.setTableType(tables.getString("TABLE_TYPE").toUpperCase());
|
||||
ret.add(td);
|
||||
}
|
||||
return ret;
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
if (null != connection) {
|
||||
try {
|
||||
connection.close();
|
||||
} catch (SQLException e) {
|
||||
}
|
||||
connection = null;
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public TableDescription queryTableMeta(Connection connection, String schemaName,
|
||||
String tableName) {
|
||||
return queryTableList(connection, schemaName).stream()
|
||||
.filter(one -> tableName.equals(one.getTableName()))
|
||||
.findAny().orElse(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> querySchemaList() {
|
||||
Set<String> ret = new HashSet<>();
|
||||
ResultSet schemas = null;
|
||||
try {
|
||||
schemas = this.metaData.getSchemas();
|
||||
while (schemas.next()) {
|
||||
ret.add(schemas.getString("TABLE_SCHEM"));
|
||||
}
|
||||
return new ArrayList<>(ret);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
if (null != schemas) {
|
||||
schemas.close();
|
||||
schemas = null;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public List<String> queryTableColumnName(Connection connection, String schemaName,
|
||||
String tableName) {
|
||||
Set<String> columns = new HashSet<>();
|
||||
try (ResultSet rs = connection.getMetaData()
|
||||
.getColumns(this.catalogName, schemaName, tableName, null)) {
|
||||
while (rs.next()) {
|
||||
columns.add(rs.getString("COLUMN_NAME"));
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return new ArrayList<>(columns);
|
||||
}
|
||||
|
||||
}
|
||||
@Override
|
||||
public List<ColumnDescription> queryTableColumnMeta(Connection connection, String schemaName,
|
||||
String tableName) {
|
||||
String sql = this.getTableFieldsQuerySQL(schemaName, tableName);
|
||||
List<ColumnDescription> ret = this.querySelectSqlColumnMeta(connection, sql);
|
||||
|
||||
@Override
|
||||
public List<TableDescription> queryTableList(String schemaName) {
|
||||
List<TableDescription> ret = new ArrayList<>();
|
||||
Set<String> uniqueSet = new HashSet<>();
|
||||
ResultSet tables = null;
|
||||
try {
|
||||
tables = this.metaData.getTables(this.catalogName, schemaName, "%", new String[] { "TABLE", "VIEW" });
|
||||
while (tables.next()) {
|
||||
String tableName = tables.getString("TABLE_NAME");
|
||||
if (uniqueSet.contains(tableName)) {
|
||||
continue;
|
||||
} else {
|
||||
uniqueSet.add(tableName);
|
||||
}
|
||||
// 补充一下注释信息
|
||||
try (ResultSet columns = connection.getMetaData()
|
||||
.getColumns(this.catalogName, schemaName, tableName, null)) {
|
||||
while (columns.next()) {
|
||||
String columnName = columns.getString("COLUMN_NAME");
|
||||
String remarks = columns.getString("REMARKS");
|
||||
for (ColumnDescription cd : ret) {
|
||||
if (columnName.equals(cd.getFieldName())) {
|
||||
cd.setRemarks(remarks);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
TableDescription td = new TableDescription();
|
||||
td.setSchemaName(schemaName);
|
||||
td.setTableName(tableName);
|
||||
td.setRemarks(tables.getString("REMARKS"));
|
||||
td.setTableType(tables.getString("TABLE_TYPE").toUpperCase());
|
||||
ret.add(td);
|
||||
}
|
||||
return ret;
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
if (null != tables) {
|
||||
tables.close();
|
||||
tables = null;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public List<String> queryTablePrimaryKeys(Connection connection, String schemaName,
|
||||
String tableName) {
|
||||
Set<String> ret = new HashSet<>();
|
||||
try (ResultSet primaryKeys = connection.getMetaData()
|
||||
.getPrimaryKeys(this.catalogName, schemaName, tableName)) {
|
||||
while (primaryKeys.next()) {
|
||||
String name = primaryKeys.getString("COLUMN_NAME");
|
||||
if (!ret.contains(name)) {
|
||||
ret.add(name);
|
||||
}
|
||||
}
|
||||
return new ArrayList<>(ret);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ColumnDescription> queryTableColumnMeta(String schemaName, String tableName) {
|
||||
String sql = this.getTableFieldsQuerySQL(schemaName, tableName);
|
||||
List<ColumnDescription> ret = this.querySelectSqlColumnMeta(sql);
|
||||
ResultSet columns = null;
|
||||
try {
|
||||
columns = this.metaData.getColumns(this.catalogName, schemaName, tableName, null);
|
||||
while (columns.next()) {
|
||||
String columnName = columns.getString("COLUMN_NAME");
|
||||
String remarks = columns.getString("REMARKS");
|
||||
for (ColumnDescription cd : ret) {
|
||||
if (columnName.equalsIgnoreCase(cd.getFieldName())) {
|
||||
cd.setRemarks(remarks);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
if (null != columns) {
|
||||
columns.close();
|
||||
columns = null;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public SchemaTableData queryTableData(Connection connection, String schemaName, String tableName,
|
||||
int rowCount) {
|
||||
String fullTableName = getQuotedSchemaTableCombination(schemaName, tableName);
|
||||
String querySQL = String.format("SELECT * FROM %s ", fullTableName);
|
||||
SchemaTableData data = new SchemaTableData();
|
||||
data.setSchemaName(schemaName);
|
||||
data.setTableName(tableName);
|
||||
data.setColumns(new ArrayList<>());
|
||||
data.setRows(new ArrayList<>());
|
||||
try (Statement st = connection.createStatement()) {
|
||||
if (getDatabaseType() == DatabaseTypeEnum.HIVE) {
|
||||
HivePrepareUtils.prepare(connection, schemaName, tableName);
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
try (ResultSet rs = st.executeQuery(querySQL)) {
|
||||
ResultSetMetaData m = rs.getMetaData();
|
||||
int count = m.getColumnCount();
|
||||
for (int i = 1; i <= count; i++) {
|
||||
data.getColumns().add(m.getColumnLabel(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> queryTablePrimaryKeys(String schemaName, String tableName) {
|
||||
Set<String> ret = new HashSet<>();
|
||||
ResultSet primarykeys = null;
|
||||
try {
|
||||
primarykeys = this.metaData.getPrimaryKeys(this.catalogName, schemaName, tableName);
|
||||
while (primarykeys.next()) {
|
||||
String name = primarykeys.getString("COLUMN_NAME");
|
||||
if (!ret.contains(name)) {
|
||||
ret.add(name);
|
||||
}
|
||||
}
|
||||
return new ArrayList<>(ret);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
try {
|
||||
if (null != primarykeys) {
|
||||
primarykeys.close();
|
||||
primarykeys = null;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
}
|
||||
}
|
||||
}
|
||||
int counter = 0;
|
||||
while (rs.next() && counter++ < rowCount) {
|
||||
List<Object> row = new ArrayList<>(count);
|
||||
for (int i = 1; i <= count; i++) {
|
||||
Object value = rs.getObject(i);
|
||||
if (value != null && value instanceof byte[]) {
|
||||
row.add(DbswitchStrUtils.toHexString((byte[]) value));
|
||||
} else if (value != null && value instanceof java.sql.Clob) {
|
||||
row.add(TypeConvertUtils.castToString(value));
|
||||
} else if (value != null && value instanceof java.sql.Blob) {
|
||||
byte[] bytes = TypeConvertUtils.castToByteArray(value);
|
||||
row.add(DbswitchStrUtils.toHexString(bytes));
|
||||
} else {
|
||||
row.add(null == value ? null : value.toString());
|
||||
}
|
||||
}
|
||||
data.getRows().add(row);
|
||||
}
|
||||
|
||||
@Override
|
||||
public abstract List<ColumnDescription> querySelectSqlColumnMeta(String sql);
|
||||
return data;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testQuerySQL(String sql) {
|
||||
String wrapperSql = this.getTestQuerySQL(sql);
|
||||
try (Statement statement = this.connection.createStatement();) {
|
||||
statement.execute(wrapperSql);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
@Override
|
||||
public void testQuerySQL(Connection connection, String sql) {
|
||||
String wrapperSql = this.getTestQuerySQL(sql);
|
||||
try (Statement statement = connection.createStatement();) {
|
||||
statement.execute(wrapperSql);
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getQuotedSchemaTableCombination(String schemaName, String tableName) {
|
||||
return String.format(" \"%s\".\"%s\" ", schemaName, tableName);
|
||||
}
|
||||
@Override
|
||||
public String getQuotedSchemaTableCombination(String schemaName, String tableName) {
|
||||
return String.format(" \"%s\".\"%s\" ", schemaName, tableName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFieldDefinition(ColumnMetaData v, List<String> pks, boolean useAutoInc, boolean addCr) {
|
||||
throw new RuntimeException("AbstractDatabase Unempliment!");
|
||||
}
|
||||
@Override
|
||||
public String getFieldDefinition(ColumnMetaData v, List<String> pks, boolean useAutoInc,
|
||||
boolean addCr, boolean withRemarks) {
|
||||
throw new RuntimeException("AbstractDatabase Unimplemented!");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getPrimaryKeyAsString(List<String> pks) {
|
||||
if (!pks.isEmpty()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("\"");
|
||||
sb.append(StringUtils.join(pks, "\" , \""));
|
||||
sb.append("\"");
|
||||
return sb.toString();
|
||||
}
|
||||
@Override
|
||||
public String getPrimaryKeyAsString(List<String> pks) {
|
||||
if (!pks.isEmpty()) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
sb.append("\"");
|
||||
sb.append(StringUtils.join(pks, "\" , \""));
|
||||
sb.append("\"");
|
||||
return sb.toString();
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
/**************************************
|
||||
* internal function
|
||||
**************************************/
|
||||
@Override
|
||||
public List<String> getTableColumnCommentDefinition(TableDescription td,
|
||||
List<ColumnDescription> cds) {
|
||||
throw new RuntimeException("AbstractDatabase Unimplemented!");
|
||||
}
|
||||
|
||||
protected abstract String getTableFieldsQuerySQL(String schemaName, String tableName);
|
||||
/**************************************
|
||||
* internal function
|
||||
**************************************/
|
||||
|
||||
protected abstract String getTestQuerySQL(String sql);
|
||||
protected abstract String getTableFieldsQuerySQL(String schemaName, String tableName);
|
||||
|
||||
protected List<ColumnDescription> getSelectSqlColumnMeta(String querySQL, DatabaseTypeEnum dbtype) {
|
||||
List<ColumnDescription> ret = new ArrayList<ColumnDescription>();
|
||||
PreparedStatement pstmt = null;
|
||||
ResultSet rs = null;
|
||||
protected abstract String getTestQuerySQL(String sql);
|
||||
|
||||
try {
|
||||
pstmt = this.connection.prepareStatement(querySQL);
|
||||
rs = pstmt.executeQuery();
|
||||
protected List<ColumnDescription> getSelectSqlColumnMeta(Connection connection, String querySQL) {
|
||||
List<ColumnDescription> ret = new ArrayList<>();
|
||||
try (Statement st = connection.createStatement()) {
|
||||
if (getDatabaseType() == DatabaseTypeEnum.HIVE) {
|
||||
HivePrepareUtils.setResultSetColumnNameNotUnique(connection);
|
||||
}
|
||||
|
||||
ResultSetMetaData m = rs.getMetaData();
|
||||
int columns = m.getColumnCount();
|
||||
for (int i = 1; i <= columns; i++) {
|
||||
String name = m.getColumnLabel(i);
|
||||
if (null == name) {
|
||||
name = m.getColumnName(i);
|
||||
}
|
||||
try (ResultSet rs = st.executeQuery(querySQL)) {
|
||||
ResultSetMetaData m = rs.getMetaData();
|
||||
int columns = m.getColumnCount();
|
||||
for (int i = 1; i <= columns; i++) {
|
||||
String name = m.getColumnLabel(i);
|
||||
if (null == name) {
|
||||
name = m.getColumnName(i);
|
||||
}
|
||||
|
||||
ColumnDescription cd = new ColumnDescription();
|
||||
cd.setFieldName(name);
|
||||
cd.setLabelName(name);
|
||||
cd.setFieldType(m.getColumnType(i));
|
||||
if (0 != cd.getFieldType()) {
|
||||
cd.setFieldTypeName(m.getColumnTypeName(i));
|
||||
cd.setFiledTypeClassName(m.getColumnClassName(i));
|
||||
cd.setDisplaySize(m.getColumnDisplaySize(i));
|
||||
cd.setPrecisionSize(m.getPrecision(i));
|
||||
cd.setScaleSize(m.getScale(i));
|
||||
cd.setAutoIncrement(m.isAutoIncrement(i));
|
||||
cd.setNullable(m.isNullable(i) != ResultSetMetaData.columnNoNulls);
|
||||
} else {
|
||||
// 处理视图中NULL as fieldName的情况
|
||||
cd.setFieldTypeName("CHAR");
|
||||
cd.setFiledTypeClassName(String.class.getName());
|
||||
cd.setDisplaySize(1);
|
||||
cd.setPrecisionSize(1);
|
||||
cd.setScaleSize(0);
|
||||
cd.setAutoIncrement(false);
|
||||
cd.setNullable(true);
|
||||
}
|
||||
ColumnDescription cd = new ColumnDescription();
|
||||
cd.setFieldName(name);
|
||||
cd.setLabelName(name);
|
||||
cd.setFieldType(m.getColumnType(i));
|
||||
if (0 != cd.getFieldType()) {
|
||||
cd.setFieldTypeName(m.getColumnTypeName(i));
|
||||
cd.setFiledTypeClassName(m.getColumnClassName(i));
|
||||
cd.setDisplaySize(m.getColumnDisplaySize(i));
|
||||
cd.setPrecisionSize(m.getPrecision(i));
|
||||
cd.setScaleSize(m.getScale(i));
|
||||
cd.setAutoIncrement(m.isAutoIncrement(i));
|
||||
cd.setNullable(m.isNullable(i) != ResultSetMetaData.columnNoNulls);
|
||||
} else {
|
||||
// 处理视图中NULL as fieldName的情况
|
||||
cd.setFieldTypeName("CHAR");
|
||||
cd.setFiledTypeClassName(String.class.getName());
|
||||
cd.setDisplaySize(1);
|
||||
cd.setPrecisionSize(1);
|
||||
cd.setScaleSize(0);
|
||||
cd.setAutoIncrement(false);
|
||||
cd.setNullable(true);
|
||||
}
|
||||
|
||||
boolean signed = false;
|
||||
try {
|
||||
signed = m.isSigned(i);
|
||||
} catch (Exception ignored) {
|
||||
// This JDBC Driver doesn't support the isSigned method
|
||||
// nothing more we can do here by catch the exception.
|
||||
}
|
||||
cd.setSigned(signed);
|
||||
cd.setDbType(dbtype);
|
||||
boolean signed = false;
|
||||
try {
|
||||
signed = m.isSigned(i);
|
||||
} catch (Exception ignored) {
|
||||
// This JDBC Driver doesn't support the isSigned method
|
||||
// nothing more we can do here by catch the exception.
|
||||
}
|
||||
cd.setSigned(signed);
|
||||
cd.setDbType(getDatabaseType());
|
||||
|
||||
ret.add(cd);
|
||||
}
|
||||
ret.add(cd);
|
||||
}
|
||||
|
||||
return ret;
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
} finally {
|
||||
JdbcOperatorUtils.closeResultSet(rs);
|
||||
JdbcOperatorUtils.closeStatement(pstmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@@ -34,7 +34,6 @@ import com.gitee.dbswitch.dbwriter.IDatabaseWriter;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import java.sql.ResultSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedList;
|
||||
@@ -114,6 +113,10 @@ public class MigrationHandler implements Supplier<Long> {
|
||||
this.targetTableName = PatterNameUtils.getFinalName(td.getTableName(),
|
||||
sourceProperties.getRegexTableMapper());
|
||||
|
||||
if (StringUtils.isEmpty(this.targetTableName)) {
|
||||
throw new RuntimeException("表名的映射规则配置有误,不能将[" + this.sourceTableName + "]映射为空");
|
||||
}
|
||||
|
||||
this.tableNameMapString = String.format("%s.%s --> %s.%s",
|
||||
td.getSchemaName(), td.getTableName(),
|
||||
targetSchemaName, targetTableName);
|
||||
@@ -160,16 +163,26 @@ public class MigrationHandler implements Supplier<Long> {
|
||||
String targetColumnName = targetColumnDescriptions.get(i).getFieldName();
|
||||
if (StringUtils.hasLength(targetColumnName)) {
|
||||
columnMapperPairs.add(String.format("%s --> %s", sourceColumnName, targetColumnName));
|
||||
mapChecker.put(sourceColumnName, targetColumnName);
|
||||
} else {
|
||||
columnMapperPairs.add(String.format("%s --> %s", sourceColumnName, "<!Field is Deleted>"));
|
||||
columnMapperPairs.add(String.format(
|
||||
"%s --> %s",
|
||||
sourceColumnName,
|
||||
String.format("<!Field(%s) is Deleted>", (i + 1))
|
||||
));
|
||||
}
|
||||
mapChecker.put(sourceColumnName, targetColumnName);
|
||||
}
|
||||
log.info("Mapping relation : \ntable mapper :\n\t{} \ncolumn mapper :\n\t{} ",
|
||||
tableNameMapString, columnMapperPairs.stream().collect(Collectors.joining("\n\t")));
|
||||
tableNameMapString, String.join("\n\t", columnMapperPairs));
|
||||
Set<String> valueSet = new HashSet<>(mapChecker.values());
|
||||
if (valueSet.size() <= 0) {
|
||||
throw new RuntimeException("字段映射配置有误,禁止通过映射将表所有的字段都删除!");
|
||||
}
|
||||
if (!valueSet.containsAll(this.targetPrimaryKeys)) {
|
||||
throw new RuntimeException("字段映射配置有误,禁止通过映射将表的主键字段删除!");
|
||||
}
|
||||
if (mapChecker.keySet().size() != valueSet.size()) {
|
||||
throw new RuntimeException("字段映射配置有误,多个字段映射到一个同名字段!");
|
||||
throw new RuntimeException("字段映射配置有误,禁止将多个字段映射到一个同名字段!");
|
||||
}
|
||||
|
||||
IDatabaseWriter writer = DatabaseWriterFactory.createDatabaseWriter(
|
||||
@@ -258,24 +271,16 @@ public class MigrationHandler implements Supplier<Long> {
|
||||
private Long doFullCoverSynchronize(IDatabaseWriter writer) {
|
||||
final int BATCH_SIZE = fetchSize;
|
||||
|
||||
List<String> sourceFields = sourceColumnDescriptions.stream()
|
||||
.map(ColumnDescription::getFieldName)
|
||||
.collect(Collectors.toList());
|
||||
List<String> targetFields = targetColumnDescriptions.stream()
|
||||
.map(ColumnDescription::getFieldName)
|
||||
.collect(Collectors.toList());
|
||||
List<Integer> deletedFieldIndexes = new ArrayList<>();
|
||||
for (int i = 0; i < targetFields.size(); ++i) {
|
||||
if (StringUtils.isEmpty(targetFields.get(i))) {
|
||||
deletedFieldIndexes.add(i);
|
||||
List<String> sourceFields = new ArrayList<>();
|
||||
List<String> targetFields = new ArrayList<>();
|
||||
for (int i = 0; i < targetColumnDescriptions.size(); ++i) {
|
||||
ColumnDescription scd = sourceColumnDescriptions.get(i);
|
||||
ColumnDescription tcd = targetColumnDescriptions.get(i);
|
||||
if (!StringUtils.isEmpty(tcd.getFieldName())) {
|
||||
sourceFields.add(scd.getFieldName());
|
||||
targetFields.add(tcd.getFieldName());
|
||||
}
|
||||
}
|
||||
Collections.reverse(deletedFieldIndexes);
|
||||
deletedFieldIndexes.forEach(i -> {
|
||||
sourceFields.remove(sourceFields.get(i));
|
||||
targetFields.remove(targetFields.get(i));
|
||||
});
|
||||
|
||||
// 准备目的端的数据写入操作
|
||||
writer.prepareWrite(targetSchemaName, targetTableName, targetFields);
|
||||
|
||||
@@ -351,26 +356,18 @@ public class MigrationHandler implements Supplier<Long> {
|
||||
*/
|
||||
private Long doIncreaseSynchronize(IDatabaseWriter writer) {
|
||||
final int BATCH_SIZE = fetchSize;
|
||||
List<String> sourceFields = sourceColumnDescriptions.stream()
|
||||
.map(ColumnDescription::getFieldName)
|
||||
.collect(Collectors.toList());
|
||||
List<String> targetFields = targetColumnDescriptions.stream()
|
||||
.map(ColumnDescription::getFieldName)
|
||||
.collect(Collectors.toList());
|
||||
List<Integer> deletedFieldIndexes = new ArrayList<>();
|
||||
for (int i = 0; i < targetFields.size(); ++i) {
|
||||
if (StringUtils.isEmpty(targetFields.get(i))) {
|
||||
deletedFieldIndexes.add(i);
|
||||
}
|
||||
}
|
||||
Collections.reverse(deletedFieldIndexes);
|
||||
deletedFieldIndexes.forEach(i -> {
|
||||
sourceFields.remove(sourceFields.get(i));
|
||||
targetFields.remove(targetFields.get(i));
|
||||
});
|
||||
|
||||
List<String> sourceFields = new ArrayList<>();
|
||||
List<String> targetFields = new ArrayList<>();
|
||||
Map<String, String> columnNameMaps = new HashMap<>();
|
||||
for (int i = 0; i < sourceFields.size(); ++i) {
|
||||
columnNameMaps.put(sourceFields.get(i), targetFields.get(i));
|
||||
for (int i = 0; i < targetColumnDescriptions.size(); ++i) {
|
||||
ColumnDescription scd = sourceColumnDescriptions.get(i);
|
||||
ColumnDescription tcd = targetColumnDescriptions.get(i);
|
||||
if (!StringUtils.isEmpty(tcd.getFieldName())) {
|
||||
sourceFields.add(scd.getFieldName());
|
||||
targetFields.add(tcd.getFieldName());
|
||||
columnNameMaps.put(scd.getFieldName(), tcd.getFieldName());
|
||||
}
|
||||
}
|
||||
|
||||
TaskParamEntity.TaskParamEntityBuilder taskBuilder = TaskParamEntity.builder();
|
||||
|
Reference in New Issue
Block a user