version for 1.6.3

This commit is contained in:
inrgihc
2022-01-23 00:53:48 +08:00
parent df66d0e1b4
commit 6fea38b0e4
281 changed files with 11250 additions and 10915 deletions

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>com.gitee.dbswitch</groupId>
<artifactId>dbswitch-parent</artifactId>
<version>1.6.2</version>
<version>1.6.3</version>
</parent>
<artifactId>dbswitch-sql</artifactId>

View File

@@ -26,118 +26,123 @@ import org.apache.calcite.sql.type.ReturnTypes;
/**
* 这里重写了MssqlSqlDialect的unparseCall()方法
*
*
* @author tang
*/
public class TheMssqlSqlDialect extends MssqlSqlDialect {
public static final SqlDialect DEFAULT = new TheMssqlSqlDialect(EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.MSSQL).withIdentifierQuoteString("[").withCaseSensitive(false));
public static final SqlDialect DEFAULT = new TheMssqlSqlDialect(EMPTY_CONTEXT
.withDatabaseProduct(DatabaseProduct.MSSQL).withIdentifierQuoteString("[")
.withCaseSensitive(false));
private static final SqlFunction MSSQL_SUBSTRING = new SqlFunction("SUBSTRING", SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE_VARYING, null, null, SqlFunctionCategory.STRING);
private static final SqlFunction MSSQL_SUBSTRING = new SqlFunction("SUBSTRING",
SqlKind.OTHER_FUNCTION,
ReturnTypes.ARG0_NULLABLE_VARYING, null, null, SqlFunctionCategory.STRING);
/** Creates a MssqlSqlDialect. */
public TheMssqlSqlDialect(Context context) {
super(context);
}
/**
* Creates a MssqlSqlDialect.
*/
public TheMssqlSqlDialect(Context context) {
super(context);
}
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
if (call.getOperator() == SqlStdOperatorTable.SUBSTRING) {
if (call.operandCount() != 3) {
throw new IllegalArgumentException("MSSQL SUBSTRING requires FROM and FOR arguments");
}
SqlUtil.unparseFunctionSyntax(MSSQL_SUBSTRING, writer, call);
} else {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
unparseFloor(writer, call);
break;
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
if (call.getOperator() == SqlStdOperatorTable.SUBSTRING) {
if (call.operandCount() != 3) {
throw new IllegalArgumentException("MSSQL SUBSTRING requires FROM and FOR arguments");
}
SqlUtil.unparseFunctionSyntax(MSSQL_SUBSTRING, writer, call);
} else {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
unparseFloor(writer, call);
break;
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else {
super.unparseCall(writer, call, leftPrec, rightPrec);
}
break;
}
}
}
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else {
super.unparseCall(writer, call, leftPrec, rightPrec);
}
break;
}
}
}
/**
* Unparses datetime floor for Microsoft SQL Server. There is no TRUNC function,
* so simulate this using calls to CONVERT.
*
* @param writer Writer
* @param call Call
*/
private void unparseFloor(SqlWriter writer, SqlCall call) {
SqlLiteral node = call.operand(1);
TimeUnitRange unit = (TimeUnitRange) node.getValue();
/**
* Unparses datetime floor for Microsoft SQL Server. There is no TRUNC function, so simulate this
* using calls to CONVERT.
*
* @param writer Writer
* @param call Call
*/
private void unparseFloor(SqlWriter writer, SqlCall call) {
SqlLiteral node = call.operand(1);
TimeUnitRange unit = (TimeUnitRange) node.getValue();
switch (unit) {
case YEAR:
unparseFloorWithUnit(writer, call, 4, "-01-01");
break;
case MONTH:
unparseFloorWithUnit(writer, call, 7, "-01");
break;
case WEEK:
writer.print("CONVERT(DATETIME, CONVERT(VARCHAR(10), " + "DATEADD(day, - (6 + DATEPART(weekday, ");
call.operand(0).unparse(writer, 0, 0);
writer.print(")) % 7, ");
call.operand(0).unparse(writer, 0, 0);
writer.print("), 126))");
break;
case DAY:
unparseFloorWithUnit(writer, call, 10, "");
break;
case HOUR:
unparseFloorWithUnit(writer, call, 13, ":00:00");
break;
case MINUTE:
unparseFloorWithUnit(writer, call, 16, ":00");
break;
case SECOND:
unparseFloorWithUnit(writer, call, 19, ":00");
break;
default:
throw new IllegalArgumentException("MSSQL does not support FLOOR for time unit: " + unit);
}
}
switch (unit) {
case YEAR:
unparseFloorWithUnit(writer, call, 4, "-01-01");
break;
case MONTH:
unparseFloorWithUnit(writer, call, 7, "-01");
break;
case WEEK:
writer.print(
"CONVERT(DATETIME, CONVERT(VARCHAR(10), " + "DATEADD(day, - (6 + DATEPART(weekday, ");
call.operand(0).unparse(writer, 0, 0);
writer.print(")) % 7, ");
call.operand(0).unparse(writer, 0, 0);
writer.print("), 126))");
break;
case DAY:
unparseFloorWithUnit(writer, call, 10, "");
break;
case HOUR:
unparseFloorWithUnit(writer, call, 13, ":00:00");
break;
case MINUTE:
unparseFloorWithUnit(writer, call, 16, ":00");
break;
case SECOND:
unparseFloorWithUnit(writer, call, 19, ":00");
break;
default:
throw new IllegalArgumentException("MSSQL does not support FLOOR for time unit: " + unit);
}
}
private void unparseFloorWithUnit(SqlWriter writer, SqlCall call, int charLen, String offset) {
writer.print("CONVERT");
SqlWriter.Frame frame = writer.startList("(", ")");
writer.print("DATETIME, CONVERT(VARCHAR(" + charLen + "), ");
call.operand(0).unparse(writer, 0, 0);
writer.print(", 126)");
private void unparseFloorWithUnit(SqlWriter writer, SqlCall call, int charLen, String offset) {
writer.print("CONVERT");
SqlWriter.Frame frame = writer.startList("(", ")");
writer.print("DATETIME, CONVERT(VARCHAR(" + charLen + "), ");
call.operand(0).unparse(writer, 0, 0);
writer.print(", 126)");
if (offset.length() > 0) {
writer.print("+'" + offset + "'");
}
writer.endList(frame);
}
if (offset.length() > 0) {
writer.print("+'" + offset + "'");
}
writer.endList(frame);
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
}

View File

@@ -24,118 +24,119 @@ import org.apache.calcite.sql.fun.SqlRowOperator;
/**
* 这里重写了MysqlSqlDialect的unparseCall()方法
*
*
* @author tang
*/
public class TheMysqlSqlDialect extends MysqlSqlDialect {
public static final SqlDialect DEFAULT = new TheMysqlSqlDialect(
EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.MYSQL).withIdentifierQuoteString("`")
.withUnquotedCasing(Casing.UNCHANGED).withNullCollation(NullCollation.LOW));
public static final SqlDialect DEFAULT = new TheMysqlSqlDialect(
EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.MYSQL).withIdentifierQuoteString("`")
.withUnquotedCasing(Casing.UNCHANGED).withNullCollation(NullCollation.LOW));
public TheMysqlSqlDialect(Context context) {
super(context);
}
public TheMysqlSqlDialect(Context context) {
super(context);
}
/**
* Unparses datetime floor for MySQL. There is no TRUNC function, so simulate
* this using calls to DATE_FORMAT.
*
* @param writer Writer
* @param call Call
*/
private void unparseFloor(SqlWriter writer, SqlCall call) {
SqlLiteral node = call.operand(1);
TimeUnitRange unit = (TimeUnitRange) node.getValue();
/**
* Unparses datetime floor for MySQL. There is no TRUNC function, so simulate this using calls to
* DATE_FORMAT.
*
* @param writer Writer
* @param call Call
*/
private void unparseFloor(SqlWriter writer, SqlCall call) {
SqlLiteral node = call.operand(1);
TimeUnitRange unit = (TimeUnitRange) node.getValue();
if (unit == TimeUnitRange.WEEK) {
writer.print("STR_TO_DATE");
SqlWriter.Frame frame = writer.startList("(", ")");
if (unit == TimeUnitRange.WEEK) {
writer.print("STR_TO_DATE");
SqlWriter.Frame frame = writer.startList("(", ")");
writer.print("DATE_FORMAT(");
call.operand(0).unparse(writer, 0, 0);
writer.print(", '%x%v-1'), '%x%v-%w'");
writer.endList(frame);
return;
}
writer.print("DATE_FORMAT(");
call.operand(0).unparse(writer, 0, 0);
writer.print(", '%x%v-1'), '%x%v-%w'");
writer.endList(frame);
return;
}
String format;
switch (unit) {
case YEAR:
format = "%Y-01-01";
break;
case MONTH:
format = "%Y-%m-01";
break;
case DAY:
format = "%Y-%m-%d";
break;
case HOUR:
format = "%Y-%m-%d %H:00:00";
break;
case MINUTE:
format = "%Y-%m-%d %H:%i:00";
break;
case SECOND:
format = "%Y-%m-%d %H:%i:%s";
break;
default:
throw new AssertionError("MYSQL does not support FLOOR for time unit: " + unit);
}
String format;
switch (unit) {
case YEAR:
format = "%Y-01-01";
break;
case MONTH:
format = "%Y-%m-01";
break;
case DAY:
format = "%Y-%m-%d";
break;
case HOUR:
format = "%Y-%m-%d %H:00:00";
break;
case MINUTE:
format = "%Y-%m-%d %H:%i:00";
break;
case SECOND:
format = "%Y-%m-%d %H:%i:%s";
break;
default:
throw new AssertionError("MYSQL does not support FLOOR for time unit: " + unit);
}
writer.print("DATE_FORMAT");
SqlWriter.Frame frame = writer.startList("(", ")");
call.operand(0).unparse(writer, 0, 0);
writer.sep(",", true);
writer.print("'" + format + "'");
writer.endList(frame);
}
writer.print("DATE_FORMAT");
SqlWriter.Frame frame = writer.startList("(", ")");
call.operand(0).unparse(writer, 0, 0);
writer.sep(",", true);
writer.print("'" + format + "'");
writer.endList(frame);
}
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
unparseFloor(writer, call);
break;
unparseFloor(writer, call);
break;
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
// 这里处理INSERT语句的ROW关键词问题
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else if (call instanceof SqlOrderBy) {
// 这里处理分页的LIMIT OFFSET问题
SqlOrderBy thecall = (SqlOrderBy) call;
TheSqlOrderBy newcall = new TheSqlOrderBy(call.getParserPosition(), thecall.query, thecall.orderList,
thecall.offset, thecall.fetch);
newcall.getOperator().unparse(writer, newcall, leftPrec, rightPrec);
// TheSqlOrderBy.OPERATOR.unparse(writer, thecall, leftPrec, rightPrec);
} else {
// 其他情况走这里
operator.unparse(writer, call, leftPrec, rightPrec);
}
break;
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
// 这里处理INSERT语句的ROW关键词问题
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else if (call instanceof SqlOrderBy) {
// 这里处理分页的LIMIT OFFSET问题
SqlOrderBy thecall = (SqlOrderBy) call;
TheSqlOrderBy newcall = new TheSqlOrderBy(call.getParserPosition(), thecall.query,
thecall.orderList,
thecall.offset, thecall.fetch);
newcall.getOperator().unparse(writer, newcall, leftPrec, rightPrec);
// TheSqlOrderBy.OPERATOR.unparse(writer, thecall, leftPrec, rightPrec);
} else {
// 其他情况走这里
operator.unparse(writer, call, leftPrec, rightPrec);
}
break;
}
}
}
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
}

View File

@@ -9,7 +9,6 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.calcite;
import org.apache.calcite.sql.dialect.OracleSqlDialect;
import org.apache.calcite.avatica.util.TimeUnitRange;
import org.apache.calcite.rel.type.RelDataTypeSystem;
import org.apache.calcite.rel.type.RelDataTypeSystemImpl;
@@ -19,6 +18,7 @@ import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.SqlUtil;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.dialect.OracleSqlDialect;
import org.apache.calcite.sql.fun.SqlFloorFunction;
import org.apache.calcite.sql.fun.SqlLibraryOperators;
import org.apache.calcite.sql.fun.SqlRowOperator;
@@ -27,81 +27,85 @@ import org.apache.calcite.sql.type.SqlTypeName;
/**
* 这里重写了OracleSqlDialect的unparseCall()方法
*
*
* @author tang
*/
public class TheOracleSqlDialect extends OracleSqlDialect {
/** OracleDB type system. */
private static final RelDataTypeSystem ORACLE_TYPE_SYSTEM = new RelDataTypeSystemImpl() {
@Override
public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
// Maximum size of 4000 bytes for varchar2.
return 4000;
default:
return super.getMaxPrecision(typeName);
}
}
};
/**
* OracleDB type system.
*/
private static final RelDataTypeSystem ORACLE_TYPE_SYSTEM = new RelDataTypeSystemImpl() {
@Override
public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
// Maximum size of 4000 bytes for varchar2.
return 4000;
default:
return super.getMaxPrecision(typeName);
}
}
};
public static final SqlDialect DEFAULT = new TheOracleSqlDialect(
EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.ORACLE).withIdentifierQuoteString("\"")
.withDataTypeSystem(ORACLE_TYPE_SYSTEM));
public static final SqlDialect DEFAULT = new TheOracleSqlDialect(
EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.ORACLE).withIdentifierQuoteString("\"")
.withDataTypeSystem(ORACLE_TYPE_SYSTEM));
/** Creates an OracleSqlDialect. */
public TheOracleSqlDialect(Context context) {
super(context);
}
/**
* Creates an OracleSqlDialect.
*/
public TheOracleSqlDialect(Context context) {
super(context);
}
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
if (call.getOperator() == SqlStdOperatorTable.SUBSTRING) {
SqlUtil.unparseFunctionSyntax(SqlLibraryOperators.SUBSTR, writer, call);
} else {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
if (call.getOperator() == SqlStdOperatorTable.SUBSTRING) {
SqlUtil.unparseFunctionSyntax(SqlLibraryOperators.SUBSTR, writer, call);
} else {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
final SqlLiteral timeUnitNode = call.operand(1);
final TimeUnitRange timeUnit = timeUnitNode.getValueAs(TimeUnitRange.class);
final SqlLiteral timeUnitNode = call.operand(1);
final TimeUnitRange timeUnit = timeUnitNode.getValueAs(TimeUnitRange.class);
SqlCall call2 = SqlFloorFunction.replaceTimeUnitOperand(call, timeUnit.name(),
timeUnitNode.getParserPosition());
SqlFloorFunction.unparseDatetimeFunction(writer, call2, "TRUNC", true);
break;
SqlCall call2 = SqlFloorFunction.replaceTimeUnitOperand(call, timeUnit.name(),
timeUnitNode.getParserPosition());
SqlFloorFunction.unparseDatetimeFunction(writer, call2, "TRUNC", true);
break;
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else {
super.unparseCall(writer, call, leftPrec, rightPrec);
}
break;
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else {
super.unparseCall(writer, call, leftPrec, rightPrec);
}
break;
}
}
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
}
}
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
}
// End OracleSqlDialect.java

View File

@@ -26,80 +26,84 @@ import org.apache.calcite.sql.type.SqlTypeName;
/**
* 这里重写了PostgresqlSqlDialect的unparseCall()方法
*
*
* @author tang
*/
public class ThePostgresqlSqlDialect extends PostgresqlSqlDialect {
/** PostgreSQL type system. */
private static final RelDataTypeSystem POSTGRESQL_TYPE_SYSTEM = new RelDataTypeSystemImpl() {
@Override
public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
// From htup_details.h in postgresql:
// MaxAttrSize is a somewhat arbitrary upper limit on the declared size of
// data fields of char(n) and similar types. It need not have anything
// directly to do with the *actual* upper limit of varlena values, which
// is currently 1Gb (see TOAST structures in postgres.h). I've set it
// at 10Mb which seems like a reasonable number --- tgl 8/6/00. */
return 10 * 1024 * 1024;
default:
return super.getMaxPrecision(typeName);
}
}
};
/**
* PostgreSQL type system.
*/
private static final RelDataTypeSystem POSTGRESQL_TYPE_SYSTEM = new RelDataTypeSystemImpl() {
@Override
public int getMaxPrecision(SqlTypeName typeName) {
switch (typeName) {
case VARCHAR:
// From htup_details.h in postgresql:
// MaxAttrSize is a somewhat arbitrary upper limit on the declared size of
// data fields of char(n) and similar types. It need not have anything
// directly to do with the *actual* upper limit of varlena values, which
// is currently 1Gb (see TOAST structures in postgres.h). I've set it
// at 10Mb which seems like a reasonable number --- tgl 8/6/00. */
return 10 * 1024 * 1024;
default:
return super.getMaxPrecision(typeName);
}
}
};
public static final SqlDialect DEFAULT = new ThePostgresqlSqlDialect(
EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.POSTGRESQL).withIdentifierQuoteString("\"")
.withUnquotedCasing(Casing.TO_LOWER).withDataTypeSystem(POSTGRESQL_TYPE_SYSTEM));
public static final SqlDialect DEFAULT = new ThePostgresqlSqlDialect(
EMPTY_CONTEXT.withDatabaseProduct(DatabaseProduct.POSTGRESQL).withIdentifierQuoteString("\"")
.withUnquotedCasing(Casing.TO_LOWER).withDataTypeSystem(POSTGRESQL_TYPE_SYSTEM));
/** Creates a PostgresqlSqlDialect. */
public ThePostgresqlSqlDialect(Context context) {
super(context);
}
/**
* Creates a PostgresqlSqlDialect.
*/
public ThePostgresqlSqlDialect(Context context) {
super(context);
}
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
@Override
public void unparseCall(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
switch (call.getKind()) {
case FLOOR:
if (call.operandCount() != 2) {
super.unparseCall(writer, call, leftPrec, rightPrec);
return;
}
final SqlLiteral timeUnitNode = call.operand(1);
final TimeUnitRange timeUnit = timeUnitNode.getValueAs(TimeUnitRange.class);
final SqlLiteral timeUnitNode = call.operand(1);
final TimeUnitRange timeUnit = timeUnitNode.getValueAs(TimeUnitRange.class);
SqlCall call2 = SqlFloorFunction.replaceTimeUnitOperand(call, timeUnit.name(),
timeUnitNode.getParserPosition());
SqlFloorFunction.unparseDatetimeFunction(writer, call2, "DATE_TRUNC", false);
break;
SqlCall call2 = SqlFloorFunction.replaceTimeUnitOperand(call, timeUnit.name(),
timeUnitNode.getParserPosition());
SqlFloorFunction.unparseDatetimeFunction(writer, call2, "DATE_TRUNC", false);
break;
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else {
super.unparseCall(writer, call, leftPrec, rightPrec);
}
break;
default:
SqlOperator operator = call.getOperator();
if (operator instanceof SqlRowOperator) {
SqlUtil.unparseFunctionSyntax(new TheSqlRowOperator(), writer, call);
} else {
super.unparseCall(writer, call, leftPrec, rightPrec);
}
break;
}
}
}
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
/**
* Appends a string literal to a buffer.
*
* @param buf Buffer
* @param charsetName Character set name, e.g. "utf16", or null
* @param val String value
*/
@Override
public void quoteStringLiteral(StringBuilder buf, String charsetName, String val) {
buf.append(literalQuoteString);
buf.append(val.replace(literalEndQuoteString, literalEscapedQuote));
buf.append(literalEndQuoteString);
}
}

View File

@@ -25,91 +25,95 @@ import org.apache.calcite.util.ImmutableNullableList;
/**
* 重新Calcite的SqlOrderBy
*
* @author tang
*
* @author tang
*/
public class TheSqlOrderBy extends SqlOrderBy {
public static final SqlSpecialOperator OPERATOR = new Operator() {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new TheSqlOrderBy(pos, operands[0], (SqlNodeList) operands[1], operands[2], operands[3]);
}
};
public static final SqlSpecialOperator OPERATOR = new Operator() {
@Override
public SqlCall createCall(SqlLiteral functionQualifier, SqlParserPos pos, SqlNode... operands) {
return new TheSqlOrderBy(pos, operands[0], (SqlNodeList) operands[1], operands[2],
operands[3]);
}
};
public final SqlNode query;
public final SqlNodeList orderList;
public final SqlNode offset;
public final SqlNode fetch;
public final SqlNode query;
public final SqlNodeList orderList;
public final SqlNode offset;
public final SqlNode fetch;
// ~ Constructors -----------------------------------------------------------
// ~ Constructors -----------------------------------------------------------
public TheSqlOrderBy(SqlParserPos pos, SqlNode query, SqlNodeList orderList, SqlNode offset, SqlNode fetch) {
super(pos, query, orderList, offset, fetch);
this.query = query;
this.orderList = orderList;
this.offset = offset;
this.fetch = fetch;
}
public TheSqlOrderBy(SqlParserPos pos, SqlNode query, SqlNodeList orderList, SqlNode offset,
SqlNode fetch) {
super(pos, query, orderList, offset, fetch);
this.query = query;
this.orderList = orderList;
this.offset = offset;
this.fetch = fetch;
}
// ~ Methods ----------------------------------------------------------------
// ~ Methods ----------------------------------------------------------------
@Override
public SqlKind getKind() {
return SqlKind.ORDER_BY;
}
@Override
public SqlKind getKind() {
return SqlKind.ORDER_BY;
}
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
@Override
public SqlOperator getOperator() {
return OPERATOR;
}
@Override
public List<SqlNode> getOperandList() {
return ImmutableNullableList.of(query, orderList, offset, fetch);
}
@Override
public List<SqlNode> getOperandList() {
return ImmutableNullableList.of(query, orderList, offset, fetch);
}
/** Definition of {@code ORDER BY} operator. */
private static class Operator extends SqlSpecialOperator {
private Operator() {
// NOTE: make precedence lower then SELECT to avoid extra parens
super("ORDER BY", SqlKind.ORDER_BY, 0);
}
/**
* Definition of {@code ORDER BY} operator.
*/
private static class Operator extends SqlSpecialOperator {
@Override
public SqlSyntax getSyntax() {
return SqlSyntax.POSTFIX;
}
private Operator() {
// NOTE: make precedence lower then SELECT to avoid extra parens
super("ORDER BY", SqlKind.ORDER_BY, 0);
}
@Override
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOrderBy orderBy = (SqlOrderBy) call;
final SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.ORDER_BY);
orderBy.query.unparse(writer, getLeftPrec(), getRightPrec());
if (orderBy.orderList != SqlNodeList.EMPTY) {
writer.sep(getName());
final SqlWriter.Frame listFrame = writer.startList(SqlWriter.FrameTypeEnum.ORDER_BY_LIST);
unparseListClause(writer, orderBy.orderList);
writer.endList(listFrame);
}
@Override
public SqlSyntax getSyntax() {
return SqlSyntax.POSTFIX;
}
if (orderBy.fetch != null) {
final SqlWriter.Frame frame3 = writer.startList(SqlWriter.FrameTypeEnum.FETCH);
writer.newlineAndIndent();
writer.keyword("LIMIT");
orderBy.fetch.unparse(writer, -1, -1);
writer.endList(frame3);
}
@Override
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlOrderBy orderBy = (SqlOrderBy) call;
final SqlWriter.Frame frame = writer.startList(SqlWriter.FrameTypeEnum.ORDER_BY);
orderBy.query.unparse(writer, getLeftPrec(), getRightPrec());
if (orderBy.orderList != SqlNodeList.EMPTY) {
writer.sep(getName());
final SqlWriter.Frame listFrame = writer.startList(SqlWriter.FrameTypeEnum.ORDER_BY_LIST);
unparseListClause(writer, orderBy.orderList);
writer.endList(listFrame);
}
if (orderBy.offset != null) {
final SqlWriter.Frame frame2 = writer.startList(SqlWriter.FrameTypeEnum.OFFSET);
writer.keyword("OFFSET");
orderBy.offset.unparse(writer, -1, -1);
writer.endList(frame2);
}
if (orderBy.fetch != null) {
final SqlWriter.Frame frame3 = writer.startList(SqlWriter.FrameTypeEnum.FETCH);
writer.newlineAndIndent();
writer.keyword("LIMIT");
orderBy.fetch.unparse(writer, -1, -1);
writer.endList(frame3);
}
writer.endList(frame);
}
}
if (orderBy.offset != null) {
final SqlWriter.Frame frame2 = writer.startList(SqlWriter.FrameTypeEnum.OFFSET);
writer.keyword("OFFSET");
orderBy.offset.unparse(writer, -1, -1);
writer.endList(frame2);
}
writer.endList(frame);
}
}
}

View File

@@ -9,6 +9,8 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.calcite;
import java.util.AbstractList;
import java.util.Map;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.sql.SqlCall;
import org.apache.calcite.sql.SqlKind;
@@ -20,60 +22,59 @@ import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.type.InferTypes;
import org.apache.calcite.sql.type.OperandTypes;
import org.apache.calcite.util.Pair;
import java.util.AbstractList;
import java.util.Map;
/**
* 代码来源于org.apache.calcite.sql.fun.SqlRowOperator的代码这里
* 重写了unparse()方法以处理INSERT语句的ROW问题。
*
* 代码来源于org.apache.calcite.sql.fun.SqlRowOperator的代码这里 重写了unparse()方法以处理INSERT语句的ROW问题。
*
* @author tang
*/
public class TheSqlRowOperator extends SqlSpecialOperator {
// ~ Constructors -----------------------------------------------------------
// ~ Constructors -----------------------------------------------------------
public TheSqlRowOperator() {
super("", SqlKind.ROW, MDX_PRECEDENCE, false, null, InferTypes.RETURN_TYPE, OperandTypes.VARIADIC);
}
public TheSqlRowOperator() {
super("", SqlKind.ROW, MDX_PRECEDENCE, false, null, InferTypes.RETURN_TYPE,
OperandTypes.VARIADIC);
}
// ~ Methods ----------------------------------------------------------------
// ~ Methods ----------------------------------------------------------------
// implement SqlOperator
@Override
public SqlSyntax getSyntax() {
// Function syntax would work too.
return SqlSyntax.SPECIAL;
}
// implement SqlOperator
@Override
public SqlSyntax getSyntax() {
// Function syntax would work too.
return SqlSyntax.SPECIAL;
}
@Override
public RelDataType inferReturnType(final SqlOperatorBinding opBinding) {
// The type of a ROW(e1,e2) expression is a record with the types
// {e1type,e2type}. According to the standard, field names are
// implementation-defined.
return opBinding.getTypeFactory().createStructType(new AbstractList<Map.Entry<String, RelDataType>>() {
@Override
public RelDataType inferReturnType(final SqlOperatorBinding opBinding) {
// The type of a ROW(e1,e2) expression is a record with the types
// {e1type,e2type}. According to the standard, field names are
// implementation-defined.
return opBinding.getTypeFactory()
.createStructType(new AbstractList<Map.Entry<String, RelDataType>>() {
@Override
public Map.Entry<String, RelDataType> get(int index) {
return Pair.of(SqlUtil.deriveAliasFromOrdinal(index), opBinding.getOperandType(index));
}
@Override
public Map.Entry<String, RelDataType> get(int index) {
return Pair.of(SqlUtil.deriveAliasFromOrdinal(index), opBinding.getOperandType(index));
}
@Override
public int size() {
return opBinding.getOperandCount();
}
});
}
@Override
public int size() {
return opBinding.getOperandCount();
}
});
}
@Override
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlUtil.unparseFunctionSyntax(this, writer, call);
}
@Override
public void unparse(SqlWriter writer, SqlCall call, int leftPrec, int rightPrec) {
SqlUtil.unparseFunctionSyntax(this, writer, call);
}
// override SqlOperator
@Override
public boolean requiresDecimalExpansion() {
return false;
}
// override SqlOperator
@Override
public boolean requiresDecimalExpansion() {
return false;
}
}
// End TheSqlRowOperator.java

View File

@@ -11,71 +11,70 @@ package com.gitee.dbswitch.sql.constant;
/**
* 常量定义
*
* @author tang
*
* @author tang
*/
public class Const {
/**
* What's the file systems file separator on this operating system?
*/
public static final String FILE_SEPARATOR = System.getProperty("file.separator");
/**
* What's the file systems file separator on this operating system?
*/
public static final String FILE_SEPARATOR = System.getProperty("file.separator");
/**
* What's the path separator on this operating system?
*/
public static final String PATH_SEPARATOR = System.getProperty("path.separator");
/**
* What's the path separator on this operating system?
*/
public static final String PATH_SEPARATOR = System.getProperty("path.separator");
/**
* CR: operating systems specific Carriage Return
*/
public static final String CR = System.getProperty("line.separator");
/**
* CR: operating systems specific Carriage Return
*/
public static final String CR = System.getProperty("line.separator");
/**
* DOSCR: MS-DOS specific Carriage Return
*/
public static final String DOSCR = "\n\r";
/**
* DOSCR: MS-DOS specific Carriage Return
*/
public static final String DOSCR = "\n\r";
/**
* An empty ("") String.
*/
public static final String EMPTY_STRING = "";
/**
* An empty ("") String.
*/
public static final String EMPTY_STRING = "";
/**
* The Java runtime version
*/
public static final String JAVA_VERSION = System.getProperty("java.vm.version");
/**
* The Java runtime version
*/
public static final String JAVA_VERSION = System.getProperty("java.vm.version");
/**
* Create Table Statement Prefix String
*/
public static final String CREATE_TABLE = " CREATE TABLE ";
/**
* Create Table Statement Prefix String
*/
public static final String CREATE_TABLE = " CREATE TABLE ";
/**
* Alter Table Statement Prefix String
*/
public static final String ALTER_TABLE = " ALTER TABLE ";
/**
* Drop Table Statement Prefix String
*/
public static final String DROP_TABLE = " DROP TABLE ";
/**
* Alter Table Statement Prefix String
*/
public static final String ALTER_TABLE = " ALTER TABLE ";
/**
* Constant Keyword String
*/
public static final String IF_NOT_EXISTS = " IF NOT EXISTS ";
/**
* Drop Table Statement Prefix String
*/
public static final String DROP_TABLE = " DROP TABLE ";
/**
* Constant Keyword String
*/
public static final String IF_EXISTS = " IF EXISTS ";
/**
* Constant Keyword String
*/
public static final String IF_NOT_EXISTS = " IF NOT EXISTS ";
/**
* Constructor function
*/
private Const() {
/**
* Constant Keyword String
*/
public static final String IF_EXISTS = " IF EXISTS ";
}
/**
* Constructor function
*/
private Const() {
}
}

View File

@@ -9,40 +9,39 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.ddl;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
/**
* 数据库方言抽象类
*
* @author tang
*
* @author tang
*/
public abstract class AbstractDatabaseDialect {
public String getSchemaTableName(String schemaName, String tableName) {
return String.format("\"%s\".\"%s\"", schemaName.trim(),tableName.trim());
}
public String getQuoteFieldName(String fieldName) {
return String.format("\"%s\"", fieldName.trim());
}
public abstract String getFieldTypeName(ColumnDefinition column);
public abstract String getFieldDefination(ColumnDefinition column);
public String getPrimaryKeyAsString(List<String> pks) {
if (!pks.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("\"");
sb.append(StringUtils.join(pks, "\" , \""));
sb.append("\"");
return sb.toString();
}
public String getSchemaTableName(String schemaName, String tableName) {
return String.format("\"%s\".\"%s\"", schemaName.trim(), tableName.trim());
}
return "";
}
public String getQuoteFieldName(String fieldName) {
return String.format("\"%s\"", fieldName.trim());
}
public abstract String getFieldTypeName(ColumnDefinition column);
public abstract String getFieldDefination(ColumnDefinition column);
public String getPrimaryKeyAsString(List<String> pks) {
if (!pks.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("\"");
sb.append(StringUtils.join(pks, "\" , \""));
sb.append("\"");
return sb.toString();
}
return "";
}
}

View File

@@ -13,26 +13,25 @@ import java.util.Objects;
/**
* DDL操作抽象类
*
* @author tang
*
* @author tang
*/
public abstract class AbstractSqlDdlOperator {
private String name;
private String name;
public AbstractSqlDdlOperator(String name) {
this.name = Objects.requireNonNull(name);
}
public AbstractSqlDdlOperator(String name) {
this.name = Objects.requireNonNull(name);
}
public String getName() {
return this.name;
}
public String getName() {
return this.name;
}
@Override
public String toString() {
return this.name;
}
@Override
public String toString() {
return this.name;
}
public abstract String toSqlString(AbstractDatabaseDialect dialect);
public abstract String toSqlString(AbstractDatabaseDialect dialect);
}

View File

@@ -13,98 +13,100 @@ import java.util.Objects;
/**
* 类定义实体类
*
* @author tang
*
* @author tang
*/
public class ColumnDefinition {
private String columnName;
private String columnType;
private String columnComment;
private Integer lengthOrPrecision;
private Integer scale;
private boolean primaryKey;
private boolean autoIncrement;
private boolean nullable;
private String defaultValue;
public String getColumnName() {
return columnName;
}
private String columnName;
private String columnType;
private String columnComment;
private Integer lengthOrPrecision;
private Integer scale;
private boolean primaryKey;
private boolean autoIncrement;
private boolean nullable;
private String defaultValue;
public void setColumnName(String columnName) {
this.columnName = Objects.requireNonNull(columnName);
}
public String getColumnName() {
return columnName;
}
public String getColumnType() {
return columnType;
}
public void setColumnName(String columnName) {
this.columnName = Objects.requireNonNull(columnName);
}
public void setColumnType(String columnType) {
this.columnType = Objects.requireNonNull(columnType);
}
public String getColumnType() {
return columnType;
}
public String getColumnComment() {
return columnComment;
}
public void setColumnType(String columnType) {
this.columnType = Objects.requireNonNull(columnType);
}
public void setColumnComment(String columnComment) {
this.columnComment = columnComment;
}
public String getColumnComment() {
return columnComment;
}
public Integer getLengthOrPrecision() {
return lengthOrPrecision;
}
public void setColumnComment(String columnComment) {
this.columnComment = columnComment;
}
public void setLengthOrPrecision(Integer lenOrPre) {
this.lengthOrPrecision = Objects.requireNonNull(lenOrPre);
}
public Integer getLengthOrPrecision() {
return lengthOrPrecision;
}
public Integer getScale() {
return scale;
}
public void setLengthOrPrecision(Integer lenOrPre) {
this.lengthOrPrecision = Objects.requireNonNull(lenOrPre);
}
public void setScale(Integer scale) {
this.scale = scale;
}
public Integer getScale() {
return scale;
}
public boolean isPrimaryKey() {
return primaryKey;
}
public void setScale(Integer scale) {
this.scale = scale;
}
public void setPrimaryKey(boolean primaryKey) {
this.primaryKey = primaryKey;
}
public boolean isPrimaryKey() {
return primaryKey;
}
public boolean isAutoIncrement() {
return this.autoIncrement;
}
public void setPrimaryKey(boolean primaryKey) {
this.primaryKey = primaryKey;
}
public void setAutoIncrement(boolean autoIncrement) {
this.autoIncrement = autoIncrement;
}
public boolean isAutoIncrement() {
return this.autoIncrement;
}
public boolean isNullable() {
return nullable;
}
public void setAutoIncrement(boolean autoIncrement) {
this.autoIncrement = autoIncrement;
}
public void setNullable(boolean nullable) {
this.nullable = nullable;
}
public boolean isNullable() {
return nullable;
}
public String getDefaultValue() {
return defaultValue;
}
public void setNullable(boolean nullable) {
this.nullable = nullable;
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
public String getDefaultValue() {
return defaultValue;
}
@Override
public String toString() {
return "ColumnDefinition [columnName=" + columnName + ", columnType=" + columnType + ", columnComment="
+ columnComment + ", lengthOrPrecision=" + lengthOrPrecision + ", scale=" + scale + ", primaryKey="
+ primaryKey + ", nullable=" + nullable + ", defaultValue=" + defaultValue + "]";
}
public void setDefaultValue(String defaultValue) {
this.defaultValue = defaultValue;
}
@Override
public String toString() {
return "ColumnDefinition [columnName=" + columnName + ", columnType=" + columnType
+ ", columnComment="
+ columnComment + ", lengthOrPrecision=" + lengthOrPrecision + ", scale=" + scale
+ ", primaryKey="
+ primaryKey + ", nullable=" + nullable + ", defaultValue=" + defaultValue + "]";
}
}

View File

@@ -15,52 +15,52 @@ import java.util.Objects;
/**
* 表定义实体类
*
* @author tang
*
* @author tang
*/
public class TableDefinition {
private String schemaName;
private String tableName;
private String tableComment;
private List<ColumnDefinition> columns = new ArrayList<>();
public String getSchemaName() {
return schemaName;
}
private String schemaName;
private String tableName;
private String tableComment;
private List<ColumnDefinition> columns = new ArrayList<>();
public void setSchemaName(String schemaName) {
this.schemaName = Objects.requireNonNull(schemaName);
}
public String getSchemaName() {
return schemaName;
}
public String getTableName() {
return tableName;
}
public void setSchemaName(String schemaName) {
this.schemaName = Objects.requireNonNull(schemaName);
}
public void setTableName(String tableName) {
this.tableName = Objects.requireNonNull(tableName);
}
public String getTableName() {
return tableName;
}
public String getTableComment() {
return tableComment;
}
public void setTableName(String tableName) {
this.tableName = Objects.requireNonNull(tableName);
}
public void setTableComment(String tableComment) {
this.tableComment = tableComment;
}
public String getTableComment() {
return tableComment;
}
public List<ColumnDefinition> getColumns() {
return columns;
}
public void setTableComment(String tableComment) {
this.tableComment = tableComment;
}
public void addColumns(ColumnDefinition column) {
columns.add(column);
}
public List<ColumnDefinition> getColumns() {
return columns;
}
@Override
public String toString() {
return "TableDefinition [schemaName=" + schemaName + ", tableName=" + tableName + ", tableComment="
+ tableComment + ", columns=" + columns + "]";
}
public void addColumns(ColumnDefinition column) {
columns.add(column);
}
@Override
public String toString() {
return "TableDefinition [schemaName=" + schemaName + ", tableName=" + tableName
+ ", tableComment=" + tableComment + ", columns=" + columns + "]";
}
}

View File

@@ -9,7 +9,6 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.ddl.sql;
import java.util.Objects;
import com.gitee.dbswitch.sql.constant.Const;
import com.gitee.dbswitch.sql.ddl.AbstractDatabaseDialect;
import com.gitee.dbswitch.sql.ddl.AbstractSqlDdlOperator;
@@ -17,124 +16,131 @@ import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import com.gitee.dbswitch.sql.ddl.pojo.TableDefinition;
import com.gitee.dbswitch.sql.ddl.sql.impl.GreenplumDialectImpl;
import com.gitee.dbswitch.sql.ddl.sql.impl.PostgresDialectImpl;
import java.util.Objects;
/**
* Alter语句操作类
*
* @author tang
*
* @author tang
*/
public class DdlSqlAlterTable extends AbstractSqlDdlOperator {
protected enum AlterTypeEnum {
/**
* 添加字段操作
*/
ADD(1),
/**
* 删除字段操作
*/
DROP(2),
/**
* 修改字段操作
*/
MODIFY(3),
/**
* 重命名操作
*/
RENAME(4);
protected enum AlterTypeEnum {
/**
* 添加字段操作
*/
ADD(1),
private int index;
/**
* 删除字段操作
*/
DROP(2),
AlterTypeEnum(int idx) {
this.index = idx;
}
/**
* 修改字段操作
*/
MODIFY(3),
public int getIndex() {
return index;
}
}
/**
* 重命名操作
*/
RENAME(4);
private TableDefinition table;
private AlterTypeEnum alterType;
private int index;
public DdlSqlAlterTable(TableDefinition t, String handle) {
super(Const.ALTER_TABLE);
this.table = t;
alterType = AlterTypeEnum.valueOf(handle.toUpperCase());
}
AlterTypeEnum(int idx) {
this.index = idx;
}
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
String fullTableName = dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
public int getIndex() {
return index;
}
}
StringBuilder sb = new StringBuilder();
sb.append(this.getName());
sb.append(fullTableName);
private TableDefinition table;
private AlterTypeEnum alterType;
if (table.getColumns().size() < 1) {
throw new RuntimeException("Alter table need one column at least!");
}
public DdlSqlAlterTable(TableDefinition t, String handle) {
super(Const.ALTER_TABLE);
this.table = t;
alterType = AlterTypeEnum.valueOf(handle.toUpperCase());
}
if (AlterTypeEnum.ADD == alterType) {
if (dialect instanceof PostgresDialectImpl || dialect instanceof GreenplumDialectImpl) {
//PostgreSQL/Greenplum数据库的add只支持一列不支持多列
if (table.getColumns().size() != 1) {
throw new RuntimeException("Alter table for PostgreSQL/Greenplum only can add one column!");
}
sb.append(" ADD ");
ColumnDefinition cd = table.getColumns().get(0);
sb.append(dialect.getFieldDefination(cd));
} else {
sb.append(" ADD (");
for (int i = 0; i < table.getColumns().size(); ++i) {
ColumnDefinition cd = table.getColumns().get(i);
sb.append((i > 0) ? "," : " ");
sb.append(dialect.getFieldDefination(cd));
}
sb.append(")");
}
} else if (AlterTypeEnum.DROP == alterType) {
if (table.getColumns().size() != 1) {
throw new RuntimeException("Alter table only can drop one column!");
}
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
String fullTableName = dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
ColumnDefinition cd = table.getColumns().get(0);
sb.append(" DROP ");
sb.append(dialect.getQuoteFieldName(cd.getColumnName()));
} else if (AlterTypeEnum.MODIFY == alterType) {
if (table.getColumns().size() != 1) {
throw new RuntimeException("Alter table only can modify one column!");
}
StringBuilder sb = new StringBuilder();
sb.append(this.getName());
sb.append(fullTableName);
ColumnDefinition cd = table.getColumns().get(0);
if(dialect instanceof PostgresDialectImpl || dialect instanceof GreenplumDialectImpl ) {
//PostgreSQL/Greenplum数据库的modify需要单独拆分
String typename = dialect.getFieldTypeName(cd);
boolean nullable = cd.isNullable();
String defaultValue = cd.getDefaultValue();
sb.append(" ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName()) + " TYPE " + typename);
if (nullable) {
sb.append(",ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName()) + " SET DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty() && !"NULL".equalsIgnoreCase(defaultValue)) {
sb.append(",ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName()) + " SET DEFAULT '" + defaultValue + "'");
} else {
sb.append(",ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName()) + " SET NOT NULL");
}
} else {
sb.append(" MODIFY ");
sb.append(dialect.getFieldDefination(cd));
}
} else {
// 当前不支持rename及其他操作
throw new RuntimeException("Alter table unsupported operation : " + alterType.name());
}
if (table.getColumns().size() < 1) {
throw new RuntimeException("Alter table need one column at least!");
}
return sb.toString();
}
if (AlterTypeEnum.ADD == alterType) {
if (dialect instanceof PostgresDialectImpl || dialect instanceof GreenplumDialectImpl) {
//PostgreSQL/Greenplum数据库的add只支持一列不支持多列
if (table.getColumns().size() != 1) {
throw new RuntimeException(
"Alter table for PostgreSQL/Greenplum only can add one column!");
}
sb.append(" ADD ");
ColumnDefinition cd = table.getColumns().get(0);
sb.append(dialect.getFieldDefination(cd));
} else {
sb.append(" ADD (");
for (int i = 0; i < table.getColumns().size(); ++i) {
ColumnDefinition cd = table.getColumns().get(i);
sb.append((i > 0) ? "," : " ");
sb.append(dialect.getFieldDefination(cd));
}
sb.append(")");
}
} else if (AlterTypeEnum.DROP == alterType) {
if (table.getColumns().size() != 1) {
throw new RuntimeException("Alter table only can drop one column!");
}
ColumnDefinition cd = table.getColumns().get(0);
sb.append(" DROP ");
sb.append(dialect.getQuoteFieldName(cd.getColumnName()));
} else if (AlterTypeEnum.MODIFY == alterType) {
if (table.getColumns().size() != 1) {
throw new RuntimeException("Alter table only can modify one column!");
}
ColumnDefinition cd = table.getColumns().get(0);
if (dialect instanceof PostgresDialectImpl || dialect instanceof GreenplumDialectImpl) {
//PostgreSQL/Greenplum数据库的modify需要单独拆分
String typename = dialect.getFieldTypeName(cd);
boolean nullable = cd.isNullable();
String defaultValue = cd.getDefaultValue();
sb.append(
" ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName()) + " TYPE " + typename);
if (nullable) {
sb.append(",ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName())
+ " SET DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty() && !"NULL"
.equalsIgnoreCase(defaultValue)) {
sb.append(
",ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName()) + " SET DEFAULT '"
+ defaultValue + "'");
} else {
sb.append(
",ALTER COLUMN " + dialect.getQuoteFieldName(cd.getColumnName()) + " SET NOT NULL");
}
} else {
sb.append(" MODIFY ");
sb.append(dialect.getFieldDefination(cd));
}
} else {
// 当前不支持rename及其他操作
throw new RuntimeException("Alter table unsupported operation : " + alterType.name());
}
return sb.toString();
}
}

View File

@@ -9,70 +9,69 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.ddl.sql;
import java.util.List;
import java.util.ArrayList;
import com.gitee.dbswitch.sql.constant.Const;
import com.gitee.dbswitch.sql.ddl.AbstractDatabaseDialect;
import com.gitee.dbswitch.sql.ddl.AbstractSqlDdlOperator;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import com.gitee.dbswitch.sql.ddl.pojo.TableDefinition;
import com.gitee.dbswitch.sql.ddl.sql.impl.MySqlDialectImpl;
import java.util.ArrayList;
import java.util.List;
/**
* Create语句操作类
*
* @author tang
*
* @author tang
*/
public class DdlSqlCreateTable extends AbstractSqlDdlOperator {
private TableDefinition table;
private TableDefinition table;
public DdlSqlCreateTable(TableDefinition t) {
super(Const.CREATE_TABLE);
this.table = t;
}
public DdlSqlCreateTable(TableDefinition t) {
super(Const.CREATE_TABLE);
this.table = t;
}
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
StringBuilder sb=new StringBuilder();
sb.append(this.getName());
String fullTableName=dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
sb.append(fullTableName);
sb.append(" (");
sb.append(Const.CR);
List<ColumnDefinition> columns=table.getColumns();
List<String> pks=new ArrayList<>();
for(int i=0;i<columns.size();++i) {
ColumnDefinition c=columns.get(i);
if(c.isPrimaryKey()) {
pks.add(c.getColumnName());
}
if (i > 0) {
sb.append(",");
} else {
sb.append(" ");
}
String definition=dialect.getFieldDefination(c);
sb.append(definition);
sb.append(Const.CR);
}
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
StringBuilder sb = new StringBuilder();
sb.append(this.getName());
String fullTableName = dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
sb.append(fullTableName);
sb.append(" (");
sb.append(Const.CR);
if (!pks.isEmpty()) {
String pk = dialect.getPrimaryKeyAsString(pks);
sb.append(", PRIMARY KEY (").append(pk).append(")").append(Const.CR);
}
List<ColumnDefinition> columns = table.getColumns();
List<String> pks = new ArrayList<>();
for (int i = 0; i < columns.size(); ++i) {
ColumnDefinition c = columns.get(i);
if (c.isPrimaryKey()) {
pks.add(c.getColumnName());
}
sb.append(" )");
if (dialect instanceof MySqlDialectImpl) {
sb.append(" ENGINE=InnoDB DEFAULT CHARSET=utf8 ");
}
if (i > 0) {
sb.append(",");
} else {
sb.append(" ");
}
String definition = dialect.getFieldDefination(c);
sb.append(definition);
sb.append(Const.CR);
}
if (!pks.isEmpty()) {
String pk = dialect.getPrimaryKeyAsString(pks);
sb.append(", PRIMARY KEY (").append(pk).append(")").append(Const.CR);
}
sb.append(" )");
if (dialect instanceof MySqlDialectImpl) {
sb.append(" ENGINE=InnoDB DEFAULT CHARSET=utf8 ");
}
sb.append(Const.CR);
return sb.toString();
}
sb.append(Const.CR);
return sb.toString();
}
}

View File

@@ -16,26 +16,25 @@ import com.gitee.dbswitch.sql.ddl.pojo.TableDefinition;
/**
* Drop语句操作类
*
* @author tang
*
* @author tang
*/
public class DdlSqlDropTable extends AbstractSqlDdlOperator {
private TableDefinition table;
private TableDefinition table;
public DdlSqlDropTable(TableDefinition t) {
super(Const.DROP_TABLE);
this.table = t;
}
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
StringBuilder sb = new StringBuilder();
sb.append(this.getName());
String fullTableName = dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
sb.append(fullTableName);
return sb.toString();
}
public DdlSqlDropTable(TableDefinition t) {
super(Const.DROP_TABLE);
this.table = t;
}
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
StringBuilder sb=new StringBuilder();
sb.append(this.getName());
String fullTableName=dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
sb.append(fullTableName);
return sb.toString();
}
}

View File

@@ -15,26 +15,25 @@ import com.gitee.dbswitch.sql.ddl.pojo.TableDefinition;
/**
* Truncate语句操作类
*
* @author tang
*
* @author tang
*/
public class DdlSqlTruncateTable extends AbstractSqlDdlOperator {
private TableDefinition table;
public DdlSqlTruncateTable(TableDefinition t) {
super("TRUNCATE TABLE ");
this.table=t;
}
private TableDefinition table;
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
StringBuilder sb=new StringBuilder();
sb.append(this.getName());
String fullTableName=dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
sb.append(fullTableName);
return sb.toString();
}
public DdlSqlTruncateTable(TableDefinition t) {
super("TRUNCATE TABLE ");
this.table = t;
}
@Override
public String toSqlString(AbstractDatabaseDialect dialect) {
StringBuilder sb = new StringBuilder();
sb.append(this.getName());
String fullTableName = dialect.getSchemaTableName(table.getSchemaName(), table.getTableName());
sb.append(fullTableName);
return sb.toString();
}
}

View File

@@ -9,93 +9,98 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.ddl.sql.impl;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import com.gitee.dbswitch.sql.ddl.type.GreenplumDataTypeEnum;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import com.gitee.dbswitch.sql.ddl.type.GreenplumDataTypeEnum;
/**
* Greenplum方言实现类
*
* @author tang
*
* @author tang
*/
public class GreenplumDialectImpl extends PostgresDialectImpl {
protected static List<GreenplumDataTypeEnum> integerTypes;
protected static List<GreenplumDataTypeEnum> integerTypes;
static {
integerTypes = new ArrayList<>();
integerTypes.add(GreenplumDataTypeEnum.SERIAL2);
integerTypes.add(GreenplumDataTypeEnum.SERIAL4);
integerTypes.add(GreenplumDataTypeEnum.SERIAL8);
integerTypes.add(GreenplumDataTypeEnum.SMALLSERIAL);
integerTypes.add(GreenplumDataTypeEnum.SERIAL);
integerTypes.add(GreenplumDataTypeEnum.BIGSERIAL);
}
static {
integerTypes = new ArrayList<>();
integerTypes.add(GreenplumDataTypeEnum.SERIAL2);
integerTypes.add(GreenplumDataTypeEnum.SERIAL4);
integerTypes.add(GreenplumDataTypeEnum.SERIAL8);
integerTypes.add(GreenplumDataTypeEnum.SMALLSERIAL);
integerTypes.add(GreenplumDataTypeEnum.SERIAL);
integerTypes.add(GreenplumDataTypeEnum.BIGSERIAL);
}
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
StringBuilder sb = new StringBuilder();
GreenplumDataTypeEnum type = null;
try {
type = GreenplumDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(String.format("Invalid Greenplum data type: %s", column.getColumnType()));
}
if(column.isAutoIncrement()) {
if(!GreenplumDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(String.format("Invalid Greenplum auto increment data type: %s", column.getColumnType()));
}
}
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
sb.append(type.name());
switch (type) {
case NUMERIC:
case DECIMAL:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid Greenplum data type length: %s(%d)", column.getColumnType(), length));
}
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(
String.format("Invalid Greenplum data type scale: %s(%d,%d)", column.getColumnType(), length, scale));
}
sb.append(String.format("(%d,%d)", length,scale));
break;
case CHAR:
case VARCHAR:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid Greenplum data type length: %s(%d)", column.getColumnType(), length));
}
sb.append(String.format(" (%d) ", length));
break;
case TIMESTAMP:
if (Objects.isNull(length) || length < 0) {
sb.append(" (0) ");
} else if (0 == length || 6 == length) {
sb.append(String.format(" (%d) ", length));
} else {
throw new RuntimeException(
String.format("Invalid Greenplum data type length: %s(%d)", column.getColumnType(), length));
}
break;
case DOUBLE:
sb.append(" PRECISION ");
break;
default:
break;
}
StringBuilder sb = new StringBuilder();
GreenplumDataTypeEnum type = null;
try {
type = GreenplumDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(
String.format("Invalid Greenplum data type: %s", column.getColumnType()));
}
return sb.toString();
}
if (column.isAutoIncrement()) {
if (!GreenplumDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(String
.format("Invalid Greenplum auto increment data type: %s", column.getColumnType()));
}
}
sb.append(type.name());
switch (type) {
case NUMERIC:
case DECIMAL:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid Greenplum data type length: %s(%d)", column.getColumnType(),
length));
}
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(
String.format("Invalid Greenplum data type scale: %s(%d,%d)", column.getColumnType(),
length, scale));
}
sb.append(String.format("(%d,%d)", length, scale));
break;
case CHAR:
case VARCHAR:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid Greenplum data type length: %s(%d)", column.getColumnType(),
length));
}
sb.append(String.format(" (%d) ", length));
break;
case TIMESTAMP:
if (Objects.isNull(length) || length < 0) {
sb.append(" (0) ");
} else if (0 == length || 6 == length) {
sb.append(String.format(" (%d) ", length));
} else {
throw new RuntimeException(
String.format("Invalid Greenplum data type length: %s(%d)", column.getColumnType(),
length));
}
break;
case DOUBLE:
sb.append(" PRECISION ");
break;
default:
break;
}
return sb.toString();
}
}

View File

@@ -9,155 +9,163 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.ddl.sql.impl;
import java.util.List;
import java.util.ArrayList;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
import com.gitee.dbswitch.sql.ddl.AbstractDatabaseDialect;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import com.gitee.dbswitch.sql.ddl.type.MySqlDataTypeEnum;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
/**
* 关于MySQL的的自增列问题
* <p>
* 1一张表中只能有一列为自增长列。
* <p>
* 2列的数据类型必须为数值型。
* <p>
* 3不能设置默认值。
* <p>
* 4会自动应用not null。
*
* @author tang
*
* @author tang
*/
public class MySqlDialectImpl extends AbstractDatabaseDialect {
private static List<MySqlDataTypeEnum> integerTypes;
static{
integerTypes= new ArrayList<>();
integerTypes.add(MySqlDataTypeEnum.TINYINT);
integerTypes.add(MySqlDataTypeEnum.SMALLINT);
integerTypes.add(MySqlDataTypeEnum.MEDIUMINT);
integerTypes.add(MySqlDataTypeEnum.INTEGER);
integerTypes.add(MySqlDataTypeEnum.INT);
integerTypes.add(MySqlDataTypeEnum.BIGINT);
}
@Override
public String getSchemaTableName(String schemaName, String tableName) {
if(Objects.isNull(schemaName) || schemaName.trim().isEmpty()) {
return String.format("`%s`", tableName);
}
return String.format("`%s`.`%s`", schemaName,tableName);
}
@Override
public String getQuoteFieldName(String fieldName) {
return String.format("`%s`", fieldName.trim());
}
@Override
public String getPrimaryKeyAsString(List<String> pks) {
if (!pks.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("`");
sb.append(StringUtils.join(pks, "` , `"));
sb.append("`");
return sb.toString();
}
private static List<MySqlDataTypeEnum> integerTypes;
return "";
}
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
StringBuilder sb = new StringBuilder();
MySqlDataTypeEnum type = null;
try {
type = MySqlDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(String.format("Invalid MySQL data type: %s", column.getColumnType()));
}
if(column.isAutoIncrement()) {
if(!MySqlDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(String.format("Invalid MySQL auto increment data type: %s", column.getColumnType()));
}
}
static {
integerTypes = new ArrayList<>();
integerTypes.add(MySqlDataTypeEnum.TINYINT);
integerTypes.add(MySqlDataTypeEnum.SMALLINT);
integerTypes.add(MySqlDataTypeEnum.MEDIUMINT);
integerTypes.add(MySqlDataTypeEnum.INTEGER);
integerTypes.add(MySqlDataTypeEnum.INT);
integerTypes.add(MySqlDataTypeEnum.BIGINT);
}
sb.append(type.name());
switch (type) {
case FLOAT:
case DOUBLE:
case DECIMAL:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid MySQL data type length: %s(%d)", column.getColumnType(), length));
}
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(
String.format("Invalid MySQL data type scale: %s(%d,%d)", column.getColumnType(), length, scale));
}
sb.append(String.format("(%d,%d)", length,scale));
break;
case TINYINT:
case SMALLINT:
case MEDIUMINT:
case INTEGER:
case INT:
case BIGINT:
case CHAR:
case VARCHAR:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid MySQL data type length: %s(%d)", column.getColumnType(), length));
}
sb.append(String.format(" (%d) ", length));
default:
break;
}
return sb.toString();
}
@Override
public String getFieldDefination(ColumnDefinition column) {
String fieldname = column.getColumnName();
boolean nullable = column.isNullable();
String defaultValue = column.getDefaultValue();
String comment=column.getColumnComment();
@Override
public String getSchemaTableName(String schemaName, String tableName) {
if (Objects.isNull(schemaName) || schemaName.trim().isEmpty()) {
return String.format("`%s`", tableName);
}
return String.format("`%s`.`%s`", schemaName, tableName);
}
StringBuilder sb = new StringBuilder();
sb.append(String.format("`%s` ",fieldname.trim()));
sb.append(this.getFieldTypeName(column));
@Override
public String getQuoteFieldName(String fieldName) {
return String.format("`%s`", fieldName.trim());
}
if (column.isAutoIncrement() && column.isPrimaryKey() ) {
//在MySQL数据库里只有主键是自增的
sb.append(" NOT NULL AUTO_INCREMENT ");
} else {
if (nullable) {
sb.append(" DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty()) {
if ("NULL".equalsIgnoreCase(defaultValue)) {
sb.append(" DEFAULT NULL");
} else if (defaultValue.toUpperCase().trim().startsWith("CURRENT_TIMESTAMP")) {
// 处理时间字段的默认当前时间问题
sb.append(String.format(" DEFAULT %s", defaultValue));
} else {
sb.append(String.format(" DEFAULT '%s'", defaultValue));
}
} else {
sb.append(" NOT NULL");
}
}
@Override
public String getPrimaryKeyAsString(List<String> pks) {
if (!pks.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("`");
sb.append(StringUtils.join(pks, "` , `"));
sb.append("`");
return sb.toString();
}
if(Objects.nonNull(comment) && !comment.isEmpty()) {
sb.append(String.format(" COMMENT '%s'", comment));
}
return sb.toString();
}
return "";
}
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
StringBuilder sb = new StringBuilder();
MySqlDataTypeEnum type = null;
try {
type = MySqlDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(
String.format("Invalid MySQL data type: %s", column.getColumnType()));
}
if (column.isAutoIncrement()) {
if (!MySqlDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(
String.format("Invalid MySQL auto increment data type: %s", column.getColumnType()));
}
}
sb.append(type.name());
switch (type) {
case FLOAT:
case DOUBLE:
case DECIMAL:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid MySQL data type length: %s(%d)", column.getColumnType(),
length));
}
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(
String.format("Invalid MySQL data type scale: %s(%d,%d)", column.getColumnType(),
length, scale));
}
sb.append(String.format("(%d,%d)", length, scale));
break;
case TINYINT:
case SMALLINT:
case MEDIUMINT:
case INTEGER:
case INT:
case BIGINT:
case CHAR:
case VARCHAR:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid MySQL data type length: %s(%d)", column.getColumnType(),
length));
}
sb.append(String.format(" (%d) ", length));
default:
break;
}
return sb.toString();
}
@Override
public String getFieldDefination(ColumnDefinition column) {
String fieldname = column.getColumnName();
boolean nullable = column.isNullable();
String defaultValue = column.getDefaultValue();
String comment = column.getColumnComment();
StringBuilder sb = new StringBuilder();
sb.append(String.format("`%s` ", fieldname.trim()));
sb.append(this.getFieldTypeName(column));
if (column.isAutoIncrement() && column.isPrimaryKey()) {
//在MySQL数据库里只有主键是自增的
sb.append(" NOT NULL AUTO_INCREMENT ");
} else {
if (nullable) {
sb.append(" DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty()) {
if ("NULL".equalsIgnoreCase(defaultValue)) {
sb.append(" DEFAULT NULL");
} else if (defaultValue.toUpperCase().trim().startsWith("CURRENT_TIMESTAMP")) {
// 处理时间字段的默认当前时间问题
sb.append(String.format(" DEFAULT %s", defaultValue));
} else {
sb.append(String.format(" DEFAULT '%s'", defaultValue));
}
} else {
sb.append(" NOT NULL");
}
}
if (Objects.nonNull(comment) && !comment.isEmpty()) {
sb.append(String.format(" COMMENT '%s'", comment));
}
return sb.toString();
}
}

View File

@@ -9,117 +9,126 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.ddl.sql.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import com.gitee.dbswitch.sql.ddl.AbstractDatabaseDialect;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import com.gitee.dbswitch.sql.ddl.type.OracleDataTypeEnum;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* 关于Oracle12c的自增列问题
* <p>
* 1一张表中只能有一列为自增长列。
* <p>
* 2列的数据类型必须为数值型。
* <p>
* 3不能设置默认值。
* <p>
* 4会自动应用not null和not deferrable。
* <p>
* 5使用CTAS方式无法继承自增长列的属性。
* <p>
* 6如果执行回滚事务会回滚但是序列中的值不会回滚。
*
* @author tang
*
* @author tang
*/
public class OracleDialectImpl extends AbstractDatabaseDialect {
private static List<OracleDataTypeEnum> integerTypes;
static{
integerTypes= new ArrayList<>();
integerTypes.add(OracleDataTypeEnum.NUMBER);
}
public class OracleDialectImpl extends AbstractDatabaseDialect {
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
StringBuilder sb = new StringBuilder();
OracleDataTypeEnum type = null;
try {
type = OracleDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(String.format("Invalid Oracle data type: %s", column.getColumnType()));
}
private static List<OracleDataTypeEnum> integerTypes;
if (column.isAutoIncrement()) {
if (!OracleDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(String.format("Invalid Oracle auto increment data type: %s", column.getColumnType()));
}
}
static {
integerTypes = new ArrayList<>();
integerTypes.add(OracleDataTypeEnum.NUMBER);
}
sb.append(type.name());
switch (type) {
case NUMBER:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid Oracle data type length: %s(%d)", column.getColumnType(), length));
}
if (length > 0) {
sb.append(String.format("(%d)", length));
} else {
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(String.format("Invalid Oracle data type scale: %s(%d,%d)",
column.getColumnType(), length, scale));
}
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
StringBuilder sb = new StringBuilder();
OracleDataTypeEnum type = null;
try {
type = OracleDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(
String.format("Invalid Oracle data type: %s", column.getColumnType()));
}
sb.append(String.format("(%d,%d)", length, scale));
}
break;
case CHAR:
case NCHAR:
case VARCHAR:
case VARCHAR2:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(String.format("Invalid Oracle data type length: %s(%d)", column.getColumnType(), length));
}
sb.append(String.format(" (%d) ", length));
default:
break;
}
if (column.isAutoIncrement()) {
if (!OracleDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(
String.format("Invalid Oracle auto increment data type: %s", column.getColumnType()));
}
}
return sb.toString();
}
@Override
public String getFieldDefination(ColumnDefinition column) {
String fieldname = column.getColumnName();
boolean nullable = column.isNullable();
String defaultValue = column.getDefaultValue();
//String comment=column.getColumnComment();
sb.append(type.name());
switch (type) {
case NUMBER:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid Oracle data type length: %s(%d)", column.getColumnType(),
length));
}
StringBuilder sb = new StringBuilder();
sb.append(String.format("\"%s\" ",fieldname.trim()));
sb.append(this.getFieldTypeName(column));
if (length > 0) {
sb.append(String.format("(%d)", length));
} else {
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(String.format("Invalid Oracle data type scale: %s(%d,%d)",
column.getColumnType(), length, scale));
}
if (column.isAutoIncrement() && column.isPrimaryKey()) {
// 在Oracle12c数据库里只有主键是自增的
sb.append(" GENERATED BY DEFAULT ON NULL AS IDENTITY ");
} else {
if (nullable) {
sb.append(" DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty()) {
if (defaultValue.equalsIgnoreCase("NULL")) {
sb.append(" DEFAULT NULL");
} else if (defaultValue.equalsIgnoreCase("SYSDATE")) {
sb.append(" DEFAULT SYSDATE");
} else {
sb.append(String.format(" DEFAULT '%s'", defaultValue));
}
} else {
sb.append(" NOT NULL");
}
}
sb.append(String.format("(%d,%d)", length, scale));
}
break;
case CHAR:
case NCHAR:
case VARCHAR:
case VARCHAR2:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(String
.format("Invalid Oracle data type length: %s(%d)", column.getColumnType(), length));
}
sb.append(String.format(" (%d) ", length));
default:
break;
}
return sb.toString();
}
@Override
public String getFieldDefination(ColumnDefinition column) {
String fieldname = column.getColumnName();
boolean nullable = column.isNullable();
String defaultValue = column.getDefaultValue();
//String comment=column.getColumnComment();
StringBuilder sb = new StringBuilder();
sb.append(String.format("\"%s\" ", fieldname.trim()));
sb.append(this.getFieldTypeName(column));
if (column.isAutoIncrement() && column.isPrimaryKey()) {
// 在Oracle12c数据库里只有主键是自增的
sb.append(" GENERATED BY DEFAULT ON NULL AS IDENTITY ");
} else {
if (nullable) {
sb.append(" DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty()) {
if (defaultValue.equalsIgnoreCase("NULL")) {
sb.append(" DEFAULT NULL");
} else if (defaultValue.equalsIgnoreCase("SYSDATE")) {
sb.append(" DEFAULT SYSDATE");
} else {
sb.append(String.format(" DEFAULT '%s'", defaultValue));
}
} else {
sb.append(" NOT NULL");
}
}
return sb.toString();
}
return sb.toString();
}
}

View File

@@ -9,127 +9,132 @@
/////////////////////////////////////////////////////////////
package com.gitee.dbswitch.sql.ddl.sql.impl;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import com.gitee.dbswitch.sql.ddl.AbstractDatabaseDialect;
import com.gitee.dbswitch.sql.ddl.pojo.ColumnDefinition;
import com.gitee.dbswitch.sql.ddl.type.PostgresDataTypeEnum;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
/**
* PostgreSQL方言实现类
*
* @author tang
*
* @author tang
*/
public class PostgresDialectImpl extends AbstractDatabaseDialect {
private static List<PostgresDataTypeEnum> integerTypes;
private static List<PostgresDataTypeEnum> integerTypes;
static {
integerTypes = new ArrayList<>();
integerTypes.add(PostgresDataTypeEnum.SERIAL2);
integerTypes.add(PostgresDataTypeEnum.SERIAL4);
integerTypes.add(PostgresDataTypeEnum.SERIAL8);
integerTypes.add(PostgresDataTypeEnum.SMALLSERIAL);
integerTypes.add(PostgresDataTypeEnum.SERIAL);
integerTypes.add(PostgresDataTypeEnum.BIGSERIAL);
}
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
StringBuilder sb = new StringBuilder();
PostgresDataTypeEnum type = null;
try {
type = PostgresDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(String.format("Invalid PostgreSQL data type: %s", column.getColumnType()));
}
if(column.isAutoIncrement()) {
if(!PostgresDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(String.format("Invalid PostgreSQL auto increment data type: %s", column.getColumnType()));
}
}
static {
integerTypes = new ArrayList<>();
integerTypes.add(PostgresDataTypeEnum.SERIAL2);
integerTypes.add(PostgresDataTypeEnum.SERIAL4);
integerTypes.add(PostgresDataTypeEnum.SERIAL8);
integerTypes.add(PostgresDataTypeEnum.SMALLSERIAL);
integerTypes.add(PostgresDataTypeEnum.SERIAL);
integerTypes.add(PostgresDataTypeEnum.BIGSERIAL);
}
sb.append(type.name());
switch (type) {
case NUMERIC:
case DECIMAL:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type length: %s(%d)", column.getColumnType(), length));
}
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type scale: %s(%d,%d)", column.getColumnType(), length, scale));
}
sb.append(String.format("(%d,%d)", length,scale));
break;
case CHAR:
case VARCHAR:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type length: %s(%d)", column.getColumnType(), length));
}
sb.append(String.format(" (%d) ", length));
break;
case TIMESTAMP:
if (Objects.isNull(length) || length < 0) {
sb.append(" (0) ");
} else if (0 == length || 6 == length) {
sb.append(String.format(" (%d) ", length));
} else {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type length: %s(%d)", column.getColumnType(), length));
}
break;
case DOUBLE:
sb.append(" PRECISION ");
break;
default:
break;
}
@Override
public String getFieldTypeName(ColumnDefinition column) {
int length = column.getLengthOrPrecision();
int scale = column.getScale();
return sb.toString();
}
@Override
public String getFieldDefination(ColumnDefinition column) {
String fieldname = column.getColumnName();
boolean nullable = column.isNullable();
String defaultValue = column.getDefaultValue();
//String comment=column.getColumnComment();
StringBuilder sb = new StringBuilder();
PostgresDataTypeEnum type = null;
try {
type = PostgresDataTypeEnum.valueOf(column.getColumnType().toUpperCase());
} catch (IllegalArgumentException e) {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type: %s", column.getColumnType()));
}
StringBuilder sb = new StringBuilder();
sb.append(String.format("\"%s\" ",fieldname.trim()));
sb.append(this.getFieldTypeName(column));
if (column.isAutoIncrement()) {
if (!PostgresDialectImpl.integerTypes.contains(type)) {
throw new RuntimeException(String
.format("Invalid PostgreSQL auto increment data type: %s", column.getColumnType()));
}
}
if (column.isAutoIncrement()) {
//PostgreSQL/Greenplum数据库里可以有多个自增列
sb.append(" ");
} else {
if (nullable) {
sb.append(" DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty()) {
if (defaultValue.equalsIgnoreCase("NULL")) {
sb.append(" DEFAULT NULL");
} else if ("now()".equalsIgnoreCase(defaultValue)) {
sb.append(" DEFAULT now() ");
} else {
sb.append(String.format(" DEFAULT '%s'", defaultValue));
}
} else {
sb.append(" NOT NULL");
}
}
return sb.toString();
}
sb.append(type.name());
switch (type) {
case NUMERIC:
case DECIMAL:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type length: %s(%d)", column.getColumnType(),
length));
}
if (Objects.isNull(scale) || scale < 0) {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type scale: %s(%d,%d)", column.getColumnType(),
length, scale));
}
sb.append(String.format("(%d,%d)", length, scale));
break;
case CHAR:
case VARCHAR:
if (Objects.isNull(length) || length < 0) {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type length: %s(%d)", column.getColumnType(),
length));
}
sb.append(String.format(" (%d) ", length));
break;
case TIMESTAMP:
if (Objects.isNull(length) || length < 0) {
sb.append(" (0) ");
} else if (0 == length || 6 == length) {
sb.append(String.format(" (%d) ", length));
} else {
throw new RuntimeException(
String.format("Invalid PostgreSQL data type length: %s(%d)", column.getColumnType(),
length));
}
break;
case DOUBLE:
sb.append(" PRECISION ");
break;
default:
break;
}
return sb.toString();
}
@Override
public String getFieldDefination(ColumnDefinition column) {
String fieldname = column.getColumnName();
boolean nullable = column.isNullable();
String defaultValue = column.getDefaultValue();
//String comment=column.getColumnComment();
StringBuilder sb = new StringBuilder();
sb.append(String.format("\"%s\" ", fieldname.trim()));
sb.append(this.getFieldTypeName(column));
if (column.isAutoIncrement()) {
//PostgreSQL/Greenplum数据库里可以有多个自增列
sb.append(" ");
} else {
if (nullable) {
sb.append(" DEFAULT NULL");
} else if (Objects.nonNull(defaultValue) && !defaultValue.isEmpty()) {
if (defaultValue.equalsIgnoreCase("NULL")) {
sb.append(" DEFAULT NULL");
} else if ("now()".equalsIgnoreCase(defaultValue)) {
sb.append(" DEFAULT now() ");
} else {
sb.append(String.format(" DEFAULT '%s'", defaultValue));
}
} else {
sb.append(" NOT NULL");
}
}
return sb.toString();
}
}

View File

@@ -13,62 +13,61 @@ import java.sql.Types;
/**
* PostgreSQL的数据类型
*
* <p>
* 参考地址https://www.yiibai.com/postgresql/postgresql-datatypes.html
*
* @author tang
*
* @author tang
*/
public enum GreenplumDataTypeEnum {
//~~~~~整型类型~~~~~~~~
SMALLINT(0,Types.SMALLINT),
INT2(1,Types.SMALLINT),
INTEGER(2,Types.INTEGER),
INT4(3,Types.INTEGER),
BIGINT(4,Types.BIGINT),
INT8(5,Types.BIGINT),
DECIMAL(6,Types.DECIMAL),
NUMERIC(7,Types.NUMERIC),
REAL(8,Types.REAL),//equal float4
FLOAT4(9,Types.FLOAT),
DOUBLE(10,Types.DOUBLE),
FLOAT8(11,Types.DOUBLE),
SMALLSERIAL(12,Types.SMALLINT),
SERIAL2(13,Types.SMALLINT),
SERIAL(14,Types.INTEGER),
SERIAL4(15,Types.INTEGER),
BIGSERIAL(16,Types.BIGINT),
SERIAL8(17,Types.BIGINT),
//~~~~~日期和时间类型~~~~~~~~
DATE(18,Types.DATE),
TIME(19,Types.TIME),
TIMESTAMP(20,Types.TIMESTAMP),
//~~~~~字符串类型~~~~~~~~
CHAR(21,Types.CHAR),
VARCHAR(22,Types.VARCHAR),
TEXT(23,Types.CLOB),
BYTEA(24,Types.BLOB),
//~~~~~整型类型~~~~~~~~
SMALLINT(0, Types.SMALLINT),
INT2(1, Types.SMALLINT),
INTEGER(2, Types.INTEGER),
INT4(3, Types.INTEGER),
BIGINT(4, Types.BIGINT),
INT8(5, Types.BIGINT),
DECIMAL(6, Types.DECIMAL),
NUMERIC(7, Types.NUMERIC),
REAL(8, Types.REAL),//equal float4
FLOAT4(9, Types.FLOAT),
DOUBLE(10, Types.DOUBLE),
FLOAT8(11, Types.DOUBLE),
SMALLSERIAL(12, Types.SMALLINT),
SERIAL2(13, Types.SMALLINT),
SERIAL(14, Types.INTEGER),
SERIAL4(15, Types.INTEGER),
BIGSERIAL(16, Types.BIGINT),
SERIAL8(17, Types.BIGINT),
//~~~~~~~其他类型~~~~~~~~
BOOL(25,Types.BOOLEAN);
private int index;
private int jdbctype;
//~~~~~日期和时间类型~~~~~~~~
DATE(18, Types.DATE),
TIME(19, Types.TIME),
TIMESTAMP(20, Types.TIMESTAMP),
GreenplumDataTypeEnum(int idx,int jdbcType) {
this.index = idx;
this.jdbctype=jdbcType;
}
//~~~~~字符串类型~~~~~~~~
CHAR(21, Types.CHAR),
VARCHAR(22, Types.VARCHAR),
TEXT(23, Types.CLOB),
BYTEA(24, Types.BLOB),
//~~~~~~~其他类型~~~~~~~~
BOOL(25, Types.BOOLEAN);
private int index;
private int jdbctype;
GreenplumDataTypeEnum(int idx, int jdbcType) {
this.index = idx;
this.jdbctype = jdbcType;
}
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
}

View File

@@ -13,58 +13,57 @@ import java.sql.Types;
/**
* MySQL的数据类型
*
* <p>
* 参考地址https://www.yiibai.com/mysql/data-types.html
*
* @author tang
*
* @author tang
*/
public enum MySqlDataTypeEnum {
//~~~~~整型类型~~~~~~~~
TINYINT(0,Types.TINYINT),
SMALLINT(1,Types.SMALLINT),
MEDIUMINT(2,Types.INTEGER),
INTEGER(3,Types.INTEGER),
INT(4,Types.INTEGER),
BIGINT(5,Types.BIGINT),
FLOAT(6,Types.FLOAT),
DOUBLE(7,Types.DOUBLE),
DECIMAL(8,Types.DECIMAL),
//~~~~~日期和时间类型~~~~~~~~
DATE(9,Types.DATE),
TIME(10,Types.TIME),
YEAR(11,Types.DATE),
DATETIME(12,Types.TIMESTAMP),
TIMESTAMP(13,Types.TIMESTAMP),
//~~~~~字符串类型~~~~~~~~
CHAR(14,Types.CHAR),
VARCHAR(15,Types.VARCHAR),
TINYBLOB(16,Types.VARBINARY),
TINYTEXT(17,Types.CLOB),
BLOB(18,Types.VARBINARY),
TEXT(19,Types.CLOB),
MEDIUMBLOB(20,Types.LONGVARBINARY),
MEDIUMTEXT(21,Types.LONGVARCHAR),
LONGBLOB(22,Types.LONGVARBINARY),
LONGTEXT(23,Types.LONGVARCHAR);
private int index;
private int jdbctype;
MySqlDataTypeEnum(int idx,int jdbcType) {
this.index = idx;
this.jdbctype=jdbcType;
}
//~~~~~整型类型~~~~~~~~
TINYINT(0, Types.TINYINT),
SMALLINT(1, Types.SMALLINT),
MEDIUMINT(2, Types.INTEGER),
INTEGER(3, Types.INTEGER),
INT(4, Types.INTEGER),
BIGINT(5, Types.BIGINT),
FLOAT(6, Types.FLOAT),
DOUBLE(7, Types.DOUBLE),
DECIMAL(8, Types.DECIMAL),
//~~~~~日期和时间类型~~~~~~~~
DATE(9, Types.DATE),
TIME(10, Types.TIME),
YEAR(11, Types.DATE),
DATETIME(12, Types.TIMESTAMP),
TIMESTAMP(13, Types.TIMESTAMP),
//~~~~~字符串类型~~~~~~~~
CHAR(14, Types.CHAR),
VARCHAR(15, Types.VARCHAR),
TINYBLOB(16, Types.VARBINARY),
TINYTEXT(17, Types.CLOB),
BLOB(18, Types.VARBINARY),
TEXT(19, Types.CLOB),
MEDIUMBLOB(20, Types.LONGVARBINARY),
MEDIUMTEXT(21, Types.LONGVARCHAR),
LONGBLOB(22, Types.LONGVARBINARY),
LONGTEXT(23, Types.LONGVARCHAR);
private int index;
private int jdbctype;
MySqlDataTypeEnum(int idx, int jdbcType) {
this.index = idx;
this.jdbctype = jdbcType;
}
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
}

View File

@@ -13,44 +13,43 @@ import java.sql.Types;
/**
* Oracle的数据类型
*
* <p>
* 参考地址http://blog.itpub.net/26736162/viewspace-2149685
*
* @author tang
*
* @author tang
*/
public enum OracleDataTypeEnum {
//~~~~~整型类型~~~~~~~~
NUMBER(1,Types.NUMERIC),
//~~~~~日期和时间类型~~~~~~~~
DATE(2,Types.DATE),
TIMESTAMP(3,Types.TIMESTAMP),
//~~~~~字符串类型~~~~~~~~
CHAR(4,Types.CHAR),
NCHAR(5,Types.CHAR),
VARCHAR(6,Types.VARCHAR),
VARCHAR2(7,Types.VARCHAR),
LONG(8,Types.LONGVARBINARY),
CLOB(9,Types.CLOB),
BLOB(10,Types.BLOB);
private int index;
private int jdbctype;
//~~~~~整型类型~~~~~~~~
NUMBER(1, Types.NUMERIC),
OracleDataTypeEnum(int idx,int jdbcType) {
this.index = idx;
this.jdbctype=jdbcType;
}
//~~~~~日期和时间类型~~~~~~~~
DATE(2, Types.DATE),
TIMESTAMP(3, Types.TIMESTAMP),
//~~~~~字符串类型~~~~~~~~
CHAR(4, Types.CHAR),
NCHAR(5, Types.CHAR),
VARCHAR(6, Types.VARCHAR),
VARCHAR2(7, Types.VARCHAR),
LONG(8, Types.LONGVARBINARY),
CLOB(9, Types.CLOB),
BLOB(10, Types.BLOB);
private int index;
private int jdbctype;
OracleDataTypeEnum(int idx, int jdbcType) {
this.index = idx;
this.jdbctype = jdbcType;
}
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
}

View File

@@ -13,61 +13,60 @@ import java.sql.Types;
/**
* PostgreSQL的数据类型
*
* <p>
* 参考地址https://www.yiibai.com/postgresql/postgresql-datatypes.html
*
* @author tang
*
* @author tang
*/
public enum PostgresDataTypeEnum {
//~~~~~整型类型~~~~~~~~
SMALLINT(0,Types.SMALLINT),
INT2(1,Types.SMALLINT),
INTEGER(2,Types.INTEGER),
INT4(3,Types.INTEGER),
BIGINT(4,Types.BIGINT),
INT8(5,Types.BIGINT),
DECIMAL(6,Types.DECIMAL),
NUMERIC(7,Types.NUMERIC),
REAL(8,Types.REAL),//equal float4
FLOAT4(9,Types.FLOAT),
DOUBLE(10,Types.DOUBLE),
FLOAT8(11,Types.DOUBLE),
SMALLSERIAL(12,Types.SMALLINT),
SERIAL2(13,Types.SMALLINT),
SERIAL(14,Types.INTEGER),
SERIAL4(15,Types.INTEGER),
BIGSERIAL(16,Types.BIGINT),
SERIAL8(17,Types.BIGINT),
//~~~~~日期和时间类型~~~~~~~~
DATE(18,Types.DATE),
TIME(19,Types.TIME),
TIMESTAMP(20,Types.TIMESTAMP),
//~~~~~字符串类型~~~~~~~~
CHAR(21,Types.CHAR),
VARCHAR(22,Types.VARCHAR),
TEXT(23,Types.CLOB),
BYTEA(24,Types.BLOB),
//~~~~~整型类型~~~~~~~~
SMALLINT(0, Types.SMALLINT),
INT2(1, Types.SMALLINT),
INTEGER(2, Types.INTEGER),
INT4(3, Types.INTEGER),
BIGINT(4, Types.BIGINT),
INT8(5, Types.BIGINT),
DECIMAL(6, Types.DECIMAL),
NUMERIC(7, Types.NUMERIC),
REAL(8, Types.REAL),//equal float4
FLOAT4(9, Types.FLOAT),
DOUBLE(10, Types.DOUBLE),
FLOAT8(11, Types.DOUBLE),
SMALLSERIAL(12, Types.SMALLINT),
SERIAL2(13, Types.SMALLINT),
SERIAL(14, Types.INTEGER),
SERIAL4(15, Types.INTEGER),
BIGSERIAL(16, Types.BIGINT),
SERIAL8(17, Types.BIGINT),
//~~~~~~~其他类型~~~~~~~~
BOOL(25,Types.BOOLEAN);
private int index;
private int jdbctype;
//~~~~~日期和时间类型~~~~~~~~
DATE(18, Types.DATE),
TIME(19, Types.TIME),
TIMESTAMP(20, Types.TIMESTAMP),
PostgresDataTypeEnum(int idx,int jdbcType) {
this.index = idx;
this.jdbctype=jdbcType;
}
//~~~~~字符串类型~~~~~~~~
CHAR(21, Types.CHAR),
VARCHAR(22, Types.VARCHAR),
TEXT(23, Types.CLOB),
BYTEA(24, Types.BLOB),
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
//~~~~~~~其他类型~~~~~~~~
BOOL(25, Types.BOOLEAN);
private int index;
private int jdbctype;
PostgresDataTypeEnum(int idx, int jdbcType) {
this.index = idx;
this.jdbctype = jdbcType;
}
public int getIndex() {
return index;
}
public int getJdbcType() {
return this.jdbctype;
}
}

View File

@@ -10,7 +10,7 @@
package com.gitee.dbswitch.sql.service;
import java.util.Map;
import com.gitee.dbswitch.common.constant.DatabaseTypeEnum;
import com.gitee.dbswitch.common.type.DatabaseTypeEnum;
/**
* SQL语言共分为四大类数据查询语言DQL数据操纵语言DML数据定义语言DDL数据控制语言DCL

View File

@@ -23,7 +23,7 @@ import com.gitee.dbswitch.sql.calcite.TheMssqlSqlDialect;
import com.gitee.dbswitch.sql.calcite.TheMysqlSqlDialect;
import com.gitee.dbswitch.sql.calcite.TheOracleSqlDialect;
import com.gitee.dbswitch.sql.calcite.ThePostgresqlSqlDialect;
import com.gitee.dbswitch.common.constant.DatabaseTypeEnum;
import com.gitee.dbswitch.common.type.DatabaseTypeEnum;
import com.gitee.dbswitch.sql.service.ISqlConvertService;
/**

View File

@@ -11,7 +11,7 @@ package com.gitee.dbswitch.sql.service.impl;
import java.util.HashMap;
import java.util.Map;
import com.gitee.dbswitch.common.constant.DatabaseTypeEnum;
import com.gitee.dbswitch.common.type.DatabaseTypeEnum;
import com.gitee.dbswitch.sql.service.ISqlGeneratorService;
import com.gitee.dbswitch.sql.ddl.AbstractDatabaseDialect;
import com.gitee.dbswitch.sql.ddl.AbstractSqlDdlOperator;