Move all type data to datatypes.js

This commit is contained in:
1ilit
2024-06-10 02:17:43 +03:00
parent 0908d040e0
commit 9c31e2be52
8 changed files with 514 additions and 426 deletions

View File

@@ -1,86 +1,15 @@
import { dbToTypes } from "../data/datatypes";
import i18n from "../i18n/i18n";
import { isFunction, strHasQuotes } from "./utils";
import { isFunction } from "./utils";
function validateDateStr(str) {
return /^(?!0000)(?!00)(?:(?!0000)[0-9]{4}-(?:(?:0[1-9]|1[0-2])-(?:0[1-9]|1[0-9]|2[0-9]|3[01])|(?:0[1-9]|1[0-2])-(?:0[1-9]|1[0-9]|2[0-8])|(?:0[13-9]|1[0-2])-(?:29|30)|(?:0[13578]|1[02])-31))$/.test(
str,
);
}
function checkDefault(field) {
function checkDefault(field, database) {
if (field.default === "") return true;
if (isFunction(field.default)) return true;
if (!field.notNull && field.default.toLowerCase() === "null") return true;
switch (field.type) {
case "INT":
case "BIGINT":
case "SMALLINT":
return /^-?\d*$/.test(field.default);
case "SET": {
const defaultValues = field.default.split(",");
for (let i = 0; i < defaultValues.length; i++) {
if (!field.values.includes(defaultValues[i].trim())) return false;
}
return true;
}
case "ENUM":
return field.values.includes(field.default);
case "CHAR":
case "VARCHAR":
if (strHasQuotes(field.default)) {
return field.default.length - 2 <= field.size;
}
return field.default.length <= field.size;
case "BINARY":
case "VARBINARY":
return (
field.default.length <= field.size && /^[01]+$/.test(field.default)
);
case "BOOLEAN":
return (
field.default.trim().toLowerCase() === "false" ||
field.default.trim().toLowerCase() === "true"
);
case "FLOAT":
case "DECIMAL":
case "DOUBLE":
case "NUMERIC":
case "REAL":
return /^-?\d*.?\d+$/.test(field.default);
case "DATE":
return validateDateStr(field.default);
case "TIME":
return /^(?:[01]?\d|2[0-3]):[0-5]?\d:[0-5]?\d$/.test(field.default);
case "TIMESTAMP": {
if (field.default.toUpperCase() === "CURRENT_TIMESTAMP") {
return true;
}
if (!/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/.test(field.default)) {
return false;
}
const content = field.default.split(" ");
const date = content[0].split("-");
return parseInt(date[0]) >= 1970 && parseInt(date[0]) <= 2038;
}
case "DATETIME": {
if (field.default.toUpperCase() === "CURRENT_TIMESTAMP") {
return true;
}
if (!/^\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}$/.test(field.default)) {
return false;
}
const c = field.default.split(" ");
const d = c[0].split("-");
return parseInt(d[0]) >= 1000 && parseInt(d[0]) <= 9999;
}
default:
return true;
}
return dbToTypes[database][field.type].checkDefault(field);
}
export function getIssues(diagram) {
@@ -123,7 +52,7 @@ export function getIssues(diagram) {
}
}
if (!checkDefault(field)) {
if (!checkDefault(field, diagram.database)) {
issues.push(
i18n.t("default_doesnt_match_type", {
tableName: table.name,

View File

@@ -1,8 +1,8 @@
import { defaultTypes } from "../data/datatypes";
import { dbToTypes, defaultTypes } from "../data/datatypes";
import { isFunction, isKeyword, strHasQuotes } from "./utils";
export function getJsonType(f) {
if (!defaultTypes.includes(f.type)) {
if (!Object.keys(defaultTypes).includes(f.type)) {
return '{ "type" : "object", additionalProperties : true }';
}
switch (f.type) {
@@ -39,18 +39,26 @@ export function generateSchema(type) {
)}\n\t\t\t},\n\t\t\t"additionalProperties": false\n\t\t}`;
}
export function getTypeString(field, dbms = "mysql", baseType = false) {
export function getTypeString(
field,
currentDb,
dbms = "mysql",
baseType = false,
) {
if (dbms === "mysql") {
if (field.type === "UUID") {
return `VARCHAR(36)`;
}
if (hasPrecision(field.type) || isSized(field.type)) {
if (
dbToTypes[currentDb][field.type].isSized ||
dbToTypes[currentDb][field.type].hasPrecision
) {
return `${field.type}${field.size ? `(${field.size})` : ""}`;
}
if (field.type === "SET" || field.type === "ENUM") {
return `${field.type}(${field.values.map((v) => `"${v}"`).join(", ")})`;
}
if (!defaultTypes.includes(field.type)) {
if (!Object.keys(defaultTypes).includes(field.type)) {
return "JSON";
}
return field.type;
@@ -76,7 +84,7 @@ export function getTypeString(field, dbms = "mysql", baseType = false) {
if (field.type === "DATETIME") {
return `timestamp`;
}
if (isSized(field.type)) {
if (dbToTypes[currentDb][field.type].isSized) {
const type =
field.type === "BINARY"
? "bit"
@@ -85,7 +93,7 @@ export function getTypeString(field, dbms = "mysql", baseType = false) {
: field.type.toLowerCase();
return `${type}(${field.size})`;
}
if (hasPrecision(field.type) && field.size !== "") {
if (dbToTypes[currentDb][field.type].hasPrecision && field.size !== "") {
return `${field.type}${field.size}`;
}
return field.type.toLowerCase();
@@ -121,7 +129,7 @@ export function getTypeString(field, dbms = "mysql", baseType = false) {
type = field.type;
break;
}
if (isSized(field.type)) {
if (dbToTypes[currentDb][field.type].isSized) {
return `${type}(${field.size})`;
}
@@ -167,13 +175,14 @@ export function jsonToMySQL(obj) {
(field) =>
`${field.comment === "" ? "" : `\t-- ${field.comment}\n`}\t\`${
field.name
}\` ${getTypeString(field)}${field.notNull ? " NOT NULL" : ""}${
}\` ${getTypeString(field, obj.database)}${field.notNull ? " NOT NULL" : ""}${
field.increment ? " AUTO_INCREMENT" : ""
}${field.unique ? " UNIQUE" : ""}${
field.default !== "" ? ` DEFAULT ${parseDefault(field)}` : ""
}${
field.check === "" || !hasCheck(field.type)
? !defaultTypes.includes(field.type)
field.check === "" ||
!dbToTypes[obj.database][field.type].hasCheck
? !Object.keys(defaultTypes).includes(field.type)
? ` CHECK(\n\t\tJSON_SCHEMA_VALID("${generateSchema(
obj.types.find(
(t) => t.name === field.type.toLowerCase(),
@@ -181,7 +190,7 @@ export function jsonToMySQL(obj) {
)}", \`${field.name}\`))`
: ""
: ` CHECK(${field.check})`
}${field.comment ? ` COMMENT '${field.comment}'` : ''}`,
}${field.comment ? ` COMMENT '${field.comment}'` : ""}`,
)
.join(",\n")}${
table.fields.filter((f) => f.primary).length > 0
@@ -190,7 +199,7 @@ export function jsonToMySQL(obj) {
.map((f) => `\`${f.name}\``)
.join(", ")})`
: ""
}\n)${table.comment ? ` COMMENT='${table.comment}'` : ''};\n${
}\n)${table.comment ? ` COMMENT='${table.comment}'` : ""};\n${
table.indices.length > 0
? `\n${table.indices.map(
(i) =>
@@ -233,14 +242,16 @@ export function jsonToPostgreSQL(obj) {
`${
type.comment === "" ? "" : `/**\n${type.comment}\n*/\n`
}CREATE TYPE ${type.name} AS (\n${type.fields
.map((f) => `\t${f.name} ${getTypeString(f, "postgres")}`)
.map(
(f) => `\t${f.name} ${getTypeString(f, obj.database, "postgres")}`,
)
.join("\n")}\n);`
);
} else {
return `${
type.comment === "" ? "" : `/**\n${type.comment}\n*/\n`
}CREATE TYPE ${type.name} AS (\n${type.fields
.map((f) => `\t${f.name} ${getTypeString(f, "postgres")}`)
.map((f) => `\t${f.name} ${getTypeString(f, obj.database, "postgres")}`)
.join("\n")}\n);`;
}
})}\n${obj.tables
@@ -263,12 +274,13 @@ export function jsonToPostgreSQL(obj) {
(field) =>
`${field.comment === "" ? "" : `\t-- ${field.comment}\n`}\t"${
field.name
}" ${getTypeString(field, "postgres")}${
}" ${getTypeString(field, obj.database, "postgres")}${
field.notNull ? " NOT NULL" : ""
}${
field.default !== "" ? ` DEFAULT ${parseDefault(field)}` : ""
}${
field.check === "" || !hasCheck(field.type)
field.check === "" ||
!dbToTypes[obj.database][field.type].hasCheck
? ""
: ` CHECK(${field.check})`
}`,
@@ -367,7 +379,8 @@ export function jsonToSQLite(obj) {
}" ${getSQLiteType(field)}${field.notNull ? " NOT NULL" : ""}${
field.unique ? " UNIQUE" : ""
}${field.default !== "" ? ` DEFAULT ${parseDefault(field)}` : ""}${
field.check === "" || !hasCheck(field.type)
field.check === "" ||
!dbToTypes[obj.database][field.type].hasCheck
? ""
: ` CHECK(${field.check})`
}`,
@@ -408,13 +421,14 @@ export function jsonToMariaDB(obj) {
(field) =>
`${field.comment === "" ? "" : `\t-- ${field.comment}\n`}\t\`${
field.name
}\` ${getTypeString(field)}${field.notNull ? " NOT NULL" : ""}${
}\` ${getTypeString(field, obj.database)}${field.notNull ? " NOT NULL" : ""}${
field.increment ? " AUTO_INCREMENT" : ""
}${field.unique ? " UNIQUE" : ""}${
field.default !== "" ? ` DEFAULT ${parseDefault(field)}` : ""
}${
field.check === "" || !hasCheck(field.type)
? !defaultTypes.includes(field.type)
field.check === "" ||
!dbToTypes[obj.database][field.type].hasCheck
? !Object.keys(defaultTypes).includes(field.type)
? ` CHECK(\n\t\tJSON_SCHEMA_VALID('${generateSchema(
obj.types.find(
(t) => t.name === field.type.toLowerCase(),
@@ -466,7 +480,7 @@ export function jsonToSQLServer(obj) {
}CREATE TYPE [${type.name}] FROM ${
type.fields.length < 0
? ""
: `${getTypeString(type.fields[0], "mssql", true)}`
: `${getTypeString(type.fields[0], obj.database, "mssql", true)}`
};\nGO\n`;
})
.join("\n")}\n${obj.tables
@@ -479,14 +493,15 @@ export function jsonToSQLServer(obj) {
(field) =>
`${field.comment === "" ? "" : `\t-- ${field.comment}\n`}\t[${
field.name
}] ${getTypeString(field, "mssql")}${
}] ${getTypeString(field, obj.database, "mssql")}${
field.notNull ? " NOT NULL" : ""
}${field.increment ? " IDENTITY" : ""}${
field.unique ? " UNIQUE" : ""
}${
field.default !== "" ? ` DEFAULT ${parseDefault(field)}` : ""
}${
field.check === "" || !hasCheck(field.type)
field.check === "" ||
!dbToTypes[obj.database][field.type].hasCheck
? ""
: ` CHECK(${field.check})`
}`,
@@ -522,41 +537,3 @@ export function jsonToSQLServer(obj) {
)
.join("\n")}`;
}
export function isSized(type) {
return ["CHAR", "VARCHAR", "BINARY", "VARBINARY", "TEXT"].includes(type);
}
export function hasPrecision(type) {
return ["DOUBLE", "NUMERIC", "DECIMAL", "FLOAT"].includes(type);
}
export function hasCheck(type) {
return [
"INT",
"SMALLINT",
"BIGINT",
"CHAR",
"VARCHAR",
"FLOAT",
"DECIMAL",
"DOUBLE",
"NUMERIC",
"REAL",
].includes(type);
}
export function getSize(type) {
switch (type) {
case "CHAR":
case "BINARY":
return 1;
case "VARCHAR":
case "VARBINARY":
return 255;
case "TEXT":
return 65535;
default:
return "";
}
}