plugins/db/MakeDao.go

664 lines
21 KiB
Go

package db
import (
_ "embed"
"github.com/ssgo/db"
"github.com/ssgo/log"
"github.com/ssgo/u"
"io/ioutil"
"os"
"path"
"regexp"
"strings"
"text/template"
)
//go:embed dao.js
var daoTpl string
//go:embed dao_ext.js
var daoExtTpl string
type DaoData struct {
DBName string
FixedDBName string
RandomTag string
VersionField string
//TableNames []string
Tables []TableData
FixedTables []string
}
type TableData struct {
DBName string
TableName string
FixedTableName string
IsAutoId bool
AutoIdField string
AutoIdFieldType string
PrimaryKey *IndexField
UniqueKeys map[string]*IndexField
IndexKeys map[string]*IndexField
Fields []FieldData
PointFields []FieldData
//FieldsWithoutAutoId []FieldData
SelectFields string
ValidFieldConfig ValidFieldConfig
ValidField string
ValidWhere string
ValidSet string
InvalidSet string
VersionField string
HasVersion bool
AutoGenerated []string
AutoGeneratedOnUpdate []string
}
type TableDesc struct {
Field string
Type string
Null string
Key string
Default string
Extra string
After string
}
type TableIndex struct {
Non_unique int
Key_name string
Seq_in_index int
Column_name string
}
type FieldData struct {
Name string
Type string
Default string
Options map[string]string
}
type IndexField struct {
Name string
Where string
Args string
Params string
ItemArgs string
StringArgs string
}
type ValidFieldConfig struct {
Field string
Type string
ValidOperator string
ValidValue string
ValidSetOperator string
ValidSetValue string
InvalidSetOperator string
InvalidSetValue string
}
type DaoConfig struct {
VersionField string
ValidFields []ValidFieldConfig
}
func MakeDao(outputPath string, conn *db.DB, conf *DaoConfig, logger *log.Logger) error {
if conf == nil {
conf = &DaoConfig{}
}
if logger == nil {
logger = log.DefaultLogger
}
if conf.VersionField == "" {
conf.VersionField = "version"
}
if conf.ValidFields == nil {
conf.ValidFields = []ValidFieldConfig{
{
Field: "isValid",
Type: "tinyint",
ValidOperator: "!=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "1",
InvalidSetOperator: "=",
InvalidSetValue: "0",
},
{
Field: "isActive",
Type: "tinyint",
ValidOperator: "!=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "1",
InvalidSetOperator: "=",
InvalidSetValue: "0",
},
{
Field: "deleted",
Type: "tinyint",
ValidOperator: "=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "0",
InvalidSetOperator: "=",
InvalidSetValue: "1",
},
{
Field: "status",
Type: "tinyint",
ValidOperator: "!=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "1",
InvalidSetOperator: "=",
InvalidSetValue: "0",
},
}
}
numberTester := regexp.MustCompile("^[0-9]+$")
for k, validFieldInfo := range conf.ValidFields {
if !numberTester.MatchString(validFieldInfo.ValidValue) {
conf.ValidFields[k].ValidValue = "'" + validFieldInfo.ValidValue + "'"
}
if !numberTester.MatchString(validFieldInfo.InvalidSetValue) {
conf.ValidFields[k].InvalidSetValue = "'" + validFieldInfo.InvalidSetValue + "'"
}
}
var tableListResult *db.QueryResult
if conn.Config.Type == "sqlite3" {
tableListResult = conn.Query("SELECT name FROM sqlite_master WHERE type='table'")
} else { //if conn.Config.Type == "mysql" {
tableListResult = conn.Query("SHOW TABLES")
}
if tableListResult.Error != nil {
logger.Error("failed to connect to db: " + tableListResult.Error.Error())
return tableListResult.Error
}
tableNames := make([]string, 0)
fixedTables := make([]string, 0)
for _, table := range tableListResult.StringsOnC1() {
if strings.HasPrefix(table, "_") || strings.HasPrefix(table, ".") {
continue
}
tableNames = append(tableNames, table)
fixedTables = append(fixedTables, strings.ToUpper(table[0:1])+table[1:])
}
dbName := conn.Config.DB
dbFile := path.Join(outputPath, "dao.js")
u.CheckPath(dbFile)
if files, err := ioutil.ReadDir(outputPath); err == nil {
for _, file := range files {
if strings.HasPrefix(file.Name(), "a_") && strings.HasSuffix(file.Name(), ".go") {
_ = os.Remove(path.Join(outputPath, file.Name()))
}
}
}
tableDataList := make([]TableData, 0)
enumTypeExists := map[string]bool{}
for i, table := range tableNames {
fixedTableName := fixedTables[i]
//tableFile := path.Join(outputPath, "a_"+table+".go")
descs := make([]TableDesc, 0)
indexs := make([]TableIndex, 0)
var err error
if conn.Config.Type == "sqlite3" {
if table == "sqlite_sequence" {
continue
}
tableSql := conn.Query("SELECT `sql` FROM `sqlite_master` WHERE `type`='table' AND `name`='" + table + "'").StringOnR1C1()
tableM := ddlTableMatcher.FindStringSubmatch(u.String(tableSql))
//fmt.Println(u.JsonP(tableM), 111)
if tableM != nil {
fieldsM := ddlFieldMatcher.FindAllStringSubmatch(tableM[2], 2000)
//fmt.Println(tableM[2], u.JsonP(fieldsM), 111)
if fieldsM != nil {
for _, m := range fieldsM {
extra := ""
//if m[1] == "PRIMARY" && m[2] == "KEY" {
if strings.Contains(m[2], " AUTOINCREMENT") {
m[2] = strings.Replace(m[2], " AUTOINCREMENT", "", 1)
extra = "auto_increment"
}
if strings.Contains(m[2], " PRIMARY KEY") {
m[2] = strings.Replace(m[2], " PRIMARY KEY", "", 1)
indexs = append(indexs, TableIndex{
Non_unique: 0,
Key_name: "PRIMARY",
Column_name: m[1],
})
}
//if m[1] == "PRIMARY" && m[2] == "KEY" {
// keysM := ddlKeyMatcher.FindAllStringSubmatch(m[3], 20)
// if keysM != nil {
// for _, km := range keysM {
// indexs = append(indexs, TableIndex{
// Non_unique: 0,
// Key_name: "PRIMARY",
// Column_name: km[1],
// })
// //oldIndexInfos = append(oldIndexInfos, &TableKeyDesc{
// // Key_name: "PRIMARY",
// // Column_name: km[1],
// //})
// }
// }
//}
nullSet := "NULL"
//fmt.Println(" =====", m[0], m[1], m[2])
if ddlNotNullMatcher.MatchString(m[2]) {
m[2] = ddlNotNullMatcher.ReplaceAllString(m[2], "")
nullSet = "NOT NULL"
} else if ddlNullMatcher.MatchString(m[2]) {
m[2] = ddlNullMatcher.ReplaceAllString(m[2], "")
nullSet = "NULL"
}
//fmt.Println(" =====", m[2], "|", nullSet)
//if m[]
descs = append(descs, TableDesc{
Field: m[1],
Type: m[2],
Null: u.StringIf(nullSet == "NOT NULL", "NO", "YES"),
Key: "",
Default: "",
Extra: extra,
After: "",
})
//oldFieldList = append(oldFieldList, &TableFieldDesc{
// Field: m[1],
// Type: m[2],
// //Null: u.StringIf(strings.Contains(m[3], "NOT NULL"), "NO", "YES"),
// Null: u.StringIf(nullSet == "NOT NULL", "NO", "YES"),
// Key: "",
// Default: "",
// Extra: "",
// After: "",
//})
}
}
//fmt.Println(u.JsonP(fieldsM), 222)
}
// 读取索引信息
for _, indexInfo := range conn.Query("SELECT `name`, `sql` FROM `sqlite_master` WHERE `type`='index' AND `tbl_name`='" + table + "'").StringMapResults() {
//fmt.Println(u.JsonP(indexInfo), 777)
indexM := ddlIndexMatcher.FindStringSubmatch(indexInfo["sql"])
if indexM != nil {
//fmt.Println(u.JsonP(indexM), 666)
isNotUnique := 1
if strings.Contains(indexM[1], "UNIQUE") {
isNotUnique = 0
}
indexFieldM := ddlIndexFieldMatcher.FindAllStringSubmatch(indexM[4], 20)
//fmt.Println(u.JsonP(indexFieldM), 555)
if indexFieldM != nil {
for _, km := range indexFieldM {
indexs = append(indexs, TableIndex{
Non_unique: isNotUnique,
Key_name: indexInfo["name"],
Column_name: km[1],
})
//oldIndexInfos = append(oldIndexInfos, &TableKeyDesc{
// Key_name: indexInfo["name"],
// Column_name: km[1],
//})
}
}
}
}
} else {
err = conn.Query("DESC `" + table + "`").To(&descs)
if err == nil {
err = conn.Query("SHOW INDEX FROM `" + table + "`").To(&indexs)
}
}
if err != nil {
logger.Error("failed to get table info: "+err.Error(), "db", dbName, "table", table)
continue
}
//fmt.Println(u.JsonP(indexs), 123)
tableData := TableData{
DBName: dbName,
TableName: table,
FixedTableName: fixedTableName,
IsAutoId: false,
AutoIdField: "",
AutoIdFieldType: "",
PrimaryKey: nil,
UniqueKeys: make(map[string]*IndexField),
IndexKeys: make(map[string]*IndexField),
Fields: make([]FieldData, 0),
PointFields: make([]FieldData, 0),
SelectFields: "",
ValidField: "",
ValidWhere: "",
ValidFieldConfig: ValidFieldConfig{},
ValidSet: "",
InvalidSet: "",
VersionField: conf.VersionField,
HasVersion: false,
AutoGenerated: make([]string, 0),
AutoGeneratedOnUpdate: make([]string, 0),
}
fields := make([]string, 0)
fieldTypesForId := map[string]string{}
idFields := make([]string, 0)
idFieldsUpper := make([]string, 0)
idFieldParams := make([]string, 0)
idFieldItemArgs := make([]string, 0)
uniqueFields := map[string][]string{}
uniqueFieldsUpper := map[string][]string{}
uniqueFieldParams := map[string][]string{}
uniqueFieldItemArgs := map[string][]string{}
indexFields := map[string][]string{}
indexFieldsUpper := map[string][]string{}
indexFieldParams := map[string][]string{}
indexFieldItemArgs := map[string][]string{}
for _, desc := range descs {
if strings.Contains(desc.Extra, "auto_increment") {
tableData.IsAutoId = true
//tableData.AutoIdField = u.GetUpperName(desc.Field)
tableData.AutoIdField = desc.Field
tableData.AutoGenerated = append(tableData.AutoGenerated, desc.Field)
}
// DEFAULT_GENERATED on update CURRENT_TIMESTAMP
if strings.Contains(desc.Extra, "DEFAULT_GENERATED") {
if strings.Contains(desc.Extra, "on update") {
tableData.AutoGeneratedOnUpdate = append(tableData.AutoGeneratedOnUpdate, desc.Field)
} else {
tableData.AutoGenerated = append(tableData.AutoGenerated, desc.Field)
}
}
if desc.Field == conf.VersionField && (conn.Config.Type == "sqlite3" || (strings.Contains(desc.Type, "bigint") && strings.Contains(desc.Type, "unsigned"))) {
tableData.HasVersion = true
}
for _, validFieldInfo := range conf.ValidFields {
if desc.Field == validFieldInfo.Field && (conn.Config.Type == "sqlite3" || strings.Contains(desc.Type, validFieldInfo.Type)) {
tableData.ValidField = validFieldInfo.Field
tableData.ValidFieldConfig = validFieldInfo
tableData.ValidWhere = " AND `" + validFieldInfo.Field + "`" + validFieldInfo.ValidOperator + validFieldInfo.ValidValue
tableData.ValidSet = "`" + validFieldInfo.Field + "`" + validFieldInfo.ValidSetOperator + validFieldInfo.ValidSetValue
tableData.InvalidSet = "`" + validFieldInfo.Field + "`" + validFieldInfo.InvalidSetOperator + validFieldInfo.InvalidSetValue
}
}
fields = append(fields, desc.Field)
typ := ""
defaultValue := "0"
options := map[string]string{}
if strings.Contains(desc.Type, "bigint") {
typ = "int64"
} else if strings.Contains(desc.Type, "int") {
typ = "int"
} else if strings.Contains(desc.Type, "float") {
typ = "float32"
} else if strings.Contains(desc.Type, "double") {
typ = "float64"
} else if desc.Type == "datetime" {
typ = "Datetime"
defaultValue = "\"0000-00-00 00:00:00\""
} else if desc.Type == "date" {
typ = "Date"
defaultValue = "\"0000-00-00\""
} else if desc.Type == "time" {
typ = "Time"
defaultValue = "\"00:00:00\""
} else if strings.HasPrefix(desc.Type, "enum(") {
typ = u.GetUpperName(desc.Field)
if !enumTypeExists[typ] {
enumTypeExists[typ] = true
a := u.SplitWithoutNone(desc.Type[5:len(desc.Type)-1], ",")
for _, v := range a {
if strings.HasPrefix(v, "'") && strings.HasSuffix(v, "'") {
v = v[1 : len(v)-1]
}
options[typ+u.GetUpperName(v)] = v
}
}
defaultValue = "\"\""
} else {
typ = "string"
defaultValue = "\"\""
}
if strings.Contains(desc.Type, " unsigned") && strings.HasPrefix(typ, "int") {
typ = "u" + typ
}
fieldTypesForId[desc.Field] = typ // 用于ID的类型不加指针
if strings.Contains(desc.Extra, "auto_increment") && tableData.IsAutoId {
tableData.AutoIdFieldType = typ
}
//if desc.Null == "YES" || desc.Default != nil || desc.Extra == "auto_increment" {
if desc.Null == "YES" || strings.Contains(desc.Extra, "auto_increment") {
tableData.PointFields = append(tableData.PointFields, FieldData{
//Name: u.GetUpperName(desc.Field),
Name: desc.Field,
Type: typ,
Default: defaultValue,
Options: options,
})
typ = "*" + typ
}
tableData.Fields = append(tableData.Fields, FieldData{
//Name: u.GetUpperName(desc.Field),
Name: desc.Field,
Type: typ,
Default: defaultValue,
Options: options,
})
//if desc.Key != "PRI" {
// tableData.FieldsWithoutAutoId = append(tableData.FieldsWithoutAutoId, FieldData{
// Name: u.GetUpperName(desc.Field),
// Type: typ,
// })
//}
}
for _, index := range indexs {
if index.Key_name == "PRIMARY" {
idFields = append(idFields, index.Column_name)
idFieldsUpper = append(idFieldsUpper, u.GetUpperName(index.Column_name))
idFieldParams = append(idFieldParams, fixParamName(index.Column_name)+" "+fieldTypesForId[index.Column_name])
idFieldItemArgs = append(idFieldItemArgs, "this."+index.Column_name)
} else if index.Non_unique == 0 {
if uniqueFields[index.Key_name] == nil {
uniqueFields[index.Key_name] = make([]string, 0)
uniqueFieldsUpper[index.Key_name] = make([]string, 0)
uniqueFieldParams[index.Key_name] = make([]string, 0)
uniqueFieldItemArgs[index.Key_name] = make([]string, 0)
}
uniqueFields[index.Key_name] = append(uniqueFields[index.Key_name], index.Column_name)
uniqueFieldsUpper[index.Key_name] = append(uniqueFieldsUpper[index.Key_name], u.GetUpperName(index.Column_name))
uniqueFieldParams[index.Key_name] = append(uniqueFieldParams[index.Key_name], fixParamName(index.Column_name)+" "+fieldTypesForId[index.Column_name])
uniqueFieldItemArgs[index.Key_name] = append(uniqueFieldItemArgs[index.Key_name], u.StringIf(tableData.IsAutoId, "*", "")+"item."+u.GetUpperName(index.Column_name))
} else {
if indexFields[index.Key_name] == nil {
indexFields[index.Key_name] = make([]string, 0)
indexFieldsUpper[index.Key_name] = make([]string, 0)
indexFieldParams[index.Key_name] = make([]string, 0)
indexFieldItemArgs[index.Key_name] = make([]string, 0)
}
indexFields[index.Key_name] = append(indexFields[index.Key_name], index.Column_name)
indexFieldsUpper[index.Key_name] = append(indexFieldsUpper[index.Key_name], u.GetUpperName(index.Column_name))
indexFieldParams[index.Key_name] = append(indexFieldParams[index.Key_name], fixParamName(index.Column_name)+" "+fieldTypesForId[index.Column_name])
indexFieldItemArgs[index.Key_name] = append(indexFieldItemArgs[index.Key_name], u.StringIf(tableData.IsAutoId, "*", "")+"item."+u.GetUpperName(index.Column_name))
}
}
//fmt.Println("keys: ", u.JsonP(idFields), u.JsonP(uniqueFields), u.JsonP(indexFields))
if len(idFields) > 0 {
tableData.PrimaryKey = &IndexField{
Name: strings.Join(idFieldsUpper, ""),
Where: "(`" + strings.Join(idFields, "`=? AND `") + "`=?)",
Args: fixJoinParams(idFields, ", "),
Params: fixJoinParams(idFieldParams, ", "),
ItemArgs: strings.Join(idFieldItemArgs, ", "),
StringArgs: "\"" + fixJoinParams(idFields, "\", \"") + "\"",
}
// 将复合主键中的索引添加到 NewQuery().ByXXX
for i := len(idFields) - 1; i >= 0; i-- {
name2 := strings.Join(idFieldsUpper[0:i+1], "")
k2 := "Index_" + name2
// 唯一索引和普通索引中都不存在时创建
if tableData.UniqueKeys[k2] == nil && tableData.IndexKeys[k2] == nil {
tableData.IndexKeys[k2] = &IndexField{
Name: name2,
Where: "(`" + strings.Join(idFields[0:i+1], "`=? AND `") + "`=?)",
Args: fixJoinParams(idFields[0:i+1], ", "),
Params: fixJoinParams(idFieldParams[0:i+1], ", "),
ItemArgs: strings.Join(idFieldItemArgs[0:i+1], ", "),
StringArgs: "\"" + fixJoinParams(idFields[0:i+1], "\", \"") + "\"",
}
}
}
}
for k, fieldNames := range uniqueFields {
name1 := strings.Join(uniqueFieldsUpper[k], "")
k1 := "Unique_" + name1
if tableData.UniqueKeys[k1] == nil {
tableData.UniqueKeys[k1] = &IndexField{
Name: name1,
Where: "(`" + strings.Join(fieldNames, "`=? AND `") + "`=?)",
Args: fixJoinParams(fieldNames, ", "),
Params: fixJoinParams(uniqueFieldParams[k], ", "),
ItemArgs: strings.Join(uniqueFieldItemArgs[k], ", "),
StringArgs: "\"" + fixJoinParams(fieldNames, "\", \"") + "\"",
}
}
// 将复合唯一索引中的索引添加到 NewQuery().ByXXX
for i := len(fieldNames) - 1; i >= 0; i-- {
name2 := strings.Join(uniqueFieldsUpper[k][0:i+1], "")
k2 := "Index_" + name2
// 唯一索引和普通索引中都不存在时创建
if tableData.UniqueKeys[k2] == nil && tableData.IndexKeys[k2] == nil {
tableData.IndexKeys[k2] = &IndexField{
Name: name2,
Where: "(`" + strings.Join(fieldNames[0:i+1], "`=? AND `") + "`=?)",
Args: fixJoinParams(fieldNames[0:i+1], ", "),
Params: fixJoinParams(uniqueFieldParams[k][0:i+1], ", "),
ItemArgs: strings.Join(uniqueFieldItemArgs[k][0:i+1], ", "),
StringArgs: "\"" + fixJoinParams(fieldNames[0:i+1], "\", \"") + "\"",
}
}
}
}
// 将其他索引添加到 NewQuery().ByXXX
for k, fieldNames := range indexFields {
for i := range fieldNames {
name := strings.Join(indexFieldsUpper[k][0:i+1], "")
k2 := "Index_" + name
// 唯一索引和普通索引中都不存在时创建
if tableData.UniqueKeys[k2] == nil && tableData.IndexKeys[k2] == nil {
tableData.IndexKeys[k2] = &IndexField{
Name: name,
Where: "(`" + strings.Join(fieldNames[0:i+1], "`=? AND `") + "`=?)",
Args: fixJoinParams(fieldNames[0:i+1], ", "),
Params: fixJoinParams(indexFieldParams[k][0:i+1], ", "),
ItemArgs: strings.Join(indexFieldItemArgs[k][0:i+1], ", "),
StringArgs: "\"" + fixJoinParams(fieldNames[0:i+1], "\", \"") + "\"",
}
}
}
}
tableData.SelectFields = "`" + strings.Join(fields, "`, `") + "`"
tableDataList = append(tableDataList, tableData)
}
daoData := DaoData{
DBName: dbName,
FixedDBName: u.GetUpperName(dbName),
VersionField: conf.VersionField,
//TableNames: tableNames,
Tables: tableDataList,
FixedTables: fixedTables,
}
daoFile := path.Join(outputPath, "dao.js")
err := writeWithTpl(daoFile, daoTpl, daoData)
//if err == nil {
// queryFile := path.Join(outputPath, "query.go")
// err = writeWithTpl(queryFile, queryTpl, daoData)
//}
if err != nil {
logger.Error("make dao failed: "+err.Error(), "db", dbName)
return err
}
logger.Info("make dao success", "db", dbName)
daoExtFile := path.Join(outputPath, "dao_ext.js")
if !u.FileExists(daoExtFile) {
_ = writeWithTpl(daoExtFile, daoExtTpl, daoData)
_ = os.Chmod(daoExtFile, 0664)
}
//err := writeWithTpl(tableFile, tableTpl, tableData)
//if err != nil {
// fmt.Println(" -", table, u.Red(err.Error()))
//} else {
// fmt.Println(" -", table, u.Green("OK"))
//}
return nil
}
func fixParamName(in string) string {
switch in {
case "type":
return "typ"
}
return in
}
func fixJoinParams(elems []string, sep string) string {
a := make([]string, len(elems))
for i := len(elems) - 1; i >= 0; i-- {
a[i] = fixParamName(elems[i])
}
return strings.Join(a, sep)
}
func writeWithTpl(filename, tplContent string, data interface{}) error {
tpl, err := template.New(filename).Parse(tplContent)
if err == nil {
exists := u.FileExists(filename)
if exists {
_ = os.Chmod(filename, 0644)
}
var fp *os.File
fp, err = os.OpenFile(filename, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0444)
if err == nil {
err = tpl.Execute(fp, data)
_ = fp.Close()
}
if exists {
_ = os.Chmod(filename, 0444)
}
}
return err
}