add testcase

add dao support for db
This commit is contained in:
Star 2024-03-09 13:43:14 +08:00
parent 658832bf18
commit b68fc4d2c1
13 changed files with 2558 additions and 83 deletions

663
db/MakeDao.go Normal file
View File

@ -0,0 +1,663 @@
package db
import (
_ "embed"
"github.com/ssgo/db"
"github.com/ssgo/log"
"github.com/ssgo/u"
"io/ioutil"
"os"
"path"
"regexp"
"strings"
"text/template"
)
//go:embed dao.js
var daoTpl string
//go:embed dao_ext.js
var daoExtTpl string
type DaoData struct {
DBName string
FixedDBName string
RandomTag string
VersionField string
//TableNames []string
Tables []TableData
FixedTables []string
}
type TableData struct {
DBName string
TableName string
FixedTableName string
IsAutoId bool
AutoIdField string
AutoIdFieldType string
PrimaryKey *IndexField
UniqueKeys map[string]*IndexField
IndexKeys map[string]*IndexField
Fields []FieldData
PointFields []FieldData
//FieldsWithoutAutoId []FieldData
SelectFields string
ValidFieldConfig ValidFieldConfig
ValidField string
ValidWhere string
ValidSet string
InvalidSet string
VersionField string
HasVersion bool
AutoGenerated []string
AutoGeneratedOnUpdate []string
}
type TableDesc struct {
Field string
Type string
Null string
Key string
Default string
Extra string
After string
}
type TableIndex struct {
Non_unique int
Key_name string
Seq_in_index int
Column_name string
}
type FieldData struct {
Name string
Type string
Default string
Options map[string]string
}
type IndexField struct {
Name string
Where string
Args string
Params string
ItemArgs string
StringArgs string
}
type ValidFieldConfig struct {
Field string
Type string
ValidOperator string
ValidValue string
ValidSetOperator string
ValidSetValue string
InvalidSetOperator string
InvalidSetValue string
}
type DaoConfig struct {
VersionField string
ValidFields []ValidFieldConfig
}
func MakeDao(outputPath string, conn *db.DB, conf *DaoConfig, logger *log.Logger) error {
if conf == nil {
conf = &DaoConfig{}
}
if logger == nil {
logger = log.DefaultLogger
}
if conf.VersionField == "" {
conf.VersionField = "version"
}
if conf.ValidFields == nil {
conf.ValidFields = []ValidFieldConfig{
{
Field: "isValid",
Type: "tinyint",
ValidOperator: "!=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "1",
InvalidSetOperator: "=",
InvalidSetValue: "0",
},
{
Field: "isActive",
Type: "tinyint",
ValidOperator: "!=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "1",
InvalidSetOperator: "=",
InvalidSetValue: "0",
},
{
Field: "deleted",
Type: "tinyint",
ValidOperator: "=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "0",
InvalidSetOperator: "=",
InvalidSetValue: "1",
},
{
Field: "status",
Type: "tinyint",
ValidOperator: "!=",
ValidValue: "0",
ValidSetOperator: "=",
ValidSetValue: "1",
InvalidSetOperator: "=",
InvalidSetValue: "0",
},
}
}
numberTester := regexp.MustCompile("^[0-9]+$")
for k, validFieldInfo := range conf.ValidFields {
if !numberTester.MatchString(validFieldInfo.ValidValue) {
conf.ValidFields[k].ValidValue = "'" + validFieldInfo.ValidValue + "'"
}
if !numberTester.MatchString(validFieldInfo.InvalidSetValue) {
conf.ValidFields[k].InvalidSetValue = "'" + validFieldInfo.InvalidSetValue + "'"
}
}
var tableListResult *db.QueryResult
if conn.Config.Type == "sqlite3" {
tableListResult = conn.Query("SELECT name FROM sqlite_master WHERE type='table'")
} else { //if conn.Config.Type == "mysql" {
tableListResult = conn.Query("SHOW TABLES")
}
if tableListResult.Error != nil {
logger.Error("failed to connect to db: " + tableListResult.Error.Error())
return tableListResult.Error
}
tableNames := make([]string, 0)
fixedTables := make([]string, 0)
for _, table := range tableListResult.StringsOnC1() {
if strings.HasPrefix(table, "_") || strings.HasPrefix(table, ".") {
continue
}
tableNames = append(tableNames, table)
fixedTables = append(fixedTables, strings.ToUpper(table[0:1])+table[1:])
}
dbName := conn.Config.DB
dbFile := path.Join(outputPath, "dao.js")
u.CheckPath(dbFile)
if files, err := ioutil.ReadDir(outputPath); err == nil {
for _, file := range files {
if strings.HasPrefix(file.Name(), "a_") && strings.HasSuffix(file.Name(), ".go") {
_ = os.Remove(path.Join(outputPath, file.Name()))
}
}
}
tableDataList := make([]TableData, 0)
enumTypeExists := map[string]bool{}
for i, table := range tableNames {
fixedTableName := fixedTables[i]
//tableFile := path.Join(outputPath, "a_"+table+".go")
descs := make([]TableDesc, 0)
indexs := make([]TableIndex, 0)
var err error
if conn.Config.Type == "sqlite3" {
if table == "sqlite_sequence" {
continue
}
tableSql := conn.Query("SELECT `sql` FROM `sqlite_master` WHERE `type`='table' AND `name`='" + table + "'").StringOnR1C1()
tableM := ddlTableMatcher.FindStringSubmatch(u.String(tableSql))
//fmt.Println(u.JsonP(tableM), 111)
if tableM != nil {
fieldsM := ddlFieldMatcher.FindAllStringSubmatch(tableM[2], 2000)
//fmt.Println(tableM[2], u.JsonP(fieldsM), 111)
if fieldsM != nil {
for _, m := range fieldsM {
extra := ""
//if m[1] == "PRIMARY" && m[2] == "KEY" {
if strings.Contains(m[2], " AUTOINCREMENT") {
m[2] = strings.Replace(m[2], " AUTOINCREMENT", "", 1)
extra = "auto_increment"
}
if strings.Contains(m[2], " PRIMARY KEY") {
m[2] = strings.Replace(m[2], " PRIMARY KEY", "", 1)
indexs = append(indexs, TableIndex{
Non_unique: 0,
Key_name: "PRIMARY",
Column_name: m[1],
})
}
//if m[1] == "PRIMARY" && m[2] == "KEY" {
// keysM := ddlKeyMatcher.FindAllStringSubmatch(m[3], 20)
// if keysM != nil {
// for _, km := range keysM {
// indexs = append(indexs, TableIndex{
// Non_unique: 0,
// Key_name: "PRIMARY",
// Column_name: km[1],
// })
// //oldIndexInfos = append(oldIndexInfos, &TableKeyDesc{
// // Key_name: "PRIMARY",
// // Column_name: km[1],
// //})
// }
// }
//}
nullSet := "NULL"
//fmt.Println(" =====", m[0], m[1], m[2])
if ddlNotNullMatcher.MatchString(m[2]) {
m[2] = ddlNotNullMatcher.ReplaceAllString(m[2], "")
nullSet = "NOT NULL"
} else if ddlNullMatcher.MatchString(m[2]) {
m[2] = ddlNullMatcher.ReplaceAllString(m[2], "")
nullSet = "NULL"
}
//fmt.Println(" =====", m[2], "|", nullSet)
//if m[]
descs = append(descs, TableDesc{
Field: m[1],
Type: m[2],
Null: u.StringIf(nullSet == "NOT NULL", "NO", "YES"),
Key: "",
Default: "",
Extra: extra,
After: "",
})
//oldFieldList = append(oldFieldList, &TableFieldDesc{
// Field: m[1],
// Type: m[2],
// //Null: u.StringIf(strings.Contains(m[3], "NOT NULL"), "NO", "YES"),
// Null: u.StringIf(nullSet == "NOT NULL", "NO", "YES"),
// Key: "",
// Default: "",
// Extra: "",
// After: "",
//})
}
}
//fmt.Println(u.JsonP(fieldsM), 222)
}
// 读取索引信息
for _, indexInfo := range conn.Query("SELECT `name`, `sql` FROM `sqlite_master` WHERE `type`='index' AND `tbl_name`='" + table + "'").StringMapResults() {
//fmt.Println(u.JsonP(indexInfo), 777)
indexM := ddlIndexMatcher.FindStringSubmatch(indexInfo["sql"])
if indexM != nil {
//fmt.Println(u.JsonP(indexM), 666)
isNotUnique := 1
if strings.Contains(indexM[1], "UNIQUE") {
isNotUnique = 0
}
indexFieldM := ddlIndexFieldMatcher.FindAllStringSubmatch(indexM[4], 20)
//fmt.Println(u.JsonP(indexFieldM), 555)
if indexFieldM != nil {
for _, km := range indexFieldM {
indexs = append(indexs, TableIndex{
Non_unique: isNotUnique,
Key_name: indexInfo["name"],
Column_name: km[1],
})
//oldIndexInfos = append(oldIndexInfos, &TableKeyDesc{
// Key_name: indexInfo["name"],
// Column_name: km[1],
//})
}
}
}
}
} else {
err = conn.Query("DESC `" + table + "`").To(&descs)
if err == nil {
err = conn.Query("SHOW INDEX FROM `" + table + "`").To(&indexs)
}
}
if err != nil {
logger.Error("failed to get table info: "+err.Error(), "db", dbName, "table", table)
continue
}
//fmt.Println(u.JsonP(indexs), 123)
tableData := TableData{
DBName: dbName,
TableName: table,
FixedTableName: fixedTableName,
IsAutoId: false,
AutoIdField: "",
AutoIdFieldType: "",
PrimaryKey: nil,
UniqueKeys: make(map[string]*IndexField),
IndexKeys: make(map[string]*IndexField),
Fields: make([]FieldData, 0),
PointFields: make([]FieldData, 0),
SelectFields: "",
ValidField: "",
ValidWhere: "",
ValidFieldConfig: ValidFieldConfig{},
ValidSet: "",
InvalidSet: "",
VersionField: conf.VersionField,
HasVersion: false,
AutoGenerated: make([]string, 0),
AutoGeneratedOnUpdate: make([]string, 0),
}
fields := make([]string, 0)
fieldTypesForId := map[string]string{}
idFields := make([]string, 0)
idFieldsUpper := make([]string, 0)
idFieldParams := make([]string, 0)
idFieldItemArgs := make([]string, 0)
uniqueFields := map[string][]string{}
uniqueFieldsUpper := map[string][]string{}
uniqueFieldParams := map[string][]string{}
uniqueFieldItemArgs := map[string][]string{}
indexFields := map[string][]string{}
indexFieldsUpper := map[string][]string{}
indexFieldParams := map[string][]string{}
indexFieldItemArgs := map[string][]string{}
for _, desc := range descs {
if strings.Contains(desc.Extra, "auto_increment") {
tableData.IsAutoId = true
//tableData.AutoIdField = u.GetUpperName(desc.Field)
tableData.AutoIdField = desc.Field
tableData.AutoGenerated = append(tableData.AutoGenerated, desc.Field)
}
// DEFAULT_GENERATED on update CURRENT_TIMESTAMP
if strings.Contains(desc.Extra, "DEFAULT_GENERATED") {
if strings.Contains(desc.Extra, "on update") {
tableData.AutoGeneratedOnUpdate = append(tableData.AutoGeneratedOnUpdate, desc.Field)
} else {
tableData.AutoGenerated = append(tableData.AutoGenerated, desc.Field)
}
}
if desc.Field == conf.VersionField && (conn.Config.Type == "sqlite3" || (strings.Contains(desc.Type, "bigint") && strings.Contains(desc.Type, "unsigned"))) {
tableData.HasVersion = true
}
for _, validFieldInfo := range conf.ValidFields {
if desc.Field == validFieldInfo.Field && (conn.Config.Type == "sqlite3" || strings.Contains(desc.Type, validFieldInfo.Type)) {
tableData.ValidField = validFieldInfo.Field
tableData.ValidFieldConfig = validFieldInfo
tableData.ValidWhere = " AND `" + validFieldInfo.Field + "`" + validFieldInfo.ValidOperator + validFieldInfo.ValidValue
tableData.ValidSet = "`" + validFieldInfo.Field + "`" + validFieldInfo.ValidSetOperator + validFieldInfo.ValidSetValue
tableData.InvalidSet = "`" + validFieldInfo.Field + "`" + validFieldInfo.InvalidSetOperator + validFieldInfo.InvalidSetValue
}
}
fields = append(fields, desc.Field)
typ := ""
defaultValue := "0"
options := map[string]string{}
if strings.Contains(desc.Type, "bigint") {
typ = "int64"
} else if strings.Contains(desc.Type, "int") {
typ = "int"
} else if strings.Contains(desc.Type, "float") {
typ = "float32"
} else if strings.Contains(desc.Type, "double") {
typ = "float64"
} else if desc.Type == "datetime" {
typ = "Datetime"
defaultValue = "\"0000-00-00 00:00:00\""
} else if desc.Type == "date" {
typ = "Date"
defaultValue = "\"0000-00-00\""
} else if desc.Type == "time" {
typ = "Time"
defaultValue = "\"00:00:00\""
} else if strings.HasPrefix(desc.Type, "enum(") {
typ = u.GetUpperName(desc.Field)
if !enumTypeExists[typ] {
enumTypeExists[typ] = true
a := u.SplitWithoutNone(desc.Type[5:len(desc.Type)-1], ",")
for _, v := range a {
if strings.HasPrefix(v, "'") && strings.HasSuffix(v, "'") {
v = v[1 : len(v)-1]
}
options[typ+u.GetUpperName(v)] = v
}
}
defaultValue = "\"\""
} else {
typ = "string"
defaultValue = "\"\""
}
if strings.Contains(desc.Type, " unsigned") && strings.HasPrefix(typ, "int") {
typ = "u" + typ
}
fieldTypesForId[desc.Field] = typ // 用于ID的类型不加指针
if strings.Contains(desc.Extra, "auto_increment") && tableData.IsAutoId {
tableData.AutoIdFieldType = typ
}
//if desc.Null == "YES" || desc.Default != nil || desc.Extra == "auto_increment" {
if desc.Null == "YES" || strings.Contains(desc.Extra, "auto_increment") {
tableData.PointFields = append(tableData.PointFields, FieldData{
//Name: u.GetUpperName(desc.Field),
Name: desc.Field,
Type: typ,
Default: defaultValue,
Options: options,
})
typ = "*" + typ
}
tableData.Fields = append(tableData.Fields, FieldData{
//Name: u.GetUpperName(desc.Field),
Name: desc.Field,
Type: typ,
Default: defaultValue,
Options: options,
})
//if desc.Key != "PRI" {
// tableData.FieldsWithoutAutoId = append(tableData.FieldsWithoutAutoId, FieldData{
// Name: u.GetUpperName(desc.Field),
// Type: typ,
// })
//}
}
for _, index := range indexs {
if index.Key_name == "PRIMARY" {
idFields = append(idFields, index.Column_name)
idFieldsUpper = append(idFieldsUpper, u.GetUpperName(index.Column_name))
idFieldParams = append(idFieldParams, fixParamName(index.Column_name)+" "+fieldTypesForId[index.Column_name])
idFieldItemArgs = append(idFieldItemArgs, "this."+index.Column_name)
} else if index.Non_unique == 0 {
if uniqueFields[index.Key_name] == nil {
uniqueFields[index.Key_name] = make([]string, 0)
uniqueFieldsUpper[index.Key_name] = make([]string, 0)
uniqueFieldParams[index.Key_name] = make([]string, 0)
uniqueFieldItemArgs[index.Key_name] = make([]string, 0)
}
uniqueFields[index.Key_name] = append(uniqueFields[index.Key_name], index.Column_name)
uniqueFieldsUpper[index.Key_name] = append(uniqueFieldsUpper[index.Key_name], u.GetUpperName(index.Column_name))
uniqueFieldParams[index.Key_name] = append(uniqueFieldParams[index.Key_name], fixParamName(index.Column_name)+" "+fieldTypesForId[index.Column_name])
uniqueFieldItemArgs[index.Key_name] = append(uniqueFieldItemArgs[index.Key_name], u.StringIf(tableData.IsAutoId, "*", "")+"item."+u.GetUpperName(index.Column_name))
} else {
if indexFields[index.Key_name] == nil {
indexFields[index.Key_name] = make([]string, 0)
indexFieldsUpper[index.Key_name] = make([]string, 0)
indexFieldParams[index.Key_name] = make([]string, 0)
indexFieldItemArgs[index.Key_name] = make([]string, 0)
}
indexFields[index.Key_name] = append(indexFields[index.Key_name], index.Column_name)
indexFieldsUpper[index.Key_name] = append(indexFieldsUpper[index.Key_name], u.GetUpperName(index.Column_name))
indexFieldParams[index.Key_name] = append(indexFieldParams[index.Key_name], fixParamName(index.Column_name)+" "+fieldTypesForId[index.Column_name])
indexFieldItemArgs[index.Key_name] = append(indexFieldItemArgs[index.Key_name], u.StringIf(tableData.IsAutoId, "*", "")+"item."+u.GetUpperName(index.Column_name))
}
}
//fmt.Println("keys: ", u.JsonP(idFields), u.JsonP(uniqueFields), u.JsonP(indexFields))
if len(idFields) > 0 {
tableData.PrimaryKey = &IndexField{
Name: strings.Join(idFieldsUpper, ""),
Where: "(`" + strings.Join(idFields, "`=? AND `") + "`=?)",
Args: fixJoinParams(idFields, ", "),
Params: fixJoinParams(idFieldParams, ", "),
ItemArgs: strings.Join(idFieldItemArgs, ", "),
StringArgs: "\"" + fixJoinParams(idFields, "\", \"") + "\"",
}
// 将复合主键中的索引添加到 NewQuery().ByXXX
for i := len(idFields) - 1; i >= 0; i-- {
name2 := strings.Join(idFieldsUpper[0:i+1], "")
k2 := "Index_" + name2
// 唯一索引和普通索引中都不存在时创建
if tableData.UniqueKeys[k2] == nil && tableData.IndexKeys[k2] == nil {
tableData.IndexKeys[k2] = &IndexField{
Name: name2,
Where: "(`" + strings.Join(idFields[0:i+1], "`=? AND `") + "`=?)",
Args: fixJoinParams(idFields[0:i+1], ", "),
Params: fixJoinParams(idFieldParams[0:i+1], ", "),
ItemArgs: strings.Join(idFieldItemArgs[0:i+1], ", "),
StringArgs: "\"" + fixJoinParams(idFields[0:i+1], "\", \"") + "\"",
}
}
}
}
for k, fieldNames := range uniqueFields {
name1 := strings.Join(uniqueFieldsUpper[k], "")
k1 := "Unique_" + name1
if tableData.UniqueKeys[k1] == nil {
tableData.UniqueKeys[k1] = &IndexField{
Name: name1,
Where: "(`" + strings.Join(fieldNames, "`=? AND `") + "`=?)",
Args: fixJoinParams(fieldNames, ", "),
Params: fixJoinParams(uniqueFieldParams[k], ", "),
ItemArgs: strings.Join(uniqueFieldItemArgs[k], ", "),
StringArgs: "\"" + fixJoinParams(fieldNames, "\", \"") + "\"",
}
}
// 将复合唯一索引中的索引添加到 NewQuery().ByXXX
for i := len(fieldNames) - 1; i >= 0; i-- {
name2 := strings.Join(uniqueFieldsUpper[k][0:i+1], "")
k2 := "Index_" + name2
// 唯一索引和普通索引中都不存在时创建
if tableData.UniqueKeys[k2] == nil && tableData.IndexKeys[k2] == nil {
tableData.IndexKeys[k2] = &IndexField{
Name: name2,
Where: "(`" + strings.Join(fieldNames[0:i+1], "`=? AND `") + "`=?)",
Args: fixJoinParams(fieldNames[0:i+1], ", "),
Params: fixJoinParams(uniqueFieldParams[k][0:i+1], ", "),
ItemArgs: strings.Join(uniqueFieldItemArgs[k][0:i+1], ", "),
StringArgs: "\"" + fixJoinParams(fieldNames[0:i+1], "\", \"") + "\"",
}
}
}
}
// 将其他索引添加到 NewQuery().ByXXX
for k, fieldNames := range indexFields {
for i := range fieldNames {
name := strings.Join(indexFieldsUpper[k][0:i+1], "")
k2 := "Index_" + name
// 唯一索引和普通索引中都不存在时创建
if tableData.UniqueKeys[k2] == nil && tableData.IndexKeys[k2] == nil {
tableData.IndexKeys[k2] = &IndexField{
Name: name,
Where: "(`" + strings.Join(fieldNames[0:i+1], "`=? AND `") + "`=?)",
Args: fixJoinParams(fieldNames[0:i+1], ", "),
Params: fixJoinParams(indexFieldParams[k][0:i+1], ", "),
ItemArgs: strings.Join(indexFieldItemArgs[k][0:i+1], ", "),
StringArgs: "\"" + fixJoinParams(fieldNames[0:i+1], "\", \"") + "\"",
}
}
}
}
tableData.SelectFields = "`" + strings.Join(fields, "`, `") + "`"
tableDataList = append(tableDataList, tableData)
}
daoData := DaoData{
DBName: dbName,
FixedDBName: u.GetUpperName(dbName),
VersionField: conf.VersionField,
//TableNames: tableNames,
Tables: tableDataList,
FixedTables: fixedTables,
}
daoFile := path.Join(outputPath, "dao.js")
err := writeWithTpl(daoFile, daoTpl, daoData)
//if err == nil {
// queryFile := path.Join(outputPath, "query.go")
// err = writeWithTpl(queryFile, queryTpl, daoData)
//}
if err != nil {
logger.Error("make dao failed: "+err.Error(), "db", dbName)
return err
}
logger.Info("make dao success", "db", dbName)
daoExtFile := path.Join(outputPath, "dao_ext.js")
if !u.FileExists(daoExtFile) {
_ = writeWithTpl(daoExtFile, daoExtTpl, daoData)
_ = os.Chmod(daoExtFile, 0664)
}
//err := writeWithTpl(tableFile, tableTpl, tableData)
//if err != nil {
// fmt.Println(" -", table, u.Red(err.Error()))
//} else {
// fmt.Println(" -", table, u.Green("OK"))
//}
return nil
}
func fixParamName(in string) string {
switch in {
case "type":
return "typ"
}
return in
}
func fixJoinParams(elems []string, sep string) string {
a := make([]string, len(elems))
for i := len(elems) - 1; i >= 0; i-- {
a[i] = fixParamName(elems[i])
}
return strings.Join(a, sep)
}
func writeWithTpl(filename, tplContent string, data interface{}) error {
tpl, err := template.New(filename).Parse(tplContent)
if err == nil {
exists := u.FileExists(filename)
if exists {
_ = os.Chmod(filename, 0644)
}
var fp *os.File
fp, err = os.OpenFile(filename, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0444)
if err == nil {
err = tpl.Execute(fp, data)
_ = fp.Close()
}
if exists {
_ = os.Chmod(filename, 0444)
}
}
return err
}

622
db/MakeTable.go Normal file
View File

@ -0,0 +1,622 @@
package db
import (
_ "embed"
"fmt"
"github.com/ssgo/db"
"github.com/ssgo/log"
"github.com/ssgo/u"
"os"
"regexp"
"strings"
"text/template"
)
type TableFieldDesc struct {
Field string
Type string
Null string
Key string
Default string
Extra string
After string
}
type TableKeyDesc struct {
Key_name string
Column_name string
}
type TableField struct {
Name string
Type string
Index string
IndexGroup string
Default string
Comment string
IsNull bool
Extra string
desc string
Null string
}
type ERGroup struct {
Group string
Comment string
Tables []TableStruct
}
type TableStruct struct {
Name string
Comment string
Fields []TableField
}
//go:embed er.html
var erTpl string
func (field *TableField) Parse(tableType string) {
//if field.Index == "autoId" {
// field.Type += " unsigned"
//}
if field.IsNull {
field.Null = "NULL"
} else {
field.Null = "NOT NULL"
}
if tableType == "sqlite3" {
// sqlite3 不能修改字段统一使用NULL
field.Null = "NULL"
if field.Extra == "AUTO_INCREMENT" {
field.Extra = "PRIMARY KEY AUTOINCREMENT"
field.Type = "integer"
field.Null = "NOT NULL"
}
}
a := make([]string, 0)
a = append(a, fmt.Sprintf("`%s` %s", field.Name, field.Type))
if tableType == "mysql" {
lowerType := strings.ToLower(field.Type)
if strings.Contains(lowerType, "varchar") || strings.Contains(lowerType, "text") {
a = append(a, " COLLATE utf8mb4_general_ci")
}
}
//if field.Index == "autoId" {
// a = append(a, " AUTO_INCREMENT")
// field.Index = "pk"
// //a = append(a, " NOT NULL")
//}
//if field.Index == "uniqueId" {
// field.Index = "pk"
// //a = append(a, " NOT NULL")
//}
if field.Extra != "" {
a = append(a, " "+field.Extra)
}
a = append(a, " "+field.Null)
if field.Default != "" {
if strings.Contains(field.Default, "CURRENT_TIMESTAMP") {
a = append(a, " DEFAULT "+field.Default)
} else {
a = append(a, " DEFAULT '"+field.Default+"'")
}
}
if tableType == "sqlite3" {
field.Comment = ""
field.Type = "numeric"
} else if tableType == "mysql" {
if field.Comment != "" {
a = append(a, " COMMENT '"+field.Comment+"'")
}
}
field.desc = strings.Join(a, "")
}
var ddlTableMatcher = regexp.MustCompile("(?is)^\\s*CREATE\\s+TABLE\\s+`?([\\w]+)`?\\s*\\(\\s*(.*?)\\s*\\);?\\s*$")
var ddlFieldMatcher = regexp.MustCompile("(?s)\\s*[`\\[]?([\\w]+)[`\\]]?\\s+\\[?([\\w() ]+)\\]?\\s*(.*?)(,|$)")
var ddlKeyMatcher = regexp.MustCompile("[`\\[]?([\\w]+)[`\\]]?\\s*(,|\\))")
var ddlNotNullMatcher = regexp.MustCompile("(?i)\\s+NOT NULL")
var ddlNullMatcher = regexp.MustCompile("(?i)\\s+NULL")
//var ddlDefaultMatcher = regexp.MustCompile("(?i)\\s+DEFAULT\\s+(.*?)$")
var ddlIndexMatcher = regexp.MustCompile("(?is)^\\s*CREATE\\s+([A-Za-z ]+)\\s+`?([\\w]+)`?\\s+ON\\s+`?([\\w]+)`?\\s*\\(\\s*(.*?)\\s*\\);?\\s*$")
var ddlIndexFieldMatcher = regexp.MustCompile("[`\\[]?([\\w]+)[`\\]]?\\s*(,|$)")
// MakeER 创建ER图文件
func MakeER(groups []ERGroup, outputFile *string, tplFile *string, logger *log.Logger) error {
tplStr := ""
if tplFile == nil || *tplFile == "" {
tplStr = erTpl
} else {
tplStr = *tplFile
}
erOutputFile := "er.html"
if outputFile != nil && *outputFile != "" {
erOutputFile = *outputFile
}
tpl := template.New(erOutputFile).Funcs(template.FuncMap{
"short": func(in string) string {
switch in {
case "NULL":
return "n"
case "NOT NULL":
return "nn"
case "AUTO_INCREMENT":
return "ai"
case "CURRENT_TIMESTAMP":
return "ct"
case "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP":
return "ctu"
}
return in
},
})
var err error
tpl, err = tpl.Parse(tplStr)
if err == nil {
var fp *os.File
fp, err = os.OpenFile(erOutputFile, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0666)
if err == nil {
err = tpl.Execute(fp, map[string]interface{}{
"title": "ER",
"groups": groups,
})
_ = fp.Close()
}
}
if err != nil {
logger.Error(err.Error())
}
return err
}
func MakeTable(conn *db.DB, table *TableStruct, logger *log.Logger) ([]string, error) {
//fmt.Println(u.JsonP(ddlKeyMatcher.FindAllStringSubmatch("(`key`,id, `name` )", 100)), "====================")
fieldSets := make([]string, 0)
//fieldSetBy := make(map[string]string)
pks := make([]string, 0)
keySets := make([]string, 0)
keySetBy := make(map[string]string)
keySetFields := make(map[string]string)
for i, field := range table.Fields {
field.Parse(conn.Config.Type)
table.Fields[i] = field
if conn.Config.Type == "sqlite3" {
if field.Index == "pk" && field.Extra != "PRIMARY KEY AUTOINCREMENT" {
// sqlite3 用 unique 代替 pk
field.Index = "unique"
field.IndexGroup = "0"
field.Null = "NULL"
}
}
switch field.Index {
case "pk":
if conn.Config.Type == "sqlite3" {
if field.Extra != "PRIMARY KEY AUTOINCREMENT" {
pks = append(pks, field.Name)
}
} else {
pks = append(pks, field.Name)
}
case "unique":
keyName := fmt.Sprint("uk_", table.Name, "_", field.Name)
if field.IndexGroup != "" {
keyName = fmt.Sprint("uk_", table.Name, "_", field.IndexGroup)
}
if keySetBy[keyName] != "" {
keySetFields[keyName] += " " + field.Name
// 复合索引
if conn.Config.Type == "sqlite3" {
keySetBy[keyName] = strings.Replace(keySetBy[keyName], ")", ", `"+field.Name+"`)", 1)
} else if conn.Config.Type == "mysql" {
keySetBy[keyName] = strings.Replace(keySetBy[keyName], ") COMMENT", ", `"+field.Name+"`) COMMENT", 1)
}
} else {
keySetFields[keyName] = field.Name
keySet := ""
if conn.Config.Type == "sqlite3" {
keySet = fmt.Sprintf("CREATE UNIQUE INDEX `%s` ON `%s` (`%s`)", keyName, table.Name, field.Name)
} else if conn.Config.Type == "mysql" {
keySet = fmt.Sprintf("UNIQUE KEY `%s` (`%s`) COMMENT '%s'", keyName, field.Name, field.Comment)
}
keySets = append(keySets, keySet)
keySetBy[keyName] = keySet
}
case "fulltext":
if conn.Config.Type == "mysql" {
keyName := fmt.Sprint("tk_", table.Name, "_", field.Name)
keySet := fmt.Sprintf("FULLTEXT KEY `%s` (`%s`) COMMENT '%s'", keyName, field.Name, field.Comment)
keySets = append(keySets, keySet)
keySetBy[keyName] = keySet
}
case "index":
keyName := fmt.Sprint("ik_", table.Name, "_", field.Name)
if field.IndexGroup != "" {
keyName = fmt.Sprint("ik_", table.Name, "_", field.IndexGroup)
}
if keySetBy[keyName] != "" {
keySetFields[keyName] += " " + field.Name
// 复合索引
if conn.Config.Type == "sqlite3" {
keySetBy[keyName] = strings.Replace(keySetBy[keyName], ")", ", `"+field.Name+"`)", 1)
} else if conn.Config.Type == "mysql" {
keySetBy[keyName] = strings.Replace(keySetBy[keyName], ") COMMENT", ", `"+field.Name+"`) COMMENT", 1)
}
} else {
keySetFields[keyName] = field.Name
keySet := ""
if conn.Config.Type == "sqlite3" {
keySet = fmt.Sprintf("CREATE INDEX `%s` ON `%s` (`%s`)", keyName, table.Name, field.Name)
} else if conn.Config.Type == "mysql" {
keySet = fmt.Sprintf("KEY `%s` (`%s`) COMMENT '%s'", keyName, field.Name, field.Comment)
}
keySets = append(keySets, keySet)
keySetBy[keyName] = keySet
}
}
fieldSets = append(fieldSets, field.desc)
//fieldSetBy[field.Name] = field.desc
}
//fmt.Println(u.JsonP(table.Fields))
//fmt.Println(u.JsonP(keySetBy), 3)
//fmt.Println(u.JsonP(keySets), 4)
outSql := make([]string, 0)
var result *db.ExecResult
var tableInfo map[string]interface{}
if conn.Config.Type == "sqlite3" {
tableInfo = conn.Query("SELECT `name`, `sql` FROM `sqlite_master` WHERE `type`='table' AND `name`='" + table.Name + "'").MapOnR1()
tableInfo["comment"] = ""
} else if conn.Config.Type == "mysql" {
tableInfo = conn.Query("SELECT TABLE_NAME name, TABLE_COMMENT comment FROM information_schema.TABLES WHERE TABLE_SCHEMA='" + conn.Config.DB + "' AND TABLE_NAME='" + table.Name + "'").MapOnR1()
}
oldTableComment := u.String(tableInfo["comment"])
if tableInfo["name"] != nil && tableInfo["name"] != "" {
// 合并字段
oldFieldList := make([]*TableFieldDesc, 0)
oldFields := make(map[string]*TableFieldDesc)
oldIndexes := make(map[string]string)
oldIndexInfos := make([]*TableKeyDesc, 0)
oldComments := map[string]string{}
if conn.Config.Type == "sqlite3" {
tableM := ddlTableMatcher.FindStringSubmatch(u.String(tableInfo["sql"]))
if tableM != nil {
fieldsM := ddlFieldMatcher.FindAllStringSubmatch(tableM[2], 2000)
if fieldsM != nil {
for _, m := range fieldsM {
if m[1] == "PRIMARY" && m[2] == "KEY" {
keysM := ddlKeyMatcher.FindAllStringSubmatch(m[3], 20)
if keysM != nil {
for _, km := range keysM {
oldIndexInfos = append(oldIndexInfos, &TableKeyDesc{
Key_name: "PRIMARY",
Column_name: km[1],
})
}
}
} else {
Null := "NULL"
//fmt.Println(" =====", m[0], m[1], m[2])
if ddlNotNullMatcher.MatchString(m[2]) {
m[2] = ddlNotNullMatcher.ReplaceAllString(m[2], "")
Null = "NOT NULL"
} else if ddlNullMatcher.MatchString(m[2]) {
m[2] = ddlNullMatcher.ReplaceAllString(m[2], "")
Null = "NULL"
}
//fmt.Println(" =====", m[2], "|", Null)
oldFieldList = append(oldFieldList, &TableFieldDesc{
Field: m[1],
Type: m[2],
//Null: u.StringIf(strings.Contains(m[3], "NOT NULL"), "NO", "YES"),
Null: u.StringIf(Null == "NOT NULL", "NO", "YES"),
Key: "",
Default: "",
Extra: "",
After: "",
})
}
}
}
//fmt.Println(u.JsonP(fieldsM), 222)
}
// 读取索引信息
for _, indexInfo := range conn.Query("SELECT `name`, `sql` FROM `sqlite_master` WHERE `type`='index' AND `tbl_name`='" + table.Name + "'").StringMapResults() {
//fmt.Println(u.JsonP(indexInfo), 777)
indexM := ddlIndexMatcher.FindStringSubmatch(indexInfo["sql"])
if indexM != nil {
//fmt.Println(u.JsonP(indexM), 666)
indexFieldM := ddlIndexFieldMatcher.FindAllStringSubmatch(indexM[4], 20)
//fmt.Println(u.JsonP(indexFieldM), 555)
if indexFieldM != nil {
for _, km := range indexFieldM {
oldIndexInfos = append(oldIndexInfos, &TableKeyDesc{
Key_name: indexInfo["name"],
Column_name: km[1],
})
}
}
}
}
//fmt.Println(u.JsonP(oldFieldList), 1)
//fmt.Println(u.JsonP(oldIndexInfos), 2)
} else if conn.Config.Type == "mysql" {
conn.Query("SELECT column_name, column_comment FROM information_schema.columns WHERE TABLE_SCHEMA='" + conn.Config.DB + "' AND TABLE_NAME='" + table.Name + "'").ToKV(&oldComments)
_ = conn.Query("DESC `" + table.Name + "`").To(&oldFieldList)
_ = conn.Query("SHOW INDEX FROM `" + table.Name + "`").To(&oldIndexInfos)
}
//fmt.Println(u.JsonP(oldComments), 111)
for _, indexInfo := range oldIndexInfos {
if oldIndexes[indexInfo.Key_name] == "" {
oldIndexes[indexInfo.Key_name] = indexInfo.Column_name
} else {
oldIndexes[indexInfo.Key_name] += " " + indexInfo.Column_name
}
}
//fmt.Println(u.JsonP(oldIndexes), 111)
//fmt.Println(u.JsonP(keySetFields), 222)
//fmt.Println(u.JsonP(keySetBy), 333)
// 先后顺序
prevFieldId := ""
for _, field := range oldFieldList {
if conn.Config.Type == "sqlite3" {
field.Type = "numeric"
} else if conn.Config.Type == "mysql" {
field.After = prevFieldId
}
prevFieldId = field.Field
oldFields[field.Field] = field
}
//fmt.Println(111, u.JsonP(oldFields), 111)
actions := make([]string, 0)
for keyId := range oldIndexes {
if keyId != "PRIMARY" && strings.ToLower(keySetFields[keyId]) != strings.ToLower(oldIndexes[keyId]) {
if conn.Config.Type == "sqlite3" {
actions = append(actions, "DROP INDEX `"+keyId+"`")
} else if conn.Config.Type == "mysql" {
actions = append(actions, "DROP KEY `"+keyId+"`")
}
}
}
//fmt.Println(" =>>>>>>>>", oldIndexes, pks)
if oldIndexes["PRIMARY"] != "" && strings.ToLower(oldIndexes["PRIMARY"]) != strings.ToLower(strings.Join(pks, " ")) {
if conn.Config.Type == "sqlite3" {
} else if conn.Config.Type == "mysql" {
actions = append(actions, "DROP PRIMARY KEY")
}
}
//for fieldId, fieldSet := range fieldSetBy {
newFieldExists := map[string]bool{}
prevFieldId = ""
for _, field := range table.Fields {
newFieldExists[field.Name] = true
oldField := oldFields[field.Name]
// 修复部分数据库的特殊性
if oldField == nil {
if conn.Config.Type == "sqlite3" {
actions = append(actions, "ALTER TABLE `"+table.Name+"` ADD COLUMN "+field.desc)
} else if conn.Config.Type == "mysql" {
actions = append(actions, "ADD COLUMN "+field.desc)
}
} else {
oldField.Type = strings.TrimSpace(strings.ReplaceAll(oldField.Type, " (", "("))
fixedOldDefault := u.String(oldField.Default)
if fixedOldDefault == "CURRENT_TIMESTAMP" && strings.Contains(oldField.Extra, "on update CURRENT_TIMESTAMP") {
fixedOldDefault = "CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP"
}
fixedOldNull := "NOT NULL"
if oldField.Null == "YES" {
fixedOldNull = "NULL"
}
//fmt.Println(" ==", field.Type, "!=", oldField.Type, "||", field.Default, "!=", fixedOldDefault, "||", field.Null, "!=", fixedOldNull, "||", oldField.After, "!=", prevFieldId, "||", oldComments[field.Name], "!=", field.Comment)
//fmt.Println(" ==", strings.ToLower(field.Type) != strings.ToLower(oldField.Type), strings.ToLower(field.Default) != strings.ToLower(fixedOldDefault), strings.ToLower(field.Null) != strings.ToLower(fixedOldNull), strings.ToLower(oldField.After) != strings.ToLower(prevFieldId), strings.ToLower(oldComments[field.Name]) != strings.ToLower(field.Comment))
if strings.ToLower(field.Type) != strings.ToLower(oldField.Type) || strings.ToLower(field.Default) != strings.ToLower(fixedOldDefault) || strings.ToLower(field.Null) != strings.ToLower(fixedOldNull) || strings.ToLower(oldField.After) != strings.ToLower(prevFieldId) || strings.ToLower(oldComments[field.Name]) != strings.ToLower(field.Comment) {
//fmt.Println(" > > > > ", u.JsonP(oldField), 1111)
// `t4f34` varchar(100) COLLATE utf8mb4_general_ci COMMENT ''
// f34, varchar(100), YES, , ,
//fmt.Println(111111, u.JsonP(field), 1111)
// 为什么Desc是空
after := ""
if conn.Config.Type == "mysql" {
if oldField.After != prevFieldId {
if prevFieldId == "" {
after = " FIRST"
} else {
after = " AFTER `" + prevFieldId + "`"
}
}
}
//DROP INDEX `uk_config_key`;
//ALTER TABLE `config` RENAME COLUMN `key` TO `keyOld`;
//ALTER TABLE `config` ADD COLUMN `key` varchar(30) NULL;
//UPDATE `config` SET `key`=`keyOld`;
//ALTER TABLE `config` DROP COLUMN `keyOld`;
//CREATE INDEX `uk_config_key` ON `config` (`key`);
if conn.Config.Type == "sqlite3" {
// 不支持修改字段,所以要先创建然后复制数据再删除
// 方案一(已放弃)重新创建表实现修改
//actions = append(actions, fmt.Sprintf("CREATE TABLE `%s_temp` (\n%s\n)", table.Name, strings.Join(fieldSets, ",\n")))
//actions = append(actions, fmt.Sprintf("INSERT INTO `%s_temp` SELECT * FROM `%s`", table.Name, table.Name))
//actions = append(actions, fmt.Sprintf("DROP TABLE `%s`", table.Name))
//actions = append(actions, fmt.Sprintf("ALTER TABLE `%s_temp` RENAME TO `%s`", table.Name, table.Name))
//INSERT INTO t1_new SELECT foo, bar, baz FROM t1;
//DROP TABLE t1;
//ALTER TABLE t1_new RENAME TO t1;
// 方案二已放弃创建新字段复制数据后删除部分Sqlite不支持DROP COLUMN
//redoIndexes := make([]string, 0)
//for oldIndexName, oldIndex := range oldIndexes {
// if u.StringIn(strings.Split(oldIndex, " "), field.Name) {
// indexSql := conn.Query("SELECT `sql` FROM `sqlite_master` WHERE `type`='index' AND `name`='" + oldIndexName + "'").StringOnR1C1()
// redoIndexes = append(redoIndexes, indexSql)
// actions = append(actions, "DROP INDEX `"+oldIndexName+"`")
// }
//}
//oldPostfix := u.UniqueId()
//actions = append(actions, "ALTER TABLE `"+table.Name+"` RENAME COLUMN `"+field.Name+"` TO `d_"+field.Name+"_"+oldPostfix+"`")
//actions = append(actions, "ALTER TABLE `"+table.Name+"` ADD COLUMN "+field.Desc+after)
//actions = append(actions, "UPDATE `"+table.Name+"` SET `"+field.Name+"`=`d_"+field.Name+"_"+oldPostfix+"`")
////actions = append(actions, "ALTER TABLE `"+table.Name+"` DROP COLUMN `"+field.Name+"Old`")
//for _, redoIndex := range redoIndexes {
// actions = append(actions, redoIndex)
//}
// 方案三 不修改字段类型Sqlite可以兼容
//actions = append(actions, "ALTER TABLE `"+table.Name+"` ADD COLUMN "+field.desc)
} else if conn.Config.Type == "mysql" {
actions = append(actions, "CHANGE `"+field.Name+"` "+field.desc+after)
}
}
}
if conn.Config.Type == "mysql" {
prevFieldId = field.Name
}
}
for oldFieldName := range oldFields {
if newFieldExists[oldFieldName] != true {
if conn.Config.Type == "sqlite3" {
//actions = append(actions, "ALTER TABLE `"+table.Name+"` DROP COLUMN `"+oldFieldName+"`")
} else if conn.Config.Type == "mysql" {
actions = append(actions, "DROP COLUMN `"+oldFieldName+"`")
}
}
}
// sqlite3 不支持添加主键
if conn.Config.Type == "mysql" {
if len(pks) > 0 && strings.ToLower(oldIndexes["PRIMARY"]) != strings.ToLower(strings.Join(pks, " ")) {
actions = append(actions, "ADD PRIMARY KEY(`"+strings.Join(pks, "`,`")+"`)")
}
}
//fmt.Println(111, u.JsonP(oldIndexes), 222 )
//fmt.Println(222, u.JsonP(keySetBy), 222 )
for keyId, keySet := range keySetBy {
if oldIndexes[keyId] == "" || strings.ToLower(oldIndexes[keyId]) != strings.ToLower(keySetFields[keyId]) {
if conn.Config.Type == "sqlite3" {
actions = append(actions, keySet)
} else if conn.Config.Type == "mysql" {
actions = append(actions, "ADD "+keySet)
}
}
}
//fmt.Println(" =>", table.Comment, "|", oldTableComment )
if conn.Config.Type == "mysql" {
if table.Comment != oldTableComment {
actions = append(actions, "COMMENT '"+table.Comment+"'")
}
}
if len(actions) == 0 {
// 不需要更新
return outSql, nil
}
tx := conn.Begin()
defer tx.CheckFinished()
if conn.Config.Type == "sqlite3" {
for _, action := range actions {
//fmt.Println(u.Dim("\t" + strings.ReplaceAll(action, "\n", "\n\t")))
outSql = append(outSql, action)
result = tx.Exec(action)
if result.Error != nil {
break
}
}
} else if conn.Config.Type == "mysql" {
sql := "ALTER TABLE `" + table.Name + "` " + strings.Join(actions, "\n,") + ";"
//fmt.Println(u.Dim("\t" + strings.ReplaceAll(sql, "\n", "\n\t")))
outSql = append(outSql, sql)
result = tx.Exec(sql)
}
if result.Error != nil {
_ = tx.Rollback()
} else {
_ = tx.Commit()
}
} else {
// 创建新表
if len(pks) > 0 {
fieldSets = append(fieldSets, "PRIMARY KEY (`"+strings.Join(pks, "`,`")+"`)")
}
indexSets := make([]string, 0) // sqlite3 额外创建索引的sql
if conn.Config.Type == "sqlite3" {
for _, indexSql := range keySetBy {
indexSets = append(indexSets, indexSql)
}
} else if conn.Config.Type == "mysql" {
for _, key := range keySets {
fieldSets = append(fieldSets, key)
}
}
sql := ""
if conn.Config.Type == "sqlite3" {
sql = fmt.Sprintf("CREATE TABLE `%s` (\n %s\n);", table.Name, strings.Join(fieldSets, ",\n "))
} else if conn.Config.Type == "mysql" {
sql = fmt.Sprintf("CREATE TABLE `%s` (\n %s\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_general_ci COMMENT='%s';", table.Name, strings.Join(fieldSets, ",\n "), table.Comment)
}
tx := conn.Begin()
defer tx.CheckFinished()
//fmt.Println(u.Dim("\t" + strings.ReplaceAll(sql, "\n", "\n\t")))
outSql = append(outSql, sql)
result = tx.Exec(sql)
if result.Error == nil {
if conn.Config.Type == "sqlite3" {
for _, indexSet := range indexSets {
//fmt.Println(indexSet)
//fmt.Println(u.Dim("\t" + strings.ReplaceAll(indexSet, "\n", "\n\t")))
outSql = append(outSql, indexSet)
r := tx.Exec(indexSet)
if r.Error != nil {
result = r
}
}
}
}
if result.Error != nil {
_ = tx.Rollback()
} else {
_ = tx.Commit()
}
}
if result == nil {
return outSql, nil
}
if result.Error != nil {
logger.Error(result.Error.Error())
}
return outSql, result.Error
}

714
db/dao.js Normal file
View File

@ -0,0 +1,714 @@
{{$dao := . -}}
let _{{$dao.DBName}}Dao = {_daoInstances:{}, _daoActions:{}}
function _make{{$dao.DBName}}Dao(conn, tx, rd) {
let dao = {_tableInstances:{}}
let daoActions = _{{$dao.DBName}}Dao._daoActions
// let tableActions = _{{$dao.DBName}}Dao._tableActions
for(let k in _{{$dao.DBName}}Dao) if(typeof _{{$dao.DBName}}Dao[k]==='function') dao[k] = _{{$dao.DBName}}Dao[k]
for(let k in daoActions) if(typeof daoActions[k]==='function') dao[k] = daoActions[k]
{{- range $t := .Tables }}
Object.defineProperty(dao, "{{$t.TableName}}", {get: function(){return dao._getTable("{{$t.TableName}}")}})
{{- end }}
//for(let tableName in _{{$dao.DBName}}Dao._table) Object.defineProperty(dao, tableName, {get: function(){return dao._getTable(tableName)}})
dao._conn = conn
dao._tx = tx
dao._rd = rd
return dao
}
function get{{$dao.FixedDBName}}Dao(connectionName, redisName) {
let cachedDaoName = (connectionName||'')+'_'+(redisName||'')
if(_{{$dao.DBName}}Dao._daoInstances[cachedDaoName]) return _{{$dao.DBName}}Dao._daoInstances[cachedDaoName]
let conn = typeof db==='object' ? (connectionName ? db.fetch(connectionName) : db.fetch()) : null
let rd = typeof redis==='object' ? (redisName ? redis.fetch(redisName) : redis.fetch()) : null
let dao = _make{{$dao.DBName}}Dao(conn, null, rd)
_{{$dao.DBName}}Dao._daoInstances[cachedDaoName] = dao
return dao
}
_{{$dao.DBName}}Dao._getTable = function(tableName){
if(this._tableInstances[tableName]) return this._tableInstances[tableName]
let tb = _{{$dao.DBName}}Dao[tableName]
let t = {}
let daoActions = _{{$dao.DBName}}Dao._daoActions
for(let k in tb) if(typeof tb[k]==='function') t[k] = tb[k]
for(let k in daoActions) if(typeof daoActions[k]==='function') t[k] = daoActions[k]
// for(let k in tableActions) if(typeof tableActions[k]==='function') t[k] = tableActions[k]
t._tableName = tableName
t._conn = this._conn
t._tx = this._tx
t._rd = this._rd
this._tableInstances[tableName] = t
return t
}
_{{$dao.DBName}}Dao.begin = function() {
return _make{{$dao.DBName}}Dao(this._conn, this._conn.begin(), this._rd)
}
_{{$dao.DBName}}Dao._daoActions.query = function(sql, ...args) {
if(this._tx) return this._tx.query(sql, ...args)
if(this._conn) return this._conn.query(sql, ...args)
return null
}
_{{$dao.DBName}}Dao._daoActions.query1 = function(sql, ...args) {
if(this._tx) return this._tx.query1(sql, ...args)
let aa = this._conn.query1(sql, ...args)
if(this._conn) return this._conn.query1(sql, ...args)
return null
}
_{{$dao.DBName}}Dao._daoActions.query11 = function(sql, ...args) {
if(this._tx) return this._tx.query11(sql, ...args)
if(this._conn) return this._conn.query11(sql, ...args)
return null
}
_{{$dao.DBName}}Dao._daoActions.query1a = function(sql, ...args) {
if(this._tx) return this._tx.query1a(sql, ...args)
if(this._conn) return this._conn.query1a(sql, ...args)
return null
}
_{{$dao.DBName}}Dao._daoActions.exec = function(sql, ...args) {
if(this._tx) return this._tx.exec(sql, ...args)
if(this._conn) return this._conn.exec(sql, ...args)
return null
}
_{{$dao.DBName}}Dao._daoActions.insertTo = function(table, data) {
if(this._tx) return this._tx.insert(table, data)
if(this._conn) return this._conn.insert(table, data)
return null
}
_{{$dao.DBName}}Dao._daoActions.replaceTo = function(table, data) {
if(this._tx) return this._tx.replace(table, data)
if(this._conn) return this._conn.replace(table, data)
return null
}
_{{$dao.DBName}}Dao._daoActions.updateTo = function(table, data, sql, ...args) {
if(this._tx) return this._tx.update(table, data, sql, ...args)
if(this._conn) return this._conn.update(table, data, sql, ...args)
return null
}
_{{$dao.DBName}}Dao._daoActions.deleteFrom = function(table, where, ...args) {
if(this._tx) return this._tx.delete(table, where, ...args)
if(this._conn) return this._conn.delete(table, where, ...args)
return null
}
{{- range $t := .Tables }}
_{{$dao.DBName}}Dao.{{$t.TableName}} = {}
_{{$dao.DBName}}Dao.{{$t.TableName}}Item = {}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query = {}
_{{$dao.DBName}}Dao.{{$t.TableName}}QueryList = {}
_{{$dao.DBName}}Dao.{{$t.TableName}}.attach = function(item){
if(!item || typeof item !== 'object') return null
let o = {}
for(let k in item) o[k] = item[k]
let itemSet = _{{$dao.DBName}}Dao.{{$t.TableName}}Item
for(let k in itemSet) if(typeof itemSet[k]==='function') o[k] = itemSet[k]
// item._conn = this._conn
// item._tx = this._tx
// item._rd = this._rd
o._table = this
return o
}
_{{$dao.DBName}}Dao.{{$t.TableName}}.attachList = function(list){
if(!list || typeof list !== 'object' || !(list instanceof Array)) return []
let a = []
for(let i=0; i<list.length; i++) a.push(this.attach(list[i]))
let queryListSet = _{{$dao.DBName}}Dao.{{$t.TableName}}QueryList
for(let k in queryListSet) if(typeof queryListSet[k]==='function') a[k] = queryListSet[k]
return a
}
_{{$dao.DBName}}Dao.{{$t.TableName}}.new = function(data){
let item = {}
{{- range $f := .Fields }}
item.{{$f.Name}} = data && data.{{$f.Name}} !== undefined ? data.{{$f.Name}} : {{$f.Default}}
{{- end }}
{{- if .IsAutoId }}
delete item.{{.AutoIdField}}
{{- end }}
{{- if $t.InvalidSet}}
item.{{$t.ValidField}} = {{$t.ValidFieldConfig.ValidSetValue}}
{{- end }}
return this.attach(item)
}
{{range $k := .UniqueKeys -}}
_{{$dao.DBName}}Dao.{{$t.TableName}}.getBy{{$k.Name}} = function({{$k.Args}}) {
let rr = this.query1("SELECT {{$t.SelectFields}} FROM `{{$t.TableName}}` WHERE {{$k.Where}}{{$t.ValidWhere}}", {{$k.Args}})
if(this._conn) return this.attach(rr)
return null
}
{{- end }}
{{ if $t.PrimaryKey -}}
_{{$dao.DBName}}Dao.{{$t.TableName}}.get = function({{$t.PrimaryKey.Args}}) {
return this.attach( this.query1("SELECT {{$t.SelectFields}} FROM `{{$t.TableName}}` WHERE {{$t.PrimaryKey.Where}}{{$t.ValidWhere}}", {{$t.PrimaryKey.Args}}))
}
_{{$dao.DBName}}Dao.{{$t.TableName}}.getWithFields = function({{$t.PrimaryKey.Args}}, fields) {
return this.attach(this.query1("SELECT "+fields+" FROM `{{$t.TableName}}` WHERE {{$t.PrimaryKey.Where}}{{$t.ValidWhere}}", {{$t.PrimaryKey.Args}}))
}
{{ if .ValidSet -}}
_{{$dao.DBName}}Dao.{{$t.TableName}}.getWithInvalid = function({{$t.PrimaryKey.Args}}) {
return this.attach(this.query1("SELECT {{$t.SelectFields}} FROM `{{$t.TableName}}` WHERE {{$t.PrimaryKey.Where}}", {{$t.PrimaryKey.Args}}))
}
{{- end }}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}.insert = function(data) {
{{- range $index, $field := .AutoGenerated }}
if( data["{{$field}}"] == null ) delete data["{{$field}}"]
{{- end }}
{{- range $index, $field := .AutoGeneratedOnUpdate }}
delete data["{{$field}}"]
{{- end }}
{{- if $t.InvalidSet}}
if(data.{{$t.ValidField}}===undefined) data.{{$t.ValidField}} = {{$t.ValidFieldConfig.ValidSetValue}}
{{- end }}
{{- if .HasVersion }}
let version = this._getVersion()
data["{{$t.VersionField}}"] = version
{{- end }}
let r = this.insertTo("{{$t.TableName}}", data)
{{- if .HasVersion }}
this._commitVersion(version)
r.version = version
{{- end }}
return r
}
_{{$dao.DBName}}Dao.{{$t.TableName}}.replace = function(data) {
{{- range $index, $field := .AutoGenerated }}
if( data["{{$field}}"] == null ) delete data["{{$field}}"]
{{- end }}
{{- range $index, $field := .AutoGeneratedOnUpdate }}
delete data["{{$field}}"]
{{- end }}
{{- if $t.InvalidSet}}
if(data.{{$t.ValidField}}===undefined) data.{{$t.ValidField}} = {{$t.ValidFieldConfig.ValidSetValue}}
{{- end }}
{{- if .HasVersion }}
let version = this._getVersion()
data["{{$t.VersionField}}"] = version
{{- end }}
let r = this.replaceTo("{{$t.TableName}}", data)
{{- if .HasVersion }}
this._commitVersion(version)
r.version = version
{{- end }}
return r
}
{{if .PrimaryKey -}}
_{{$dao.DBName}}Dao.{{$t.TableName}}.update = function(data, {{$t.PrimaryKey.Args}}) {
{{- range $index, $field := .AutoGenerated }}
if( data["{{$field}}"] == null ) delete data["{{$field}}"]
{{- end }}
{{- range $index, $field := .AutoGeneratedOnUpdate }}
delete data["{{$field}}"]
{{- end }}
{{- if .HasVersion }}
let version = this._getVersion()
data["{{$t.VersionField}}"] = version
{{- end }}
let r = this.updateTo("{{$t.TableName}}", data, "{{$t.PrimaryKey.Where}}", {{$t.PrimaryKey.Args}})
{{- if .HasVersion }}
this._commitVersion(version)
r.version = version
{{- end }}
return r
}
{{- if .InvalidSet }}
_{{$dao.DBName}}Dao.{{$t.TableName}}.enable = function({{$t.PrimaryKey.Args}}) {
{{- if .HasVersion }}
let version = this._getVersion()
let r = this.exec("UPDATE `{{$t.TableName}}` set {{$t.ValidSet}}, `{{$t.VersionField}}`=? WHERE {{$t.PrimaryKey.Where}}", version, {{$t.PrimaryKey.Args}})
{{- else }}
let r = this.exec("UPDATE `{{$t.TableName}}` set {{$t.ValidSet}} WHERE {{$t.PrimaryKey.Where}}", {{$t.PrimaryKey.Args}})
{{- end }}
{{- if .HasVersion }}
this._commitVersion(version)
r.version = version
{{- end }}
return r
}
_{{$dao.DBName}}Dao.{{$t.TableName}}.disable = function({{$t.PrimaryKey.Args}}){
{{- if .HasVersion }}
let version = this._getVersion()
let r = this.exec("UPDATE `{{$t.TableName}}` set {{$t.InvalidSet}}, `{{$t.VersionField}}`=? WHERE {{$t.PrimaryKey.Where}}", version, {{$t.PrimaryKey.Args}})
{{- else }}
let r = this.exec("UPDATE `{{$t.TableName}}` set {{$t.InvalidSet}} WHERE {{$t.PrimaryKey.Where}}", {{$t.PrimaryKey.Args}})
{{- end }}
{{- if .HasVersion }}
this._commitVersion(version)
r.version = version
{{- end }}
return r
}
{{- else }}
_{{$dao.DBName}}Dao.{{$t.TableName}}.delete = function({{$t.PrimaryKey.Args}}) {
return this.exec("DELETE FROM `{{$t.TableName}}` WHERE {{$t.PrimaryKey.Where}}", {{$t.PrimaryKey.Args}})
}
_{{$dao.DBName}}Dao.{{$t.TableName}}.deleteBy = function(where, ...args) {
return this.deleteFrom("{{.TableName}}", where, ...args)
}
{{- end }}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}.updateBy = function(data, where, ...args) {
{{- if .HasVersion }}
let version = this._getVersion()
data["{{.VersionField}}"] = version
{{- end }}
let r = this.updateTo("{{.TableName}}", data, where, ...args)
{{- if .HasVersion }}
this._commitVersion(version)
r.version = version
{{- end }}
return r
}
{{ if .HasVersion -}}
_{{$dao.DBName}}Dao.{{$t.TableName}}._getVersion = function() {
let version = 0
if(this._rd){
version = this._rd.incr("_DATA_VERSION_{{.TableName}}")
if(version > 1){
this._rd.setEX("_DATA_VERSION_DOING_{{.TableName}}_"+version, 10, true)
return version
}
this._rd.del("_DATA_VERSION_{{.TableName}}")
version = 0
} else {
logger.warn("use version but not configured redis", {db:"{{.DBName}}", table:"{{.TableName}}"})
}
let maxVersion = parseInt(this.query11("SELECT MAX(`{{.VersionField}}`) FROM `{{.TableName}}`") || '0')
version = maxVersion+1
if(this._rd){
this._rd.mSet("_DATA_VERSION_{{.TableName}}", version, "_DATA_MAX_VERSION_{{.TableName}}", version)
this._rd.setEX("_DATA_VERSION_DOING_{{.TableName}}_"+version, 10, true)
}
return version
}
_{{$dao.DBName}}Dao.{{$t.TableName}}._commitVersion = function(version) {
if(this._rd){
this._rd.del("_DATA_VERSION_DOING_{{.TableName}}_"+version)
let seqVersion = parseInt(this._rd.get("_DATA_VERSION_{{.TableName}}") || '0')
let currentMaxVersion = parseInt(this._rd.get("_DATA_MAX_VERSION_{{.TableName}}") || '0')
for(let i=currentMaxVersion; i<=seqVersion; i++){
if(this._rd.exists("_DATA_VERSION_DOING_{{.TableName}}_"+i)){
break
} else {
this._rd.set("_DATA_MAX_VERSION_{{.TableName}}", i)
}
}
}
}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}.newQuery = function(){
let query = {
_validWhere: "{{.ValidWhere}}",
_sql: "",
_fields: "{{.SelectFields}}",
_where: "",
_extraSql: "",
_extraArgs: [],
_args: [],
_leftJoins: [],
_leftJoinArgs: [],
_result: null,
_lastSql: "",
_lastSqlArgs: [],
}
let querySet = _{{$dao.DBName}}Dao.{{$t.TableName}}Query
for(let k in querySet) if(typeof querySet[k]==='function') query[k] = querySet[k]
query._table = this
// query._conn = this._conn
// query._tx = this._tx
// query._rd = this._rd
return query
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query._parseFields = function(fields, table) {
let fieldArr = fields
if(typeof fields !== 'object' || !(fields instanceof Array)) {
if(typeof fields !== 'string') fields = fields.toString()
if (fields==="" || fields.indexOf('(') !== -1 || fields.indexOf('`') !== -1) return fields
fieldArr = fields.split(",")
}
for(let i=0; i<fieldArr.length; i++){
let field = fieldArr[i]
if(typeof field !== 'string') field = field.toString()
field = field.trim()
let as = ""
if(field.indexOf(' ')!==-1){
let a = field.split(" ")
field = a[0]
if(a[a.length-2].toLowerCase()==="as" && !a[a.length-1].startsWith("`")){
a[a.length-1] = "`" + a[a.length-1] + "`"
}
as = " " + a.slice(1),join(" ")
}
if(table){
fieldArr[i] = "`"+table+"`.`"+field+"`"+as
} else {
fieldArr[i] = "`"+field+"`"+as
}
}
return fieldArr.join(",")
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query._parse = function(tag){
if(this._sql !== ""){
return this._sql, this._args
}
let fields = this._fields
let validWhere = this._validWhere
if(tag==="COUNT"){
fields = "COUNT(*)"
}else if(tag==="COUNT_ALL"){
fields = "COUNT(*)"
validWhere = ""
}else if(tag==="ALL"){
validWhere = ""
}else if(tag==="ALL_VERSION"){
validWhere = ""
//}else if(tag==="VERSION"){
}
let leftJoinsStr = ""
if(this._leftJoins.length > 0){
leftJoinsStr = " " + this._leftJoins.join(" ")
this._args.unshift(...this._leftJoinArgs)
validWhere = validWhere.replace(/ AND /g, " AND `{{.TableName}}`.")
}
if(this._where==="" && validWhere.startsWith(" AND ")) validWhere = validWhere.substring(5)
if(this._extraArgs) this._args.push(...this._extraArgs)
return {sql:"SELECT " + fields + " FROM `{{.TableName}}`" + leftJoinsStr + " WHERE " + this._where + validWhere + this._extraSql, args:this._args}
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.sql = function(sql, ...args){
this._sql = sql
this._args = args
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.fields = function(fields){
this._fields = this._parseFields(fields, "")
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.appendFields = function(fields){
if(this._fields) this._fields += ", "
this._fields += this._parseFields(fields, "")
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.where = function(where, ...args){
this._where = where
this._args = args
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.in = function(field, ...values){
if(field.indexOf("`")===-1) field = "`"+field+"`"
this._where = field+" IN "+this._conn.inKeys(values.length)
this._args = values
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.and = function(where, ...args){
if(this._where) this._where += " AND "
this._where += where
this._args = this._args.push(...args)
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.or = function(where, ...args){
if(this._where) this._where += " OR "
this._where += where
this._args = this._args.push(...args)
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.andIn = function(field, ...values){
if(field.indexOf("`")===-1) field = "`"+field+"`"
if(this._where) this._where += " AND "
this._where += field + " IN "+this._conn.inKeys(values.length)
this._args = this._args.push(...values)
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.orIn = function(field, ...values){
if(field.indexOf("`")===-1) field = "`"+field+"`"
if(this._where) this._where += " OR "
this._where += field + " IN "+this._conn.inKeys(values.length)
this._args = this._args.push(...values)
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.orderBy = function(orderBy){
this._extraSql += " ORDER BY " + orderBy
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.groupBy = function(groupBy){
this._extraSql += " GROUP BY " + groupBy
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.limit = function(start, num){
if(num===undefined){
num = start
start = 0
}
this._extraSql += " LIMIT ?,?"
this._extraArgs.push(start, num)
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.having = function(where, ...args){
this._extraSql += " HAVING "+where
this._extraArgs = this._extraArgs.push(...args)
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.extra = function(sql, ...args){
this._extraSql += sql
this._extraArgs = this._extraArgs.push(...args)
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.leftJoin = function(joinTable, fields, on, ...args){
if(this._fields.indexOf("`{{.TableName}}`.")===-1){
this._fields = "`{{.TableName}}`."+this._fields.replace(/`, `/g, "`, `{{.TableName}}`.`")
}
if(fields) this._fields += ", "+this._parseFields(fields, joinTable)
this._leftJoins = this._leftJoins.push("LEFT JOIN `" + joinTable + "` ON " + on)
this._leftJoinArgs = this._leftJoinArgs.push(...args)
return this
}
{{range $idx := .IndexKeys}}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.by{{$idx.Name}} = function({{$idx.Args}}){
return this.where("{{$idx.Where}}", {{$idx.Args}})
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.and{{$idx.Name}} = function({{$idx.Args}}){
if(this._where) this._where += " AND "
this._where += "{{$idx.Where}}"
this._args = this._args.push({{$idx.Args}})
return this
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.or{{$idx.Name}} = function({{$idx.Args}}){
if(this._where){
this._where += " OR "
}
this._where += "{{$idx.Where}}"
this._args = this._args.push({{$idx.Args}})
return this
}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.query = function(){
let {sql,args} = this._parse("")
this._lastSql = sql
this._lastSqlArgs = args
this._result = this._table.query(sql, ...args).result
return this
}
{{- if .ValidSet }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.queryWithValid = function(){
let {sql,args} = this._parse("ALL")
this._lastSql = sql
this._lastSqlArgs = args
this._result = this._table.query(sql, ...args).result
return this
}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.count = function(){
let {sql,args} = this._parse("COUNT")
this._lastSql = sql
this._lastSqlArgs = args
return parseInt(this._table.query11(sql, ...args) || '0')
}
{{- if .ValidSet }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.countAll = function(){
let {sql,args} = this._parse("COUNT_ALL")
this._lastSql = sql
this._lastSqlArgs = args
return parseInt(this._table.query11(sql, ...args) || '0')
}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.queryByPage = function(start, num){
this.limit(start, num)
return this.query()
}
{{- if .ValidSet }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.queryWithValidByPage = function(start, num){
this.limit(start, num)
return this.queryWithValid()
}
{{- end }}
{{- if .HasVersion }}
{{- if .ValidSet }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.queryByVersion = function(minVersion, maxVersion, limit, withInvalid){
if(minVersion > 0) withInvalid = true
let parseTag = u.StringIf(withInvalid, "ALL_VERSION", "VERSION")
{{- else }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.queryByVersion = function(minVersion, maxVersion, limit){
let parseTag = "VERSION"
{{- end }}
if(maxVersion===0){
if(this._rd){
maxVersion = parseInt(this._rd.GET("_DATA_MAX_VERSION_{{.TableName}}") || '0')
} else {
logger.warn("use version but not configured redis", {db:"{{.DBName}}", table:"{{.TableName}}"})
}
if(maxVersion===0){
maxVersion = parseInt(this._table.query11("SELECT MAX(`{{.VersionField}}`) FROM `{{.TableName}}`") || '0')
this._lastSql = "SELECT MAX(`{{.VersionField}}`) FROM `{{.TableName}}`"
this._lastSqlArgs = []
}
}
this.and("`version` BETWEEN ? AND ?", minVersion+1, maxVersion )
if(limit > 0){
this.orderBy("`{{.VersionField}}`")
this.limit(0, limit)
}
let {sql,args} = this._parse(parseTag)
this._lastSql = sql
this._lastSqlArgs = args
this._result = this._table.query(sql, ...args).result
this.maxVersion = maxVersion
return this
}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.first = function(){
let list = this.list()
if(list.length > 0) return this._table.attach(list[0])
return null
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.list = function(){
if(this._result===null) this.query()
return this._table.attachList(this._result)
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.listBy = function(...fields){
if(this._result===null) this.query()
let list = []
for(let item of this._result){
let o = {}
for(let k in item) if(fields.indexOf(k)!==-1) o[k] = item[k]
list.push(o)
}
return list
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.getSql = function(){
return this._lastSql
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Query.getSqlArgs = function(){
return this._lastSqlArgs
}
_{{$dao.DBName}}Dao.{{$t.TableName}}QueryList.getData = function(){
let list = []
for(let i=0; i<this.length; i++) list.push(list[i].getData())
return list
}
_{{$dao.DBName}}Dao.{{$t.TableName}}QueryList.getJson = function(){
return JSON.stringify(this.getData())
}
{{- if .PrimaryKey }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Item.save = function(){
return this._table.replace(this.getData())
}
{{- if .InvalidSet }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Item.enable = function(){
return this._table.enable({{.PrimaryKey.ItemArgs}})
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Item.disable = function(){
return this._table.disable({{.PrimaryKey.ItemArgs}})
}
{{- else }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Item.delete = function(){
return this._table.delete({{.PrimaryKey.ItemArgs}})
}
{{- end }}
{{- end }}
_{{$dao.DBName}}Dao.{{$t.TableName}}Item.getData = function(){
let data = {}
{{- range $f := .Fields }}
if(this.{{$f.Name}} !== undefined) data.{{$f.Name}} = this.{{$f.Name}}
{{- end }}
return data
}
_{{$dao.DBName}}Dao.{{$t.TableName}}Item.getJson = function(){
return JSON.stringify(this.getData())
}
{{- end }}

0
db/dao_ext.js Normal file
View File

243
db/db.go
View File

@ -6,18 +6,39 @@ import (
"github.com/ssgo/db"
"github.com/ssgo/log"
"github.com/ssgo/u"
"strings"
"sync"
)
type DB struct {
pool *db.DB
name string
pool *db.DB
logger *log.Logger
}
type Tx struct {
conn *db.Tx
}
type ExecResult struct {
Id int64
Changes int64
Version uint64
Sql string
SqlArgs []interface{}
}
type QueryResult struct {
Result []map[string]interface{}
Sql string
SqlArgs []interface{}
}
var dbPool = map[string]*db.DB{}
var defaultDBURL = ""
var dbURLs = map[string]string{}
var defaultDB *db.DB
var dbPoolLock = sync.RWMutex{}
func init() {
plugin.Register(plugin.Plugin{
@ -32,13 +53,21 @@ configs:
Init: func(conf map[string]interface{}) {
if conf["default"] != nil {
defaultDB = db.GetDB(u.String(conf["default"]), nil)
defaultDBURL = u.String(conf["default"])
tmpDB := db.GetDB(defaultDBURL, nil)
dbPoolLock.Lock()
defaultDB = tmpDB
dbPoolLock.Unlock()
}
if conf["configs"] != nil {
confs := map[string]string{}
u.Convert(conf["configs"], &confs)
for name, url := range confs {
dbPool[name] = db.GetDB(url, nil)
tmpDB := db.GetDB(url, nil)
dbPoolLock.Lock()
dbURLs[name] = url
dbPool[name] = tmpDB
dbPoolLock.Unlock()
}
}
},
@ -46,7 +75,7 @@ configs:
"fetch": GetDB,
},
// 实现直接使用db.xxx操作默认的数据库
JsCode: `_db = db
JsCode: `let _db = db
db = _db.fetch()
db.fetch = _db.fetch
`,
@ -57,22 +86,92 @@ db.fetch = _db.fetch
// GetDB name 连接配置名称,如果不提供名称则使用默认连接
// GetDB return 数据库连接,对象内置连接池操作,完成后无需手动关闭连接
func GetDB(name *string, logger *log.Logger) *DB {
if logger == nil {
logger = log.DefaultLogger
}
if name == nil || *name == "" {
if defaultDB != nil {
return &DB{
pool: defaultDB.CopyByLogger(logger),
}
if defaultDB == nil {
tmpDB := db.GetDB(defaultDBURL, nil)
dbPoolLock.Lock()
defaultDB = tmpDB
dbPoolLock.Unlock()
}
return &DB{
name: "",
pool: defaultDB.CopyByLogger(logger),
logger: logger,
}
} else {
if dbPool[*name] != nil {
return &DB{
pool: dbPool[*name].CopyByLogger(logger),
dbPoolLock.RLock()
tmpDB := dbPool[*name]
dbPoolLock.RUnlock()
if tmpDB == nil {
tmpDB = db.GetDB(dbURLs[*name], nil)
dbPoolLock.Lock()
dbPool[*name] = tmpDB
dbPoolLock.Unlock()
}
return &DB{
name: *name,
pool: tmpDB.CopyByLogger(logger),
logger: logger,
}
}
}
// Destroy 关闭连接池
func (db *DB) Destroy() error {
err := db.pool.Destroy()
dbPoolLock.Lock()
if db.name == "" {
defaultDB = nil
} else {
dbPool[db.name] = nil
}
dbPoolLock.Unlock()
return err
}
// InKeys 根据长度生成SQL中 IN "(?,?,...)" 引号中的部分
// InKeys return IN后面的Key变量的SQL
func InKeys(numArgs int) string {
a := make([]string, numArgs)
for i := 0; i < numArgs; i++ {
a[i] = "?"
}
return fmt.Sprintf("(%s)", strings.Join(a, ","))
}
// Make 创建表格,如果表格已经存在则更新表结构
// Make return 已执行的SQL列表
func (db *DB) Make(groups []ERGroup) ([]string, error) {
outSql := make([]string, 0)
for _, group := range groups {
for _, table := range group.Tables {
fmt.Println("=====TB", table.Name)
sql, err := MakeTable(db.pool, &table, db.logger)
outSql = append(outSql, strings.Join(sql, "\n"))
if err != nil {
return outSql, err
}
}
}
return &DB{
pool: db.GetDB("", logger),
return outSql, nil
}
// MakeER 创建ER图
func (db *DB) MakeER(groups []ERGroup, outputFile *string, tplFile *string) error {
return MakeER(groups, outputFile, tplFile, db.logger)
}
// MakeDao 创建实体对象
// MakeDao outputPath 实体文件输出目录,为空时输出到当前目录下以数据库为目录名
func (db *DB) MakeDao(outputPath *string, conf *DaoConfig) error {
if outputPath == nil || *outputPath == "" {
defaultOutput := db.pool.Config.DB
outputPath = &defaultOutput
}
return MakeDao(*outputPath, db.pool, conf, db.logger)
}
// Begin 开始事务
@ -85,20 +184,28 @@ func (db *DB) Begin() *Tx {
// * requestSql SQL语句
// * args SQL语句中问号变量的值按顺序放在请求参数中
// Exec return 如果是INSERT到含有自增字段的表中返回插入的自增ID否则返回影响的行数
func (db *DB) Exec(requestSql string, args ...interface{}) (int64, error) {
func (db *DB) Exec(requestSql string, args ...interface{}) (ExecResult, error) {
r := db.pool.Exec(requestSql, args...)
out := r.Id()
if out == 0 {
out = r.Changes()
out := ExecResult{}
lSql := strings.ToLower(requestSql)
if strings.Contains(lSql, "insert into") || strings.Contains(lSql, "replace into") {
out.Id = r.Id()
}
out.Changes = r.Changes()
out.Sql = *r.Sql
out.SqlArgs = r.Args
return out, r.Error
}
// Query 查询
// Query return 返回查询到的数据,对象数组格式
func (db *DB) Query(requestSql string, args ...interface{}) ([]map[string]interface{}, error) {
func (db *DB) Query(requestSql string, args ...interface{}) (QueryResult, error) {
r := db.pool.Query(requestSql, args...)
return r.MapResults(), r.Error
return QueryResult{
Result: r.MapResults(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
// Query1 查询
@ -109,7 +216,7 @@ func (db *DB) Query1(requestSql string, args ...interface{}) (map[string]interfa
if len(results) > 0 {
return results[0], r.Error
} else {
return map[string]interface{}{}, r.Error
return nil, r.Error
}
}
@ -147,39 +254,53 @@ func (db *DB) Query1a(requestSql string, args ...interface{}) ([]interface{}, er
// * table 表名
// * data 数据对象Key-Value格式
// Insert return 如果是INSERT到含有自增字段的表中返回插入的自增ID否则返回影响的行数
func (db *DB) Insert(table string, data map[string]interface{}) (int64, error) {
func (db *DB) Insert(table string, data map[string]interface{}) (ExecResult, error) {
r := db.pool.Insert(table, data)
out := r.Id()
if out == 0 {
out = r.Changes()
}
return out, r.Error
return ExecResult{
Id: r.Id(),
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
// Replace 替换数据
// Replace return 如果是REPLACE到含有自增字段的表中返回插入的自增ID否则返回影响的行数
func (db *DB) Replace(table string, data map[string]interface{}) (int64, error) {
func (db *DB) Replace(table string, data map[string]interface{}) (ExecResult, error) {
r := db.pool.Replace(table, data)
out := r.Id()
if out == 0 {
out = r.Changes()
}
return out, r.Error
return ExecResult{
Id: r.Id(),
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
// Update 更新数据
// * wheres 条件SQL中WHERE后面的部分
// Update return 返回影响的行数
func (db *DB) Update(table string, data map[string]interface{}, wheres string, args ...interface{}) (int64, error) {
func (db *DB) Update(table string, data map[string]interface{}, wheres string, args ...interface{}) (ExecResult, error) {
r := db.pool.Update(table, data, wheres, args...)
return r.Changes(), r.Error
return ExecResult{
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
// Delete 删除数据
// Delete return 返回影响的行数
func (db *DB) Delete(table string, wheres string, args ...interface{}) (int64, error) {
func (db *DB) Delete(table string, wheres string, args ...interface{}) (ExecResult, error) {
r := db.pool.Delete(table, wheres, args...)
return r.Changes(), r.Error
return ExecResult{
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
// MakeId 生成指定字段不唯一的ID
@ -189,7 +310,7 @@ func (db *DB) Delete(table string, wheres string, args ...interface{}) (int64, e
func (db *DB) MakeId(table string, idField string, idSize uint) (string, error) {
var id string
var err error
for i:=0; i<100; i++ {
for i := 0; i < 100; i++ {
if idSize > 20 {
id = u.UniqueId()
} else if idSize > 14 {
@ -235,14 +356,26 @@ func (tx *Tx) CheckFinished() error {
return tx.conn.CheckFinished()
}
func (tx *Tx) Exec(requestSql string, args ...interface{}) (int64, error) {
func (tx *Tx) Exec(requestSql string, args ...interface{}) (ExecResult, error) {
r := tx.conn.Exec(requestSql, args...)
return r.Changes(), r.Error
out := ExecResult{}
lSql := strings.ToLower(requestSql)
if strings.Contains(lSql, "insert into") || strings.Contains(lSql, "replace into") {
out.Id = r.Id()
}
out.Changes = r.Changes()
out.Sql = *r.Sql
out.SqlArgs = r.Args
return out, r.Error
}
func (tx *Tx) Query(requestSql string, args ...interface{}) ([]map[string]interface{}, error) {
func (tx *Tx) Query(requestSql string, args ...interface{}) (QueryResult, error) {
r := tx.conn.Query(requestSql, args...)
return r.MapResults(), r.Error
return QueryResult{
Result: r.MapResults(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
func (tx *Tx) Query1(requestSql string, args ...interface{}) (map[string]interface{}, error) {
@ -251,7 +384,7 @@ func (tx *Tx) Query1(requestSql string, args ...interface{}) (map[string]interfa
if len(results) > 0 {
return results[0], r.Error
} else {
return map[string]interface{}{}, r.Error
return nil, r.Error
}
}
@ -281,22 +414,40 @@ func (tx *Tx) Query1a(requestSql string, args ...interface{}) ([]interface{}, er
return a, r.Error
}
func (tx *Tx) Insert(table string, data map[string]interface{}) (int64, error) {
func (tx *Tx) Insert(table string, data map[string]interface{}) (ExecResult, error) {
r := tx.conn.Insert(table, data)
return r.Id(), r.Error
return ExecResult{
Id: r.Id(),
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
func (tx *Tx) Replace(table string, data map[string]interface{}) (int64, error) {
func (tx *Tx) Replace(table string, data map[string]interface{}) (ExecResult, error) {
r := tx.conn.Replace(table, data)
return r.Id(), r.Error
return ExecResult{
Id: r.Id(),
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
func (tx *Tx) Update(table string, data map[string]interface{}, wheres string, args ...interface{}) (int64, error) {
func (tx *Tx) Update(table string, data map[string]interface{}, wheres string, args ...interface{}) (ExecResult, error) {
r := tx.conn.Update(table, data, wheres, args...)
return r.Changes(), r.Error
return ExecResult{
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}
func (tx *Tx) Delete(table string, wheres string, args ...interface{}) (int64, error) {
func (tx *Tx) Delete(table string, wheres string, args ...interface{}) (ExecResult, error) {
r := tx.conn.Delete(table, wheres, args...)
return r.Changes(), r.Error
return ExecResult{
Changes: r.Changes(),
Sql: *r.Sql,
SqlArgs: r.Args,
}, r.Error
}

134
db/er.html Normal file
View File

@ -0,0 +1,134 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width,initial-scale=1.0,user-scalable=no"/>
<meta name="google" content="notranslate"/>
<title>{{.title}}</title>
<style>
body {
background: #111;
font-size: 12px;
display: flex;
flex-flow: wrap;
align-items: flex-start;
}
.keyHint {
position: absolute;
right: 0;
bottom: 0;
padding: 4px;
color: #999;
}
.keyHint b {
color: #aaa;
}
.group {
background: #222;
border-radius: 10px;
margin: 40px 10px 10px 10px;
position: relative;
display: flex;
flex-flow: wrap;
align-items: flex-start;
}
.group > .title {
color: #999;
position: absolute;
height: 30px;
line-height: 30px;
padding: 0 10px;
top: -30px;
}
.table {
background: #000;
color: #ccc;
margin: 10px;
border-radius: 10px;
}
.table > .title {
text-align: center;
border-bottom: 1px solid #999;
}
.table > .fields {
border-top: 1px solid #999;
}
.table > div {
padding: 3px 5px;
}
em {
font-style: normal;
color: #999;
}
</style>
<script>
let modeIndex = parseInt(localStorage.erMode || '1')
let modeNames = ['简约', '物理视图', '逻辑视图']
function switchMode(index){
modeIndex = index
localStorage.erMode = modeIndex
for (let node of document.querySelectorAll('[text' + modeIndex + ']')) {
node.innerHTML = node.getAttribute('text' + modeIndex)
}
document.querySelector('.keyHint').innerHTML = '当前模式:<b>'+modeNames[modeIndex-1]+'</b>按下Tab键切换'
}
window.addEventListener('load', function (){
switchMode(modeIndex)
})
window.addEventListener('keydown', function switchText(event) {
if (event.code === 'Tab') {
event.preventDefault()
modeIndex++
if (modeIndex > 3) modeIndex = 1
switchMode(modeIndex)
return false
}
})
</script>
</head>
<body>
<div class="keyHint"></div>
{{range .groups}}
<div class="group">
<div class="title" text1="{{.Group}}" text2="{{.Group}}" text3="{{.Comment}}">{{.Group}}</div>
{{range .Tables}}
<div class="table">
<div class="title" text1="{{.Name}}" text2="{{.Name}}" text3="{{.Comment}}">{{.Name}}</div>
<div class="keys">
{{range .Fields}}
{{if eq .Index "pk"}}
<div comment="{{.Comment}}">{{.Name}} <em
text1=""
text2="{{.Type}} {{.Index}}{{.IndexGroup}} {{short .Null}} {{short .Extra}}"
text3="{{.Comment}}"></em></div>
{{end}}
{{end}}
</div>
<div class="fields">
{{range .Fields}}
{{if ne .Index "pk"}}
<div comment="{{.Comment}}">{{.Name}} <em
text1=""
text2="{{.Type}} {{.Index}}{{.IndexGroup}} {{short .Null}} {{short .Extra}}"
text3="{{.Comment}}"></em></div>
{{end}}
{{end}}
</div>
</div>
{{end}}
</div>
{{end}}
</body>
</html>

16
go.mod
View File

@ -8,12 +8,12 @@ require (
github.com/emmansun/gmsm v0.21.1
github.com/gorilla/websocket v1.5.1
github.com/obscuren/ecies v0.0.0-20150213224233-7c0f4a9b18d9
github.com/ssgo/db v0.6.11
github.com/ssgo/discover v0.6.11
github.com/ssgo/httpclient v0.6.11
github.com/ssgo/log v0.6.11
github.com/ssgo/redis v0.6.11
github.com/ssgo/u v0.6.11
github.com/ssgo/db v0.6.12
github.com/ssgo/discover v0.6.12
github.com/ssgo/httpclient v0.6.12
github.com/ssgo/log v0.6.12
github.com/ssgo/redis v0.6.12
github.com/ssgo/u v0.6.12
gopkg.in/yaml.v3 v3.0.1
)
@ -21,8 +21,8 @@ require (
github.com/go-sql-driver/mysql v1.5.0 // indirect
github.com/gomodule/redigo v1.8.8 // indirect
github.com/mitchellh/mapstructure v1.4.1 // indirect
github.com/ssgo/config v0.6.11 // indirect
github.com/ssgo/standard v0.6.11 // indirect
github.com/ssgo/config v0.6.12 // indirect
github.com/ssgo/standard v0.6.12 // indirect
github.com/stretchr/testify v1.8.1 // indirect
golang.org/x/crypto v0.14.0 // indirect
golang.org/x/net v0.17.0 // indirect

View File

@ -33,7 +33,7 @@ func init() {
"newWithoutRedirect": NewHTTPWithoutRedirect,
"newH2CWithoutRedirect": NewH2CHTTPWithoutRedirect,
"setBaseURL": defaultClient.SetBaseURL,
"SetGlobalHeaders": defaultClient.SetGlobalHeaders,
"setGlobalHeaders": defaultClient.SetGlobalHeaders,
"get": defaultClient.Get,
"post": defaultClient.Post,
"put": defaultClient.Put,

7
tests.sh Executable file
View File

@ -0,0 +1,7 @@
cd tests
go test -v test.go db_test.go | logv
go test -v test.go redis_test.go | logv
go test -v test.go crypto_test.go | logv
go test -v test.go file_test.go | logv
go test -v test.go http_test.go | logv
cd ..

29
tests/TestDB.yml Normal file
View File

@ -0,0 +1,29 @@
- group: account
comment: 账号相关
tables:
- name: user
comment: 用户表
fields:
- name: id
comment: 用户编号
type: INTEGER
index: pk
extra: AUTO_INCREMENT
- name: name
comment: 姓名
type: VARCHAR(30)
- name: phone
comment: 手机号
type: VARCHAR(20)
index: unique
- name: addDate
comment: 创建时间
type: DATETIME
index: index
- name: version
comment: 数据版本号
type: INTEGER
index: index
- name: isValid
comment: 是否有效1-启用0-禁用)
type: INTEGER

View File

@ -4,47 +4,164 @@ import (
"apigo.cloud/git/apigo/gojs"
"apigo.cloud/git/apigo/plugin"
_ "apigo.cloud/git/apigo/plugins/db"
_ "apigo.cloud/git/apigo/plugins/file"
_ "apigo.cloud/git/apigo/plugins/redis"
_ "github.com/mattn/go-sqlite3"
"github.com/ssgo/u"
"os"
"path"
"testing"
)
func TestSqlite(t *testing.T) {
func TestSql(t *testing.T) {
rt := gojs.New(nil, nil)
defer rt.Close()
gojs.SetPluginsConfig(map[string]plugin.Config{
"db": {
"default": "sqlite3://test.db",
"default": "sqlite3://Test.db",
},
})
defer os.Remove("test.db")
r, err, _ := rt.Run(`
db.exec('CREATE TABLE test (id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR(30))')
id1 = db.insert('test', {name:'Tom'})
id2 = db.replace('test', {name:'Kitty'})
id3 = db.insert('test', {name:'Lucy'})
return id1 + id2 + id3
defer func() {
_ = os.Remove("Test.db")
}()
var r interface{} = false
err, _ := rt.Exec(`
TestTable = file.loadYaml('./TestDB.yml')
out = db.make(TestTable)
//console.info(...out)
`)
Test(t, "create and insert", u.Int(r) == 6, r, err)
if err == nil {
r, err, _ = rt.Run(`
let r1 = db.insert('user', {name:'Tom'})
let r2 = db.replace('user', {name:'Kitty'})
let r3 = db.insert('user', {name:'Lucy'})
return r1.id + ',' + r2.id + ',' + r3.id + ' | ' + r1.changes + ',' + r2.changes + ',' + r3.changes
`)
}
Test(t, "create and insert", r == "1,2,3 | 1,1,1", r, err)
r, err, _ = rt.Run(`
changes = db.fetch().update('test', {name:'Lucy Wang'}, 'id=?', 3)
return changes
let r = db.fetch().update('user', {name:'Lucy Wang'}, 'id=?', 3)
return r.changes
`)
Test(t, "update", u.Int(r) == 1, r, err)
r, err, _ = rt.Run(`
changes = db.fetch().delete('test', 'id=?', 2)
return changes
let r = db.fetch().delete('user', 'id=?', 2)
return r.changes
`)
Test(t, "update", u.Int(r) == 1, r, err)
Test(t, "delete", u.Int(r) == 1, r, err)
r, err, _ = rt.Run(`
list = db.fetch().query('select id,name from test where id > ?', 1)
return list
let r = db.fetch().query('select id,name from user where id > ?', 1)
return r.result
`)
list := make([]struct {
Id int
Name string
}, 0)
u.Convert(r, &list)
Test(t, "list", len(list) == 1 && list[0].Name == "Lucy Wang", r, err)
rt.Exec("db.destroy()")
}
func TestDao(t *testing.T) {
rdServer := StartRedis("16379")
defer rdServer.Close()
rt := gojs.New(nil, nil)
defer rt.Close()
gojs.SetPluginsConfig(map[string]plugin.Config{
"db": {
"default": "sqlite3://Test.db",
},
"redis": {
"default": "redis://localhost:16379",
},
})
defer func() {
_ = os.Remove("Test.db")
_ = os.Remove("er.html")
_ = os.RemoveAll("Test")
}()
//_ = os.Remove("Test.db")
var r interface{} = false
err, _ := rt.Exec(`
let TestDB = file.loadYaml('./TestDB.yml')
out = db.make(TestDB)
db.makeDao()
// console.info(...out)
`)
daoCode := ""
daoFile := path.Join("Test", "dao.js")
daoExtFile := path.Join("Test", "dao_ext.js")
_ = u.WriteFile(daoExtFile, `
_TestDao.user.getFirstUserName = function(){
let q = this.newQuery()
let u = q.limit(1).fields('id,name').first()
//logger.info(q.getSql())
return u.name
}
_TestDao.userItem.getFullName = function(){
return this.name+'('+this.id+')'
}
`)
if u.FileExists(daoFile) {
code, _ := u.ReadFile(daoFile)
daoCode += code + "\n"
}
if u.FileExists(daoExtFile) {
code, _ := u.ReadFile(daoExtFile)
daoCode += code + "\n"
}
if err == nil {
r, err, _ = rt.Run(daoCode + `
let dao = getTestDao()
let r1 = dao.user.insert({name:'Tom'})
let r2 = dao.user.replace({name:'Kitty'})
//r3 = dao.user.insert({name:'Lucy'})
let u3 = dao.user.new({phone:'18612341234'})
u3.name = 'Lucy'
let r3 = u3.save()
return r1.id + ',' + r2.id + ',' + r3.id + ' | ' + r1.version + ',' + r2.version + ',' + r3.version
`)
}
Test(t, "create and insert", r == "1,2,3 | 1,2,3", r, err)
r, err, _ = rt.Run(daoCode + `
let item = getTestDao().user.get(1)
return item.name
`)
Test(t, "get", r == "Tom", r, err)
r, err, _ = rt.Run(daoCode + `
let u = getTestDao().user.getByPhone('1861234123')
if(u!==null) return 'not null'
u = getTestDao().user.getByPhone('18612341234')
u.name = 'Lucy Wang'
let r = u.save()
return r.changes +','+ r.version
`)
Test(t, "update", r == "1,4", r, err)
r, err, _ = rt.Run(daoCode + `
getTestDao()
let r = getTestDao().user.disable(2)
return r.changes
`)
Test(t, "disable", u.Int(r) == 1, r, err)
r, err, _ = rt.Run(daoCode+`
let q = getTestDao().user.newQuery()
let list = q.where('id>?',1).list()
return list
`)
list := make([]struct{
Id int
@ -53,4 +170,21 @@ func TestSqlite(t *testing.T) {
u.Convert(r, &list)
Test(t, "list", len(list) == 1 && list[0].Name == "Lucy Wang", r, err)
r, err, _ = rt.Run(daoCode+`
return getTestDao().user.getFirstUserName()
`)
Test(t, "table ext", r == "Tom", r, err)
r, err, _ = rt.Run(daoCode+`
return getTestDao().user.newQuery().list()[0].getFullName()
`)
Test(t, "item ext", r == "Tom(1)", r, err)
r, err, _ = rt.Run(daoCode+`
db.makeER(TestDB)
let erStr = file.read('er.html')
return erStr.match(/<div class="title" text1=".+>(.*?)</)[1]
`)
Test(t, "er", r == "account", r, err)
}

View File

@ -3,14 +3,14 @@ module tests
go 1.17
require (
apigo.cloud/git/apigo/gojs v0.0.2
apigo.cloud/git/apigo/gojs v0.0.3
apigo.cloud/git/apigo/plugin v1.0.1
apigo.cloud/git/apigo/plugins v0.0.0
github.com/mattn/go-sqlite3 v1.14.18
github.com/ssgo/discover v0.6.11
github.com/ssgo/redis v0.6.11
github.com/ssgo/s v1.6.11
github.com/ssgo/u v0.6.11
github.com/ssgo/discover v0.6.12
github.com/ssgo/redis v0.6.12
github.com/ssgo/s v1.6.12
github.com/ssgo/u v0.6.12
github.com/tidwall/redcon v1.6.2
)
@ -27,11 +27,11 @@ require (
github.com/obscuren/ecies v0.0.0-20150213224233-7c0f4a9b18d9 // indirect
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
github.com/shirou/gopsutil/v3 v3.22.10 // indirect
github.com/ssgo/config v0.6.11 // indirect
github.com/ssgo/db v0.6.11 // indirect
github.com/ssgo/httpclient v0.6.11 // indirect
github.com/ssgo/log v0.6.11 // indirect
github.com/ssgo/standard v0.6.11 // indirect
github.com/ssgo/config v0.6.12 // indirect
github.com/ssgo/db v0.6.12 // indirect
github.com/ssgo/httpclient v0.6.12 // indirect
github.com/ssgo/log v0.6.12 // indirect
github.com/ssgo/standard v0.6.12 // indirect
github.com/tidwall/btree v1.1.0 // indirect
github.com/tidwall/match v1.1.1 // indirect
github.com/tklauser/go-sysconf v0.3.10 // indirect

View File

@ -20,6 +20,7 @@ func Test(t *testing.T, title string, ok bool, info ...interface{}) {
}
var _rdData = map[string][]byte{}
var _rdIncr = map[string]int{}
var _rdMData = map[string]map[string][]byte{}
func StartRedis(port string) *redcon.Server {
@ -29,8 +30,28 @@ func StartRedis(port string) *redcon.Server {
case "SET":
_rdData[string(cmd.Args[1])] = cmd.Args[2]
conn.WriteString("OK")
case "SETEX":
_rdData[string(cmd.Args[1])] = cmd.Args[3]
conn.WriteString("OK")
case "MSET":
for i:=1; i< len(cmd.Args); i+=2 {
_rdData[string(cmd.Args[i])] = cmd.Args[i+1]
}
conn.WriteString("OK")
case "GET":
conn.WriteBulk(_rdData[string(cmd.Args[1])])
case "DEL":
delete(_rdData, string(cmd.Args[1]))
conn.WriteInt(1)
case "EXISTS":
if _rdData[string(cmd.Args[1])] == nil {
conn.WriteInt(0)
}else{
conn.WriteInt(1)
}
case "INCR":
_rdIncr[string(cmd.Args[1])] ++
conn.WriteInt(_rdIncr[string(cmd.Args[1])])
case "HSET":
data := _rdMData[string(cmd.Args[1])]
if data == nil {
@ -51,7 +72,7 @@ func StartRedis(port string) *redcon.Server {
conn.WriteArray(0)
}
default:
//fmt.Println(u.BYellow("unknown redis command " + string(cmd.Raw)))
fmt.Println(u.BYellow("unknown redis command " + string(cmd.Raw)))
conn.WriteString("OK")
}
}, func(conn redcon.Conn) bool {