From 4e03ae54acea69e7ef00c2061d7a9f0e852c4c69 Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Wed, 25 Apr 2018 08:09:24 +0300 Subject: [PATCH 001/169] Delete unused functions from parsers/common.go --- packages/parser/common.go | 134 ------------------------ packages/parser/common_general_check.go | 17 --- 2 files changed, 151 deletions(-) delete mode 100644 packages/parser/common_general_check.go diff --git a/packages/parser/common.go b/packages/parser/common.go index 2147118f9..9e698506d 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -20,10 +20,7 @@ import ( "bytes" "encoding/json" "fmt" - "os" "reflect" - "strconv" - "strings" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" @@ -84,28 +81,6 @@ func InsertInLogTx(transaction *model.DbTransaction, binaryTx []byte, time int64 return nil } -// IsState returns if country is state -func IsState(transaction *model.DbTransaction, country string) (int64, error) { - ids, err := model.GetAllSystemStatesIDs() - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("get all system states ids") - return 0, err - } - for _, id := range ids { - sp := &model.StateParameter{} - sp.SetTablePrefix(converter.Int64ToStr(id)) - _, err = sp.Get(transaction, "state_name") - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("state get by name transaction") - return 0, err - } - if strings.ToLower(sp.Name) == strings.ToLower(country) { - return id, nil - } - } - return 0, nil -} - // ParserInterface is parsing transactions type ParserInterface interface { Init() error @@ -115,20 +90,6 @@ type ParserInterface interface { Header() *tx.Header } -// GetTablePrefix returns table prefix -func GetTablePrefix(global string, stateId int64) (string, error) { - globalInt, err := strconv.Atoi(global) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.ConversionError}).Error("converting global to int") - return "", err - } - stateIdStr := converter.Int64ToStr(stateId) - if globalInt == 1 { - return "global", nil - } - return stateIdStr, nil -} - // GetParser returns ParserInterface func GetParser(p *Parser, txType string) (ParserInterface, error) { switch txType { @@ -196,13 +157,6 @@ func (p Parser) GetLogger() *log.Entry { return logger } -// ClearTmp deletes temporary files -func ClearTmp(blocks map[int64]string) { - for _, tmpFileName := range blocks { - os.Remove(tmpFileName) - } -} - // CheckLogTx checks if this transaction exists // And it would have successfully passed a frontal test func CheckLogTx(txBinary []byte, transactions, txQueue bool) error { @@ -356,20 +310,6 @@ func (p *Parser) ErrInfo(verr interface{}) error { return fmt.Errorf("[ERROR] %s (%s)\n%s\n%s", err, utils.Caller(1), p.FormatBlockData(), p.FormatTxMap()) } -// BlockError writes the error of the transaction in the transactions_status table -func (p *Parser) BlockError(err error) { - if len(p.TxHash) == 0 { - return - } - errText := err.Error() - if len(errText) > 255 { - errText = errText[:255] - } - p.DeleteQueueTx(p.TxHash) - ts := &model.TransactionStatus{} - ts.SetError(p.DbTransaction, errText, p.TxHash) -} - // AccessRights checks the access right by executing the condition value func (p *Parser) AccessRights(condition string, iscondition bool) error { logger := p.GetLogger() @@ -401,80 +341,6 @@ func (p *Parser) AccessRights(condition string, iscondition bool) error { return nil } -// AccessChange is changing access -func (p *Parser) AccessChange(table, name, global string, stateId int64) error { - logger := p.GetLogger() - prefix, err := GetTablePrefix(global, stateId) - if err != nil { - return err - } - var conditions string - switch table { - case "pages": - page := &model.Page{} - page.SetTablePrefix(prefix) - if _, err := page.Get(name); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting page") - return err - } - conditions = page.Conditions - case "menus": - menu := &model.Menu{} - menu.SetTablePrefix(prefix) - if _, err := menu.Get(name); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting menu") - return err - } - conditions = menu.Conditions - } - - if len(conditions) > 0 { - ret, err := p.SmartContract.EvalIf(conditions) - if err != nil { - log.WithFields(log.Fields{"type": consts.EvalError, "error": err}).Error("evaluating conditions") - return err - } - if !ret { - log.WithFields(log.Fields{"type": consts.AccessDenied}).Error("Access denied") - return fmt.Errorf(`Access denied`) - } - } else { - log.WithFields(log.Fields{"type": consts.EmptyObject, "table": prefix + "_" + table}).Error("There is not conditions in") - return fmt.Errorf(`There is not conditions in %s`, prefix+`_`+table) - } - return nil -} - -func (p *Parser) getEGSPrice(name string) (decimal.Decimal, error) { - logger := p.GetLogger() - syspar := &model.SystemParameter{} - fPrice, err := syspar.GetValueParameterByName("op_price", name) - if err != nil { - logger.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("getting value parameter by name") - return decimal.New(0, 0), p.ErrInfo(err) - } - if fPrice == nil { - return decimal.New(0, 0), nil - } - p.TxCost = 0 - p.TxUsedCost, _ = decimal.NewFromString(*fPrice) - systemParam := &model.SystemParameter{} - _, err = systemParam.Get("fuel_rate") - if err != nil { - logger.WithFields(log.Fields{"error": err, "type": consts.DBError}).Fatal("getting system parameter") - } - fuelRate, err := decimal.NewFromString(systemParam.Value) - if err != nil { - logger.WithFields(log.Fields{"error": err, "type": consts.ConversionError, "value": systemParam.Value}).Error("converting fuel rate system parameter from string to decimal") - return decimal.New(0, 0), p.ErrInfo(err) - } - if fuelRate.Cmp(decimal.New(0, 0)) <= 0 { - logger.Error("fuel rate is less than zero") - return decimal.New(0, 0), fmt.Errorf(`fuel rate must be greater than 0`) - } - return p.TxUsedCost.Mul(fuelRate), nil -} - // CallContract calls the contract functions according to the specified flags func (p *Parser) CallContract(flags int) (resultContract string, err error) { sc := smart.SmartContract{ diff --git a/packages/parser/common_general_check.go b/packages/parser/common_general_check.go deleted file mode 100644 index 94d682a4f..000000000 --- a/packages/parser/common_general_check.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2016 The go-daylight Authors -// This file is part of the go-daylight library. -// -// The go-daylight library is free software: you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// The go-daylight library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public License -// along with the go-daylight library. If not, see . - -package parser From 7c9bf1a952aaa1730c4203f2a1d33aa49b198208 Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Wed, 25 Apr 2018 10:08:36 +0300 Subject: [PATCH 002/169] Delete unused fields from parser --- packages/parser/common.go | 59 +++++++++++++-------------------------- 1 file changed, 20 insertions(+), 39 deletions(-) diff --git a/packages/parser/common.go b/packages/parser/common.go index 9e698506d..847658ebb 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -109,32 +109,27 @@ type Parser struct { dataType int blockData []byte CurrentVersion string - MrklRoot []byte PublicKeys [][]byte - TxBinaryData []byte // transaction binary data - TxFullData []byte // full transaction, with type and data - TxHash []byte - TxSlice [][]byte - TxMap map[string][]byte - TxIds int // count of transactions - TxKeyID int64 - TxEcosystemIDStr string - TxEcosystemID int64 - TxNodePosition uint32 - TxTime int64 - TxType int64 - TxCost int64 // Maximum cost of executing contract - TxFuel int64 // The fuel cost of executed contract - TxUsedCost decimal.Decimal // Used cost of CPU resources - TxPtr interface{} // Pointer to the corresponding struct in consts/struct.go - TxData map[string]interface{} - TxSmart *tx.SmartContract - TxContract *smart.Contract - TxHeader *tx.Header - txParser ParserInterface - DbTransaction *model.DbTransaction - SysUpdate bool + TxBinaryData []byte // transaction binary data + TxFullData []byte // full transaction, with type and data + TxHash []byte + TxKeyID int64 + TxEcosystemID int64 + TxNodePosition uint32 + TxTime int64 + TxType int64 + TxCost int64 // Maximum cost of executing contract + TxFuel int64 // The fuel cost of executed contract + TxUsedCost decimal.Decimal // Used cost of CPU resources + TxPtr interface{} // Pointer to the corresponding struct in consts/struct.go + TxData map[string]interface{} + TxSmart *tx.SmartContract + TxContract *smart.Contract + TxHeader *tx.Header + txParser ParserInterface + DbTransaction *model.DbTransaction + SysUpdate bool SmartContract smart.SmartContract } @@ -284,20 +279,6 @@ func (p *Parser) FormatBlockData() string { return result } -// FormatTxMap returns the formated TxMap -func (p *Parser) FormatTxMap() string { - result := "" - for k, v := range p.TxMap { - switch k { - case "sign": - result += "[" + k + "] = " + fmt.Sprintf("%x\n", v) - default: - result += "[" + k + "] = " + fmt.Sprintf("%s\n", v) - } - } - return result -} - // ErrInfo returns the more detailed error func (p *Parser) ErrInfo(verr interface{}) error { var err error @@ -307,7 +288,7 @@ func (p *Parser) ErrInfo(verr interface{}) error { case string: err = fmt.Errorf(verr.(string)) } - return fmt.Errorf("[ERROR] %s (%s)\n%s\n%s", err, utils.Caller(1), p.FormatBlockData(), p.FormatTxMap()) + return fmt.Errorf("[ERROR] %s (%s)\n%s", err, utils.Caller(1), p.FormatBlockData()) } // AccessRights checks the access right by executing the condition value From 43cd8e5d49e582c5624bd678d092820b2d46fd7f Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 003/169] move changes --- cmd/config.go | 4 +- packages/conf/conf.go | 26 +++ packages/conf/runmode.go | 40 ++++ packages/migration/{ => vde}/vde.go | 0 packages/smart/funcs.go | 41 ++++ packages/vdemanager/config.go | 65 +++++++ packages/vdemanager/manager.go | 286 ++++++++++++++++++++++++++++ 7 files changed, 460 insertions(+), 2 deletions(-) create mode 100644 packages/conf/runmode.go rename packages/migration/{ => vde}/vde.go (100%) create mode 100644 packages/vdemanager/config.go create mode 100644 packages/vdemanager/manager.go diff --git a/cmd/config.go b/cmd/config.go index c6779160a..90ace6d01 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().BoolVar(&conf.Config.PrivateBlockchain, "privateBlockchain", false, "Is blockchain private") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) @@ -147,7 +147,7 @@ func init() { viper.BindPFlag("TLSCert", configCmd.Flags().Lookup("tls-cert")) viper.BindPFlag("TLSKey", configCmd.Flags().Lookup("tls-key")) viper.BindPFlag("MaxPageGenerationTime", configCmd.Flags().Lookup("mpgt")) - viper.BindPFlag("PrivateBlockchain", configCmd.Flags().Lookup("privateBlockchain")) viper.BindPFlag("TempDir", configCmd.Flags().Lookup("tempDir")) viper.BindPFlag("NodesAddr", configCmd.Flags().Lookup("nodesAddr")) + viper.BindPFlag("RunningMode", configCmd.Flags().Lookup("runMode")) } diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 7ac76b6e8..2e175b01c 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -89,6 +89,7 @@ type GlobalConfig struct { TLS bool // TLS is on/off. It is required for https TLSCert string // TLSCert is a filepath of the fullchain of certificate. TLSKey string // TLSKey is a filepath of the private key. + RunningMode string MaxPageGenerationTime int64 // in milliseconds @@ -216,3 +217,28 @@ func FillRuntimeKey() error { func GetNodesAddr() []string { return Config.NodesAddr[:] } + +// IsPrivateBlockchain check running mode +func (c *GlobalConfig) IsPrivateBlockchain() bool { + return RunMode(c.RunningMode).IsPrivateBlockchain() +} + +// IsPublicBlockchain check running mode +func (c *GlobalConfig) IsPublicBlockchain() bool { + return RunMode(c.RunningMode).IsPublicBlockchain() +} + +// IsVDE check running mode +func (c *GlobalConfig) IsVDE() bool { + return RunMode(c.RunningMode).IsVDE() +} + +// IsVDEMaster check running mode +func (c *GlobalConfig) IsVDEMaster() bool { + return RunMode(c.RunningMode).IsVDEMaster() +} + +// IsSupportingVDE check running mode +func (c *GlobalConfig) IsSupportingVDE() bool { + return RunMode(c.RunningMode).IsSupportingVDE() +} diff --git a/packages/conf/runmode.go b/packages/conf/runmode.go new file mode 100644 index 000000000..a03f2aeb0 --- /dev/null +++ b/packages/conf/runmode.go @@ -0,0 +1,40 @@ +package conf + +// PrivateBlockchain const label for running mode +const privateBlockchain RunMode = "PrivateBlockchain" + +// PublicBlockchain const label for running mode +const publicBlockchain RunMode = "PublicBlockchain" + +// VDEManager const label for running mode +const vdeMaster RunMode = "VDEMaster" + +// VDE const label for running mode +const vde RunMode = "VDE" + +type RunMode string + +// IsPublicBlockchain returns true if mode equal PublicBlockchain +func (rm RunMode) IsPublicBlockchain() bool { + return rm == publicBlockchain +} + +// IsPrivateBlockchain returns true if mode equal PrivateBlockchain +func (rm RunMode) IsPrivateBlockchain() bool { + return rm == privateBlockchain +} + +// IsVDEMaster returns true if mode equal vdeMaster +func (rm RunMode) IsVDEMaster() bool { + return rm == vdeMaster +} + +// IsVDE returns true if mode equal vde +func (rm RunMode) IsVDE() bool { + return rm == vde +} + +// IsSupportingVDE returns true if mode support vde +func (rm RunMode) IsSupportingVDE() bool { + return rm.IsVDE() || rm.IsVDEMaster() +} diff --git a/packages/migration/vde.go b/packages/migration/vde/vde.go similarity index 100% rename from packages/migration/vde.go rename to packages/migration/vde/vde.go diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index ef9aba4e8..ef7e6b8d1 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -46,6 +46,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/script" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/satori/go.uuid" "github.com/shopspring/decimal" @@ -240,6 +241,21 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["UpdateCron"] = UpdateCron vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: + f["HTTPRequest"] = HTTPRequest + f["GetMapKeys"] = GetMapKeys + f["SortedKeys"] = SortedKeys + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDE"] = StopVDE + f["GetVDEList"] = GetVDEList + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["DBSelectMetrics"] = DBSelectMetrics @@ -1415,3 +1431,28 @@ func StringToBytes(src string) []byte { func BytesToString(src []byte) string { return string(src) } + +// CreateVDE allow create new VDE throw vdemanager +func CreateVDE(sc *SmartContract, name, dbUser, dbPassword string, port int64) error { + return vdemanager.Manager.CreateVDE(name, dbUser, dbPassword, int(port)) +} + +// DeleteVDE delete vde +func DeleteVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.DeleteVDE(name) +} + +// StartVDE run VDE process +func StartVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StartVDE(name) +} + +// StopVDE stops VDE process +func StopVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StopVDE(name) +} + +// GetVDEList returns list VDE process with statuses +func GetVDEList(sc *SmartContract, name string) (map[string]string, error) { + return vdemanager.Manager.ListProcess() +} diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go new file mode 100644 index 000000000..450ff5aac --- /dev/null +++ b/packages/vdemanager/config.go @@ -0,0 +1,65 @@ +package vdemanager + +import ( + "fmt" + "os/exec" + "path/filepath" +) + +const ( + inidDBCommand = "initDatabase" + genKeysCommand = "generateKeys" + startCommand = "start" +) +// ChildVDEConfig struct to manage child entry +type ChildVDEConfig struct { + Executable string + Name string + Directory string + DBUser string + DBPassword string + ConfigFileName string + HTTPPort int +} + +func (c ChildVDEConfig) configCommand() *exec.Cmd { + + args := []string{ + "config", + fmt.Sprintf("--path=%s", c.configPath()), + fmt.Sprintf("--dbUser=%s", c.DBUser), + fmt.Sprintf("--dbPassword=%s", c.DBPassword), + fmt.Sprintf("--dbName=%s", c.Name), + fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--dataDir=%s", c.Directory), + fmt.Sprintf("--keysDir=%s", c.Directory), + fmt.Sprintf("--runMode=VDE") + } + + return exec.Command(c.Executable, args...) +} + +func (c ChildVDEConfig) initDBCommand() exec.Cmd { + return getCommand(inidDBCommand) +} + +func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { + return getCommand(genKeysCommand) +} + +func (c ChildVDEConfig) startCommand() exec.Cmd { + retturn getCommand(startCommand) +} + +func (c ChildVDEConfig) configPath() string { + return filepath.Join(c.Directory, ConfigFileName) +} + +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + return args := []string{ + commandName, + fmt.Sprintf("--config=%s", c.configPath()), + } + + return exec.Command(c.Executable, args...) +} \ No newline at end of file diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go new file mode 100644 index 000000000..d35362ce2 --- /dev/null +++ b/packages/vdemanager/manager.go @@ -0,0 +1,286 @@ +package vdemanager + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + + "github.com/GenesisKernel/go-genesis/packages/conf" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/model" + pConf "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/process" + log "github.com/sirupsen/logrus" +) + +const ( + childFolder = "configs" + createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` + createDBTemplate = `CREATE DATABASE %s OWNER %s` + + dropDBTemplate = `DROP OWNED BY %s CASCADE` + dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` + commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` +) + +var ( + errWrongMode = errors.New("node must be running as VDEMaster") +) + +// VDEManager struct +type VDEManager struct { + processes *process.ProcessManager +} + +var ( + Manager *VDEManager + childConfigsPath string +) + +// InitVDEManager create init instance of VDEManager +func InitVDEManager() error { + if err := prepareWorkDir(); err != nil { + return err + } + + return initProcessManager() +} + +func prepareWorkDir() error { + childConfigsPath = path.Join(conf.Config.DataDir, childFolder) + + if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { + if err := os.Mkdir(childConfigsPath, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") + return err + } + } + + return nil +} + +// CreateVDE creates one instance of VDE +func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + + config := ChildVDEConfig{ + Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Name: name, + Directory: path.Join(childConfigsPath, name) + DBUser: dbUser, + DBPassword: dbPassword, + ConfigFileName: consts.DefaultConfigFile, + HTTPPort: port, + } + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("creating new VDE") + return errWrongMode + } + + if err := mgr.createVDEDB(name, dbUser, dbPassword); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on creating VDE DB") + return err + } + + if err := mgr.initVDEDir(name); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "DirName": name, "error": err}).Error("on init VDE dir") + return err + } + + cmd := config.configCommand() + if err := cmd.Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run config command") + return err + } + + if err := config.generateKeysCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run generateKeys command") + return err + } + + if err := config.initDBCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run initDB command") + return err + } + + procConfEntry := pConf.NewConfigEntry(config.Directory) + procConfEntry.Name = "program:" + name + command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) + procConfEntry.AddKeyValue("command", command) + proc := process.NewProcess("vdeMaster", confEntry) + + mgr.processes.Add(name, proc) + mgr.processes.Find(name).Start(true) + return nil +} + +// ListProcess returns list of process names with state of process +func (mgr *VDEManager) ListProcess() (map[string]string, error) { + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("get VDE list") + return nil, errWrongMode + } + + list := make(map[string]string) + + mgr.processes.ForEachProcess(func(p *process.Process) { + list[p.GetName()] = p.GetState().String() + }) + + return list, nil +} + +// DeleteVDE stop VDE process and remove VDE folder +func (mgr *VDEManager) DeleteVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("deleting VDE") + return errWrongMode + } + + p := mgr.processes.Find(name) + if p != nil { + p.Stop(true) + } + + vdeDir := path.Join(childConfigsPath, name) + vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) + vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Errorf("Getting config from path %s", vdeConfigPath) + return err + } + + dropDBquery := fmt.Sprintf(dropDBTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropDBquery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") + return err + } + + dropVDERoleQuery := fmt.Sprintf(dropDBRoleTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropVDERoleQuery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db user") + return err + } + + return os.RemoveAll(vdeDir) +} + +// StartVDE find process and then start him +func (mgr *VDEManager) StartVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("starting VDE") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.STOPPED || + state == process.EXITED || + state == process.FATAL { + proc.Start(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE started") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on starting VDE") + return err +} + +// StopVDE find process with definded name and then stop him +func (mgr *VDEManager) StopVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("on stopping VDE process") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.RUNNING || + state == process.STARTING { + proc.Stop(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE is stoped") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on stoping VDE") + return err +} + +func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { + + if err := model.DBConn.Exec(fmt.Sprintf(createRoleTemplate, login, pass)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB User") + return err + } + + if err := model.DBConn.Exec(fmt.Sprintf(createDBTemplate, vdeName, login)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB") + return err + } + + return nil +} + +func (mgr *VDEManager) initVDEDir(vdeName string) error { + + vdeDirName := path.Join(childConfigsPath, vdeName) + if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { + if err := os.Mkdir(vdeDirName, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") + return err + } + } + + return nil +} + +func initProcessManager() error { + Manager = &VDEManager{ + processes: process.NewProcessManager(), + } + + list, err := ioutil.ReadDir(childConfigsPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") + return err + } + + for _, item := range list { + if item.IsDir() { + procDir := path.Join(childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + confEntry := pConf.NewConfigEntry(procDir) + confEntry.Name = "program:" + item.Name() + confEntry.AddKeyValue("command", commandStr) + confEntry.AddKeyValue("redirect_stderr", "true") + confEntry.AddKeyValue("autostart", "true") + confEntry.AddKeyValue("autorestart", "true") + + proc := process.NewProcess("vdeMaster", confEntry) + Manager.processes.Add(item.Name(), proc) + } + } + + return nil +} From ddae5806e4093cb4266c222969a7290255f6d2fb Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:01 +0300 Subject: [PATCH 004/169] setup vde mode for vm in default handler --- packages/api/api.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index 5412ce6ab..d891d82fe 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -30,6 +30,7 @@ import ( hr "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" @@ -241,10 +242,8 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { - // Getting and validating request parameters - vde := r.FormValue(`vde`) - if vde == `1` || vde == `true` { - data.vm = smart.GetVM(true, data.ecosystemId) + if conf.Config.IsSupportingVDE() { + data.vm = smart.GetVM(true, consts.DefaultVDE) if data.vm == nil { return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) } @@ -252,6 +251,7 @@ func fillParams(params map[string]int) apiHandle { } else { data.vm = smart.GetVM(false, 0) } + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { From 7218f8477cec5865c91a193d453b4a6c5178b968 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 005/169] separate routes by vde --- packages/api/route.go | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/packages/api/route.go b/packages/api/route.go index 0157212f6..fd03b9c57 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -19,6 +19,7 @@ package api import ( "strings" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/utils/tx" @@ -49,14 +50,8 @@ func Route(route *hr.Router) { route.Handle(`OPTIONS`, consts.ApiPath+`*name`, optionsHandler()) route.Handle(`GET`, consts.ApiPath+`data/:table/:id/:column/:hash`, dataHandler()) - get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) - get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) - get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`contract/:name`, ``, authWallet, getContract) get(`contracts`, `?limit ?offset:int64`, authWallet, getContracts) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) - get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) - get(`ecosystems`, ``, authWallet, ecosystems) get(`getuid`, ``, getUID) get(`list/:name`, `?limit ?offset:int64,?columns:string`, authWallet, list) get(`row/:name/:id`, `?columns:string`, authWallet, row) @@ -66,11 +61,7 @@ func Route(route *hr.Router) { get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) - get(`txstatus/:hash`, ``, authWallet, txstatus) get(`test/:name`, ``, getTest) - get(`history/:table/:id`, ``, authWallet, getHistory) - get(`block/:id`, ``, getBlockInfo) - get(`maxblockid`, ``, getMaxBlockID) get(`version`, ``, getVersion) get(`avatar/:ecosystem/:member`, ``, getAvatar) get(`config/:option`, ``, getConfigOption) @@ -78,7 +69,6 @@ func Route(route *hr.Router) { post(`content/page/:name`, `?lang:string`, authWallet, getPage) post(`content/menu/:name`, `?lang:string`, authWallet, getMenu) post(`content/hash/:name`, ``, getPageHash) - post(`vde/create`, ``, authWallet, vdeCreate) post(`login`, `?pubkey signature:hex,?key_id ?mobile:string,?ecosystem ?expire ?role_id:int64`, login) post(`prepare/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, authWallet, contractHandlers.prepareContract) post(`contract/:request_id`, `?pubkey signature:hex, time:string, ?token_ecosystem:int64,?max_sum ?payover:string`, authWallet, blockchainUpdatingState, contractHandlers.contract) @@ -89,6 +79,20 @@ func Route(route *hr.Router) { post(`updnotificator`, `ids:string`, updateNotificator) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) + + if !conf.Config.IsSupportingVDE() { + get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) + get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) + get(`txstatus/:hash`, ``, authWallet, txstatus) + get(`history/:table/:id`, ``, authWallet, getHistory) + get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) + get(`block/:id`, ``, getBlockInfo) + get(`maxblockid`, ``, getMaxBlockID) + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) + get(`systemparams`, `?names:string`, authWallet, systemParams) + get(`ecosystems`, ``, authWallet, ecosystems) + } } func processParams(input string) (params map[string]int) { From 62a792632648216aaf7e797c8a57a849a6e5a6ee Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 006/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- packages/model/db.go | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; diff --git a/packages/model/db.go b/packages/model/db.go index f784fc70c..7c7c8efdb 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -10,6 +10,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/migration" + "github.com/GenesisKernel/go-genesis/packages/migration/vde" "github.com/jinzhu/gorm" log "github.com/sirupsen/logrus" @@ -155,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(migration.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error } // ExecSchema is executing schema From eab470a84f902e4bd2c0016ee64b3bd48e003433 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 15:59:38 +0300 Subject: [PATCH 007/169] temp commit --- packages/consts/log_types.go | 2 ++ packages/smart/smart.go | 10 +++++++++- packages/vdemanager/config.go | 29 +++++++++++++++-------------- packages/vdemanager/manager.go | 10 ++++++++-- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/packages/consts/log_types.go b/packages/consts/log_types.go index a6bf14b16..7812ab193 100644 --- a/packages/consts/log_types.go +++ b/packages/consts/log_types.go @@ -53,4 +53,6 @@ const ( BCRelevanceError = "BCRelevanceError" BCActualizationError = "BCActualizationError" SchedulerError = "SchedulerError" + WrongModeError = "WrongModeError" + VDEManagerError = "VDEManagerError" ) diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 27223e5a1..6f6242fb7 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -486,7 +486,15 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } state := converter.StrToInt64(prefix) vm := newVM() - EmbedFuncs(vm, script.VMTypeVDE) + + var vmt script.VMType + if conf.Config.IsVDE() { + vmt = script.VMTypeVDE + } else if conf.Config.IsVDEMaster() { + vmt = script.VMTypeVDEMaster + } + + EmbedFuncs(vm, vmt) smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index 450ff5aac..bcafa10ff 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -7,10 +7,11 @@ import ( ) const ( - inidDBCommand = "initDatabase" + inidDBCommand = "initDatabase" genKeysCommand = "generateKeys" - startCommand = "start" + startCommand = "start" ) + // ChildVDEConfig struct to manage child entry type ChildVDEConfig struct { Executable string @@ -30,36 +31,36 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--httpPort=%d", c.HTTPPort), fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - fmt.Sprintf("--runMode=VDE") + "--runMode=VDE", } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() exec.Cmd { - return getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() *exec.Cmd { + return c.getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { - return getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { + return c.getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() exec.Cmd { - retturn getCommand(startCommand) +func (c ChildVDEConfig) startCommand() *exec.Cmd { + return c.getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, ConfigFileName) + return filepath.Join(c.Directory, c.ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - return args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } return exec.Command(c.Executable, args...) -} \ No newline at end of file +} diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d35362ce2..d1a37d413 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -66,10 +66,16 @@ func prepareWorkDir() error { // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") + return err + } + config := ChildVDEConfig{ - Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Executable: execPath, Name: name, - Directory: path.Join(childConfigsPath, name) + Directory: path.Join(childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, From f8ee848268e85f39cfe461992dbe73ada4178ce0 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 8 May 2018 09:59:10 +0300 Subject: [PATCH 008/169] temporary commit --- cmd/config.go | 2 +- packages/conf/conf.go | 35 +++++++++-- packages/consts/consts.go | 3 + packages/daemons/common.go | 13 ++++- packages/daylight/daemonsctl/daemonsctl.go | 27 ++++----- packages/daylight/start.go | 38 ++++++------ packages/script/vminit.go | 2 + packages/smart/smart.go | 6 +- packages/tcpserver/tcpserver.go | 6 ++ packages/vdemanager/manager.go | 68 ++++++++++------------ 10 files changed, 120 insertions(+), 80 deletions(-) diff --git a/cmd/config.go b/cmd/config.go index 90ace6d01..127f7c827 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "PublicBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 2e175b01c..b91be9b38 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -133,10 +133,33 @@ func LoadConfig(path string) error { if err != nil { return errors.Wrapf(err, "marshalling config to global struct variable") } - return nil } +// GetConfigFromPath read config from path and returns GlobalConfig struct +func GetConfigFromPath(path string) (*GlobalConfig, error) { + log.WithFields(log.Fields{"path": path}).Info("Loading config") + + _, err := os.Stat(path) + if os.IsNotExist(err) { + return nil, errors.Errorf("Unable to load config file %s", path) + } + + viper.SetConfigFile(path) + err = viper.ReadInConfig() + if err != nil { + return nil, errors.Wrapf(err, "reading config") + } + + c := &GlobalConfig{} + err = viper.Unmarshal(c) + if err != nil { + return c, errors.Wrapf(err, "marshalling config to global struct variable") + } + + return c, nil +} + // SaveConfig save global parameters to configFile func SaveConfig(path string) error { dir := filepath.Dir(path) @@ -219,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c *GlobalConfig) IsPrivateBlockchain() bool { +func (c GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c *GlobalConfig) IsPublicBlockchain() bool { +func (c GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c *GlobalConfig) IsVDE() bool { +func (c GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c *GlobalConfig) IsVDEMaster() bool { +func (c GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c *GlobalConfig) IsSupportingVDE() bool { +func (c GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } diff --git a/packages/consts/consts.go b/packages/consts/consts.go index 936a26602..7470b5de4 100644 --- a/packages/consts/consts.go +++ b/packages/consts/consts.go @@ -157,3 +157,6 @@ const TxRequestExpire = 1 * time.Minute // DefaultTempDirName is default name of temporary directory const DefaultTempDirName = "genesis-temp" + +// DefaultVDE allways is 1 +const DefaultVDE = 1 diff --git a/packages/daemons/common.go b/packages/daemons/common.go index 861c03983..8f1bb4d6b 100644 --- a/packages/daemons/common.go +++ b/packages/daemons/common.go @@ -130,7 +130,7 @@ func StartDaemons() { utils.CancelFunc = cancel utils.ReturnCh = make(chan string) - daemonsToStart := serverList + daemonsToStart := getDaemonsToStart() if conf.Config.TestRollBack { daemonsToStart = rollbackList } @@ -156,3 +156,14 @@ func getHostPort(h string) string { } return fmt.Sprintf("%s:%d", h, consts.DEFAULT_TCP_PORT) } + +func getDaemonsToStart() []string { + if conf.Config.IsSupportingVDE() { + return []string{ + "Notificator", + "Scheduler", + } + } + + return serverList +} diff --git a/packages/daylight/daemonsctl/daemonsctl.go b/packages/daylight/daemonsctl/daemonsctl.go index cdddac4d8..84cac3036 100644 --- a/packages/daylight/daemonsctl/daemonsctl.go +++ b/packages/daylight/daemonsctl/daemonsctl.go @@ -14,17 +14,19 @@ import ( // RunAllDaemons start daemons, load contracts and tcpserver func RunAllDaemons() error { - logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) - - daemons.InitialLoad(logEntry) - err := syspar.SysUpdate(nil) - if err != nil { - log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) - return err - } - - if data, ok := parser.GetDataFromFirstBlock(); ok { - syspar.SetFirstBlockData(data) + if !conf.Config.IsSupportingVDE() { + logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) + + daemons.InitialLoad(logEntry) + err := syspar.SysUpdate(nil) + if err != nil { + log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) + return err + } + + if data, ok := parser.GetDataFromFirstBlock(); ok { + syspar.SetFirstBlockData(data) + } } log.Info("load contracts") @@ -36,8 +38,7 @@ func RunAllDaemons() error { log.Info("start daemons") daemons.StartDaemons() - err = tcpserver.TcpListener(conf.Config.TCPServer.Str()) - if err != nil { + if err := tcpserver.TcpListener(conf.Config.TCPServer.Str()); err != nil { log.Errorf("can't start tcp servers, stop") return err } diff --git a/packages/daylight/start.go b/packages/daylight/start.go index 8925da530..e7ff8b253 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -37,6 +37,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/publisher" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/service" @@ -181,15 +182,6 @@ func initRoutes(listenHost string) { httpListener(listenHost, route) } -func logBlockchainMode() { - mode := "private" - if !conf.Config.PrivateBlockchain { - mode = "non private" - } - - log.WithFields(log.Fields{"mode": mode}).Error("Node running mode") -} - // Start starts the main code of the program func Start() { var err error @@ -218,7 +210,7 @@ func Start() { } } - logBlockchainMode() + log.WithFields(log.Fields{"mode": conf.Config.RunningMode}).Info("Node running mode") f := utils.LockOrDie(conf.Config.LockFilePath) defer f.Unlock() @@ -259,18 +251,24 @@ func Start() { os.Exit(1) } - var availableBCGap int64 = consts.AvailableBCGap - if syspar.GetRbBlocks1() > consts.AvailableBCGap { - availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap - } + if !conf.Config.IsSupportingVDE() { + var availableBCGap int64 = consts.AvailableBCGap + if syspar.GetRbBlocks1() > consts.AvailableBCGap { + availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap + } + + blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) + blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) + blockGenerationTime := blockGenerationDuration + blocksGapDuration - blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) - blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) - blockGenerationTime := blockGenerationDuration + blocksGapDuration + checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) + na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) + na.Run() + } - checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) - na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) - na.Run() + if conf.Config.IsVDEMaster() { + vdemanager.InitVDEManager() + } } daemons.WaitForSignals() diff --git a/packages/script/vminit.go b/packages/script/vminit.go index d479e04ce..f286b7fbe 100644 --- a/packages/script/vminit.go +++ b/packages/script/vminit.go @@ -69,6 +69,8 @@ const ( VMTypeSmart VMType = 1 // VMTypeVDE is vde vm type VMTypeVDE VMType = 2 + // VMTypeVDEMaster is VDEMaster type + VMTypeVDEMaster VMType = 3 TagFile = "file" TagAddress = "address" diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 6f6242fb7..1eb641958 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -902,7 +902,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect sign") return retError(ErrIncorrectSign) } - if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.PrivateBlockchain { + if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { if sc.TxSmart.TokenEcosystem == 0 { sc.TxSmart.TokenEcosystem = 1 } @@ -1024,8 +1024,8 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { result = result[:255] } } - if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && - !sc.VDE && !conf.Config.PrivateBlockchain && sc.TxContract.Name != `@1NewUser` { + + if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { apl := sc.TxUsedCost.Mul(fuelRate) wltAmount, ierr := decimal.NewFromString(payWallet.Amount) diff --git a/packages/tcpserver/tcpserver.go b/packages/tcpserver/tcpserver.go index f13a3de4f..d361e7d7b 100644 --- a/packages/tcpserver/tcpserver.go +++ b/packages/tcpserver/tcpserver.go @@ -22,6 +22,8 @@ import ( "sync/atomic" "time" + "github.com/GenesisKernel/go-genesis/packages/conf" + "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/service" @@ -107,6 +109,10 @@ func HandleTCPRequest(rw net.Conn) { // TcpListener is listening tcp address func TcpListener(laddr string) error { + if conf.Config.IsSupportingVDE() { + return nil + } + if strings.HasPrefix(laddr, "127.") { log.Warn("Listening at local address: ", laddr) } diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d1a37d413..4cca4ac8e 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -24,7 +24,7 @@ const ( dropDBTemplate = `DROP OWNED BY %s CASCADE` dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` - commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` + commandTemplate = `%s start --config=%s` ) var ( @@ -33,49 +33,35 @@ var ( // VDEManager struct type VDEManager struct { - processes *process.ProcessManager + processes *process.ProcessManager + execPath string + childConfigsPath string } var ( - Manager *VDEManager - childConfigsPath string + Manager *VDEManager ) -// InitVDEManager create init instance of VDEManager -func InitVDEManager() error { - if err := prepareWorkDir(); err != nil { - return err - } - - return initProcessManager() -} - -func prepareWorkDir() error { - childConfigsPath = path.Join(conf.Config.DataDir, childFolder) +func prepareWorkDir() (string, error) { + childConfigsPath := path.Join(conf.Config.DataDir, childFolder) if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { if err := os.Mkdir(childConfigsPath, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") - return err + return "", err } } - return nil + return childConfigsPath, nil } // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { - execPath, err := os.Executable() - if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") - return err - } - config := ChildVDEConfig{ - Executable: execPath, + Executable: mgr.execPath, Name: name, - Directory: path.Join(childConfigsPath, name), + Directory: path.Join(mgr.childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, @@ -117,7 +103,7 @@ func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) erro procConfEntry.Name = "program:" + name command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) procConfEntry.AddKeyValue("command", command) - proc := process.NewProcess("vdeMaster", confEntry) + proc := process.NewProcess("vdeMaster", procConfEntry) mgr.processes.Add(name, proc) mgr.processes.Find(name).Start(true) @@ -153,7 +139,7 @@ func (mgr *VDEManager) DeleteVDE(name string) error { p.Stop(true) } - vdeDir := path.Join(childConfigsPath, name) + vdeDir := path.Join(mgr.childConfigsPath, name) vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) if err != nil { @@ -250,7 +236,7 @@ func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { func (mgr *VDEManager) initVDEDir(vdeName string) error { - vdeDirName := path.Join(childConfigsPath, vdeName) + vdeDirName := path.Join(mgr.childConfigsPath, vdeName) if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { if err := os.Mkdir(vdeDirName, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") @@ -261,21 +247,33 @@ func (mgr *VDEManager) initVDEDir(vdeName string) error { return nil } -func initProcessManager() error { +func InitVDEManager() { + + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on determine executable path") + } + + childConfigsPath, err := prepareWorkDir() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on prepare child configs folder") + } + Manager = &VDEManager{ - processes: process.NewProcessManager(), + processes: process.NewProcessManager(), + execPath: execPath, + childConfigsPath: childConfigsPath, } list, err := ioutil.ReadDir(childConfigsPath) if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") - return err + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Fatal("on read child VDE directory") } for _, item := range list { if item.IsDir() { - procDir := path.Join(childConfigsPath, item.Name()) - commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + procDir := path.Join(Manager.childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) confEntry := pConf.NewConfigEntry(procDir) confEntry.Name = "program:" + item.Name() confEntry.AddKeyValue("command", commandStr) @@ -287,6 +285,4 @@ func initProcessManager() error { Manager.processes.Add(item.Name(), proc) } } - - return nil } From 095169edc0210e28de32c230d02ae47c6d46e616 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 009/169] temporary commit --- packages/api/api.go | 8 +- packages/api/login.go | 5 +- packages/api/vde.go | 4 +- packages/daemons/block_generator_tx.go | 2 +- packages/daylight/start.go | 8 + .../vde/{vde.go => vde_data_contracts.go} | 267 ++---------------- packages/migration/vde/vde_data_keys.go | 6 + packages/migration/vde/vde_data_members.go | 7 + packages/migration/vde/vde_data_menu.go | 45 +++ packages/migration/vde/vde_data_pages.go | 5 + packages/migration/vde/vde_data_parameters.go | 18 ++ packages/migration/vde/vde_data_tables.go | 68 +++++ packages/migration/vde/vde_schema.go | 143 ++++++++++ packages/model/db.go | 9 +- packages/parser/common.go | 2 +- packages/smart/smart.go | 13 +- packages/template/template.go | 2 +- 17 files changed, 342 insertions(+), 270 deletions(-) rename packages/migration/vde/{vde.go => vde_data_contracts.go} (60%) create mode 100644 packages/migration/vde/vde_data_keys.go create mode 100644 packages/migration/vde/vde_data_members.go create mode 100644 packages/migration/vde/vde_data_menu.go create mode 100644 packages/migration/vde/vde_data_pages.go create mode 100644 packages/migration/vde/vde_data_parameters.go create mode 100644 packages/migration/vde/vde_data_tables.go create mode 100644 packages/migration/vde/vde_schema.go diff --git a/packages/api/api.go b/packages/api/api.go index d891d82fe..f8a503b7f 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -243,15 +243,11 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { if conf.Config.IsSupportingVDE() { - data.vm = smart.GetVM(true, consts.DefaultVDE) - if data.vm == nil { - return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) - } data.vde = true - } else { - data.vm = smart.GetVM(false, 0) } + data.vm = smart.GetVM() + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { diff --git a/packages/api/login.go b/packages/api/login.go index b55fe85c3..ef8114139 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,7 +128,8 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM(false, 0) + vm := smart.GetVM() + contract := smart.VMGetContract(vm, "NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) @@ -207,7 +208,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, ecosystemID)), + IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, consts.DefaultVDE)), } data.result = &result diff --git a/packages/api/vde.go b/packages/api/vde.go index d494dba3e..cf83ec6b0 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -67,8 +67,8 @@ func InitSmartContract(sc *smart.SmartContract, data []byte) error { if err := msgpack.Unmarshal(data, &sc.TxSmart); err != nil { return err } - sc.TxContract = smart.VMGetContractByID(smart.GetVM(sc.VDE, sc.TxSmart.EcosystemID), - int32(sc.TxSmart.Type)) + + sc.TxContract = smart.VMGetContractByID(smart.GetVM(), int32(sc.TxSmart.Type)) if sc.TxContract == nil { return fmt.Errorf(`unknown contract %d`, sc.TxSmart.Type) } diff --git a/packages/daemons/block_generator_tx.go b/packages/daemons/block_generator_tx.go index 9b5ddb977..d96e58f8c 100644 --- a/packages/daemons/block_generator_tx.go +++ b/packages/daemons/block_generator_tx.go @@ -45,7 +45,7 @@ func (dtx *DelayedTx) RunForBlockID(blockID int64) { } func (dtx *DelayedTx) createTx(delayedContactID, keyID int64) error { - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, callDelayedContract, uint32(firstEcosystemID)) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/daylight/start.go b/packages/daylight/start.go index e7ff8b253..74861a936 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -35,6 +35,7 @@ import ( logtools "github.com/GenesisKernel/go-genesis/packages/log" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/publisher" + "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/vdemanager" @@ -266,6 +267,13 @@ func Start() { na.Run() } + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde_data_contracts.go similarity index 60% rename from packages/migration/vde/vde.go rename to packages/migration/vde/vde_data_contracts.go index 640338e93..4e5ca29ab 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -1,247 +1,6 @@ package vde -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES +var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "conditions") VALUES ('1','MainCondition','contract MainCondition { conditions { if EcosysParam("founder_account")!=$key_id @@ -927,7 +686,7 @@ MenuItem( UpdateCron($Id) } }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { + ('23', 'UploadBinary', 'contract UploadBinary { data { Name string Data bytes "file" @@ -954,5 +713,23 @@ MenuItem( $result = $Id } - }', 'ContractConditions("MainCondition")'); - ` + }', 'ContractConditions("MainCondition")'), + ('24', 'NewUser','contract NewUser { + data { + NewPubkey string + } + conditions { + $newId = PubToID($NewPubkey) + if $newId == 0 { + error "Wrong pubkey" + } + if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { + error "User already exists" + } + + $amount = Money(1000) * Money(1000000000000000000) + } + action { + DBInsert("keys", "id, pub", $newId, $NewPubKey) + } + }', 'ContractConditions("MainCondition")');` diff --git a/packages/migration/vde/vde_data_keys.go b/packages/migration/vde/vde_data_keys.go new file mode 100644 index 000000000..42e26c843 --- /dev/null +++ b/packages/migration/vde/vde_data_keys.go @@ -0,0 +1,6 @@ +package vde + +var keysDataSQL = ` +INSERT INTO "%[1]d_keys" (id, pub) +VALUES (4544233900443112470, '489347a1205c818d9a02f285faaedd0122a56138e3d985f5e1b4f6a9470f90f692a00a3453771dd7feea388ceb7aefeaf183e299c70ad1aecb7f870bfada3b86'); +` diff --git a/packages/migration/vde/vde_data_members.go b/packages/migration/vde/vde_data_members.go new file mode 100644 index 000000000..069f1ea2b --- /dev/null +++ b/packages/migration/vde/vde_data_members.go @@ -0,0 +1,7 @@ +package vde + +var membersDataSQL = ` +INSERT INTO "%[1]d_members" ("id", "member_name") +VALUES('%[2]d', 'founder'), +('4544233900443112470', 'guest'); +` diff --git a/packages/migration/vde/vde_data_menu.go b/packages/migration/vde/vde_data_menu.go new file mode 100644 index 000000000..b52a1699f --- /dev/null +++ b/packages/migration/vde/vde_data_menu.go @@ -0,0 +1,45 @@ +package vde + +var menuDataSQL = ` +INSERT INTO "%[1]d_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true');` diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go new file mode 100644 index 000000000..90ef6eab4 --- /dev/null +++ b/packages/migration/vde/vde_data_pages.go @@ -0,0 +1,5 @@ +package vde + +var pagesDataSQL = ` +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +` diff --git a/packages/migration/vde/vde_data_parameters.go b/packages/migration/vde/vde_data_parameters.go new file mode 100644 index 000000000..3ba29e2f9 --- /dev/null +++ b/packages/migration/vde/vde_data_parameters.go @@ -0,0 +1,18 @@ +package vde + +var parametersDataSQL = ` +INSERT INTO "%[1]d_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go new file mode 100644 index 000000000..4223e825a --- /dev/null +++ b/packages/migration/vde/vde_data_tables.go @@ -0,0 +1,68 @@ +package vde + +var tablesDataSQL = ` +INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"menu": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")", +"validate_count": "ContractConditions(\"MainCondition\")", +"validate_mode": "ContractConditions(\"MainCondition\")", +"app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go new file mode 100644 index 000000000..c3fda993f --- /dev/null +++ b/packages/migration/vde/vde_schema.go @@ -0,0 +1,143 @@ +package vde + +import ( + "strings" +) + +// GetVDEScript returns script for VDE schema +func GetVDEScript() string { + scripts := []string{ + schemaVDE, + membersDataSQL, + menuDataSQL, + pagesDataSQL, + parametersDataSQL, + tablesDataSQL, + contractsDataSQL, + keysDataSQL, + } + + return strings.Join(scripts, "\r\n") +} + +var schemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_keys" ( + "id" bigint NOT NULL DEFAULT '0', + "pub" bytea NOT NULL DEFAULT '', + "multi" bigint NOT NULL DEFAULT '0', + "deleted" bigint NOT NULL DEFAULT '0', + "blocked" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_keys" ADD CONSTRAINT "%[1]d_keys_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_members"; + CREATE TABLE "%[1]d_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_members" ADD CONSTRAINT "%[1]d_members_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_languages"; CREATE TABLE "%[1]d_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_languages" ADD CONSTRAINT "%[1]d_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_languages_index_name" ON "%[1]d_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_menu"; CREATE TABLE "%[1]d_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_menu" ADD CONSTRAINT "%[1]d_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_menu_index_name" ON "%[1]d_menu" (name); + + DROP TABLE IF EXISTS "%[1]d_pages"; CREATE TABLE "%[1]d_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_pages" ADD CONSTRAINT "%[1]d_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_pages_index_name" ON "%[1]d_pages" (name); + + DROP TABLE IF EXISTS "%[1]d_blocks"; CREATE TABLE "%[1]d_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_blocks" ADD CONSTRAINT "%[1]d_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_blocks_index_name" ON "%[1]d_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_signatures"; CREATE TABLE "%[1]d_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_signatures" ADD CONSTRAINT "%[1]d_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_contracts" ADD CONSTRAINT "%[1]d_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_parameters"; + CREATE TABLE "%[1]d_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_parameters" ADD CONSTRAINT "%[1]d_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_parameters_index_name" ON "%[1]d_parameters" (name); + + DROP TABLE IF EXISTS "%[1]d_cron"; + CREATE TABLE "%[1]d_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_cron" ADD CONSTRAINT "%[1]d_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_binaries"; + CREATE TABLE "%[1]d_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_binaries" ADD CONSTRAINT "%[1]d_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_binaries_index_app_id_member_id_name" ON "%[1]d_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + ` diff --git a/packages/model/db.go b/packages/model/db.go index 7c7c8efdb..b6416c45f 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -156,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.GetVDEScript(), id, wallet)).Error } // ExecSchema is executing schema @@ -385,5 +385,12 @@ func InitDB(cfg conf.DBConfig) error { return err } + if conf.Config.IsSupportingVDE() { + if err := ExecSchemaLocalData(consts.DefaultVDE, conf.Config.KeyID); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE schema") + return err + } + } + return nil } diff --git a/packages/parser/common.go b/packages/parser/common.go index 2147118f9..f8bf4ff36 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -481,7 +481,7 @@ func (p *Parser) CallContract(flags int) (resultContract string, err error) { VDE: false, Rollback: true, SysUpdate: false, - VM: smart.GetVM(false, 0), + VM: smart.GetVM(), TxSmart: *p.TxSmart, TxData: p.TxData, TxContract: p.TxContract, diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 1eb641958..9f3434632 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -66,7 +66,6 @@ const ( var ( smartVM *script.VM - smartVDE map[int64]*script.VM smartTest = make(map[string]string) ErrCurrentBalance = errors.New(`current balance is not enough`) @@ -118,17 +117,10 @@ func newVM() *script.VM { func init() { smartVM = newVM() - smartVDE = make(map[int64]*script.VM) } // GetVM is returning smart vm -func GetVM(vde bool, ecosystemID int64) *script.VM { - if vde { - if v, ok := smartVDE[ecosystemID]; ok { - return v - } - return nil - } +func GetVM() *script.VM { return smartVM } @@ -495,7 +487,6 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } EmbedFuncs(vm, vmt) - smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { list, err := script.ContractsList(item[`value`]) @@ -853,7 +844,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { methods := []string{`init`, `conditions`, `action`, `rollback`} sc.TxContract.StackCont = []string{sc.TxContract.Name} (*sc.TxContract.Extend)[`stack_cont`] = StackCont - sc.VM = GetVM(sc.VDE, sc.TxSmart.EcosystemID) + sc.VM = GetVM() if (flags&CallRollback) == 0 && (flags&CallAction) != 0 { if !sc.VDE { toID = sc.BlockData.KeyID diff --git a/packages/template/template.go b/packages/template/template.go index 5c0dc1842..8beb4882b 100644 --- a/packages/template/template.go +++ b/packages/template/template.go @@ -692,7 +692,7 @@ func Template2JSON(input string, timeout *bool, vars *map[string]string) []byte isvde := (*vars)[`vde`] == `true` || (*vars)[`vde`] == `1` sc := smart.SmartContract{ VDE: isvde, - VM: smart.GetVM(isvde, converter.StrToInt64((*vars)[`ecosystem_id`])), + VM: smart.GetVM(), TxSmart: tx.SmartContract{ Header: tx.Header{ EcosystemID: converter.StrToInt64((*vars)[`ecosystem_id`]), From c3d77cce75de717c80ae92f43f945f71c9fdaa1a Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 010/169] fix login --- packages/api/api.go | 37 ++++--- packages/api/login.go | 4 +- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- packages/migration/vde/vde_data_pages.go | 2 +- packages/migration/vde/vde_schema.go | 29 +++++ packages/smart/smart.go | 8 +- 6 files changed, 148 insertions(+), 43 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index f8a503b7f..d44c48242 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -133,9 +133,6 @@ func errorAPI(w http.ResponseWriter, err interface{}, code int, params ...interf func getPrefix(data *apiData) (prefix string) { prefix = converter.Int64ToStr(data.ecosystemId) - if data.vde { - prefix += `_vde` - } return } @@ -274,6 +271,10 @@ func fillParams(params map[string]int) apiHandle { } func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (int64, string, error) { + if conf.Config.IsSupportingVDE() { + return consts.DefaultVDE, "1", nil + } + ecosystemID := data.ecosystemId if data.params[`ecosystem`].(int64) > 0 { ecosystemID = data.params[`ecosystem`].(int64) @@ -288,9 +289,9 @@ func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (in } } prefix := converter.Int64ToStr(ecosystemID) - if data.vde { - prefix += `_vde` - } + // if data.vde { + // prefix += `_vde` + // } return ecosystemID, prefix, nil } @@ -299,18 +300,20 @@ func fillTokenData(data *apiData, claims *JWTClaims, logger *log.Entry) error { data.keyId = converter.StrToInt64(claims.KeyID) data.isMobile = claims.IsMobile data.roleId = converter.StrToInt64(claims.RoleID) - ecosystem := &model.Ecosystem{} - found, err := ecosystem.Get(data.ecosystemId) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") - return err - } + if !conf.Config.IsSupportingVDE() { + ecosystem := &model.Ecosystem{} + found, err := ecosystem.Get(data.ecosystemId) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") + return err + } - if !found { - err := fmt.Errorf("ecosystem not found") - logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") - } + if !found { + err := fmt.Errorf("ecosystem not found") + logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") + } - data.ecosystemName = ecosystem.Name + data.ecosystemName = ecosystem.Name + } return nil } diff --git a/packages/api/login.go b/packages/api/login.go index ef8114139..d9c7f8de6 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,9 +128,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM() - - contract := smart.VMGetContract(vm, "NewUser", 1) + contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) err = tx.BuildTransaction(tx.SmartContract{ diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go index 90ef6eab4..b013166b1 100644 --- a/packages/migration/vde/vde_data_pages.go +++ b/packages/migration/vde/vde_data_pages.go @@ -1,5 +1,5 @@ package vde var pagesDataSQL = ` -INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('1', 'default_page', '', 'admin_menu', 'true'),('2','admin_index','','admin_menu','true'); ` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go index c3fda993f..7edf5da94 100644 --- a/packages/migration/vde/vde_schema.go +++ b/packages/migration/vde/vde_schema.go @@ -140,4 +140,33 @@ var schemaVDE = ` ); ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + + DROP TABLE IF EXISTS "%[1]d_notifications"; + CREATE TABLE "%[1]d_notifications" ( + "id" bigint NOT NULL DEFAULT '0', + "recipient" jsonb, + "sender" jsonb, + "notification" jsonb, + "page_params" jsonb, + "processing_info" jsonb, + "page_name" varchar(255) NOT NULL DEFAULT '', + "date_created" timestamp, + "date_start_processing" timestamp, + "date_closed" timestamp, + "closed" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_notifications" ADD CONSTRAINT "%[1]d_notifications_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_roles_participants"; + CREATE TABLE "%[1]d_roles_participants" ( + "id" bigint NOT NULL DEFAULT '0', + "role" jsonb, + "member" jsonb, + "appointed" jsonb, + "date_created" timestamp, + "date_deleted" timestamp, + "deleted" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_roles_participants" ADD CONSTRAINT "%[1]d_roles_participants_pkey" PRIMARY KEY ("id"); + ` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 9f3434632..41c61e610 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -174,6 +174,7 @@ func VMRun(vm *script.VM, block *script.Block, params []interface{}, extend *map func VMGetContract(vm *script.VM, name string, state uint32) *Contract { name = script.StateName(state, name) obj, ok := vm.Objects[name] + if ok && obj.Type == script.ObjContract { return &Contract{Name: name, Block: obj.Value.(*script.Block)} } @@ -469,15 +470,15 @@ func LoadContract(transaction *model.DbTransaction, prefix string) (err error) { func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err error) { var contracts []map[string]string - if !model.IsTable(prefix + `_vde_contracts`) { + if !model.IsTable(prefix + `_contracts`) { return } - contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_vde_contracts" order by id`, -1) + contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_contracts" order by id`, -1) if err != nil { return err } state := converter.StrToInt64(prefix) - vm := newVM() + vm := GetVM() var vmt script.VMType if conf.Config.IsVDE() { @@ -502,6 +503,7 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro WalletID: 0, TokenID: 0, } + if err = vmCompile(vm, item[`value`], &owner); err != nil { log.WithFields(log.Fields{"names": names, "error": err}).Error("Load VDE Contract") } else { From 16b63631f3266285ecab019abc57b55dfc683507 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 011/169] temporary commit --- packages/api/login.go | 50 +++++++- packages/api/route.go | 6 +- packages/api/vde.go | 7 +- packages/api/vde_test.go | 120 ++----------------- packages/migration/vde/vde_data_contracts.go | 41 +++++++ packages/smart/smart.go | 1 + 6 files changed, 105 insertions(+), 120 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index d9c7f8de6..9e0f9a07e 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -19,12 +19,14 @@ package api import ( "fmt" "net/http" + "strings" "time" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/notificator" "github.com/GenesisKernel/go-genesis/packages/publisher" + msgpack "gopkg.in/vmihailenco/msgpack.v2" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" @@ -131,20 +133,60 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) - err = tx.BuildTransaction(tx.SmartContract{ + // scHeader, err := getHeader("NewUser", data) + if err != nil { + return errorAPI(w, "E_EMPTYOBJECT", http.StatusBadRequest) + } + + sc := tx.SmartContract{ Header: tx.Header{ Type: int(info.ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, NetworkID: consts.NETWORK_ID, + PublicKey: pubkey, }, SignedBy: smart.PubToID(NodePublicKey), Data: params, - }, NodePrivateKey, NodePublicKey, string(hexPubKey)) - if err != nil { - log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } + + if conf.Config.IsSupportingVDE() { + + signPrms := []string{sc.ForSign()} + signPrms = append(signPrms, string(hexPubKey)) + signature, err := crypto.Sign( + NodePrivateKey, + strings.Join(signPrms, ","), + ) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") + return err + } + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + serializedContract, err := msgpack.Marshal(sc) + if err != nil { + logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") + return errorAPI(w, err, http.StatusInternalServerError) + } + // signature := data.params[`signature`].([]byte) + // if len(signature) == 0 { + // log.WithFields(log.Fields{"type": consts.EmptyObject, "params": data.params}).Error("signature is empty") + // } + + fmt.Println(len(signature)) + ret, err := VDEContract(serializedContract, data) + if err != nil { + return errorAPI(w, err, http.StatusInternalServerError) + } + data.result = ret + } else { + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, string(hexPubKey)) + if err != nil { + log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") + } + } + } if ecosystemID > 1 && len(pubkey) == 0 { diff --git a/packages/api/route.go b/packages/api/route.go index fd03b9c57..282a47a31 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -58,7 +58,7 @@ func Route(route *hr.Router) { get(`interface/page/:name`, ``, authWallet, getPageRow) get(`interface/menu/:name`, ``, authWallet, getMenuRow) get(`interface/block/:name`, ``, authWallet, getBlockInterfaceRow) - get(`systemparams`, `?names:string`, authWallet, systemParams) + // get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) get(`test/:name`, ``, getTest) @@ -77,7 +77,7 @@ func Route(route *hr.Router) { post(`test/:name`, ``, getTest) post(`content`, `template ?source:string`, jsonContent) post(`updnotificator`, `ids:string`, updateNotificator) - + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) if !conf.Config.IsSupportingVDE() { @@ -88,7 +88,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) diff --git a/packages/api/vde.go b/packages/api/vde.go index cf83ec6b0..9891ffddb 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -173,17 +173,22 @@ func VDEContract(contractData []byte, data *apiData) (result *contractResult, er result.Message = &txstatusError{Type: "panic", Error: err.Error()} return } + if data.token != nil && data.token.Valid { if auth, err := data.token.SignedString([]byte(jwtSecret)); err == nil { sc.TxData[`auth_token`] = auth } } + if ret, err = sc.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction); err == nil { result.Result = ret } else { if errResult := json.Unmarshal([]byte(err.Error()), &result.Message); errResult != nil { - log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "text": err.Error(), + log.WithFields(log.Fields{ + "type": consts.JSONUnmarshallError, + "text": err.Error(), "error": errResult}).Error("unmarshalling contract error") + result.Message = &txstatusError{Type: "panic", Error: errResult.Error()} } } diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index b2b071315..57cb90949 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -24,6 +24,7 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" @@ -33,121 +34,16 @@ import ( ) func TestVDECreate(t *testing.T) { - var ( - err error - retid int64 - ret vdeCreateResult - ) - - assert.NoError(t, keyLogin(1)) - - if err = sendPost(`vde/create`, nil, &ret); err != nil && - err.Error() != `400 {"error": "E_VDECREATED", "msg": "Virtual Dedicated Ecosystem is already created" }` { - t.Error(err) - return - } - - rnd := `rnd` + crypto.RandSeq(6) - form := url.Values{`Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - - retid, _, err = postTxResult(`NewContract`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active 5", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - assert.NoError(t, postTx(`EditContract`, &form)) - - form = url.Values{`Name`: {rnd}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - - retid, _, err = postTxResult(`NewParameter`, &form) - assert.NoError(t, err) + require.NoError(t, keyLogin(1)) - form = url.Values{`Name`: {`new_table`}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - if err = postTx(`NewParameter`, &form); err != nil && err.Error() != - `500 {"error": "E_SERVER", "msg": "{\"type\":\"warning\",\"error\":\"Parameter new_table already exists\"}" }` { - t.Error(err) - return + form := url.Values{ + "VDEName": {"testvde"}, + "DBUser": {"vdeuser"}, + "DBPassword": {"vdepassword"}, + "VDEAPIPort": {"8000"}, } - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, `Conditions`: {`true`}, - `vde`: {`1`}} - - assert.NoError(t, postTx(`EditParameter`, &form)) - - form = url.Values{"Name": {`menu` + rnd}, "Value": {`first - second - third`}, "Title": {`My Menu`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewMenu`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, - `Conditions`: {`true`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditMenu`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Append)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendMenu`, &form)) - - form = url.Values{"Name": {`page` + rnd}, "Value": {`Page`}, "Menu": {`government`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewPage`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit page value`}, - `Conditions`: {`true`}, "Menu": {`government`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditPage`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Test Page)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendPage`, &form)) - - form = url.Values{"Name": {`block` + rnd}, "Value": {`Page block`}, "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewBlock`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit block value`}, - `Conditions`: {`true`}, `vde`: {`1`}} - assert.NoError(t, postTx(`EditBlock`, &form)) - - name := randName(`tbl`) - form = url.Values{"Name": {name}, `vde`: {`true`}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "1", - "conditions":"true"}, - {"name":"Amount", "type":"number","index": "0", "conditions":"true"}, - {"name":"Active", "type":"character","index": "0", "conditions":"true"}]`}, - "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} - assert.NoError(t, postTx(`NewTable`, &form)) - - form = url.Values{"Name": {name}, `vde`: {`true`}, - "Permissions": {`{"insert": "ContractConditions(\"MainCondition\")", - "update" : "true", "new_column": "ContractConditions(\"MainCondition\")"}`}} - assert.NoError(t, postTx(`EditTable`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {"true"}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newColRead`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {"ContractConditions(\"MainCondition\")"}} - assert.NoError(t, postTx(`EditColumn`, &form)) + require.NoError(t, postTx("NewVDE", &form)) - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`EditColumn`, &form)) } func TestVDEParams(t *testing.T) { diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 41c61e610..332ec592b 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -885,6 +885,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { return retError(ErrEmptyPublicKey) } sc.PublicKeys = append(sc.PublicKeys, public) + var CheckSignResult bool CheckSignResult, err = utils.CheckSign(sc.PublicKeys, sc.TxData[`forsign`].(string), sc.TxSmart.BinSignatures, false) if err != nil { From 9ff3b45409929e221585c3365e62a1f4f996329b Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 012/169] move changes --- cmd/config.go | 4 +- packages/conf/conf.go | 26 +++ packages/conf/runmode.go | 40 ++++ packages/migration/{ => vde}/vde.go | 0 packages/smart/funcs.go | 41 ++++ packages/vdemanager/config.go | 65 +++++++ packages/vdemanager/manager.go | 286 ++++++++++++++++++++++++++++ 7 files changed, 460 insertions(+), 2 deletions(-) create mode 100644 packages/conf/runmode.go rename packages/migration/{ => vde}/vde.go (100%) create mode 100644 packages/vdemanager/config.go create mode 100644 packages/vdemanager/manager.go diff --git a/cmd/config.go b/cmd/config.go index c6779160a..90ace6d01 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().BoolVar(&conf.Config.PrivateBlockchain, "privateBlockchain", false, "Is blockchain private") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) @@ -147,7 +147,7 @@ func init() { viper.BindPFlag("TLSCert", configCmd.Flags().Lookup("tls-cert")) viper.BindPFlag("TLSKey", configCmd.Flags().Lookup("tls-key")) viper.BindPFlag("MaxPageGenerationTime", configCmd.Flags().Lookup("mpgt")) - viper.BindPFlag("PrivateBlockchain", configCmd.Flags().Lookup("privateBlockchain")) viper.BindPFlag("TempDir", configCmd.Flags().Lookup("tempDir")) viper.BindPFlag("NodesAddr", configCmd.Flags().Lookup("nodesAddr")) + viper.BindPFlag("RunningMode", configCmd.Flags().Lookup("runMode")) } diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 7ac76b6e8..2e175b01c 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -89,6 +89,7 @@ type GlobalConfig struct { TLS bool // TLS is on/off. It is required for https TLSCert string // TLSCert is a filepath of the fullchain of certificate. TLSKey string // TLSKey is a filepath of the private key. + RunningMode string MaxPageGenerationTime int64 // in milliseconds @@ -216,3 +217,28 @@ func FillRuntimeKey() error { func GetNodesAddr() []string { return Config.NodesAddr[:] } + +// IsPrivateBlockchain check running mode +func (c *GlobalConfig) IsPrivateBlockchain() bool { + return RunMode(c.RunningMode).IsPrivateBlockchain() +} + +// IsPublicBlockchain check running mode +func (c *GlobalConfig) IsPublicBlockchain() bool { + return RunMode(c.RunningMode).IsPublicBlockchain() +} + +// IsVDE check running mode +func (c *GlobalConfig) IsVDE() bool { + return RunMode(c.RunningMode).IsVDE() +} + +// IsVDEMaster check running mode +func (c *GlobalConfig) IsVDEMaster() bool { + return RunMode(c.RunningMode).IsVDEMaster() +} + +// IsSupportingVDE check running mode +func (c *GlobalConfig) IsSupportingVDE() bool { + return RunMode(c.RunningMode).IsSupportingVDE() +} diff --git a/packages/conf/runmode.go b/packages/conf/runmode.go new file mode 100644 index 000000000..a03f2aeb0 --- /dev/null +++ b/packages/conf/runmode.go @@ -0,0 +1,40 @@ +package conf + +// PrivateBlockchain const label for running mode +const privateBlockchain RunMode = "PrivateBlockchain" + +// PublicBlockchain const label for running mode +const publicBlockchain RunMode = "PublicBlockchain" + +// VDEManager const label for running mode +const vdeMaster RunMode = "VDEMaster" + +// VDE const label for running mode +const vde RunMode = "VDE" + +type RunMode string + +// IsPublicBlockchain returns true if mode equal PublicBlockchain +func (rm RunMode) IsPublicBlockchain() bool { + return rm == publicBlockchain +} + +// IsPrivateBlockchain returns true if mode equal PrivateBlockchain +func (rm RunMode) IsPrivateBlockchain() bool { + return rm == privateBlockchain +} + +// IsVDEMaster returns true if mode equal vdeMaster +func (rm RunMode) IsVDEMaster() bool { + return rm == vdeMaster +} + +// IsVDE returns true if mode equal vde +func (rm RunMode) IsVDE() bool { + return rm == vde +} + +// IsSupportingVDE returns true if mode support vde +func (rm RunMode) IsSupportingVDE() bool { + return rm.IsVDE() || rm.IsVDEMaster() +} diff --git a/packages/migration/vde.go b/packages/migration/vde/vde.go similarity index 100% rename from packages/migration/vde.go rename to packages/migration/vde/vde.go diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 951bfedd2..f2fe15345 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -46,6 +46,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/script" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/satori/go.uuid" "github.com/shopspring/decimal" @@ -240,6 +241,21 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["UpdateCron"] = UpdateCron vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: + f["HTTPRequest"] = HTTPRequest + f["GetMapKeys"] = GetMapKeys + f["SortedKeys"] = SortedKeys + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDE"] = StopVDE + f["GetVDEList"] = GetVDEList + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["DBSelectMetrics"] = DBSelectMetrics @@ -1439,3 +1455,28 @@ func StringToBytes(src string) []byte { func BytesToString(src []byte) string { return string(src) } + +// CreateVDE allow create new VDE throw vdemanager +func CreateVDE(sc *SmartContract, name, dbUser, dbPassword string, port int64) error { + return vdemanager.Manager.CreateVDE(name, dbUser, dbPassword, int(port)) +} + +// DeleteVDE delete vde +func DeleteVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.DeleteVDE(name) +} + +// StartVDE run VDE process +func StartVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StartVDE(name) +} + +// StopVDE stops VDE process +func StopVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StopVDE(name) +} + +// GetVDEList returns list VDE process with statuses +func GetVDEList(sc *SmartContract, name string) (map[string]string, error) { + return vdemanager.Manager.ListProcess() +} diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go new file mode 100644 index 000000000..450ff5aac --- /dev/null +++ b/packages/vdemanager/config.go @@ -0,0 +1,65 @@ +package vdemanager + +import ( + "fmt" + "os/exec" + "path/filepath" +) + +const ( + inidDBCommand = "initDatabase" + genKeysCommand = "generateKeys" + startCommand = "start" +) +// ChildVDEConfig struct to manage child entry +type ChildVDEConfig struct { + Executable string + Name string + Directory string + DBUser string + DBPassword string + ConfigFileName string + HTTPPort int +} + +func (c ChildVDEConfig) configCommand() *exec.Cmd { + + args := []string{ + "config", + fmt.Sprintf("--path=%s", c.configPath()), + fmt.Sprintf("--dbUser=%s", c.DBUser), + fmt.Sprintf("--dbPassword=%s", c.DBPassword), + fmt.Sprintf("--dbName=%s", c.Name), + fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--dataDir=%s", c.Directory), + fmt.Sprintf("--keysDir=%s", c.Directory), + fmt.Sprintf("--runMode=VDE") + } + + return exec.Command(c.Executable, args...) +} + +func (c ChildVDEConfig) initDBCommand() exec.Cmd { + return getCommand(inidDBCommand) +} + +func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { + return getCommand(genKeysCommand) +} + +func (c ChildVDEConfig) startCommand() exec.Cmd { + retturn getCommand(startCommand) +} + +func (c ChildVDEConfig) configPath() string { + return filepath.Join(c.Directory, ConfigFileName) +} + +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + return args := []string{ + commandName, + fmt.Sprintf("--config=%s", c.configPath()), + } + + return exec.Command(c.Executable, args...) +} \ No newline at end of file diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go new file mode 100644 index 000000000..d35362ce2 --- /dev/null +++ b/packages/vdemanager/manager.go @@ -0,0 +1,286 @@ +package vdemanager + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + + "github.com/GenesisKernel/go-genesis/packages/conf" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/model" + pConf "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/process" + log "github.com/sirupsen/logrus" +) + +const ( + childFolder = "configs" + createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` + createDBTemplate = `CREATE DATABASE %s OWNER %s` + + dropDBTemplate = `DROP OWNED BY %s CASCADE` + dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` + commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` +) + +var ( + errWrongMode = errors.New("node must be running as VDEMaster") +) + +// VDEManager struct +type VDEManager struct { + processes *process.ProcessManager +} + +var ( + Manager *VDEManager + childConfigsPath string +) + +// InitVDEManager create init instance of VDEManager +func InitVDEManager() error { + if err := prepareWorkDir(); err != nil { + return err + } + + return initProcessManager() +} + +func prepareWorkDir() error { + childConfigsPath = path.Join(conf.Config.DataDir, childFolder) + + if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { + if err := os.Mkdir(childConfigsPath, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") + return err + } + } + + return nil +} + +// CreateVDE creates one instance of VDE +func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + + config := ChildVDEConfig{ + Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Name: name, + Directory: path.Join(childConfigsPath, name) + DBUser: dbUser, + DBPassword: dbPassword, + ConfigFileName: consts.DefaultConfigFile, + HTTPPort: port, + } + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("creating new VDE") + return errWrongMode + } + + if err := mgr.createVDEDB(name, dbUser, dbPassword); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on creating VDE DB") + return err + } + + if err := mgr.initVDEDir(name); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "DirName": name, "error": err}).Error("on init VDE dir") + return err + } + + cmd := config.configCommand() + if err := cmd.Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run config command") + return err + } + + if err := config.generateKeysCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run generateKeys command") + return err + } + + if err := config.initDBCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run initDB command") + return err + } + + procConfEntry := pConf.NewConfigEntry(config.Directory) + procConfEntry.Name = "program:" + name + command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) + procConfEntry.AddKeyValue("command", command) + proc := process.NewProcess("vdeMaster", confEntry) + + mgr.processes.Add(name, proc) + mgr.processes.Find(name).Start(true) + return nil +} + +// ListProcess returns list of process names with state of process +func (mgr *VDEManager) ListProcess() (map[string]string, error) { + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("get VDE list") + return nil, errWrongMode + } + + list := make(map[string]string) + + mgr.processes.ForEachProcess(func(p *process.Process) { + list[p.GetName()] = p.GetState().String() + }) + + return list, nil +} + +// DeleteVDE stop VDE process and remove VDE folder +func (mgr *VDEManager) DeleteVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("deleting VDE") + return errWrongMode + } + + p := mgr.processes.Find(name) + if p != nil { + p.Stop(true) + } + + vdeDir := path.Join(childConfigsPath, name) + vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) + vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Errorf("Getting config from path %s", vdeConfigPath) + return err + } + + dropDBquery := fmt.Sprintf(dropDBTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropDBquery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") + return err + } + + dropVDERoleQuery := fmt.Sprintf(dropDBRoleTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropVDERoleQuery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db user") + return err + } + + return os.RemoveAll(vdeDir) +} + +// StartVDE find process and then start him +func (mgr *VDEManager) StartVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("starting VDE") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.STOPPED || + state == process.EXITED || + state == process.FATAL { + proc.Start(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE started") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on starting VDE") + return err +} + +// StopVDE find process with definded name and then stop him +func (mgr *VDEManager) StopVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("on stopping VDE process") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.RUNNING || + state == process.STARTING { + proc.Stop(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE is stoped") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on stoping VDE") + return err +} + +func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { + + if err := model.DBConn.Exec(fmt.Sprintf(createRoleTemplate, login, pass)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB User") + return err + } + + if err := model.DBConn.Exec(fmt.Sprintf(createDBTemplate, vdeName, login)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB") + return err + } + + return nil +} + +func (mgr *VDEManager) initVDEDir(vdeName string) error { + + vdeDirName := path.Join(childConfigsPath, vdeName) + if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { + if err := os.Mkdir(vdeDirName, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") + return err + } + } + + return nil +} + +func initProcessManager() error { + Manager = &VDEManager{ + processes: process.NewProcessManager(), + } + + list, err := ioutil.ReadDir(childConfigsPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") + return err + } + + for _, item := range list { + if item.IsDir() { + procDir := path.Join(childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + confEntry := pConf.NewConfigEntry(procDir) + confEntry.Name = "program:" + item.Name() + confEntry.AddKeyValue("command", commandStr) + confEntry.AddKeyValue("redirect_stderr", "true") + confEntry.AddKeyValue("autostart", "true") + confEntry.AddKeyValue("autorestart", "true") + + proc := process.NewProcess("vdeMaster", confEntry) + Manager.processes.Add(item.Name(), proc) + } + } + + return nil +} From b37f590d9a581b714a966cb72c19c7c254352d92 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:01 +0300 Subject: [PATCH 013/169] setup vde mode for vm in default handler --- packages/api/api.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index 5412ce6ab..d891d82fe 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -30,6 +30,7 @@ import ( hr "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" @@ -241,10 +242,8 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { - // Getting and validating request parameters - vde := r.FormValue(`vde`) - if vde == `1` || vde == `true` { - data.vm = smart.GetVM(true, data.ecosystemId) + if conf.Config.IsSupportingVDE() { + data.vm = smart.GetVM(true, consts.DefaultVDE) if data.vm == nil { return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) } @@ -252,6 +251,7 @@ func fillParams(params map[string]int) apiHandle { } else { data.vm = smart.GetVM(false, 0) } + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { From 7c36754f8f8d411959e673ff1d386dac08f259bf Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 014/169] separate routes by vde --- packages/api/route.go | 28 ++++++++++++++++------------ 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/packages/api/route.go b/packages/api/route.go index 7f99e2cfe..a4234d34b 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -19,6 +19,7 @@ package api import ( "strings" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/utils/tx" @@ -50,14 +51,8 @@ func Route(route *hr.Router) { route.Handle(`OPTIONS`, consts.ApiPath+`*name`, optionsHandler()) route.Handle(`GET`, consts.ApiPath+`data/:table/:id/:column/:hash`, dataHandler()) - get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) - get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) - get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`contract/:name`, ``, authWallet, getContract) get(`contracts`, `?limit ?offset:int64`, authWallet, getContracts) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) - get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) - get(`ecosystems`, ``, authWallet, ecosystems) get(`getuid`, ``, getUID) get(`list/:name`, `?limit ?offset:int64,?columns:string`, authWallet, list) get(`row/:name/:id`, `?columns:string`, authWallet, row) @@ -67,12 +62,7 @@ func Route(route *hr.Router) { get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) - get(`txstatus/:hash`, ``, authWallet, txstatus) - get(`txstatusMultiple`, `data:string`, authWallet, txstatusMulti) get(`test/:name`, ``, getTest) - get(`history/:table/:id`, ``, authWallet, getHistory) - get(`block/:id`, ``, getBlockInfo) - get(`maxblockid`, ``, getMaxBlockID) get(`version`, ``, getVersion) get(`avatar/:ecosystem/:member`, ``, getAvatar) get(`config/:option`, ``, getConfigOption) @@ -80,7 +70,6 @@ func Route(route *hr.Router) { post(`content/page/:name`, `?lang:string`, authWallet, getPage) post(`content/menu/:name`, `?lang:string`, authWallet, getMenu) post(`content/hash/:name`, ``, getPageHash) - post(`vde/create`, ``, authWallet, vdeCreate) post(`login`, `?pubkey signature:hex,?key_id ?mobile:string,?ecosystem ?expire ?role_id:int64`, login) post(`prepare/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, authWallet, contractHandlers.prepareContract) post(`prepareMultiple/:name`, `data:string`, authWallet, contractHandlers.prepareMultipleContract) @@ -93,6 +82,21 @@ func Route(route *hr.Router) { post(`updnotificator`, `ids:string`, updateNotificator) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) + + if !conf.Config.IsSupportingVDE() { + get(`txstatus/:hash`, ``, authWallet, txstatus) + get(`txstatusMultiple`, `data:string`, authWallet, txstatusMulti) + get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) + get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) + get(`history/:table/:id`, ``, authWallet, getHistory) + get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) + get(`block/:id`, ``, getBlockInfo) + get(`maxblockid`, ``, getMaxBlockID) + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) + get(`systemparams`, `?names:string`, authWallet, systemParams) + get(`ecosystems`, ``, authWallet, ecosystems) + } } func processParams(input string) (params map[string]int) { From cc7663c669fa50ee925ab6c8b32ce012ed19fd4f Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 015/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- packages/model/db.go | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; diff --git a/packages/model/db.go b/packages/model/db.go index f784fc70c..7c7c8efdb 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -10,6 +10,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/migration" + "github.com/GenesisKernel/go-genesis/packages/migration/vde" "github.com/jinzhu/gorm" log "github.com/sirupsen/logrus" @@ -155,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(migration.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error } // ExecSchema is executing schema From 6b3f8d746743cde4944718eebdc662a7c08690cd Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 15:59:38 +0300 Subject: [PATCH 016/169] temp commit --- packages/consts/log_types.go | 2 ++ packages/smart/smart.go | 10 +++++++++- packages/vdemanager/config.go | 29 +++++++++++++++-------------- packages/vdemanager/manager.go | 10 ++++++++-- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/packages/consts/log_types.go b/packages/consts/log_types.go index a6bf14b16..7812ab193 100644 --- a/packages/consts/log_types.go +++ b/packages/consts/log_types.go @@ -53,4 +53,6 @@ const ( BCRelevanceError = "BCRelevanceError" BCActualizationError = "BCActualizationError" SchedulerError = "SchedulerError" + WrongModeError = "WrongModeError" + VDEManagerError = "VDEManagerError" ) diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 27223e5a1..6f6242fb7 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -486,7 +486,15 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } state := converter.StrToInt64(prefix) vm := newVM() - EmbedFuncs(vm, script.VMTypeVDE) + + var vmt script.VMType + if conf.Config.IsVDE() { + vmt = script.VMTypeVDE + } else if conf.Config.IsVDEMaster() { + vmt = script.VMTypeVDEMaster + } + + EmbedFuncs(vm, vmt) smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index 450ff5aac..bcafa10ff 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -7,10 +7,11 @@ import ( ) const ( - inidDBCommand = "initDatabase" + inidDBCommand = "initDatabase" genKeysCommand = "generateKeys" - startCommand = "start" + startCommand = "start" ) + // ChildVDEConfig struct to manage child entry type ChildVDEConfig struct { Executable string @@ -30,36 +31,36 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--httpPort=%d", c.HTTPPort), fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - fmt.Sprintf("--runMode=VDE") + "--runMode=VDE", } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() exec.Cmd { - return getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() *exec.Cmd { + return c.getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { - return getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { + return c.getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() exec.Cmd { - retturn getCommand(startCommand) +func (c ChildVDEConfig) startCommand() *exec.Cmd { + return c.getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, ConfigFileName) + return filepath.Join(c.Directory, c.ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - return args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } return exec.Command(c.Executable, args...) -} \ No newline at end of file +} diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d35362ce2..d1a37d413 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -66,10 +66,16 @@ func prepareWorkDir() error { // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") + return err + } + config := ChildVDEConfig{ - Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Executable: execPath, Name: name, - Directory: path.Join(childConfigsPath, name) + Directory: path.Join(childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, From 67e1e665daf43cb351ff56669a84cacfb30be085 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 8 May 2018 09:59:10 +0300 Subject: [PATCH 017/169] temporary commit --- cmd/config.go | 2 +- packages/conf/conf.go | 35 +++++++++-- packages/consts/consts.go | 3 + packages/daemons/common.go | 13 ++++- packages/daylight/daemonsctl/daemonsctl.go | 27 ++++----- packages/daylight/start.go | 38 ++++++------ packages/script/vminit.go | 2 + packages/smart/smart.go | 6 +- packages/tcpserver/tcpserver.go | 6 ++ packages/vdemanager/manager.go | 68 ++++++++++------------ 10 files changed, 120 insertions(+), 80 deletions(-) diff --git a/cmd/config.go b/cmd/config.go index 90ace6d01..127f7c827 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "PublicBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 2e175b01c..b91be9b38 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -133,10 +133,33 @@ func LoadConfig(path string) error { if err != nil { return errors.Wrapf(err, "marshalling config to global struct variable") } - return nil } +// GetConfigFromPath read config from path and returns GlobalConfig struct +func GetConfigFromPath(path string) (*GlobalConfig, error) { + log.WithFields(log.Fields{"path": path}).Info("Loading config") + + _, err := os.Stat(path) + if os.IsNotExist(err) { + return nil, errors.Errorf("Unable to load config file %s", path) + } + + viper.SetConfigFile(path) + err = viper.ReadInConfig() + if err != nil { + return nil, errors.Wrapf(err, "reading config") + } + + c := &GlobalConfig{} + err = viper.Unmarshal(c) + if err != nil { + return c, errors.Wrapf(err, "marshalling config to global struct variable") + } + + return c, nil +} + // SaveConfig save global parameters to configFile func SaveConfig(path string) error { dir := filepath.Dir(path) @@ -219,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c *GlobalConfig) IsPrivateBlockchain() bool { +func (c GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c *GlobalConfig) IsPublicBlockchain() bool { +func (c GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c *GlobalConfig) IsVDE() bool { +func (c GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c *GlobalConfig) IsVDEMaster() bool { +func (c GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c *GlobalConfig) IsSupportingVDE() bool { +func (c GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } diff --git a/packages/consts/consts.go b/packages/consts/consts.go index 9684221d7..45b07c9b6 100644 --- a/packages/consts/consts.go +++ b/packages/consts/consts.go @@ -157,3 +157,6 @@ const TxRequestExpire = 1 * time.Minute // DefaultTempDirName is default name of temporary directory const DefaultTempDirName = "genesis-temp" + +// DefaultVDE allways is 1 +const DefaultVDE = 1 diff --git a/packages/daemons/common.go b/packages/daemons/common.go index 861c03983..8f1bb4d6b 100644 --- a/packages/daemons/common.go +++ b/packages/daemons/common.go @@ -130,7 +130,7 @@ func StartDaemons() { utils.CancelFunc = cancel utils.ReturnCh = make(chan string) - daemonsToStart := serverList + daemonsToStart := getDaemonsToStart() if conf.Config.TestRollBack { daemonsToStart = rollbackList } @@ -156,3 +156,14 @@ func getHostPort(h string) string { } return fmt.Sprintf("%s:%d", h, consts.DEFAULT_TCP_PORT) } + +func getDaemonsToStart() []string { + if conf.Config.IsSupportingVDE() { + return []string{ + "Notificator", + "Scheduler", + } + } + + return serverList +} diff --git a/packages/daylight/daemonsctl/daemonsctl.go b/packages/daylight/daemonsctl/daemonsctl.go index cdddac4d8..84cac3036 100644 --- a/packages/daylight/daemonsctl/daemonsctl.go +++ b/packages/daylight/daemonsctl/daemonsctl.go @@ -14,17 +14,19 @@ import ( // RunAllDaemons start daemons, load contracts and tcpserver func RunAllDaemons() error { - logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) - - daemons.InitialLoad(logEntry) - err := syspar.SysUpdate(nil) - if err != nil { - log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) - return err - } - - if data, ok := parser.GetDataFromFirstBlock(); ok { - syspar.SetFirstBlockData(data) + if !conf.Config.IsSupportingVDE() { + logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) + + daemons.InitialLoad(logEntry) + err := syspar.SysUpdate(nil) + if err != nil { + log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) + return err + } + + if data, ok := parser.GetDataFromFirstBlock(); ok { + syspar.SetFirstBlockData(data) + } } log.Info("load contracts") @@ -36,8 +38,7 @@ func RunAllDaemons() error { log.Info("start daemons") daemons.StartDaemons() - err = tcpserver.TcpListener(conf.Config.TCPServer.Str()) - if err != nil { + if err := tcpserver.TcpListener(conf.Config.TCPServer.Str()); err != nil { log.Errorf("can't start tcp servers, stop") return err } diff --git a/packages/daylight/start.go b/packages/daylight/start.go index 8925da530..e7ff8b253 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -37,6 +37,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/publisher" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/service" @@ -181,15 +182,6 @@ func initRoutes(listenHost string) { httpListener(listenHost, route) } -func logBlockchainMode() { - mode := "private" - if !conf.Config.PrivateBlockchain { - mode = "non private" - } - - log.WithFields(log.Fields{"mode": mode}).Error("Node running mode") -} - // Start starts the main code of the program func Start() { var err error @@ -218,7 +210,7 @@ func Start() { } } - logBlockchainMode() + log.WithFields(log.Fields{"mode": conf.Config.RunningMode}).Info("Node running mode") f := utils.LockOrDie(conf.Config.LockFilePath) defer f.Unlock() @@ -259,18 +251,24 @@ func Start() { os.Exit(1) } - var availableBCGap int64 = consts.AvailableBCGap - if syspar.GetRbBlocks1() > consts.AvailableBCGap { - availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap - } + if !conf.Config.IsSupportingVDE() { + var availableBCGap int64 = consts.AvailableBCGap + if syspar.GetRbBlocks1() > consts.AvailableBCGap { + availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap + } + + blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) + blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) + blockGenerationTime := blockGenerationDuration + blocksGapDuration - blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) - blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) - blockGenerationTime := blockGenerationDuration + blocksGapDuration + checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) + na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) + na.Run() + } - checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) - na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) - na.Run() + if conf.Config.IsVDEMaster() { + vdemanager.InitVDEManager() + } } daemons.WaitForSignals() diff --git a/packages/script/vminit.go b/packages/script/vminit.go index d479e04ce..f286b7fbe 100644 --- a/packages/script/vminit.go +++ b/packages/script/vminit.go @@ -69,6 +69,8 @@ const ( VMTypeSmart VMType = 1 // VMTypeVDE is vde vm type VMTypeVDE VMType = 2 + // VMTypeVDEMaster is VDEMaster type + VMTypeVDEMaster VMType = 3 TagFile = "file" TagAddress = "address" diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 6f6242fb7..1eb641958 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -902,7 +902,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect sign") return retError(ErrIncorrectSign) } - if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.PrivateBlockchain { + if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { if sc.TxSmart.TokenEcosystem == 0 { sc.TxSmart.TokenEcosystem = 1 } @@ -1024,8 +1024,8 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { result = result[:255] } } - if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && - !sc.VDE && !conf.Config.PrivateBlockchain && sc.TxContract.Name != `@1NewUser` { + + if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { apl := sc.TxUsedCost.Mul(fuelRate) wltAmount, ierr := decimal.NewFromString(payWallet.Amount) diff --git a/packages/tcpserver/tcpserver.go b/packages/tcpserver/tcpserver.go index f13a3de4f..d361e7d7b 100644 --- a/packages/tcpserver/tcpserver.go +++ b/packages/tcpserver/tcpserver.go @@ -22,6 +22,8 @@ import ( "sync/atomic" "time" + "github.com/GenesisKernel/go-genesis/packages/conf" + "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/service" @@ -107,6 +109,10 @@ func HandleTCPRequest(rw net.Conn) { // TcpListener is listening tcp address func TcpListener(laddr string) error { + if conf.Config.IsSupportingVDE() { + return nil + } + if strings.HasPrefix(laddr, "127.") { log.Warn("Listening at local address: ", laddr) } diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d1a37d413..4cca4ac8e 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -24,7 +24,7 @@ const ( dropDBTemplate = `DROP OWNED BY %s CASCADE` dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` - commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` + commandTemplate = `%s start --config=%s` ) var ( @@ -33,49 +33,35 @@ var ( // VDEManager struct type VDEManager struct { - processes *process.ProcessManager + processes *process.ProcessManager + execPath string + childConfigsPath string } var ( - Manager *VDEManager - childConfigsPath string + Manager *VDEManager ) -// InitVDEManager create init instance of VDEManager -func InitVDEManager() error { - if err := prepareWorkDir(); err != nil { - return err - } - - return initProcessManager() -} - -func prepareWorkDir() error { - childConfigsPath = path.Join(conf.Config.DataDir, childFolder) +func prepareWorkDir() (string, error) { + childConfigsPath := path.Join(conf.Config.DataDir, childFolder) if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { if err := os.Mkdir(childConfigsPath, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") - return err + return "", err } } - return nil + return childConfigsPath, nil } // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { - execPath, err := os.Executable() - if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") - return err - } - config := ChildVDEConfig{ - Executable: execPath, + Executable: mgr.execPath, Name: name, - Directory: path.Join(childConfigsPath, name), + Directory: path.Join(mgr.childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, @@ -117,7 +103,7 @@ func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) erro procConfEntry.Name = "program:" + name command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) procConfEntry.AddKeyValue("command", command) - proc := process.NewProcess("vdeMaster", confEntry) + proc := process.NewProcess("vdeMaster", procConfEntry) mgr.processes.Add(name, proc) mgr.processes.Find(name).Start(true) @@ -153,7 +139,7 @@ func (mgr *VDEManager) DeleteVDE(name string) error { p.Stop(true) } - vdeDir := path.Join(childConfigsPath, name) + vdeDir := path.Join(mgr.childConfigsPath, name) vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) if err != nil { @@ -250,7 +236,7 @@ func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { func (mgr *VDEManager) initVDEDir(vdeName string) error { - vdeDirName := path.Join(childConfigsPath, vdeName) + vdeDirName := path.Join(mgr.childConfigsPath, vdeName) if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { if err := os.Mkdir(vdeDirName, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") @@ -261,21 +247,33 @@ func (mgr *VDEManager) initVDEDir(vdeName string) error { return nil } -func initProcessManager() error { +func InitVDEManager() { + + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on determine executable path") + } + + childConfigsPath, err := prepareWorkDir() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on prepare child configs folder") + } + Manager = &VDEManager{ - processes: process.NewProcessManager(), + processes: process.NewProcessManager(), + execPath: execPath, + childConfigsPath: childConfigsPath, } list, err := ioutil.ReadDir(childConfigsPath) if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") - return err + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Fatal("on read child VDE directory") } for _, item := range list { if item.IsDir() { - procDir := path.Join(childConfigsPath, item.Name()) - commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + procDir := path.Join(Manager.childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) confEntry := pConf.NewConfigEntry(procDir) confEntry.Name = "program:" + item.Name() confEntry.AddKeyValue("command", commandStr) @@ -287,6 +285,4 @@ func initProcessManager() error { Manager.processes.Add(item.Name(), proc) } } - - return nil } From e1c55b40c3a2660cb01f491bc3d8429876e8d336 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 018/169] temporary commit --- packages/api/api.go | 8 +- packages/api/login.go | 5 +- packages/api/vde.go | 4 +- packages/daemons/block_generator_tx.go | 2 +- packages/daylight/start.go | 8 + .../vde/{vde.go => vde_data_contracts.go} | 267 ++---------------- packages/migration/vde/vde_data_keys.go | 6 + packages/migration/vde/vde_data_members.go | 7 + packages/migration/vde/vde_data_menu.go | 45 +++ packages/migration/vde/vde_data_pages.go | 5 + packages/migration/vde/vde_data_parameters.go | 18 ++ packages/migration/vde/vde_data_tables.go | 68 +++++ packages/migration/vde/vde_schema.go | 143 ++++++++++ packages/model/db.go | 9 +- packages/parser/common.go | 2 +- packages/smart/smart.go | 13 +- packages/template/template.go | 2 +- 17 files changed, 342 insertions(+), 270 deletions(-) rename packages/migration/vde/{vde.go => vde_data_contracts.go} (60%) create mode 100644 packages/migration/vde/vde_data_keys.go create mode 100644 packages/migration/vde/vde_data_members.go create mode 100644 packages/migration/vde/vde_data_menu.go create mode 100644 packages/migration/vde/vde_data_pages.go create mode 100644 packages/migration/vde/vde_data_parameters.go create mode 100644 packages/migration/vde/vde_data_tables.go create mode 100644 packages/migration/vde/vde_schema.go diff --git a/packages/api/api.go b/packages/api/api.go index d891d82fe..f8a503b7f 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -243,15 +243,11 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { if conf.Config.IsSupportingVDE() { - data.vm = smart.GetVM(true, consts.DefaultVDE) - if data.vm == nil { - return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) - } data.vde = true - } else { - data.vm = smart.GetVM(false, 0) } + data.vm = smart.GetVM() + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { diff --git a/packages/api/login.go b/packages/api/login.go index b55fe85c3..ef8114139 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,7 +128,8 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM(false, 0) + vm := smart.GetVM() + contract := smart.VMGetContract(vm, "NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) @@ -207,7 +208,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, ecosystemID)), + IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, consts.DefaultVDE)), } data.result = &result diff --git a/packages/api/vde.go b/packages/api/vde.go index d494dba3e..cf83ec6b0 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -67,8 +67,8 @@ func InitSmartContract(sc *smart.SmartContract, data []byte) error { if err := msgpack.Unmarshal(data, &sc.TxSmart); err != nil { return err } - sc.TxContract = smart.VMGetContractByID(smart.GetVM(sc.VDE, sc.TxSmart.EcosystemID), - int32(sc.TxSmart.Type)) + + sc.TxContract = smart.VMGetContractByID(smart.GetVM(), int32(sc.TxSmart.Type)) if sc.TxContract == nil { return fmt.Errorf(`unknown contract %d`, sc.TxSmart.Type) } diff --git a/packages/daemons/block_generator_tx.go b/packages/daemons/block_generator_tx.go index 9b5ddb977..d96e58f8c 100644 --- a/packages/daemons/block_generator_tx.go +++ b/packages/daemons/block_generator_tx.go @@ -45,7 +45,7 @@ func (dtx *DelayedTx) RunForBlockID(blockID int64) { } func (dtx *DelayedTx) createTx(delayedContactID, keyID int64) error { - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, callDelayedContract, uint32(firstEcosystemID)) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/daylight/start.go b/packages/daylight/start.go index e7ff8b253..74861a936 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -35,6 +35,7 @@ import ( logtools "github.com/GenesisKernel/go-genesis/packages/log" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/publisher" + "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/vdemanager" @@ -266,6 +267,13 @@ func Start() { na.Run() } + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde_data_contracts.go similarity index 60% rename from packages/migration/vde/vde.go rename to packages/migration/vde/vde_data_contracts.go index 640338e93..4e5ca29ab 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -1,247 +1,6 @@ package vde -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES +var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "conditions") VALUES ('1','MainCondition','contract MainCondition { conditions { if EcosysParam("founder_account")!=$key_id @@ -927,7 +686,7 @@ MenuItem( UpdateCron($Id) } }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { + ('23', 'UploadBinary', 'contract UploadBinary { data { Name string Data bytes "file" @@ -954,5 +713,23 @@ MenuItem( $result = $Id } - }', 'ContractConditions("MainCondition")'); - ` + }', 'ContractConditions("MainCondition")'), + ('24', 'NewUser','contract NewUser { + data { + NewPubkey string + } + conditions { + $newId = PubToID($NewPubkey) + if $newId == 0 { + error "Wrong pubkey" + } + if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { + error "User already exists" + } + + $amount = Money(1000) * Money(1000000000000000000) + } + action { + DBInsert("keys", "id, pub", $newId, $NewPubKey) + } + }', 'ContractConditions("MainCondition")');` diff --git a/packages/migration/vde/vde_data_keys.go b/packages/migration/vde/vde_data_keys.go new file mode 100644 index 000000000..42e26c843 --- /dev/null +++ b/packages/migration/vde/vde_data_keys.go @@ -0,0 +1,6 @@ +package vde + +var keysDataSQL = ` +INSERT INTO "%[1]d_keys" (id, pub) +VALUES (4544233900443112470, '489347a1205c818d9a02f285faaedd0122a56138e3d985f5e1b4f6a9470f90f692a00a3453771dd7feea388ceb7aefeaf183e299c70ad1aecb7f870bfada3b86'); +` diff --git a/packages/migration/vde/vde_data_members.go b/packages/migration/vde/vde_data_members.go new file mode 100644 index 000000000..069f1ea2b --- /dev/null +++ b/packages/migration/vde/vde_data_members.go @@ -0,0 +1,7 @@ +package vde + +var membersDataSQL = ` +INSERT INTO "%[1]d_members" ("id", "member_name") +VALUES('%[2]d', 'founder'), +('4544233900443112470', 'guest'); +` diff --git a/packages/migration/vde/vde_data_menu.go b/packages/migration/vde/vde_data_menu.go new file mode 100644 index 000000000..b52a1699f --- /dev/null +++ b/packages/migration/vde/vde_data_menu.go @@ -0,0 +1,45 @@ +package vde + +var menuDataSQL = ` +INSERT INTO "%[1]d_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true');` diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go new file mode 100644 index 000000000..90ef6eab4 --- /dev/null +++ b/packages/migration/vde/vde_data_pages.go @@ -0,0 +1,5 @@ +package vde + +var pagesDataSQL = ` +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +` diff --git a/packages/migration/vde/vde_data_parameters.go b/packages/migration/vde/vde_data_parameters.go new file mode 100644 index 000000000..3ba29e2f9 --- /dev/null +++ b/packages/migration/vde/vde_data_parameters.go @@ -0,0 +1,18 @@ +package vde + +var parametersDataSQL = ` +INSERT INTO "%[1]d_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go new file mode 100644 index 000000000..4223e825a --- /dev/null +++ b/packages/migration/vde/vde_data_tables.go @@ -0,0 +1,68 @@ +package vde + +var tablesDataSQL = ` +INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"menu": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")", +"validate_count": "ContractConditions(\"MainCondition\")", +"validate_mode": "ContractConditions(\"MainCondition\")", +"app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go new file mode 100644 index 000000000..c3fda993f --- /dev/null +++ b/packages/migration/vde/vde_schema.go @@ -0,0 +1,143 @@ +package vde + +import ( + "strings" +) + +// GetVDEScript returns script for VDE schema +func GetVDEScript() string { + scripts := []string{ + schemaVDE, + membersDataSQL, + menuDataSQL, + pagesDataSQL, + parametersDataSQL, + tablesDataSQL, + contractsDataSQL, + keysDataSQL, + } + + return strings.Join(scripts, "\r\n") +} + +var schemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_keys" ( + "id" bigint NOT NULL DEFAULT '0', + "pub" bytea NOT NULL DEFAULT '', + "multi" bigint NOT NULL DEFAULT '0', + "deleted" bigint NOT NULL DEFAULT '0', + "blocked" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_keys" ADD CONSTRAINT "%[1]d_keys_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_members"; + CREATE TABLE "%[1]d_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_members" ADD CONSTRAINT "%[1]d_members_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_languages"; CREATE TABLE "%[1]d_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_languages" ADD CONSTRAINT "%[1]d_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_languages_index_name" ON "%[1]d_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_menu"; CREATE TABLE "%[1]d_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_menu" ADD CONSTRAINT "%[1]d_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_menu_index_name" ON "%[1]d_menu" (name); + + DROP TABLE IF EXISTS "%[1]d_pages"; CREATE TABLE "%[1]d_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_pages" ADD CONSTRAINT "%[1]d_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_pages_index_name" ON "%[1]d_pages" (name); + + DROP TABLE IF EXISTS "%[1]d_blocks"; CREATE TABLE "%[1]d_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_blocks" ADD CONSTRAINT "%[1]d_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_blocks_index_name" ON "%[1]d_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_signatures"; CREATE TABLE "%[1]d_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_signatures" ADD CONSTRAINT "%[1]d_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_contracts" ADD CONSTRAINT "%[1]d_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_parameters"; + CREATE TABLE "%[1]d_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_parameters" ADD CONSTRAINT "%[1]d_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_parameters_index_name" ON "%[1]d_parameters" (name); + + DROP TABLE IF EXISTS "%[1]d_cron"; + CREATE TABLE "%[1]d_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_cron" ADD CONSTRAINT "%[1]d_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_binaries"; + CREATE TABLE "%[1]d_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_binaries" ADD CONSTRAINT "%[1]d_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_binaries_index_app_id_member_id_name" ON "%[1]d_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + ` diff --git a/packages/model/db.go b/packages/model/db.go index 7c7c8efdb..b6416c45f 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -156,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.GetVDEScript(), id, wallet)).Error } // ExecSchema is executing schema @@ -385,5 +385,12 @@ func InitDB(cfg conf.DBConfig) error { return err } + if conf.Config.IsSupportingVDE() { + if err := ExecSchemaLocalData(consts.DefaultVDE, conf.Config.KeyID); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE schema") + return err + } + } + return nil } diff --git a/packages/parser/common.go b/packages/parser/common.go index 2147118f9..f8bf4ff36 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -481,7 +481,7 @@ func (p *Parser) CallContract(flags int) (resultContract string, err error) { VDE: false, Rollback: true, SysUpdate: false, - VM: smart.GetVM(false, 0), + VM: smart.GetVM(), TxSmart: *p.TxSmart, TxData: p.TxData, TxContract: p.TxContract, diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 1eb641958..9f3434632 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -66,7 +66,6 @@ const ( var ( smartVM *script.VM - smartVDE map[int64]*script.VM smartTest = make(map[string]string) ErrCurrentBalance = errors.New(`current balance is not enough`) @@ -118,17 +117,10 @@ func newVM() *script.VM { func init() { smartVM = newVM() - smartVDE = make(map[int64]*script.VM) } // GetVM is returning smart vm -func GetVM(vde bool, ecosystemID int64) *script.VM { - if vde { - if v, ok := smartVDE[ecosystemID]; ok { - return v - } - return nil - } +func GetVM() *script.VM { return smartVM } @@ -495,7 +487,6 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } EmbedFuncs(vm, vmt) - smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { list, err := script.ContractsList(item[`value`]) @@ -853,7 +844,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { methods := []string{`init`, `conditions`, `action`, `rollback`} sc.TxContract.StackCont = []string{sc.TxContract.Name} (*sc.TxContract.Extend)[`stack_cont`] = StackCont - sc.VM = GetVM(sc.VDE, sc.TxSmart.EcosystemID) + sc.VM = GetVM() if (flags&CallRollback) == 0 && (flags&CallAction) != 0 { if !sc.VDE { toID = sc.BlockData.KeyID diff --git a/packages/template/template.go b/packages/template/template.go index 5c0dc1842..8beb4882b 100644 --- a/packages/template/template.go +++ b/packages/template/template.go @@ -692,7 +692,7 @@ func Template2JSON(input string, timeout *bool, vars *map[string]string) []byte isvde := (*vars)[`vde`] == `true` || (*vars)[`vde`] == `1` sc := smart.SmartContract{ VDE: isvde, - VM: smart.GetVM(isvde, converter.StrToInt64((*vars)[`ecosystem_id`])), + VM: smart.GetVM(), TxSmart: tx.SmartContract{ Header: tx.Header{ EcosystemID: converter.StrToInt64((*vars)[`ecosystem_id`]), From 48174f61fceca872d7303e9041589c9af185b069 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 019/169] fix login --- packages/api/api.go | 37 ++++--- packages/api/login.go | 4 +- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- packages/migration/vde/vde_data_pages.go | 2 +- packages/migration/vde/vde_schema.go | 29 +++++ packages/smart/smart.go | 8 +- 6 files changed, 148 insertions(+), 43 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index f8a503b7f..d44c48242 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -133,9 +133,6 @@ func errorAPI(w http.ResponseWriter, err interface{}, code int, params ...interf func getPrefix(data *apiData) (prefix string) { prefix = converter.Int64ToStr(data.ecosystemId) - if data.vde { - prefix += `_vde` - } return } @@ -274,6 +271,10 @@ func fillParams(params map[string]int) apiHandle { } func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (int64, string, error) { + if conf.Config.IsSupportingVDE() { + return consts.DefaultVDE, "1", nil + } + ecosystemID := data.ecosystemId if data.params[`ecosystem`].(int64) > 0 { ecosystemID = data.params[`ecosystem`].(int64) @@ -288,9 +289,9 @@ func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (in } } prefix := converter.Int64ToStr(ecosystemID) - if data.vde { - prefix += `_vde` - } + // if data.vde { + // prefix += `_vde` + // } return ecosystemID, prefix, nil } @@ -299,18 +300,20 @@ func fillTokenData(data *apiData, claims *JWTClaims, logger *log.Entry) error { data.keyId = converter.StrToInt64(claims.KeyID) data.isMobile = claims.IsMobile data.roleId = converter.StrToInt64(claims.RoleID) - ecosystem := &model.Ecosystem{} - found, err := ecosystem.Get(data.ecosystemId) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") - return err - } + if !conf.Config.IsSupportingVDE() { + ecosystem := &model.Ecosystem{} + found, err := ecosystem.Get(data.ecosystemId) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") + return err + } - if !found { - err := fmt.Errorf("ecosystem not found") - logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") - } + if !found { + err := fmt.Errorf("ecosystem not found") + logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") + } - data.ecosystemName = ecosystem.Name + data.ecosystemName = ecosystem.Name + } return nil } diff --git a/packages/api/login.go b/packages/api/login.go index ef8114139..d9c7f8de6 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,9 +128,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM() - - contract := smart.VMGetContract(vm, "NewUser", 1) + contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) err = tx.BuildTransaction(tx.SmartContract{ diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go index 90ef6eab4..b013166b1 100644 --- a/packages/migration/vde/vde_data_pages.go +++ b/packages/migration/vde/vde_data_pages.go @@ -1,5 +1,5 @@ package vde var pagesDataSQL = ` -INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('1', 'default_page', '', 'admin_menu', 'true'),('2','admin_index','','admin_menu','true'); ` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go index c3fda993f..7edf5da94 100644 --- a/packages/migration/vde/vde_schema.go +++ b/packages/migration/vde/vde_schema.go @@ -140,4 +140,33 @@ var schemaVDE = ` ); ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + + DROP TABLE IF EXISTS "%[1]d_notifications"; + CREATE TABLE "%[1]d_notifications" ( + "id" bigint NOT NULL DEFAULT '0', + "recipient" jsonb, + "sender" jsonb, + "notification" jsonb, + "page_params" jsonb, + "processing_info" jsonb, + "page_name" varchar(255) NOT NULL DEFAULT '', + "date_created" timestamp, + "date_start_processing" timestamp, + "date_closed" timestamp, + "closed" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_notifications" ADD CONSTRAINT "%[1]d_notifications_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_roles_participants"; + CREATE TABLE "%[1]d_roles_participants" ( + "id" bigint NOT NULL DEFAULT '0', + "role" jsonb, + "member" jsonb, + "appointed" jsonb, + "date_created" timestamp, + "date_deleted" timestamp, + "deleted" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_roles_participants" ADD CONSTRAINT "%[1]d_roles_participants_pkey" PRIMARY KEY ("id"); + ` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 9f3434632..41c61e610 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -174,6 +174,7 @@ func VMRun(vm *script.VM, block *script.Block, params []interface{}, extend *map func VMGetContract(vm *script.VM, name string, state uint32) *Contract { name = script.StateName(state, name) obj, ok := vm.Objects[name] + if ok && obj.Type == script.ObjContract { return &Contract{Name: name, Block: obj.Value.(*script.Block)} } @@ -469,15 +470,15 @@ func LoadContract(transaction *model.DbTransaction, prefix string) (err error) { func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err error) { var contracts []map[string]string - if !model.IsTable(prefix + `_vde_contracts`) { + if !model.IsTable(prefix + `_contracts`) { return } - contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_vde_contracts" order by id`, -1) + contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_contracts" order by id`, -1) if err != nil { return err } state := converter.StrToInt64(prefix) - vm := newVM() + vm := GetVM() var vmt script.VMType if conf.Config.IsVDE() { @@ -502,6 +503,7 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro WalletID: 0, TokenID: 0, } + if err = vmCompile(vm, item[`value`], &owner); err != nil { log.WithFields(log.Fields{"names": names, "error": err}).Error("Load VDE Contract") } else { From cb3291114479eeccaaf1fa58dd9b0cdb69a1403c Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 020/169] temporary commit --- packages/api/login.go | 50 +++++++- packages/api/route.go | 6 +- packages/api/vde.go | 7 +- packages/api/vde_test.go | 120 ++----------------- packages/migration/vde/vde_data_contracts.go | 41 +++++++ packages/smart/smart.go | 1 + 6 files changed, 105 insertions(+), 120 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index d9c7f8de6..9e0f9a07e 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -19,12 +19,14 @@ package api import ( "fmt" "net/http" + "strings" "time" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/notificator" "github.com/GenesisKernel/go-genesis/packages/publisher" + msgpack "gopkg.in/vmihailenco/msgpack.v2" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" @@ -131,20 +133,60 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) - err = tx.BuildTransaction(tx.SmartContract{ + // scHeader, err := getHeader("NewUser", data) + if err != nil { + return errorAPI(w, "E_EMPTYOBJECT", http.StatusBadRequest) + } + + sc := tx.SmartContract{ Header: tx.Header{ Type: int(info.ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, NetworkID: consts.NETWORK_ID, + PublicKey: pubkey, }, SignedBy: smart.PubToID(NodePublicKey), Data: params, - }, NodePrivateKey, NodePublicKey, string(hexPubKey)) - if err != nil { - log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } + + if conf.Config.IsSupportingVDE() { + + signPrms := []string{sc.ForSign()} + signPrms = append(signPrms, string(hexPubKey)) + signature, err := crypto.Sign( + NodePrivateKey, + strings.Join(signPrms, ","), + ) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") + return err + } + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + serializedContract, err := msgpack.Marshal(sc) + if err != nil { + logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") + return errorAPI(w, err, http.StatusInternalServerError) + } + // signature := data.params[`signature`].([]byte) + // if len(signature) == 0 { + // log.WithFields(log.Fields{"type": consts.EmptyObject, "params": data.params}).Error("signature is empty") + // } + + fmt.Println(len(signature)) + ret, err := VDEContract(serializedContract, data) + if err != nil { + return errorAPI(w, err, http.StatusInternalServerError) + } + data.result = ret + } else { + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, string(hexPubKey)) + if err != nil { + log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") + } + } + } if ecosystemID > 1 && len(pubkey) == 0 { diff --git a/packages/api/route.go b/packages/api/route.go index a4234d34b..65097c44b 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -59,7 +59,7 @@ func Route(route *hr.Router) { get(`interface/page/:name`, ``, authWallet, getPageRow) get(`interface/menu/:name`, ``, authWallet, getMenuRow) get(`interface/block/:name`, ``, authWallet, getBlockInterfaceRow) - get(`systemparams`, `?names:string`, authWallet, systemParams) + // get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) get(`test/:name`, ``, getTest) @@ -80,7 +80,7 @@ func Route(route *hr.Router) { post(`test/:name`, ``, getTest) post(`content`, `template ?source:string`, jsonContent) post(`updnotificator`, `ids:string`, updateNotificator) - + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) if !conf.Config.IsSupportingVDE() { @@ -92,7 +92,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) diff --git a/packages/api/vde.go b/packages/api/vde.go index cf83ec6b0..9891ffddb 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -173,17 +173,22 @@ func VDEContract(contractData []byte, data *apiData) (result *contractResult, er result.Message = &txstatusError{Type: "panic", Error: err.Error()} return } + if data.token != nil && data.token.Valid { if auth, err := data.token.SignedString([]byte(jwtSecret)); err == nil { sc.TxData[`auth_token`] = auth } } + if ret, err = sc.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction); err == nil { result.Result = ret } else { if errResult := json.Unmarshal([]byte(err.Error()), &result.Message); errResult != nil { - log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "text": err.Error(), + log.WithFields(log.Fields{ + "type": consts.JSONUnmarshallError, + "text": err.Error(), "error": errResult}).Error("unmarshalling contract error") + result.Message = &txstatusError{Type: "panic", Error: errResult.Error()} } } diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index b2b071315..57cb90949 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -24,6 +24,7 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" @@ -33,121 +34,16 @@ import ( ) func TestVDECreate(t *testing.T) { - var ( - err error - retid int64 - ret vdeCreateResult - ) - - assert.NoError(t, keyLogin(1)) - - if err = sendPost(`vde/create`, nil, &ret); err != nil && - err.Error() != `400 {"error": "E_VDECREATED", "msg": "Virtual Dedicated Ecosystem is already created" }` { - t.Error(err) - return - } - - rnd := `rnd` + crypto.RandSeq(6) - form := url.Values{`Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - - retid, _, err = postTxResult(`NewContract`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active 5", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - assert.NoError(t, postTx(`EditContract`, &form)) - - form = url.Values{`Name`: {rnd}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - - retid, _, err = postTxResult(`NewParameter`, &form) - assert.NoError(t, err) + require.NoError(t, keyLogin(1)) - form = url.Values{`Name`: {`new_table`}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - if err = postTx(`NewParameter`, &form); err != nil && err.Error() != - `500 {"error": "E_SERVER", "msg": "{\"type\":\"warning\",\"error\":\"Parameter new_table already exists\"}" }` { - t.Error(err) - return + form := url.Values{ + "VDEName": {"testvde"}, + "DBUser": {"vdeuser"}, + "DBPassword": {"vdepassword"}, + "VDEAPIPort": {"8000"}, } - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, `Conditions`: {`true`}, - `vde`: {`1`}} - - assert.NoError(t, postTx(`EditParameter`, &form)) - - form = url.Values{"Name": {`menu` + rnd}, "Value": {`first - second - third`}, "Title": {`My Menu`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewMenu`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, - `Conditions`: {`true`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditMenu`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Append)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendMenu`, &form)) - - form = url.Values{"Name": {`page` + rnd}, "Value": {`Page`}, "Menu": {`government`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewPage`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit page value`}, - `Conditions`: {`true`}, "Menu": {`government`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditPage`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Test Page)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendPage`, &form)) - - form = url.Values{"Name": {`block` + rnd}, "Value": {`Page block`}, "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewBlock`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit block value`}, - `Conditions`: {`true`}, `vde`: {`1`}} - assert.NoError(t, postTx(`EditBlock`, &form)) - - name := randName(`tbl`) - form = url.Values{"Name": {name}, `vde`: {`true`}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "1", - "conditions":"true"}, - {"name":"Amount", "type":"number","index": "0", "conditions":"true"}, - {"name":"Active", "type":"character","index": "0", "conditions":"true"}]`}, - "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} - assert.NoError(t, postTx(`NewTable`, &form)) - - form = url.Values{"Name": {name}, `vde`: {`true`}, - "Permissions": {`{"insert": "ContractConditions(\"MainCondition\")", - "update" : "true", "new_column": "ContractConditions(\"MainCondition\")"}`}} - assert.NoError(t, postTx(`EditTable`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {"true"}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newColRead`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {"ContractConditions(\"MainCondition\")"}} - assert.NoError(t, postTx(`EditColumn`, &form)) + require.NoError(t, postTx("NewVDE", &form)) - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`EditColumn`, &form)) } func TestVDEParams(t *testing.T) { diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 41c61e610..332ec592b 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -885,6 +885,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { return retError(ErrEmptyPublicKey) } sc.PublicKeys = append(sc.PublicKeys, public) + var CheckSignResult bool CheckSignResult, err = utils.CheckSign(sc.PublicKeys, sc.TxData[`forsign`].(string), sc.TxSmart.BinSignatures, false) if err != nil { From 13d18ffb53d70c8a70be7b9e81e38429817005d0 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 12:05:42 +0300 Subject: [PATCH 021/169] temp commit --- packages/api/login.go | 37 +++++++++----------- packages/migration/vde/vde_data_contracts.go | 3 +- packages/migration/vde/vde_data_tables.go | 10 +++++- packages/smart/funcs.go | 3 -- 4 files changed, 28 insertions(+), 25 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index 9e0f9a07e..7882de84e 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -114,6 +114,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) + fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -126,21 +127,16 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En return err } + pubkey = data.params[`pubkey`].([]byte) hexPubKey := hex.EncodeToString(pubkey) - params := make([]byte, 0) - params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) + params := converter.EncodeLength(int64(len(hexPubKey))) + params = append(params, hexPubKey...) contract := smart.GetContract("NewUser", 1) - info := contract.Block.Info.(*script.ContractInfo) - - // scHeader, err := getHeader("NewUser", data) - if err != nil { - return errorAPI(w, "E_EMPTYOBJECT", http.StatusBadRequest) - } sc := tx.SmartContract{ Header: tx.Header{ - Type: int(info.ID), + Type: int(contract.Block.Info.(*script.ContractInfo).ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, @@ -154,34 +150,34 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En if conf.Config.IsSupportingVDE() { signPrms := []string{sc.ForSign()} - signPrms = append(signPrms, string(hexPubKey)) - signature, err := crypto.Sign( - NodePrivateKey, - strings.Join(signPrms, ","), - ) + signPrms = append(signPrms, hexPubKey) + signData := strings.Join(signPrms, ",") + signature, err := crypto.Sign(NodePrivateKey, signData) if err != nil { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") return err } + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + + if sc.PublicKey, err = hex.DecodeString(NodePublicKey); err != nil { + log.WithFields(log.Fields{"type": consts.ConversionError, "error": err}).Error("decoding public key from hex") + return err + } + serializedContract, err := msgpack.Marshal(sc) if err != nil { logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") return errorAPI(w, err, http.StatusInternalServerError) } - // signature := data.params[`signature`].([]byte) - // if len(signature) == 0 { - // log.WithFields(log.Fields{"type": consts.EmptyObject, "params": data.params}).Error("signature is empty") - // } - fmt.Println(len(signature)) ret, err := VDEContract(serializedContract, data) if err != nil { return errorAPI(w, err, http.StatusInternalServerError) } data.result = ret } else { - err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, string(hexPubKey)) + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, hexPubKey) if err != nil { log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } @@ -216,6 +212,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } + fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 755e626c7..c26fa16d6 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -804,7 +804,8 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } } action { - DBInsert("keys", "id, pub", $newId, $NewPubKey) + DBInsert("keys", "id", $newId) + SetPubKey($newId, StringToBytes($NewPubkey)) } }', 'ContractConditions("MainCondition")'), ('25', 'NewVDE', 'contract NewVDE { diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go index 4223e825a..955514d55 100644 --- a/packages/migration/vde/vde_data_tables.go +++ b/packages/migration/vde/vde_data_tables.go @@ -64,5 +64,13 @@ INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") "data": "ContractConditions(\"MainCondition\")", "hash": "ContractConditions(\"MainCondition\")", "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); + 'ContractConditions("MainCondition")'), + ('9', 'keys', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"pub": "ContractConditions(\"MainCondition\")", + "multi": "ContractConditions(\"MainCondition\")", + "deleted": "ContractConditions(\"MainCondition\")", + "blocked": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); ` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index f2fe15345..7936f7c40 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -274,9 +274,6 @@ func GetTableName(sc *SmartContract, tblname string, ecosystem int64) string { return strings.ToLower(tblname[1:]) } prefix := converter.Int64ToStr(ecosystem) - if sc.VDE { - prefix += `_vde` - } return strings.ToLower(fmt.Sprintf(`%s_%s`, prefix, tblname)) } From 8ab7fcf49ce0bf8e07ddb9af4a2a693d2e836df4 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 21:28:09 +0300 Subject: [PATCH 022/169] remove fmt from login api handlers --- packages/api/login.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index 7882de84e..a6548fcfb 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -17,7 +17,6 @@ package api import ( - "fmt" "net/http" "strings" "time" @@ -114,7 +113,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) - fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -212,7 +210,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } - fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") @@ -245,7 +242,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, consts.DefaultVDE)), + IsVDE: conf.Config.IsSupportingVDE(), } data.result = &result From cdd9ea42914ba5cbd06fe1174c8495ac57c5ec35 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:53:47 +0300 Subject: [PATCH 023/169] add drop db function --- packages/model/db.go | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/model/db.go b/packages/model/db.go index b6416c45f..757c87f30 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -394,3 +394,25 @@ func InitDB(cfg conf.DBConfig) error { return nil } + +// DropDatabase kill all process and drop database +func DropDatabase(name string) error { + query := `SELECT + pg_terminate_backend (pg_stat_activity.pid) + FROM + pg_stat_activity + WHERE + pg_stat_activity.datname = ?` + + if err := DBConn.Exec(query, name).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on kill db process") + return err + } + + if err := DBConn.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", name)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on drop db") + return err + } + + return nil +} From d624f865748dff49adf97c4a08710ffa6a716a49 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:54:14 +0300 Subject: [PATCH 024/169] fix manager --- packages/api/vde_test.go | 36 +++++++++++++++++--- packages/migration/vde/vde_data_contracts.go | 25 +++++++++++--- packages/smart/funcs.go | 8 ++--- packages/vdemanager/manager.go | 18 +++++----- 4 files changed, 67 insertions(+), 20 deletions(-) diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index 57cb90949..026e5a777 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -37,15 +37,43 @@ func TestVDECreate(t *testing.T) { require.NoError(t, keyLogin(1)) form := url.Values{ - "VDEName": {"testvde"}, - "DBUser": {"vdeuser"}, + "VDEName": {"myvde3"}, + "DBUser": {"myvdeuser3"}, "DBPassword": {"vdepassword"}, - "VDEAPIPort": {"8000"}, + "VDEAPIPort": {"8004"}, } - require.NoError(t, postTx("NewVDE", &form)) + assert.NoError(t, postTx("NewVDE", &form)) +} + +func TestVDEList(t *testing.T) { + require.NoError(t, keyLogin(1)) + fmt.Println(postTx("ListVDE", nil)) } +func TestStopVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("StopVDE", &form)) +} + +func TestRunVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RunVDE", &form)) +} + +func TestRemoveVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RemoveVDE", &form)) +} func TestVDEParams(t *testing.T) { assert.NoError(t, keyLogin(1)) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index c26fa16d6..4297f287a 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -824,19 +824,27 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } }', 'ContractConditions("MainCondition")'), ('26', 'ListVDE', 'contract ListVDE { + data {} + + conditions {} + + action { + GetVDEList() + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { data { VDEName string } conditions { - } action { - GetVDEList($VDEName) + StartVDE($VDEName) } }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { + ('28', 'StopVDE', 'contract StopVDE { data { VDEName string } @@ -845,6 +853,15 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } action { - StartVDE($VDEName) + StopVDEProcess($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('29', 'RemoveVDE', 'contract RemoveVDE { + data { + VDEName string + } + conditions {} + action{ + DeleteVDE($VDEName) } }', 'ContractConditions("MainCondition")');` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 7936f7c40..cadc0bd90 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -252,7 +252,7 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["CreateVDE"] = CreateVDE f["DeleteVDE"] = DeleteVDE f["StartVDE"] = StartVDE - f["StopVDE"] = StopVDE + f["StopVDEProcess"] = StopVDEProcess f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) @@ -1468,12 +1468,12 @@ func StartVDE(sc *SmartContract, name string) error { return vdemanager.Manager.StartVDE(name) } -// StopVDE stops VDE process -func StopVDE(sc *SmartContract, name string) error { +// StopVDEProcess stops VDE process +func StopVDEProcess(sc *SmartContract, name string) error { return vdemanager.Manager.StopVDE(name) } // GetVDEList returns list VDE process with statuses -func GetVDEList(sc *SmartContract, name string) (map[string]string, error) { +func GetVDEList(sc *SmartContract) (map[string]string, error) { return vdemanager.Manager.ListProcess() } diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index 4cca4ac8e..0e628edca 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -7,6 +7,7 @@ import ( "os" "path" "path/filepath" + "time" "github.com/GenesisKernel/go-genesis/packages/conf" @@ -22,7 +23,8 @@ const ( createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` createDBTemplate = `CREATE DATABASE %s OWNER %s` - dropDBTemplate = `DROP OWNED BY %s CASCADE` + dropDBTemplate = `DROP DATABASE IF EXISTS %s` + dropOwnedTemplate = `DROP OWNED BY %s CASCADE` dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` commandTemplate = `%s start --config=%s` ) @@ -101,7 +103,8 @@ func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) erro procConfEntry := pConf.NewConfigEntry(config.Directory) procConfEntry.Name = "program:" + name - command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) + command := fmt.Sprintf("%s start --config=%s", config.Executable, filepath.Join(config.Directory, consts.DefaultConfigFile)) + log.Infoln(command) procConfEntry.AddKeyValue("command", command) proc := process.NewProcess("vdeMaster", procConfEntry) @@ -134,10 +137,7 @@ func (mgr *VDEManager) DeleteVDE(name string) error { return errWrongMode } - p := mgr.processes.Find(name) - if p != nil { - p.Stop(true) - } + mgr.StopVDE(name) vdeDir := path.Join(mgr.childConfigsPath, name) vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) @@ -147,8 +147,8 @@ func (mgr *VDEManager) DeleteVDE(name string) error { return err } - dropDBquery := fmt.Sprintf(dropDBTemplate, vdeConfig.DB.User) - if err := model.DBConn.Exec(dropDBquery).Error; err != nil { + time.Sleep(1 * time.Second) + if err := model.DropDatabase(vdeConfig.DB.Name); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") return err } @@ -274,6 +274,7 @@ func InitVDEManager() { if item.IsDir() { procDir := path.Join(Manager.childConfigsPath, item.Name()) commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) + log.Info(commandStr) confEntry := pConf.NewConfigEntry(procDir) confEntry.Name = "program:" + item.Name() confEntry.AddKeyValue("command", commandStr) @@ -283,6 +284,7 @@ func InitVDEManager() { proc := process.NewProcess("vdeMaster", confEntry) Manager.processes.Add(item.Name(), proc) + proc.Start(true) } } } From 298f5e9605f6e834a70fe0cd5aeacad3141329a4 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 1 Jun 2018 17:23:02 +0300 Subject: [PATCH 025/169] add batch insert --- packages/model/batch.go | 85 ++++++++++++++++++++++++++++++++++++ packages/model/batch_test.go | 44 +++++++++++++++++++ packages/model/queue_tx.go | 18 ++++++++ packages/tcpserver/type1.go | 14 +++--- 4 files changed, 156 insertions(+), 5 deletions(-) create mode 100644 packages/model/batch.go create mode 100644 packages/model/batch_test.go diff --git a/packages/model/batch.go b/packages/model/batch.go new file mode 100644 index 000000000..3d2719f82 --- /dev/null +++ b/packages/model/batch.go @@ -0,0 +1,85 @@ +package model + +import ( + "fmt" + "strings" +) + +const maxBatchRows = 1000 + +// BatchModel allows bulk insert on BatchModel slice +type BatchModel interface { + TableName() string + FieldValue(fieldName string) (interface{}, error) +} + +// BatchInsert create and execute batch queries from rows splitted by maxBatchRows and fields +func BatchInsert(rows []BatchModel, fields []string) error { + queries, values, err := batchQueue(rows, fields) + if err != nil { + return err + } + + for i := 0; i < len(queries); i++ { + if err := DBConn.Exec(queries[i], values[i]...).Error; err != nil { + return err + } + } + + return nil +} + +func batchQueue(rows []BatchModel, fields []string) (queries []string, values [][]interface{}, err error) { + for len(rows) > 0 { + if len(rows) > maxBatchRows { + q, vals, err := prepareQuery(rows[:maxBatchRows], fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = rows[maxBatchRows:] + continue + } + + q, vals, err := prepareQuery(rows, fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = nil + } + + return +} + +func prepareQuery(rows []BatchModel, fields []string) (query string, values []interface{}, err error) { + valueTemplates := make([]string, 0, len(rows)) + valueArgs := make([]interface{}, 0, len(rows)*len(fields)) + query = fmt.Sprintf(`INSERT INTO "%s" (%s) VALUES `, rows[0].TableName(), strings.Join(fields, ",")) + + rowQSlice := make([]string, 0, len(fields)) + for range fields { + rowQSlice = append(rowQSlice, "?") + } + + valueTemplate := fmt.Sprintf("(%s)", strings.Join(rowQSlice, ",")) + + for _, row := range rows { + valueTemplates = append(valueTemplates, valueTemplate) + for _, field := range fields { + val, err := row.FieldValue(field) + if err != nil { + return query, values, err + } + + valueArgs = append(valueArgs, val) + } + } + + query += strings.Join(valueTemplates, ",") + return +} diff --git a/packages/model/batch_test.go b/packages/model/batch_test.go new file mode 100644 index 000000000..ff35bc946 --- /dev/null +++ b/packages/model/batch_test.go @@ -0,0 +1,44 @@ +package model + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +type TestBatchModel struct { + ID int64 + Name string +} + +func (m TestBatchModel) TableName() string { + return "test_batch" +} + +func (m TestBatchModel) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "id": + return m.ID, nil + case "name": + return m.Name, nil + default: + return nil, fmt.Errorf("Unknown field %s of TestBatchModel", fieldName) + } +} + +func TestPrepareQuery(t *testing.T) { + slice := []BatchModel{ + TestBatchModel{ID: 1, Name: "first"}, + TestBatchModel{ID: 2, Name: "second"}, + } + + query, args, err := prepareQuery(slice, []string{"id", "name"}) + require.NoError(t, err) + + checkQuery := `INSERT INTO "test_batch" (id,name) VALUES (?,?),(?,?)` + checkArgs := []interface{}{1, "first", 2, "second"} + + require.Equal(t, checkQuery, query) + require.Equal(t, checkArgs, args) +} diff --git a/packages/model/queue_tx.go b/packages/model/queue_tx.go index b19c24224..18d57a0a5 100644 --- a/packages/model/queue_tx.go +++ b/packages/model/queue_tx.go @@ -1,5 +1,9 @@ package model +import ( + "fmt" +) + // QueueTx is model type QueueTx struct { Hash []byte `gorm:"primary_key;not null"` @@ -76,3 +80,17 @@ func GetAllUnverifiedAndUnusedTransactions() ([]*QueueTx, error) { } return result, nil } + +// FieldValue implementing BatchModel interface +func (qt QueueTx) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "hash": + return qt.Hash, nil + case "data": + return qt.Data, nil + case "from_gate": + return qt.FromGate, nil + default: + return nil, fmt.Errorf("Unknown field '%s' for QueueTx", fieldName) + } +} diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 1265f58f2..615cfcedb 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -194,7 +194,9 @@ func getUnknownTransactions(buf *bytes.Buffer) ([]byte, error) { func saveNewTransactions(r *DisRequest) error { binaryTxs := r.Data + queue := []model.QueueTx{} log.WithFields(log.Fields{"binaryTxs": binaryTxs}).Debug("trying to save binary txs") + for len(binaryTxs) > 0 { txSize, err := converter.DecodeLength(&binaryTxs) if err != nil { @@ -222,12 +224,14 @@ func saveNewTransactions(r *DisRequest) error { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err, "value": txBinData}).Fatal("cannot hash bindata") } - queueTx := &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1} + queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) err = queueTx.Create() - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") - return err - } } + + if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") + return err + } + return nil } From b14aac72a8b8edf97535ee99692579d7f01f33e9 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 4 Jun 2018 12:34:36 +0300 Subject: [PATCH 026/169] test and small fixes --- packages/api/smart_test.go | 49 +++++++++++++++++++++++++++++++++--- packages/model/batch.go | 4 +-- packages/model/batch_test.go | 2 +- packages/tcpserver/type1.go | 4 +-- 4 files changed, 51 insertions(+), 8 deletions(-) diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index 045c6fc79..3bedfb760 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -419,14 +419,57 @@ func TestUpdateSysParam(t *testing.T) { func TestUpdateFullNodesWithEmptyArray(t *testing.T) { require.NoErrorf(t, keyLogin(1), "on login") - byteNodes := `[]` - // byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"5462687003324713865", "public_key":"4ea2433951ca21e6817426675874b2a6d98e5051c1100eddefa1847b0388e4834facf9abf427c46e2bc6cd5e3277fba533d03db553e499eb368194b3f1e514d4"}]` + byteNodes := `[` + byteNodes += `{"tcp_address":"127.0.0.1:7078", "api_address":"https://127.0.0.1:7079", "key_id":"-4466900793776865315", "public_key":"ca901a97e84d76f8d46e2053028f709074b3e60d3e2e33495840586567a0c961820d789592666b67b05c6ae120d5bd83d4388b2f1218638d8226d40ced0bb208"},` + byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"542353610328569127", "public_key":"a8ada71764fd2f0c9fa1d2986455288f11f0f3931492d27dc62862fdff9c97c38923ef46679488ad1cd525342d4d974621db58f809be6f8d1c19fdab50abc06b"},` + byteNodes += `{"tcp_address":"127.0.0.1:7082", "api_address":"https://127.0.0.1:7083", "key_id":"5972241339967729614", "public_key":"de1b74d36ae39422f2478cba591f4d14eb017306f6ffdc3b577cc52ee50edb8fe7c7b2eb191a24c8ddfc567cef32152bab17de698ed7b3f2ab75f3bcc8b9b372"}` + byteNodes += `]` form := &url.Values{ "Name": {"full_nodes"}, "Value": {string(byteNodes)}, } - require.EqualError(t, postTx(`UpdateSysParam`, form), `{"type":"panic","error":"Invalid value"}`) + require.NoError(t, postTx(`UpdateSysParam`, form)) +} + +func TestHelper_InsertNodeKey(t *testing.T) { + + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + + form := url.Values{ + `Value`: {`contract InsertNodeKey { + data { + KeyID string + PubKey string + } + conditions {} + action { + DBInsert("keys", "id,pub,amount", $KeyID, $PubKey, "100000000000000000000") + } + }`}, + `ApplicationId`: {`1`}, + `Conditions`: {`true`}, + } + + require.NoError(t, postTx(`NewContract`, &form)) + + forms := []url.Values{ + url.Values{ + `KeyID`: {"542353610328569127"}, + `PubKey`: {"be78f54bcf6bb7b49b7ea00790b18b40dd3f5e231ffc764f1c32d3f5a82ab322aee157931bbfca733bac83255002f5ded418f911b959b77a937f0d5d07de74f8"}, + }, + url.Values{ + `KeyID`: {"5972241339967729614"}, + `PubKey`: {"7b11a9ee4f509903118d5b965a819b778c83a21a52a033e5768d697a70a61a1bad270465f25d7f70683e977be93a9252e762488fc53808a90220d363d0a38eb6"}, + }, + } + + for _, frm := range forms { + require.NoError(t, postTx(`InsertNodeKey`, &frm)) + } } func TestValidateConditions(t *testing.T) { diff --git a/packages/model/batch.go b/packages/model/batch.go index 3d2719f82..f36c4d68d 100644 --- a/packages/model/batch.go +++ b/packages/model/batch.go @@ -58,7 +58,7 @@ func batchQueue(rows []BatchModel, fields []string) (queries []string, values [] func prepareQuery(rows []BatchModel, fields []string) (query string, values []interface{}, err error) { valueTemplates := make([]string, 0, len(rows)) - valueArgs := make([]interface{}, 0, len(rows)*len(fields)) + values = make([]interface{}, 0, len(rows)*len(fields)) query = fmt.Sprintf(`INSERT INTO "%s" (%s) VALUES `, rows[0].TableName(), strings.Join(fields, ",")) rowQSlice := make([]string, 0, len(fields)) @@ -76,7 +76,7 @@ func prepareQuery(rows []BatchModel, fields []string) (query string, values []in return query, values, err } - valueArgs = append(valueArgs, val) + values = append(values, val) } } diff --git a/packages/model/batch_test.go b/packages/model/batch_test.go index ff35bc946..2b260c2f5 100644 --- a/packages/model/batch_test.go +++ b/packages/model/batch_test.go @@ -37,7 +37,7 @@ func TestPrepareQuery(t *testing.T) { require.NoError(t, err) checkQuery := `INSERT INTO "test_batch" (id,name) VALUES (?,?),(?,?)` - checkArgs := []interface{}{1, "first", 2, "second"} + checkArgs := []interface{}{int64(1), "first", int64(2), "second"} require.Equal(t, checkQuery, query) require.Equal(t, checkArgs, args) diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 615cfcedb..5cbb48c17 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -194,7 +194,7 @@ func getUnknownTransactions(buf *bytes.Buffer) ([]byte, error) { func saveNewTransactions(r *DisRequest) error { binaryTxs := r.Data - queue := []model.QueueTx{} + queue := []model.BatchModel{} log.WithFields(log.Fields{"binaryTxs": binaryTxs}).Debug("trying to save binary txs") for len(binaryTxs) > 0 { @@ -225,7 +225,7 @@ func saveNewTransactions(r *DisRequest) error { } queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) - err = queueTx.Create() + // err = queueTx.Create() } if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { From fa390771053c8cd3cd22f65850b7ff249f51b318 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 1 Jun 2018 17:23:02 +0300 Subject: [PATCH 027/169] add batch insert --- packages/model/batch.go | 85 ++++++++++++++++++++++++++++++++++++ packages/model/batch_test.go | 44 +++++++++++++++++++ packages/model/queue_tx.go | 18 ++++++++ packages/tcpserver/type1.go | 14 +++--- 4 files changed, 156 insertions(+), 5 deletions(-) create mode 100644 packages/model/batch.go create mode 100644 packages/model/batch_test.go diff --git a/packages/model/batch.go b/packages/model/batch.go new file mode 100644 index 000000000..3d2719f82 --- /dev/null +++ b/packages/model/batch.go @@ -0,0 +1,85 @@ +package model + +import ( + "fmt" + "strings" +) + +const maxBatchRows = 1000 + +// BatchModel allows bulk insert on BatchModel slice +type BatchModel interface { + TableName() string + FieldValue(fieldName string) (interface{}, error) +} + +// BatchInsert create and execute batch queries from rows splitted by maxBatchRows and fields +func BatchInsert(rows []BatchModel, fields []string) error { + queries, values, err := batchQueue(rows, fields) + if err != nil { + return err + } + + for i := 0; i < len(queries); i++ { + if err := DBConn.Exec(queries[i], values[i]...).Error; err != nil { + return err + } + } + + return nil +} + +func batchQueue(rows []BatchModel, fields []string) (queries []string, values [][]interface{}, err error) { + for len(rows) > 0 { + if len(rows) > maxBatchRows { + q, vals, err := prepareQuery(rows[:maxBatchRows], fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = rows[maxBatchRows:] + continue + } + + q, vals, err := prepareQuery(rows, fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = nil + } + + return +} + +func prepareQuery(rows []BatchModel, fields []string) (query string, values []interface{}, err error) { + valueTemplates := make([]string, 0, len(rows)) + valueArgs := make([]interface{}, 0, len(rows)*len(fields)) + query = fmt.Sprintf(`INSERT INTO "%s" (%s) VALUES `, rows[0].TableName(), strings.Join(fields, ",")) + + rowQSlice := make([]string, 0, len(fields)) + for range fields { + rowQSlice = append(rowQSlice, "?") + } + + valueTemplate := fmt.Sprintf("(%s)", strings.Join(rowQSlice, ",")) + + for _, row := range rows { + valueTemplates = append(valueTemplates, valueTemplate) + for _, field := range fields { + val, err := row.FieldValue(field) + if err != nil { + return query, values, err + } + + valueArgs = append(valueArgs, val) + } + } + + query += strings.Join(valueTemplates, ",") + return +} diff --git a/packages/model/batch_test.go b/packages/model/batch_test.go new file mode 100644 index 000000000..ff35bc946 --- /dev/null +++ b/packages/model/batch_test.go @@ -0,0 +1,44 @@ +package model + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +type TestBatchModel struct { + ID int64 + Name string +} + +func (m TestBatchModel) TableName() string { + return "test_batch" +} + +func (m TestBatchModel) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "id": + return m.ID, nil + case "name": + return m.Name, nil + default: + return nil, fmt.Errorf("Unknown field %s of TestBatchModel", fieldName) + } +} + +func TestPrepareQuery(t *testing.T) { + slice := []BatchModel{ + TestBatchModel{ID: 1, Name: "first"}, + TestBatchModel{ID: 2, Name: "second"}, + } + + query, args, err := prepareQuery(slice, []string{"id", "name"}) + require.NoError(t, err) + + checkQuery := `INSERT INTO "test_batch" (id,name) VALUES (?,?),(?,?)` + checkArgs := []interface{}{1, "first", 2, "second"} + + require.Equal(t, checkQuery, query) + require.Equal(t, checkArgs, args) +} diff --git a/packages/model/queue_tx.go b/packages/model/queue_tx.go index b19c24224..18d57a0a5 100644 --- a/packages/model/queue_tx.go +++ b/packages/model/queue_tx.go @@ -1,5 +1,9 @@ package model +import ( + "fmt" +) + // QueueTx is model type QueueTx struct { Hash []byte `gorm:"primary_key;not null"` @@ -76,3 +80,17 @@ func GetAllUnverifiedAndUnusedTransactions() ([]*QueueTx, error) { } return result, nil } + +// FieldValue implementing BatchModel interface +func (qt QueueTx) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "hash": + return qt.Hash, nil + case "data": + return qt.Data, nil + case "from_gate": + return qt.FromGate, nil + default: + return nil, fmt.Errorf("Unknown field '%s' for QueueTx", fieldName) + } +} diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 1265f58f2..615cfcedb 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -194,7 +194,9 @@ func getUnknownTransactions(buf *bytes.Buffer) ([]byte, error) { func saveNewTransactions(r *DisRequest) error { binaryTxs := r.Data + queue := []model.QueueTx{} log.WithFields(log.Fields{"binaryTxs": binaryTxs}).Debug("trying to save binary txs") + for len(binaryTxs) > 0 { txSize, err := converter.DecodeLength(&binaryTxs) if err != nil { @@ -222,12 +224,14 @@ func saveNewTransactions(r *DisRequest) error { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err, "value": txBinData}).Fatal("cannot hash bindata") } - queueTx := &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1} + queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) err = queueTx.Create() - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") - return err - } } + + if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") + return err + } + return nil } From ae2d0ea4475dd48b386eceec119abc632295f901 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 4 Jun 2018 12:34:36 +0300 Subject: [PATCH 028/169] test and small fixes --- packages/api/smart_test.go | 49 +++++++++++++++++++++++++++++++++--- packages/model/batch.go | 4 +-- packages/model/batch_test.go | 2 +- packages/tcpserver/type1.go | 4 +-- 4 files changed, 51 insertions(+), 8 deletions(-) diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index 045c6fc79..3bedfb760 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -419,14 +419,57 @@ func TestUpdateSysParam(t *testing.T) { func TestUpdateFullNodesWithEmptyArray(t *testing.T) { require.NoErrorf(t, keyLogin(1), "on login") - byteNodes := `[]` - // byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"5462687003324713865", "public_key":"4ea2433951ca21e6817426675874b2a6d98e5051c1100eddefa1847b0388e4834facf9abf427c46e2bc6cd5e3277fba533d03db553e499eb368194b3f1e514d4"}]` + byteNodes := `[` + byteNodes += `{"tcp_address":"127.0.0.1:7078", "api_address":"https://127.0.0.1:7079", "key_id":"-4466900793776865315", "public_key":"ca901a97e84d76f8d46e2053028f709074b3e60d3e2e33495840586567a0c961820d789592666b67b05c6ae120d5bd83d4388b2f1218638d8226d40ced0bb208"},` + byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"542353610328569127", "public_key":"a8ada71764fd2f0c9fa1d2986455288f11f0f3931492d27dc62862fdff9c97c38923ef46679488ad1cd525342d4d974621db58f809be6f8d1c19fdab50abc06b"},` + byteNodes += `{"tcp_address":"127.0.0.1:7082", "api_address":"https://127.0.0.1:7083", "key_id":"5972241339967729614", "public_key":"de1b74d36ae39422f2478cba591f4d14eb017306f6ffdc3b577cc52ee50edb8fe7c7b2eb191a24c8ddfc567cef32152bab17de698ed7b3f2ab75f3bcc8b9b372"}` + byteNodes += `]` form := &url.Values{ "Name": {"full_nodes"}, "Value": {string(byteNodes)}, } - require.EqualError(t, postTx(`UpdateSysParam`, form), `{"type":"panic","error":"Invalid value"}`) + require.NoError(t, postTx(`UpdateSysParam`, form)) +} + +func TestHelper_InsertNodeKey(t *testing.T) { + + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + + form := url.Values{ + `Value`: {`contract InsertNodeKey { + data { + KeyID string + PubKey string + } + conditions {} + action { + DBInsert("keys", "id,pub,amount", $KeyID, $PubKey, "100000000000000000000") + } + }`}, + `ApplicationId`: {`1`}, + `Conditions`: {`true`}, + } + + require.NoError(t, postTx(`NewContract`, &form)) + + forms := []url.Values{ + url.Values{ + `KeyID`: {"542353610328569127"}, + `PubKey`: {"be78f54bcf6bb7b49b7ea00790b18b40dd3f5e231ffc764f1c32d3f5a82ab322aee157931bbfca733bac83255002f5ded418f911b959b77a937f0d5d07de74f8"}, + }, + url.Values{ + `KeyID`: {"5972241339967729614"}, + `PubKey`: {"7b11a9ee4f509903118d5b965a819b778c83a21a52a033e5768d697a70a61a1bad270465f25d7f70683e977be93a9252e762488fc53808a90220d363d0a38eb6"}, + }, + } + + for _, frm := range forms { + require.NoError(t, postTx(`InsertNodeKey`, &frm)) + } } func TestValidateConditions(t *testing.T) { diff --git a/packages/model/batch.go b/packages/model/batch.go index 3d2719f82..f36c4d68d 100644 --- a/packages/model/batch.go +++ b/packages/model/batch.go @@ -58,7 +58,7 @@ func batchQueue(rows []BatchModel, fields []string) (queries []string, values [] func prepareQuery(rows []BatchModel, fields []string) (query string, values []interface{}, err error) { valueTemplates := make([]string, 0, len(rows)) - valueArgs := make([]interface{}, 0, len(rows)*len(fields)) + values = make([]interface{}, 0, len(rows)*len(fields)) query = fmt.Sprintf(`INSERT INTO "%s" (%s) VALUES `, rows[0].TableName(), strings.Join(fields, ",")) rowQSlice := make([]string, 0, len(fields)) @@ -76,7 +76,7 @@ func prepareQuery(rows []BatchModel, fields []string) (query string, values []in return query, values, err } - valueArgs = append(valueArgs, val) + values = append(values, val) } } diff --git a/packages/model/batch_test.go b/packages/model/batch_test.go index ff35bc946..2b260c2f5 100644 --- a/packages/model/batch_test.go +++ b/packages/model/batch_test.go @@ -37,7 +37,7 @@ func TestPrepareQuery(t *testing.T) { require.NoError(t, err) checkQuery := `INSERT INTO "test_batch" (id,name) VALUES (?,?),(?,?)` - checkArgs := []interface{}{1, "first", 2, "second"} + checkArgs := []interface{}{int64(1), "first", int64(2), "second"} require.Equal(t, checkQuery, query) require.Equal(t, checkArgs, args) diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 615cfcedb..5cbb48c17 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -194,7 +194,7 @@ func getUnknownTransactions(buf *bytes.Buffer) ([]byte, error) { func saveNewTransactions(r *DisRequest) error { binaryTxs := r.Data - queue := []model.QueueTx{} + queue := []model.BatchModel{} log.WithFields(log.Fields{"binaryTxs": binaryTxs}).Debug("trying to save binary txs") for len(binaryTxs) > 0 { @@ -225,7 +225,7 @@ func saveNewTransactions(r *DisRequest) error { } queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) - err = queueTx.Create() + // err = queueTx.Create() } if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { From b6c72079c84111953bb39e153224f58ef1028c5f Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Tue, 5 Jun 2018 09:22:31 +0300 Subject: [PATCH 029/169] Move rollback to separate package --- cmd/rollback.go | 5 +- packages/daemons/blocks_collection.go | 190 ++++++++++++++- packages/parser/common.go | 2 +- packages/parser/common_get_blocks.go | 217 ------------------ packages/parser/common_parse_data_full.go | 63 +---- packages/parser/first_block.go | 2 +- .../block.go} | 74 ++---- .../rollback.go} | 29 ++- .../transaction.go} | 34 ++- packages/utils/utils.go | 50 ++++ 10 files changed, 302 insertions(+), 364 deletions(-) delete mode 100644 packages/parser/common_get_blocks.go rename packages/{parser/common_parse_data_rollback.go => rollback/block.go} (67%) rename packages/{parser/common_rollback_to_block_id.go => rollback/rollback.go} (82%) rename packages/{parser/common_auto_rollback.go => rollback/transaction.go} (73%) diff --git a/cmd/rollback.go b/cmd/rollback.go index 364c4c261..7be9a3225 100644 --- a/cmd/rollback.go +++ b/cmd/rollback.go @@ -4,7 +4,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/parser" + "github.com/GenesisKernel/go-genesis/packages/rollback" "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" @@ -39,8 +39,7 @@ var rollbackCmd = &cobra.Command{ log.WithError(err).Fatal("loading contracts") return } - parser := new(parser.Parser) - err := parser.RollbackToBlockID(blockID) + err := rollback.ToBlockID(blockID, nil, log.WithFields(log.Fields{})) if err != nil { log.WithError(err).Fatal("rollback to block id") return diff --git a/packages/daemons/blocks_collection.go b/packages/daemons/blocks_collection.go index 17df76388..2f27862fb 100644 --- a/packages/daemons/blocks_collection.go +++ b/packages/daemons/blocks_collection.go @@ -26,8 +26,11 @@ import ( "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/parser" + "github.com/GenesisKernel/go-genesis/packages/rollback" "github.com/GenesisKernel/go-genesis/packages/service" "github.com/GenesisKernel/go-genesis/packages/tcpserver" "github.com/GenesisKernel/go-genesis/packages/utils" @@ -162,7 +165,7 @@ func UpdateChain(ctx context.Context, d *daemon, host string, maxBlockID int64) if !hashMatched { //it should be fork, replace our previous blocks to ones from the host - err := parser.GetBlocks(block.Header.BlockID-1, host) + err := GetBlocks(block.Header.BlockID-1, host) if err != nil { d.logger.WithFields(log.Fields{"error": err, "type": consts.ParserError}).Error("processing block") banNode(host, block, err) @@ -299,3 +302,188 @@ func filterBannedHosts(hosts []string) ([]string, error) { } return goodHosts, nil } + +// GetBlocks is returning blocks +func GetBlocks(blockID int64, host string) error { + blocks, err := getBlocks(blockID, host) + if err != nil { + return err + } + + // mark all transaction as unverified + _, err = model.MarkVerifiedAndNotUsedTransactionsUnverified() + if err != nil { + log.WithFields(log.Fields{ + "error": err, + "type": consts.DBError, + }).Error("marking verified and not used transactions unverified") + return utils.ErrInfo(err) + } + + // get starting blockID from slice of blocks + if len(blocks) > 0 { + blockID = blocks[len(blocks)-1].Header.BlockID + } + + // we have the slice of blocks for applying + // first of all we should rollback old blocks + block := &model.Block{} + myRollbackBlocks, err := block.GetBlocksFrom(blockID-1, "desc", 0) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("getting rollback blocks from blockID") + return utils.ErrInfo(err) + } + for _, block := range myRollbackBlocks { + err := rollback.RollbackBlock(block.Data, false) + if err != nil { + return utils.ErrInfo(err) + } + } + + return processBlocks(blocks) +} + +func getBlocks(blockID int64, host string) ([]*parser.Block, error) { + rollback := syspar.GetRbBlocks1() + + badBlocks := make(map[int64]string) + + blocks := make([]*parser.Block, 0) + var count int64 + + // load the block bodies from the host + blocksCh, err := utils.GetBlocksBody(host, blockID, tcpserver.BlocksPerRequest, consts.DATA_TYPE_BLOCK_BODY, true) + if err != nil { + return nil, utils.ErrInfo(err) + } + + for binaryBlock := range blocksCh { + if blockID < 2 { + break + } + + // if the limit of blocks received from the node was exaggerated + if count > int64(rollback) { + break + } + + block, err := parser.ProcessBlockWherePrevFromBlockchainTable(binaryBlock, true) + if err != nil { + return nil, utils.ErrInfo(err) + } + + if badBlocks[block.Header.BlockID] == string(converter.BinToHex(block.Header.Sign)) { + log.WithFields(log.Fields{"block_id": block.Header.BlockID, "type": consts.InvalidObject}).Error("block is bad") + return nil, utils.ErrInfo(errors.New("bad block")) + } + if block.Header.BlockID != blockID { + log.WithFields(log.Fields{"header_block_id": block.Header.BlockID, "block_id": blockID, "type": consts.InvalidObject}).Error("block ids does not match") + return nil, utils.ErrInfo(errors.New("bad block_data['block_id']")) + } + + // TODO: add checking for MAX_BLOCK_SIZE + + // the public key of the one who has generated this block + nodePublicKey, err := syspar.GetNodePublicKeyByPosition(block.Header.NodePosition) + if err != nil { + log.WithFields(log.Fields{"header_block_id": block.Header.BlockID, "block_id": blockID, "type": consts.InvalidObject}).Error("block ids does not match") + return nil, utils.ErrInfo(err) + } + + // SIGN from 128 bytes to 512 bytes. Signature of TYPE, BLOCK_ID, PREV_BLOCK_HASH, TIME, WALLET_ID, state_id, MRKL_ROOT + forSign := fmt.Sprintf("0,%v,%x,%v,%v,%v,%v,%s", + block.Header.BlockID, block.PrevHeader.Hash, block.Header.Time, + block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition, + block.MrklRoot, + ) + + // save the block + blocks = append(blocks, block) + blockID-- + count++ + + // check the signature + _, okSignErr := utils.CheckSign([][]byte{nodePublicKey}, forSign, block.Header.Sign, true) + if okSignErr == nil { + break + } + } + + return blocks, nil +} + +func processBlocks(blocks []*parser.Block) error { + dbTransaction, err := model.StartTransaction() + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("starting transaction") + return utils.ErrInfo(err) + } + + // go through new blocks from the smallest block_id to the largest block_id + prevBlocks := make(map[int64]*parser.Block, 0) + + for i := len(blocks) - 1; i >= 0; i-- { + block := blocks[i] + + if prevBlocks[block.Header.BlockID-1] != nil { + block.PrevHeader.Hash = prevBlocks[block.Header.BlockID-1].Header.Hash + block.PrevHeader.Time = prevBlocks[block.Header.BlockID-1].Header.Time + block.PrevHeader.BlockID = prevBlocks[block.Header.BlockID-1].Header.BlockID + block.PrevHeader.EcosystemID = prevBlocks[block.Header.BlockID-1].Header.EcosystemID + block.PrevHeader.KeyID = prevBlocks[block.Header.BlockID-1].Header.KeyID + block.PrevHeader.NodePosition = prevBlocks[block.Header.BlockID-1].Header.NodePosition + } + + forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", block.Header.BlockID, block.PrevHeader.Hash, block.MrklRoot, block.Header.Time, block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition) + hash, err := crypto.DoubleHash([]byte(forSha)) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal("double hashing block") + } + block.Header.Hash = hash + + if err := block.CheckBlock(); err != nil { + dbTransaction.Rollback() + return utils.ErrInfo(err) + } + + if err := block.PlayBlock(dbTransaction); err != nil { + dbTransaction.Rollback() + return utils.ErrInfo(err) + } + prevBlocks[block.Header.BlockID] = block + + // for last block we should update block info + if i == 0 { + err := parser.UpdBlockInfo(dbTransaction, block) + if err != nil { + dbTransaction.Rollback() + return utils.ErrInfo(err) + } + } + if block.SysUpdate { + if err := syspar.SysUpdate(dbTransaction); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") + return utils.ErrInfo(err) + } + } + } + + // If all right we can delete old blockchain and write new + for i := len(blocks) - 1; i >= 0; i-- { + block := blocks[i] + // Delete old blocks from blockchain + b := &model.Block{} + err = b.DeleteById(dbTransaction, block.Header.BlockID) + if err != nil { + dbTransaction.Rollback() + return err + } + // insert new blocks into blockchain + if err := parser.InsertIntoBlockchain(dbTransaction, block); err != nil { + dbTransaction.Rollback() + return err + } + } + + return dbTransaction.Commit() +} diff --git a/packages/parser/common.go b/packages/parser/common.go index b5b57bc90..8dc3be71a 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -57,7 +57,7 @@ func GetBlockDataFromBlockChain(blockID int64) (*utils.BlockData, error) { return BlockData, utils.ErrInfo(err) } - header, err := ParseBlockHeader(bytes.NewBuffer(block.Data), false) + header, err := utils.ParseBlockHeader(bytes.NewBuffer(block.Data), false) if err != nil { return nil, utils.ErrInfo(err) } diff --git a/packages/parser/common_get_blocks.go b/packages/parser/common_get_blocks.go deleted file mode 100644 index 40233afc3..000000000 --- a/packages/parser/common_get_blocks.go +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright 2016 The go-daylight Authors -// This file is part of the go-daylight library. -// -// The go-daylight library is free software: you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// The go-daylight library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public License -// along with the go-daylight library. If not, see . - -package parser - -import ( - "errors" - "fmt" - - log "github.com/sirupsen/logrus" - - "github.com/GenesisKernel/go-genesis/packages/conf/syspar" - "github.com/GenesisKernel/go-genesis/packages/consts" - "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/crypto" - "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/tcpserver" - "github.com/GenesisKernel/go-genesis/packages/utils" -) - -// GetBlocks is returning blocks -func GetBlocks(blockID int64, host string) error { - blocks, err := getBlocks(blockID, host) - if err != nil { - return err - } - - // mark all transaction as unverified - _, err = model.MarkVerifiedAndNotUsedTransactionsUnverified() - if err != nil { - log.WithFields(log.Fields{ - "error": err, - "type": consts.DBError, - }).Error("marking verified and not used transactions unverified") - return utils.ErrInfo(err) - } - - // get starting blockID from slice of blocks - if len(blocks) > 0 { - blockID = blocks[len(blocks)-1].Header.BlockID - } - - // we have the slice of blocks for applying - // first of all we should rollback old blocks - block := &model.Block{} - myRollbackBlocks, err := block.GetBlocksFrom(blockID-1, "desc", 0) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("getting rollback blocks from blockID") - return utils.ErrInfo(err) - } - for _, block := range myRollbackBlocks { - err := RollbackTxFromBlock(block.Data) - if err != nil { - return utils.ErrInfo(err) - } - } - - return processBlocks(blocks) -} - -func getBlocks(blockID int64, host string) ([]*Block, error) { - rollback := syspar.GetRbBlocks1() - - badBlocks := make(map[int64]string) - - blocks := make([]*Block, 0) - var count int64 - - // load the block bodies from the host - blocksCh, err := utils.GetBlocksBody(host, blockID, tcpserver.BlocksPerRequest, consts.DATA_TYPE_BLOCK_BODY, true) - if err != nil { - return nil, utils.ErrInfo(err) - } - - for binaryBlock := range blocksCh { - if blockID < 2 { - break - } - - // if the limit of blocks received from the node was exaggerated - if count > int64(rollback) { - break - } - - block, err := ProcessBlockWherePrevFromBlockchainTable(binaryBlock, true) - if err != nil { - return nil, utils.ErrInfo(err) - } - - if badBlocks[block.Header.BlockID] == string(converter.BinToHex(block.Header.Sign)) { - log.WithFields(log.Fields{"block_id": block.Header.BlockID, "type": consts.InvalidObject}).Error("block is bad") - return nil, utils.ErrInfo(errors.New("bad block")) - } - if block.Header.BlockID != blockID { - log.WithFields(log.Fields{"header_block_id": block.Header.BlockID, "block_id": blockID, "type": consts.InvalidObject}).Error("block ids does not match") - return nil, utils.ErrInfo(errors.New("bad block_data['block_id']")) - } - - // TODO: add checking for MAX_BLOCK_SIZE - - // the public key of the one who has generated this block - nodePublicKey, err := syspar.GetNodePublicKeyByPosition(block.Header.NodePosition) - if err != nil { - log.WithFields(log.Fields{"header_block_id": block.Header.BlockID, "block_id": blockID, "type": consts.InvalidObject}).Error("block ids does not match") - return nil, utils.ErrInfo(err) - } - - // SIGN from 128 bytes to 512 bytes. Signature of TYPE, BLOCK_ID, PREV_BLOCK_HASH, TIME, WALLET_ID, state_id, MRKL_ROOT - forSign := fmt.Sprintf("0,%v,%x,%v,%v,%v,%v,%s", - block.Header.BlockID, block.PrevHeader.Hash, block.Header.Time, - block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition, - block.MrklRoot, - ) - - // save the block - blocks = append(blocks, block) - blockID-- - count++ - - // check the signature - _, okSignErr := utils.CheckSign([][]byte{nodePublicKey}, forSign, block.Header.Sign, true) - if okSignErr == nil { - break - } - } - - return blocks, nil -} - -func processBlocks(blocks []*Block) error { - dbTransaction, err := model.StartTransaction() - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("starting transaction") - return utils.ErrInfo(err) - } - - // go through new blocks from the smallest block_id to the largest block_id - prevBlocks := make(map[int64]*Block, 0) - - for i := len(blocks) - 1; i >= 0; i-- { - block := blocks[i] - - if prevBlocks[block.Header.BlockID-1] != nil { - block.PrevHeader.Hash = prevBlocks[block.Header.BlockID-1].Header.Hash - block.PrevHeader.Time = prevBlocks[block.Header.BlockID-1].Header.Time - block.PrevHeader.BlockID = prevBlocks[block.Header.BlockID-1].Header.BlockID - block.PrevHeader.EcosystemID = prevBlocks[block.Header.BlockID-1].Header.EcosystemID - block.PrevHeader.KeyID = prevBlocks[block.Header.BlockID-1].Header.KeyID - block.PrevHeader.NodePosition = prevBlocks[block.Header.BlockID-1].Header.NodePosition - } - - forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", block.Header.BlockID, block.PrevHeader.Hash, block.MrklRoot, block.Header.Time, block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition) - hash, err := crypto.DoubleHash([]byte(forSha)) - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal("double hashing block") - } - block.Header.Hash = hash - - if err := block.CheckBlock(); err != nil { - dbTransaction.Rollback() - return utils.ErrInfo(err) - } - - if err := block.playBlock(dbTransaction); err != nil { - dbTransaction.Rollback() - return utils.ErrInfo(err) - } - prevBlocks[block.Header.BlockID] = block - - // for last block we should update block info - if i == 0 { - err := UpdBlockInfo(dbTransaction, block) - if err != nil { - dbTransaction.Rollback() - return utils.ErrInfo(err) - } - } - if block.SysUpdate { - if err := syspar.SysUpdate(dbTransaction); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - return utils.ErrInfo(err) - } - } - } - - // If all right we can delete old blockchain and write new - for i := len(blocks) - 1; i >= 0; i-- { - block := blocks[i] - // Delete old blocks from blockchain - b := &model.Block{} - err = b.DeleteById(dbTransaction, block.Header.BlockID) - if err != nil { - dbTransaction.Rollback() - return err - } - // insert new blocks into blockchain - if err := InsertIntoBlockchain(dbTransaction, block); err != nil { - dbTransaction.Rollback() - return err - } - } - - return dbTransaction.Commit() -} diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index 46b67e161..d94108c9e 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -92,7 +92,7 @@ func (b *Block) PlayBlockSafe() error { return err } - err = b.playBlock(dbTransaction) + err = b.PlayBlock(dbTransaction) if b.GenBlock && b.StopCount > 0 { doneTx := b.Parsers[:b.StopCount] trData := make([][]byte, 0, b.StopCount) @@ -112,7 +112,7 @@ func (b *Block) PlayBlockSafe() error { } isFirstBlock := b.Header.BlockID == 1 - nb, err := parseBlock(bytes.NewBuffer(newBlockData), isFirstBlock) + nb, err := ParseBlock(bytes.NewBuffer(newBlockData), isFirstBlock) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("parsing new block") return err @@ -161,7 +161,7 @@ func ProcessBlockWherePrevFromMemory(data []byte) (*Block, error) { return nil, fmt.Errorf("empty buffer") } - block, err := parseBlock(buf, false) + block, err := ParseBlock(buf, false) if err != nil { return nil, err } @@ -186,7 +186,7 @@ func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Blo return nil, fmt.Errorf("empty buffer") } - block, err := parseBlock(buf, !checkSize) + block, err := ParseBlock(buf, !checkSize) if err != nil { return nil, err } @@ -199,8 +199,8 @@ func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Blo return block, nil } -func parseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { - header, err := ParseBlockHeader(blockBuffer, !firstBlock) +func ParseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { + header, err := utils.ParseBlockHeader(blockBuffer, !firstBlock) if err != nil { return nil, err } @@ -263,55 +263,6 @@ func parseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { }, nil } -// ParseBlockHeader is parses block header -func ParseBlockHeader(binaryBlock *bytes.Buffer, checkMaxSize bool) (utils.BlockData, error) { - var block utils.BlockData - var err error - - if binaryBlock.Len() < 9 { - log.WithFields(log.Fields{"size": binaryBlock.Len(), "type": consts.SizeDoesNotMatch}).Error("binary block size is too small") - return utils.BlockData{}, fmt.Errorf("bad binary block length") - } - - blockVersion := int(converter.BinToDec(binaryBlock.Next(2))) - - if checkMaxSize && int64(binaryBlock.Len()) > syspar.GetMaxBlockSize() { - log.WithFields(log.Fields{"size": binaryBlock.Len(), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") - err = fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"] %v > %v`, - binaryBlock.Len(), syspar.GetMaxBlockSize()) - - return utils.BlockData{}, err - } - - block.BlockID = converter.BinToDec(binaryBlock.Next(4)) - block.Time = converter.BinToDec(binaryBlock.Next(4)) - block.Version = blockVersion - block.EcosystemID = converter.BinToDec(binaryBlock.Next(4)) - block.KeyID, err = converter.DecodeLenInt64Buf(binaryBlock) - if err != nil { - log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "block_time": block.Time, "block_version": block.Version, "error": err}).Error("decoding binary block walletID") - return utils.BlockData{}, err - } - block.NodePosition = converter.BinToDec(binaryBlock.Next(1)) - - if block.BlockID > 1 { - signSize, err := converter.DecodeLengthBuf(binaryBlock) - if err != nil { - log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign size") - return utils.BlockData{}, err - } - if binaryBlock.Len() < signSize { - log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign") - return utils.BlockData{}, fmt.Errorf("bad block format (no sign)") - } - block.Sign = binaryBlock.Next(int(signSize)) - } else { - binaryBlock.Next(1) - } - - return block, nil -} - // ParseTransaction is parsing transaction func ParseTransaction(buffer *bytes.Buffer) (*Parser, error) { if buffer.Len() == 0 { @@ -657,7 +608,7 @@ func playTransaction(p *Parser) (string, error) { return "", nil } -func (b *Block) playBlock(dbTransaction *model.DbTransaction) error { +func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { logger := b.GetLogger() if _, err := model.DeleteUsedTransactions(dbTransaction); err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("delete used transactions") diff --git a/packages/parser/first_block.go b/packages/parser/first_block.go index 099d40113..d92be788b 100644 --- a/packages/parser/first_block.go +++ b/packages/parser/first_block.go @@ -156,7 +156,7 @@ func GetDataFromFirstBlock() (data *consts.FirstBlock, ok bool) { return } - pb, err := parseBlock(bytes.NewBuffer(block.Data), true) + pb, err := ParseBlock(bytes.NewBuffer(block.Data), true) if err != nil { log.WithFields(log.Fields{"type": consts.ParserError, "error": err}).Error("parsing data of first block") return diff --git a/packages/parser/common_parse_data_rollback.go b/packages/rollback/block.go similarity index 67% rename from packages/parser/common_parse_data_rollback.go rename to packages/rollback/block.go index f5d40df4f..d574542a4 100644 --- a/packages/parser/common_parse_data_rollback.go +++ b/packages/rollback/block.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Lesser General Public License // along with the go-daylight library. If not, see . -package parser +package rollback import ( "bytes" @@ -22,21 +22,21 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/parser" "github.com/GenesisKernel/go-genesis/packages/smart" - "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" ) // BlockRollback is blocking rollback -func BlockRollback(data []byte) error { +func RollbackBlock(data []byte, deleteBlock bool) error { buf := bytes.NewBuffer(data) if buf.Len() == 0 { log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty buffer") return fmt.Errorf("empty buffer") } - block, err := parseBlock(buf, false) + block, err := parser.ParseBlock(buf, false) if err != nil { return err } @@ -47,56 +47,28 @@ func BlockRollback(data []byte) error { return err } - err = doBlockRollback(dbTransaction, block) + err = rollbackBlock(dbTransaction, block) if err != nil { dbTransaction.Rollback() return err } - b := &model.Block{} - err = b.DeleteById(dbTransaction, block.Header.BlockID) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting block by id") - dbTransaction.Rollback() - return err - } - - err = dbTransaction.Commit() - return err -} - -// RollbackTxFromBlock is rollback tx from block -func RollbackTxFromBlock(data []byte) error { - buf := bytes.NewBuffer(data) - if buf.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty buffer") - return fmt.Errorf("empty buffer") - } - - block, err := parseBlock(buf, false) - if err != nil { - return err - } - - dbTransaction, err := model.StartTransaction() - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting db transaction") - return err - } - - err = doBlockRollback(dbTransaction, block) - - if err != nil { - dbTransaction.Rollback() - return err + if deleteBlock { + b := &model.Block{} + err = b.DeleteById(dbTransaction, block.Header.BlockID) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting block by id") + dbTransaction.Rollback() + return err + } } err = dbTransaction.Commit() return err } -func doBlockRollback(transaction *model.DbTransaction, block *Block) error { +func rollbackBlock(transaction *model.DbTransaction, block *parser.Block) error { // rollback transactions in reverse order logger := block.GetLogger() for i := len(block.Parsers) - 1; i >= 0; i-- { @@ -106,45 +78,45 @@ func doBlockRollback(transaction *model.DbTransaction, block *Block) error { _, err := model.MarkTransactionUnusedAndUnverified(transaction, p.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting transaction") - return utils.ErrInfo(err) + return err } _, err = model.DeleteLogTransactionsByHash(transaction, p.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting log transactions by hash") - return utils.ErrInfo(err) + return err } ts := &model.TransactionStatus{} err = ts.UpdateBlockID(transaction, 0, p.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating block id in transaction status") - return utils.ErrInfo(err) + return err } _, err = model.DeleteQueueTxByHash(transaction, p.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transacion from queue by hash") - return utils.ErrInfo(err) + return err } if p.TxContract != nil { if _, err := p.CallContract(smart.CallInit | smart.CallRollback); err != nil { return err } - if err = p.autoRollback(); err != nil { - return p.ErrInfo(err) + if err = rollbackTransaction(p.TxHash, p.DbTransaction, logger); err != nil { + return err } } else { MethodName := consts.TxTypes[int(p.TxType)] - parser, err := GetParser(p, MethodName) + txParser, err := parser.GetParser(p, MethodName) if err != nil { return p.ErrInfo(err) } - result := parser.Init() + result := txParser.Init() if _, ok := result.(error); ok { return p.ErrInfo(result.(error)) } - result = parser.Rollback() + result = txParser.Rollback() if _, ok := result.(error); ok { return p.ErrInfo(result.(error)) } diff --git a/packages/parser/common_rollback_to_block_id.go b/packages/rollback/rollback.go similarity index 82% rename from packages/parser/common_rollback_to_block_id.go rename to packages/rollback/rollback.go index ac68e1acb..28dc1cbd0 100644 --- a/packages/parser/common_rollback_to_block_id.go +++ b/packages/rollback/rollback.go @@ -14,27 +14,26 @@ // You should have received a copy of the GNU Lesser General Public License // along with the go-daylight library. If not, see . -package parser +package rollback import ( "bytes" - "database/sql" "strconv" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" ) -// RollbackToBlockID rollbacks blocks till blockID -func (p *Parser) RollbackToBlockID(blockID int64) error { - logger := p.GetLogger() +// ToBlockID rollbacks blocks till blockID +func ToBlockID(blockID int64, dbTransaction *model.DbTransaction, logger *log.Entry) error { _, err := model.MarkVerifiedAndNotUsedTransactionsUnverified() if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("marking verified and not used transactions unverified") - return p.ErrInfo(err) + return err } limit := 1000 @@ -44,31 +43,31 @@ func (p *Parser) RollbackToBlockID(blockID int64) error { blocks, err := block.GetBlocks(blockID, int32(limit)) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting blocks") - return p.ErrInfo(err) + return err } if len(blocks) == 0 { break } for _, block := range blocks { // roll back our blocks to the block blockID - err = BlockRollback(block.Data) + err = RollbackBlock(block.Data, true) if err != nil { - return p.ErrInfo(err) + return err } } blocks = blocks[:0] } block := &model.Block{} _, err = block.Get(blockID) - if err != nil && err != sql.ErrNoRows { + if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block") - return p.ErrInfo(err) + return err } isFirstBlock := blockID == 1 - header, err := ParseBlockHeader(bytes.NewBuffer(block.Data), !isFirstBlock) + header, err := utils.ParseBlockHeader(bytes.NewBuffer(block.Data), !isFirstBlock) if err != nil { - return p.ErrInfo(err) + return err } ib := &model.InfoBlock{ @@ -81,10 +80,10 @@ func (p *Parser) RollbackToBlockID(blockID int64) error { CurrentVersion: strconv.Itoa(header.Version), } - err = ib.Update(p.DbTransaction) + err = ib.Update(dbTransaction) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating info block") - return p.ErrInfo(err) + return err } return nil diff --git a/packages/parser/common_auto_rollback.go b/packages/rollback/transaction.go similarity index 73% rename from packages/parser/common_auto_rollback.go rename to packages/rollback/transaction.go index 5748cfdd0..14babcfaa 100644 --- a/packages/parser/common_auto_rollback.go +++ b/packages/rollback/transaction.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Lesser General Public License // along with the go-daylight library. If not, see . -package parser +package rollback import ( "encoding/json" @@ -23,17 +23,15 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" ) -func (p *Parser) restoreUpdatedDBRowToPreviousData(tx map[string]string, where string) error { - logger := p.GetLogger() +func rollbackUpdatedRow(tx map[string]string, where string, dbTransaction *model.DbTransaction, logger *log.Entry) error { var rollbackInfo map[string]string if err := json.Unmarshal([]byte(tx["data"]), &rollbackInfo); err != nil { logger.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err}).Error("unmarshalling rollback.Data from json") - return p.ErrInfo(err) + return err } addSQLUpdate := "" for k, v := range rollbackInfo { @@ -46,47 +44,45 @@ func (p *Parser) restoreUpdatedDBRowToPreviousData(tx map[string]string, where s } } addSQLUpdate = addSQLUpdate[0 : len(addSQLUpdate)-1] - if err := model.Update(p.DbTransaction, tx["table_name"], addSQLUpdate, where); err != nil { + if err := model.Update(dbTransaction, tx["table_name"], addSQLUpdate, where); err != nil { logger.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err, "query": addSQLUpdate}).Error("updating table") - return p.ErrInfo(err) + return err } return nil } -func (p *Parser) deleteInsertedDBRow(tx map[string]string, where string) error { - logger := p.GetLogger() +func rollbackInsertedRow(tx map[string]string, where string, logger *log.Entry) error { if err := model.Delete(tx["table_name"], where); err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting from table") - return p.ErrInfo(err) + return err } return nil } -func (p *Parser) autoRollback() error { - logger := p.GetLogger() +func rollbackTransaction(txHash []byte, dbTransaction *model.DbTransaction, logger *log.Entry) error { rollbackTx := &model.RollbackTx{} - txs, err := rollbackTx.GetRollbackTransactions(p.DbTransaction, p.TxHash) + txs, err := rollbackTx.GetRollbackTransactions(dbTransaction, txHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting rollback transactions") - return utils.ErrInfo(err) + return err } for _, tx := range txs { where := " WHERE id='" + tx["table_id"] + `'` if len(tx["data"]) > 0 { - if err := p.restoreUpdatedDBRowToPreviousData(tx, where); err != nil { + if err := rollbackUpdatedRow(tx, where, dbTransaction, logger); err != nil { return err } } else { - if err := p.deleteInsertedDBRow(tx, where); err != nil { + if err := rollbackInsertedRow(tx, where, logger); err != nil { return err } } } - txForDelete := &model.RollbackTx{TxHash: p.TxHash} - err = txForDelete.DeleteByHash(p.DbTransaction) + txForDelete := &model.RollbackTx{TxHash: txHash} + err = txForDelete.DeleteByHash(dbTransaction) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting rollback transaction by hash") - return p.ErrInfo(err) + return err } return nil } diff --git a/packages/utils/utils.go b/packages/utils/utils.go index ed70d83fe..d7a23611c 100644 --- a/packages/utils/utils.go +++ b/packages/utils/utils.go @@ -17,6 +17,7 @@ package utils import ( + "bytes" "context" "encoding/hex" "fmt" @@ -63,6 +64,55 @@ func (b BlockData) String() string { return fmt.Sprintf("BlockID:%d, Time:%d, NodePosition %d", b.BlockID, b.Time, b.NodePosition) } +// ParseBlockHeader is parses block header +func ParseBlockHeader(binaryBlock *bytes.Buffer, checkMaxSize bool) (BlockData, error) { + var block BlockData + var err error + + if binaryBlock.Len() < 9 { + log.WithFields(log.Fields{"size": binaryBlock.Len(), "type": consts.SizeDoesNotMatch}).Error("binary block size is too small") + return BlockData{}, fmt.Errorf("bad binary block length") + } + + blockVersion := int(converter.BinToDec(binaryBlock.Next(2))) + + if checkMaxSize && int64(binaryBlock.Len()) > syspar.GetMaxBlockSize() { + log.WithFields(log.Fields{"size": binaryBlock.Len(), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") + err = fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"] %v > %v`, + binaryBlock.Len(), syspar.GetMaxBlockSize()) + + return BlockData{}, err + } + + block.BlockID = converter.BinToDec(binaryBlock.Next(4)) + block.Time = converter.BinToDec(binaryBlock.Next(4)) + block.Version = blockVersion + block.EcosystemID = converter.BinToDec(binaryBlock.Next(4)) + block.KeyID, err = converter.DecodeLenInt64Buf(binaryBlock) + if err != nil { + log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "block_time": block.Time, "block_version": block.Version, "error": err}).Error("decoding binary block walletID") + return BlockData{}, err + } + block.NodePosition = converter.BinToDec(binaryBlock.Next(1)) + + if block.BlockID > 1 { + signSize, err := converter.DecodeLengthBuf(binaryBlock) + if err != nil { + log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign size") + return BlockData{}, err + } + if binaryBlock.Len() < signSize { + log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign") + return BlockData{}, fmt.Errorf("bad block format (no sign)") + } + block.Sign = binaryBlock.Next(int(signSize)) + } else { + binaryBlock.Next(1) + } + + return block, nil +} + var ( // ReturnCh is chan for returns ReturnCh chan string From 85a2f4d6d968ee94728673758519f490a03b40be Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Tue, 5 Jun 2018 23:30:06 +0300 Subject: [PATCH 030/169] Move entities to separate files, phase 1 --- packages/parser/block.go | 313 +++++++++++++++++++++ packages/parser/common.go | 161 ----------- packages/parser/common_parse_data_full.go | 327 ---------------------- packages/parser/common_upd_block_info.go | 75 ----- packages/parser/db.go | 219 +++++++++++++++ packages/parser/parser_cache.go | 30 ++ 6 files changed, 562 insertions(+), 563 deletions(-) create mode 100644 packages/parser/block.go delete mode 100644 packages/parser/common_upd_block_info.go create mode 100644 packages/parser/db.go create mode 100644 packages/parser/parser_cache.go diff --git a/packages/parser/block.go b/packages/parser/block.go new file mode 100644 index 000000000..0dd7d097b --- /dev/null +++ b/packages/parser/block.go @@ -0,0 +1,313 @@ +package parser + +import ( + "bytes" + "fmt" + "time" + + "github.com/GenesisKernel/go-genesis/packages/conf/syspar" + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/utils" + + log "github.com/sirupsen/logrus" +) + +// Block is storing block data +type Block struct { + Header utils.BlockData + PrevHeader *utils.BlockData + MrklRoot []byte + BinData []byte + Parsers []*Parser + SysUpdate bool + GenBlock bool // it equals true when we are generating a new block + StopCount int // The count of good tx in the block +} + +func (b Block) String() string { + return fmt.Sprintf("header: %s, prevHeader: %s", b.Header, b.PrevHeader) +} + +// GetLogger is returns logger +func (b Block) GetLogger() *log.Entry { + return log.WithFields(log.Fields{"block_id": b.Header.BlockID, "block_time": b.Header.Time, "block_wallet_id": b.Header.KeyID, + "block_state_id": b.Header.EcosystemID, "block_hash": b.Header.Hash, "block_version": b.Header.Version}) +} + +// PlayBlockSafe is inserting block safely +func (b *Block) PlayBlockSafe() error { + logger := b.GetLogger() + dbTransaction, err := model.StartTransaction() + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting db transaction") + return err + } + + err = b.PlayBlock(dbTransaction) + if b.GenBlock && b.StopCount > 0 { + doneTx := b.Parsers[:b.StopCount] + trData := make([][]byte, 0, b.StopCount) + for _, tr := range doneTx { + trData = append(trData, tr.TxFullData) + } + NodePrivateKey, _, err := utils.GetNodeKeys() + if err != nil || len(NodePrivateKey) < 1 { + log.WithFields(log.Fields{"type": consts.NodePrivateKeyFilename, "error": err}).Error("reading node private key") + return err + } + + newBlockData, err := MarshallBlock(&b.Header, trData, b.PrevHeader.Hash, NodePrivateKey) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("marshalling new block") + return err + } + + isFirstBlock := b.Header.BlockID == 1 + nb, err := ParseBlock(bytes.NewBuffer(newBlockData), isFirstBlock) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("parsing new block") + return err + } + b.BinData = newBlockData + b.Parsers = nb.Parsers + b.MrklRoot = nb.MrklRoot + b.SysUpdate = nb.SysUpdate + err = nil + } else if err != nil { + dbTransaction.Rollback() + return err + } + + if err := UpdBlockInfo(dbTransaction, b); err != nil { + dbTransaction.Rollback() + return err + } + + if err := InsertIntoBlockchain(dbTransaction, b); err != nil { + dbTransaction.Rollback() + return err + } + + dbTransaction.Commit() + if b.SysUpdate { + b.SysUpdate = false + if err = syspar.SysUpdate(nil); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") + return err + } + } + return nil +} + +func (b *Block) readPreviousBlockFromMemory() error { + return nil +} + +func (b *Block) readPreviousBlockFromBlockchainTable() error { + if b.Header.BlockID == 1 { + b.PrevHeader = &utils.BlockData{} + return nil + } + + var err error + b.PrevHeader, err = GetBlockDataFromBlockChain(b.Header.BlockID - 1) + if err != nil { + return utils.ErrInfo(fmt.Errorf("can't get block %d", b.Header.BlockID-1)) + } + return nil +} + +func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { + logger := b.GetLogger() + if _, err := model.DeleteUsedTransactions(dbTransaction); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("delete used transactions") + return err + } + limits := NewLimits(b) + for curTx, p := range b.Parsers { + var ( + msg string + err error + ) + p.DbTransaction = dbTransaction + + err = dbTransaction.Savepoint(curTx) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("using savepoint") + return err + } + msg, err = playTransaction(p) + if err == nil && p.TxSmart != nil { + err = limits.CheckLimit(p) + } + if err != nil { + if err == errNetworkStopping { + return err + } + + if b.GenBlock && err == ErrLimitStop { + b.StopCount = curTx + model.IncrementTxAttemptCount(p.DbTransaction, p.TxHash) + } + errRoll := dbTransaction.RollbackSavepoint(curTx) + if errRoll != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("rolling back to previous savepoint") + return errRoll + } + if b.GenBlock && err == ErrLimitStop { + break + } + // skip this transaction + model.MarkTransactionUsed(p.DbTransaction, p.TxHash) + p.processBadTransaction(p.TxHash, err.Error()) + if p.SysUpdate { + if err = syspar.SysUpdate(p.DbTransaction); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") + } + p.SysUpdate = false + } + continue + } + err = dbTransaction.ReleaseSavepoint(curTx) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("releasing savepoint") + } + if p.SysUpdate { + b.SysUpdate = true + p.SysUpdate = false + } + + if _, err := model.MarkTransactionUsed(p.DbTransaction, p.TxHash); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("marking transaction used") + return err + } + + // update status + ts := &model.TransactionStatus{} + if err := ts.UpdateBlockMsg(p.DbTransaction, b.Header.BlockID, msg, p.TxHash); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("updating transaction status block id") + return err + } + if err := InsertInLogTx(p.DbTransaction, p.TxFullData, p.TxTime); err != nil { + return utils.ErrInfo(err) + } + } + return nil +} + +// CheckBlock is checking block +func (b *Block) CheckBlock() error { + + logger := b.GetLogger() + // exclude blocks from future + if b.Header.Time > time.Now().Unix() { + logger.WithFields(log.Fields{"type": consts.ParameterExceeded}).Error("block time is larger than now") + return utils.ErrInfo(fmt.Errorf("incorrect block time - block.Header.Time > time.Now().Unix()")) + } + if b.PrevHeader == nil || b.PrevHeader.BlockID != b.Header.BlockID-1 { + if err := b.readPreviousBlockFromBlockchainTable(); err != nil { + logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") + return utils.ErrInfo(err) + } + } + + if b.Header.BlockID == 1 { + return nil + } + + // is this block too early? Allowable error = error_time + if b.PrevHeader != nil { + if b.Header.BlockID != b.PrevHeader.BlockID+1 { + logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") + return utils.ErrInfo(fmt.Errorf("incorrect block_id %d != %d +1", b.Header.BlockID, b.PrevHeader.BlockID)) + } + + // skip time validation for first block + if b.Header.BlockID > 1 { + blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + if err != nil { + logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") + return err + } + + validBlockTime, err := blockTimeCalculator.ValidateBlock(b.Header.NodePosition, time.Unix(b.Header.Time, 0)) + if err != nil { + logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("calculating block time") + return err + } + + if !validBlockTime { + logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("incorrect block time") + return utils.ErrInfo(fmt.Errorf("incorrect block time %d", b.PrevHeader.Time)) + } + } + } + + // check each transaction + txCounter := make(map[int64]int) + txHashes := make(map[string]struct{}) + for _, p := range b.Parsers { + hexHash := string(converter.BinToHex(p.TxHash)) + // check for duplicate transactions + if _, ok := txHashes[hexHash]; ok { + logger.WithFields(log.Fields{"tx_hash": hexHash, "type": consts.DuplicateObject}).Error("duplicate transaction") + return utils.ErrInfo(fmt.Errorf("duplicate transaction %s", hexHash)) + } + txHashes[hexHash] = struct{}{} + + // check for max transaction per user in one block + txCounter[p.TxKeyID]++ + if txCounter[p.TxKeyID] > syspar.GetMaxBlockUserTx() { + return utils.ErrInfo(fmt.Errorf("max_block_user_transactions")) + } + + if err := checkTransaction(p, b.Header.Time, false); err != nil { + return utils.ErrInfo(err) + } + + } + + result, err := b.CheckHash() + if err != nil { + return utils.ErrInfo(err) + } + if !result { + logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect signature") + return fmt.Errorf("incorrect signature / p.PrevBlock.BlockId: %d", b.PrevHeader.BlockID) + } + return nil +} + +// CheckHash is checking hash +func (b *Block) CheckHash() (bool, error) { + logger := b.GetLogger() + if b.Header.BlockID == 1 { + return true, nil + } + // check block signature + if b.PrevHeader != nil { + nodePublicKey, err := syspar.GetNodePublicKeyByPosition(b.Header.NodePosition) + if err != nil { + return false, utils.ErrInfo(err) + } + if len(nodePublicKey) == 0 { + logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("node public key is empty") + return false, utils.ErrInfo(fmt.Errorf("empty nodePublicKey")) + } + // check the signature + forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", b.Header.BlockID, b.PrevHeader.Hash, + b.Header.Time, b.Header.EcosystemID, b.Header.KeyID, b.Header.NodePosition, b.MrklRoot) + + resultCheckSign, err := utils.CheckSign([][]byte{nodePublicKey}, forSign, b.Header.Sign, true) + if err != nil { + logger.WithFields(log.Fields{"error": err, "type": consts.CryptoError}).Error("checking block header sign") + return false, utils.ErrInfo(fmt.Errorf("err: %v / block.PrevHeader.BlockID: %d / block.PrevHeader.Hash: %x / ", err, b.PrevHeader.BlockID, b.PrevHeader.Hash)) + } + + return resultCheckSign, nil + } + + return true, nil +} diff --git a/packages/parser/common.go b/packages/parser/common.go index 8dc3be71a..b2061e13f 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -17,15 +17,11 @@ package parser import ( - "bytes" - "encoding/json" "fmt" "reflect" - "time" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/utils" @@ -47,41 +43,6 @@ func GetTxTypeAndUserID(binaryBlock []byte) (txType int64, keyID int64) { return } -// GetBlockDataFromBlockChain is retrieving block data from blockchain -func GetBlockDataFromBlockChain(blockID int64) (*utils.BlockData, error) { - BlockData := new(utils.BlockData) - block := &model.Block{} - _, err := block.Get(blockID) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Getting block by ID") - return BlockData, utils.ErrInfo(err) - } - - header, err := utils.ParseBlockHeader(bytes.NewBuffer(block.Data), false) - if err != nil { - return nil, utils.ErrInfo(err) - } - - BlockData = &header - BlockData.Hash = block.Hash - return BlockData, nil -} - -// InsertInLogTx is inserting tx in log -func InsertInLogTx(transaction *model.DbTransaction, binaryTx []byte, time int64) error { - txHash, err := crypto.Hash(binaryTx) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.CryptoError}).Fatal("hashing binary tx") - } - ltx := &model.LogTransaction{Hash: txHash, Time: time} - err = ltx.Create(transaction) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("insert logged transaction") - return utils.ErrInfo(err) - } - return nil -} - // ParserInterface is parsing transactions type ParserInterface interface { Init() error @@ -153,128 +114,6 @@ func (p Parser) GetLogger() *log.Entry { return logger } -// CheckLogTx checks if this transaction exists -// And it would have successfully passed a frontal test -func CheckLogTx(txBinary []byte, transactions, txQueue bool) error { - searchedHash, err := crypto.Hash(txBinary) - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal(err) - } - logTx := &model.LogTransaction{} - found, err := logTx.GetByHash(searchedHash) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting log transaction by hash") - return utils.ErrInfo(err) - } - if found { - log.WithFields(log.Fields{"tx_hash": searchedHash, "type": consts.DuplicateObject}).Error("double tx in log transactions") - return utils.ErrInfo(fmt.Errorf("double tx in log_transactions %x", searchedHash)) - } - - if transactions { - // check for duplicate transaction - tx := &model.Transaction{} - _, err := tx.GetVerified(searchedHash) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting verified transaction") - return utils.ErrInfo(err) - } - if len(tx.Hash) > 0 { - log.WithFields(log.Fields{"tx_hash": tx.Hash, "type": consts.DuplicateObject}).Error("double tx in transactions") - return utils.ErrInfo(fmt.Errorf("double tx in transactions %x", searchedHash)) - } - } - - if txQueue { - // check for duplicate transaction from queue - qtx := &model.QueueTx{} - found, err := qtx.GetByHash(nil, searchedHash) - if found { - log.WithFields(log.Fields{"tx_hash": searchedHash, "type": consts.DuplicateObject}).Error("double tx in queue") - return utils.ErrInfo(fmt.Errorf("double tx in queue_tx %x", searchedHash)) - } - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting transaction from queue") - return utils.ErrInfo(err) - } - } - - return nil -} - -// InsertIntoBlockchain inserts a block into the blockchain -func InsertIntoBlockchain(transaction *model.DbTransaction, block *Block) error { - - // for local tests - blockID := block.Header.BlockID - - // record into the block chain - bl := &model.Block{} - err := bl.DeleteById(transaction, blockID) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting block by id") - return err - } - rollbackTx := &model.RollbackTx{} - blockRollbackTxs, err := rollbackTx.GetBlockRollbackTransactions(transaction, blockID) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block rollback txs") - return err - } - buffer := bytes.Buffer{} - for _, rollbackTx := range blockRollbackTxs { - rollbackTxBytes, err := json.Marshal(rollbackTx) - if err != nil { - log.WithFields(log.Fields{"type": consts.JSONMarshallError, "error": err}).Error("marshalling rollback_tx to json") - return err - } - - buffer.Write(rollbackTxBytes) - } - rollbackTxsHash, err := crypto.Hash(buffer.Bytes()) - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing block rollback_txs") - return err - } - b := &model.Block{ - ID: blockID, - Hash: block.Header.Hash, - Data: block.BinData, - EcosystemID: block.Header.EcosystemID, - KeyID: block.Header.KeyID, - NodePosition: block.Header.NodePosition, - Time: block.Header.Time, - RollbacksHash: rollbackTxsHash, - Tx: int32(len(block.Parsers)), - } - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") - return err - } - validBlockTime := true - if blockID > 1 { - validBlockTime, err = blockTimeCalculator.ValidateBlock(b.NodePosition, time.Unix(b.Time, 0)) - if err != nil { - log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("block validation") - return err - } - } - if validBlockTime { - err = b.Create(transaction) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") - return err - } - } else { - err := fmt.Errorf("Invalid block time: %d", block.Header.Time) - log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("invalid block time") - return err - } - - return nil -} - // FormatBlockData returns formated block data func (p *Parser) FormatBlockData() string { result := "" diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index d94108c9e..77cad5395 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -21,14 +21,12 @@ import ( "encoding/hex" "fmt" "strings" - "sync" "time" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" - "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/script" "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/utils" @@ -41,28 +39,6 @@ import ( var txParserCache = &parserCache{cache: make(map[string]*Parser)} -// Block is storing block data -type Block struct { - Header utils.BlockData - PrevHeader *utils.BlockData - MrklRoot []byte - BinData []byte - Parsers []*Parser - SysUpdate bool - GenBlock bool // it equals true when we are generating a new block - StopCount int // The count of good tx in the block -} - -func (b Block) String() string { - return fmt.Sprintf("header: %s, prevHeader: %s", b.Header, b.PrevHeader) -} - -// GetLogger is returns logger -func (b Block) GetLogger() *log.Entry { - return log.WithFields(log.Fields{"block_id": b.Header.BlockID, "block_time": b.Header.Time, "block_wallet_id": b.Header.KeyID, - "block_state_id": b.Header.EcosystemID, "block_hash": b.Header.Hash, "block_version": b.Header.Version}) -} - // InsertBlockWOForks is inserting blocks func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { block, err := ProcessBlockWherePrevFromBlockchainTable(data, !firstBlock) @@ -83,71 +59,6 @@ func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { return nil } -// PlayBlockSafe is inserting block safely -func (b *Block) PlayBlockSafe() error { - logger := b.GetLogger() - dbTransaction, err := model.StartTransaction() - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting db transaction") - return err - } - - err = b.PlayBlock(dbTransaction) - if b.GenBlock && b.StopCount > 0 { - doneTx := b.Parsers[:b.StopCount] - trData := make([][]byte, 0, b.StopCount) - for _, tr := range doneTx { - trData = append(trData, tr.TxFullData) - } - NodePrivateKey, _, err := utils.GetNodeKeys() - if err != nil || len(NodePrivateKey) < 1 { - log.WithFields(log.Fields{"type": consts.NodePrivateKeyFilename, "error": err}).Error("reading node private key") - return err - } - - newBlockData, err := MarshallBlock(&b.Header, trData, b.PrevHeader.Hash, NodePrivateKey) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("marshalling new block") - return err - } - - isFirstBlock := b.Header.BlockID == 1 - nb, err := ParseBlock(bytes.NewBuffer(newBlockData), isFirstBlock) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("parsing new block") - return err - } - b.BinData = newBlockData - b.Parsers = nb.Parsers - b.MrklRoot = nb.MrklRoot - b.SysUpdate = nb.SysUpdate - err = nil - } else if err != nil { - dbTransaction.Rollback() - return err - } - - if err := UpdBlockInfo(dbTransaction, b); err != nil { - dbTransaction.Rollback() - return err - } - - if err := InsertIntoBlockchain(dbTransaction, b); err != nil { - dbTransaction.Rollback() - return err - } - - dbTransaction.Commit() - if b.SysUpdate { - b.SysUpdate = false - if err = syspar.SysUpdate(nil); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - return err - } - } - return nil -} - // ProcessBlockWherePrevFromMemory is processing block with in memory previous block func ProcessBlockWherePrevFromMemory(data []byte) (*Block, error) { if int64(len(data)) > syspar.GetMaxBlockSize() { @@ -571,24 +482,6 @@ func CheckTransaction(data []byte) (*tx.Header, error) { return p.TxHeader, nil } -func (b *Block) readPreviousBlockFromMemory() error { - return nil -} - -func (b *Block) readPreviousBlockFromBlockchainTable() error { - if b.Header.BlockID == 1 { - b.PrevHeader = &utils.BlockData{} - return nil - } - - var err error - b.PrevHeader, err = GetBlockDataFromBlockChain(b.Header.BlockID - 1) - if err != nil { - return utils.ErrInfo(fmt.Errorf("can't get block %d", b.Header.BlockID-1)) - } - return nil -} - func playTransaction(p *Parser) (string, error) { // smart-contract if p.TxContract != nil { @@ -608,199 +501,6 @@ func playTransaction(p *Parser) (string, error) { return "", nil } -func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { - logger := b.GetLogger() - if _, err := model.DeleteUsedTransactions(dbTransaction); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("delete used transactions") - return err - } - limits := NewLimits(b) - for curTx, p := range b.Parsers { - var ( - msg string - err error - ) - p.DbTransaction = dbTransaction - - err = dbTransaction.Savepoint(curTx) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("using savepoint") - return err - } - msg, err = playTransaction(p) - if err == nil && p.TxSmart != nil { - err = limits.CheckLimit(p) - } - if err != nil { - if err == errNetworkStopping { - return err - } - - if b.GenBlock && err == ErrLimitStop { - b.StopCount = curTx - model.IncrementTxAttemptCount(p.DbTransaction, p.TxHash) - } - errRoll := dbTransaction.RollbackSavepoint(curTx) - if errRoll != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("rolling back to previous savepoint") - return errRoll - } - if b.GenBlock && err == ErrLimitStop { - break - } - // skip this transaction - model.MarkTransactionUsed(p.DbTransaction, p.TxHash) - p.processBadTransaction(p.TxHash, err.Error()) - if p.SysUpdate { - if err = syspar.SysUpdate(p.DbTransaction); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - } - p.SysUpdate = false - } - continue - } - err = dbTransaction.ReleaseSavepoint(curTx) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("releasing savepoint") - } - if p.SysUpdate { - b.SysUpdate = true - p.SysUpdate = false - } - - if _, err := model.MarkTransactionUsed(p.DbTransaction, p.TxHash); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("marking transaction used") - return err - } - - // update status - ts := &model.TransactionStatus{} - if err := ts.UpdateBlockMsg(p.DbTransaction, b.Header.BlockID, msg, p.TxHash); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("updating transaction status block id") - return err - } - if err := InsertInLogTx(p.DbTransaction, p.TxFullData, p.TxTime); err != nil { - return utils.ErrInfo(err) - } - } - return nil -} - -// CheckBlock is checking block -func (b *Block) CheckBlock() error { - - logger := b.GetLogger() - // exclude blocks from future - if b.Header.Time > time.Now().Unix() { - logger.WithFields(log.Fields{"type": consts.ParameterExceeded}).Error("block time is larger than now") - return utils.ErrInfo(fmt.Errorf("incorrect block time - block.Header.Time > time.Now().Unix()")) - } - if b.PrevHeader == nil || b.PrevHeader.BlockID != b.Header.BlockID-1 { - if err := b.readPreviousBlockFromBlockchainTable(); err != nil { - logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") - return utils.ErrInfo(err) - } - } - - if b.Header.BlockID == 1 { - return nil - } - - // is this block too early? Allowable error = error_time - if b.PrevHeader != nil { - if b.Header.BlockID != b.PrevHeader.BlockID+1 { - logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") - return utils.ErrInfo(fmt.Errorf("incorrect block_id %d != %d +1", b.Header.BlockID, b.PrevHeader.BlockID)) - } - - // skip time validation for first block - if b.Header.BlockID > 1 { - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() - if err != nil { - logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") - return err - } - - validBlockTime, err := blockTimeCalculator.ValidateBlock(b.Header.NodePosition, time.Unix(b.Header.Time, 0)) - if err != nil { - logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("calculating block time") - return err - } - - if !validBlockTime { - logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("incorrect block time") - return utils.ErrInfo(fmt.Errorf("incorrect block time %d", b.PrevHeader.Time)) - } - } - } - - // check each transaction - txCounter := make(map[int64]int) - txHashes := make(map[string]struct{}) - for _, p := range b.Parsers { - hexHash := string(converter.BinToHex(p.TxHash)) - // check for duplicate transactions - if _, ok := txHashes[hexHash]; ok { - logger.WithFields(log.Fields{"tx_hash": hexHash, "type": consts.DuplicateObject}).Error("duplicate transaction") - return utils.ErrInfo(fmt.Errorf("duplicate transaction %s", hexHash)) - } - txHashes[hexHash] = struct{}{} - - // check for max transaction per user in one block - txCounter[p.TxKeyID]++ - if txCounter[p.TxKeyID] > syspar.GetMaxBlockUserTx() { - return utils.ErrInfo(fmt.Errorf("max_block_user_transactions")) - } - - if err := checkTransaction(p, b.Header.Time, false); err != nil { - return utils.ErrInfo(err) - } - - } - - result, err := b.CheckHash() - if err != nil { - return utils.ErrInfo(err) - } - if !result { - logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect signature") - return fmt.Errorf("incorrect signature / p.PrevBlock.BlockId: %d", b.PrevHeader.BlockID) - } - return nil -} - -// CheckHash is checking hash -func (b *Block) CheckHash() (bool, error) { - logger := b.GetLogger() - if b.Header.BlockID == 1 { - return true, nil - } - // check block signature - if b.PrevHeader != nil { - nodePublicKey, err := syspar.GetNodePublicKeyByPosition(b.Header.NodePosition) - if err != nil { - return false, utils.ErrInfo(err) - } - if len(nodePublicKey) == 0 { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("node public key is empty") - return false, utils.ErrInfo(fmt.Errorf("empty nodePublicKey")) - } - // check the signature - forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", b.Header.BlockID, b.PrevHeader.Hash, - b.Header.Time, b.Header.EcosystemID, b.Header.KeyID, b.Header.NodePosition, b.MrklRoot) - - resultCheckSign, err := utils.CheckSign([][]byte{nodePublicKey}, forSign, b.Header.Sign, true) - if err != nil { - logger.WithFields(log.Fields{"error": err, "type": consts.CryptoError}).Error("checking block header sign") - return false, utils.ErrInfo(fmt.Errorf("err: %v / block.PrevHeader.BlockID: %d / block.PrevHeader.Hash: %x / ", err, b.PrevHeader.BlockID, b.PrevHeader.Hash)) - } - - return resultCheckSign, nil - } - - return true, nil -} - // MarshallBlock is marshalling block func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, key string) ([]byte, error) { var mrklArray [][]byte @@ -850,33 +550,6 @@ func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, ke return buf.Bytes(), nil } -type parserCache struct { - mutex sync.RWMutex - cache map[string]*Parser -} - -func (pc *parserCache) Get(hash string) (p *Parser, ok bool) { - pc.mutex.RLock() - defer pc.mutex.RUnlock() - - p, ok = pc.cache[hash] - return -} - -func (pc *parserCache) Set(p *Parser) { - pc.mutex.Lock() - defer pc.mutex.Unlock() - - pc.cache[string(p.TxHash)] = p -} - -func (pc *parserCache) Clean() { - pc.mutex.Lock() - defer pc.mutex.Unlock() - - pc.cache = make(map[string]*Parser) -} - // CleanCache cleans cache of transaction parsers func CleanCache() { txParserCache.Clean() diff --git a/packages/parser/common_upd_block_info.go b/packages/parser/common_upd_block_info.go deleted file mode 100644 index e0d300d7e..000000000 --- a/packages/parser/common_upd_block_info.go +++ /dev/null @@ -1,75 +0,0 @@ -// Copyright 2016 The go-daylight Authors -// This file is part of the go-daylight library. -// -// The go-daylight library is free software: you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// The go-daylight library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public License -// along with the go-daylight library. If not, see . - -package parser - -import ( - "fmt" - - "github.com/GenesisKernel/go-genesis/packages/consts" - "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/crypto" - "github.com/GenesisKernel/go-genesis/packages/model" - - log "github.com/sirupsen/logrus" -) - -// UpdBlockInfo updates info_block table -func UpdBlockInfo(dbTransaction *model.DbTransaction, block *Block) error { - blockID := block.Header.BlockID - // for the local tests - forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", blockID, block.PrevHeader.Hash, block.MrklRoot, - block.Header.Time, block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition) - - hash, err := crypto.DoubleHash([]byte(forSha)) - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal("double hashing block") - } - - block.Header.Hash = hash - if block.Header.BlockID == 1 { - ib := &model.InfoBlock{ - Hash: hash, - BlockID: blockID, - Time: block.Header.Time, - EcosystemID: block.Header.EcosystemID, - KeyID: block.Header.KeyID, - NodePosition: converter.Int64ToStr(block.Header.NodePosition), - CurrentVersion: fmt.Sprintf("%d", block.Header.Version), - } - err := ib.Create(dbTransaction) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") - return fmt.Errorf("error insert into info_block %s", err) - } - } else { - ibUpdate := &model.InfoBlock{ - Hash: hash, - BlockID: blockID, - Time: block.Header.Time, - EcosystemID: block.Header.EcosystemID, - KeyID: block.Header.KeyID, - NodePosition: converter.Int64ToStr(block.Header.NodePosition), - Sent: 0, - } - if err := ibUpdate.Update(dbTransaction); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") - return fmt.Errorf("error while updating info_block: %s", err) - } - } - - return nil -} diff --git a/packages/parser/db.go b/packages/parser/db.go new file mode 100644 index 000000000..aacec8f4f --- /dev/null +++ b/packages/parser/db.go @@ -0,0 +1,219 @@ +package parser + +import ( + "bytes" + "encoding/json" + "fmt" + "time" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/crypto" + "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/utils" + + log "github.com/sirupsen/logrus" +) + +// UpdBlockInfo updates info_block table +func UpdBlockInfo(dbTransaction *model.DbTransaction, block *Block) error { + blockID := block.Header.BlockID + // for the local tests + forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", blockID, block.PrevHeader.Hash, block.MrklRoot, + block.Header.Time, block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition) + + hash, err := crypto.DoubleHash([]byte(forSha)) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal("double hashing block") + } + + block.Header.Hash = hash + if block.Header.BlockID == 1 { + ib := &model.InfoBlock{ + Hash: hash, + BlockID: blockID, + Time: block.Header.Time, + EcosystemID: block.Header.EcosystemID, + KeyID: block.Header.KeyID, + NodePosition: converter.Int64ToStr(block.Header.NodePosition), + CurrentVersion: fmt.Sprintf("%d", block.Header.Version), + } + err := ib.Create(dbTransaction) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") + return fmt.Errorf("error insert into info_block %s", err) + } + } else { + ibUpdate := &model.InfoBlock{ + Hash: hash, + BlockID: blockID, + Time: block.Header.Time, + EcosystemID: block.Header.EcosystemID, + KeyID: block.Header.KeyID, + NodePosition: converter.Int64ToStr(block.Header.NodePosition), + Sent: 0, + } + if err := ibUpdate.Update(dbTransaction); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") + return fmt.Errorf("error while updating info_block: %s", err) + } + } + + return nil +} + +// InsertIntoBlockchain inserts a block into the blockchain +func InsertIntoBlockchain(transaction *model.DbTransaction, block *Block) error { + // for local tests + blockID := block.Header.BlockID + + // record into the block chain + bl := &model.Block{} + err := bl.DeleteById(transaction, blockID) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting block by id") + return err + } + rollbackTx := &model.RollbackTx{} + blockRollbackTxs, err := rollbackTx.GetBlockRollbackTransactions(transaction, blockID) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block rollback txs") + return err + } + buffer := bytes.Buffer{} + for _, rollbackTx := range blockRollbackTxs { + rollbackTxBytes, err := json.Marshal(rollbackTx) + if err != nil { + log.WithFields(log.Fields{"type": consts.JSONMarshallError, "error": err}).Error("marshalling rollback_tx to json") + return err + } + + buffer.Write(rollbackTxBytes) + } + rollbackTxsHash, err := crypto.Hash(buffer.Bytes()) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing block rollback_txs") + return err + } + b := &model.Block{ + ID: blockID, + Hash: block.Header.Hash, + Data: block.BinData, + EcosystemID: block.Header.EcosystemID, + KeyID: block.Header.KeyID, + NodePosition: block.Header.NodePosition, + Time: block.Header.Time, + RollbacksHash: rollbackTxsHash, + Tx: int32(len(block.Parsers)), + } + blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") + return err + } + validBlockTime := true + if blockID > 1 { + validBlockTime, err = blockTimeCalculator.ValidateBlock(b.NodePosition, time.Unix(b.Time, 0)) + if err != nil { + log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("block validation") + return err + } + } + if validBlockTime { + err = b.Create(transaction) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") + return err + } + } else { + err := fmt.Errorf("Invalid block time: %d", block.Header.Time) + log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("invalid block time") + return err + } + + return nil +} + +// InsertInLogTx is inserting tx in log +func InsertInLogTx(transaction *model.DbTransaction, binaryTx []byte, time int64) error { + txHash, err := crypto.Hash(binaryTx) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.CryptoError}).Fatal("hashing binary tx") + } + ltx := &model.LogTransaction{Hash: txHash, Time: time} + err = ltx.Create(transaction) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("insert logged transaction") + return utils.ErrInfo(err) + } + return nil +} + +// CheckLogTx checks if this transaction exists +// And it would have successfully passed a frontal test +func CheckLogTx(txBinary []byte, transactions, txQueue bool) error { + searchedHash, err := crypto.Hash(txBinary) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal(err) + } + logTx := &model.LogTransaction{} + found, err := logTx.GetByHash(searchedHash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting log transaction by hash") + return utils.ErrInfo(err) + } + if found { + log.WithFields(log.Fields{"tx_hash": searchedHash, "type": consts.DuplicateObject}).Error("double tx in log transactions") + return utils.ErrInfo(fmt.Errorf("double tx in log_transactions %x", searchedHash)) + } + + if transactions { + // check for duplicate transaction + tx := &model.Transaction{} + _, err := tx.GetVerified(searchedHash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting verified transaction") + return utils.ErrInfo(err) + } + if len(tx.Hash) > 0 { + log.WithFields(log.Fields{"tx_hash": tx.Hash, "type": consts.DuplicateObject}).Error("double tx in transactions") + return utils.ErrInfo(fmt.Errorf("double tx in transactions %x", searchedHash)) + } + } + + if txQueue { + // check for duplicate transaction from queue + qtx := &model.QueueTx{} + found, err := qtx.GetByHash(nil, searchedHash) + if found { + log.WithFields(log.Fields{"tx_hash": searchedHash, "type": consts.DuplicateObject}).Error("double tx in queue") + return utils.ErrInfo(fmt.Errorf("double tx in queue_tx %x", searchedHash)) + } + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting transaction from queue") + return utils.ErrInfo(err) + } + } + + return nil +} + +// GetBlockDataFromBlockChain is retrieving block data from blockchain +func GetBlockDataFromBlockChain(blockID int64) (*utils.BlockData, error) { + BlockData := new(utils.BlockData) + block := &model.Block{} + _, err := block.Get(blockID) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Getting block by ID") + return BlockData, utils.ErrInfo(err) + } + + header, err := utils.ParseBlockHeader(bytes.NewBuffer(block.Data), false) + if err != nil { + return nil, utils.ErrInfo(err) + } + + BlockData = &header + BlockData.Hash = block.Hash + return BlockData, nil +} diff --git a/packages/parser/parser_cache.go b/packages/parser/parser_cache.go new file mode 100644 index 000000000..adbd54009 --- /dev/null +++ b/packages/parser/parser_cache.go @@ -0,0 +1,30 @@ +package parser + +import "sync" + +type parserCache struct { + mutex sync.RWMutex + cache map[string]*Parser +} + +func (pc *parserCache) Get(hash string) (p *Parser, ok bool) { + pc.mutex.RLock() + defer pc.mutex.RUnlock() + + p, ok = pc.cache[hash] + return +} + +func (pc *parserCache) Set(p *Parser) { + pc.mutex.Lock() + defer pc.mutex.Unlock() + + pc.cache[string(p.TxHash)] = p +} + +func (pc *parserCache) Clean() { + pc.mutex.Lock() + defer pc.mutex.Unlock() + + pc.cache = make(map[string]*Parser) +} From 19bb3d6cb6118f4dca2cf43fb6f67d01197bdf9d Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Wed, 6 Jun 2018 10:55:28 +0300 Subject: [PATCH 031/169] Eliminate parser/common_tx_parser, move all to parser/db.go --- packages/daemons/block_generator.go | 6 +- packages/daemons/queue_parser_tx.go | 2 +- packages/parser/block.go | 2 +- packages/parser/common_parse_data_full.go | 2 +- packages/parser/common_tx_parser.go | 166 ---------------------- packages/parser/db.go | 132 +++++++++++++++++ 6 files changed, 138 insertions(+), 172 deletions(-) delete mode 100644 packages/parser/common_tx_parser.go diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index daa70067c..def213110 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -157,7 +157,7 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { p := new(parser.Parser) // verify transactions - err := p.AllTxParser() + err := parser.ProcessTransactionsQueue(p.DbTransaction) if err != nil { return nil, err } @@ -176,7 +176,7 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { p, err := parser.ParseTransaction(bufTransaction) if err != nil { if p != nil { - p.ProcessBadTransaction(err) + parser.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) } continue } @@ -189,7 +189,7 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { if err == parser.ErrLimitSkip { model.IncrementTxAttemptCount(nil, p.TxHash) } else { - p.ProcessBadTransaction(err) + parser.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) } continue } diff --git a/packages/daemons/queue_parser_tx.go b/packages/daemons/queue_parser_tx.go index f50da9494..4585dc1df 100644 --- a/packages/daemons/queue_parser_tx.go +++ b/packages/daemons/queue_parser_tx.go @@ -50,7 +50,7 @@ func QueueParserTx(ctx context.Context, d *daemon) error { } p := new(parser.Parser) - err = p.AllTxParser() + err = parser.ProcessTransactionsQueue(p.DbTransaction) if err != nil { d.logger.WithFields(log.Fields{"error": err}).Error("parsing transactions") return err diff --git a/packages/parser/block.go b/packages/parser/block.go index 0dd7d097b..df6a83ea5 100644 --- a/packages/parser/block.go +++ b/packages/parser/block.go @@ -161,7 +161,7 @@ func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { } // skip this transaction model.MarkTransactionUsed(p.DbTransaction, p.TxHash) - p.processBadTransaction(p.TxHash, err.Error()) + MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) if p.SysUpdate { if err = syspar.SysUpdate(p.DbTransaction); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index 77cad5395..9076ed3cb 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -143,7 +143,7 @@ func ParseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { p, err := ParseTransaction(bufTransaction) if err != nil { if p != nil && p.TxHash != nil { - p.processBadTransaction(p.TxHash, err.Error()) + MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) } return nil, fmt.Errorf("parse transaction error(%s)", err) } diff --git a/packages/parser/common_tx_parser.go b/packages/parser/common_tx_parser.go deleted file mode 100644 index 1d211e307..000000000 --- a/packages/parser/common_tx_parser.go +++ /dev/null @@ -1,166 +0,0 @@ -// Copyright 2016 The go-daylight Authors -// This file is part of the go-daylight library. -// -// The go-daylight library is free software: you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// The go-daylight library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public License -// along with the go-daylight library. If not, see . - -package parser - -import ( - "errors" - - "github.com/GenesisKernel/go-genesis/packages/consts" - "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/utils" - - log "github.com/sirupsen/logrus" -) - -// TxParser writes transactions into the queue -func (p *Parser) TxParser(hash, binaryTx []byte, myTx bool) error { - // get parameters for "struct" transactions - logger := p.GetLogger() - txType, keyID := GetTxTypeAndUserID(binaryTx) - - header, err := CheckTransaction(binaryTx) - if err != nil { - p.processBadTransaction(hash, err.Error()) - return err - } - - if !( /*txType > 127 ||*/ consts.IsStruct(int(txType))) { - if header == nil { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("tx header is nil") - return utils.ErrInfo(errors.New("header is nil")) - } - keyID = header.KeyID - } - - if keyID == 0 { - errStr := "undefined keyID" - p.processBadTransaction(hash, errStr) - return errors.New(errStr) - } - - tx := &model.Transaction{} - _, err = tx.Get(hash) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting transaction by hash") - return utils.ErrInfo(err) - } - counter := tx.Counter - counter++ - _, err = model.DeleteTransactionByHash(hash) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction by hash") - return utils.ErrInfo(err) - } - - // put with verified=1 - newTx := &model.Transaction{ - Hash: hash, - Data: binaryTx, - Type: int8(txType), - KeyID: keyID, - Counter: counter, - Verified: 1, - HighRate: tx.HighRate, - } - err = newTx.Create() - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating new transaction") - return utils.ErrInfo(err) - } - - // remove transaction from the queue (with verified=0) - err = p.DeleteQueueTx(hash) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction from queue") - return utils.ErrInfo(err) - } - - return nil -} - -func (p *Parser) processBadTransaction(hash []byte, errText string) error { - logger := p.GetLogger() - if len(errText) > 255 { - errText = errText[:255] - } - // looks like there is not hash in queue_tx in this moment - qtx := &model.QueueTx{} - _, err := qtx.GetByHash(p.DbTransaction, hash) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting tx by hash from queue") - } - - if qtx.FromGate == 0 { - m := &model.TransactionStatus{} - err = m.SetError(p.DbTransaction, errText, hash) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("setting transaction status error") - return utils.ErrInfo(err) - } - } - p.DeleteQueueTx(hash) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction from queue") - return utils.ErrInfo(err) - } - - return nil -} - -// ProcessBadTransaction processes bad transactions -func (p *Parser) ProcessBadTransaction(err error) { - if p.TxHash != nil { - model.MarkTransactionUsed(p.DbTransaction, p.TxHash) - p.processBadTransaction(p.TxHash, err.Error()) - } -} - -// DeleteQueueTx deletes a transaction from the queue -func (p *Parser) DeleteQueueTx(hash []byte) error { - logger := p.GetLogger() - delQueueTx := &model.QueueTx{Hash: hash} - err := delQueueTx.DeleteTx(p.DbTransaction) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction from queue") - return utils.ErrInfo(err) - } - // Because we process transactions with verified=0 in queue_parser_tx, after processing we need to delete them - _, err = model.DeleteTransactionIfUnused(p.DbTransaction, hash) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction if unused") - return utils.ErrInfo(err) - } - return nil -} - -// AllTxParser parses new transactions -func (p *Parser) AllTxParser() error { - logger := p.GetLogger() - all, err := model.GetAllUnverifiedAndUnusedTransactions() - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting all unverified and unused transactions") - return err - } - for _, data := range all { - err := p.TxParser(data.Hash, data.Data, false) - if err != nil { - return utils.ErrInfo(err) - } - logger.Debug("transaction parsed successfully") - } - return nil -} diff --git a/packages/parser/db.go b/packages/parser/db.go index aacec8f4f..d0e4893a2 100644 --- a/packages/parser/db.go +++ b/packages/parser/db.go @@ -3,6 +3,7 @@ package parser import ( "bytes" "encoding/json" + "errors" "fmt" "time" @@ -217,3 +218,134 @@ func GetBlockDataFromBlockChain(blockID int64) (*utils.BlockData, error) { BlockData.Hash = block.Hash return BlockData, nil } + +// DeleteQueueTx deletes a transaction from the queue +func DeleteQueueTx(dbTransaction *model.DbTransaction, hash []byte) error { + delQueueTx := &model.QueueTx{Hash: hash} + err := delQueueTx.DeleteTx(dbTransaction) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction from queue") + return utils.ErrInfo(err) + } + // Because we process transactions with verified=0 in queue_parser_tx, after processing we need to delete them + _, err = model.DeleteTransactionIfUnused(dbTransaction, hash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction if unused") + return utils.ErrInfo(err) + } + return nil +} + +func MarkTransactionBad(dbTransaction *model.DbTransaction, hash []byte, errText string) error { + if hash == nil { + return nil + } + model.MarkTransactionUsed(dbTransaction, hash) + if len(errText) > 255 { + errText = errText[:255] + } + // looks like there is not hash in queue_tx in this moment + qtx := &model.QueueTx{} + _, err := qtx.GetByHash(dbTransaction, hash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting tx by hash from queue") + } + + if qtx.FromGate == 0 { + m := &model.TransactionStatus{} + err = m.SetError(dbTransaction, errText, hash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("setting transaction status error") + return utils.ErrInfo(err) + } + } + err = DeleteQueueTx(dbTransaction, hash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction from queue") + return utils.ErrInfo(err) + } + + return nil +} + +// TxParser writes transactions into the queue +func ProcessQueueTransaction(dbTransaction *model.DbTransaction, hash, binaryTx []byte, myTx bool) error { + // get parameters for "struct" transactions + txType, keyID := GetTxTypeAndUserID(binaryTx) + + header, err := CheckTransaction(binaryTx) + if err != nil { + MarkTransactionBad(dbTransaction, hash, err.Error()) + return err + } + + if !( /*txType > 127 ||*/ consts.IsStruct(int(txType))) { + if header == nil { + log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("tx header is nil") + return utils.ErrInfo(errors.New("header is nil")) + } + keyID = header.KeyID + } + + if keyID == 0 { + errStr := "undefined keyID" + MarkTransactionBad(dbTransaction, hash, errStr) + return errors.New(errStr) + } + + tx := &model.Transaction{} + _, err = tx.Get(hash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting transaction by hash") + return utils.ErrInfo(err) + } + counter := tx.Counter + counter++ + _, err = model.DeleteTransactionByHash(hash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction by hash") + return utils.ErrInfo(err) + } + + // put with verified=1 + newTx := &model.Transaction{ + Hash: hash, + Data: binaryTx, + Type: int8(txType), + KeyID: keyID, + Counter: counter, + Verified: 1, + HighRate: tx.HighRate, + } + err = newTx.Create() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating new transaction") + return utils.ErrInfo(err) + } + + // remove transaction from the queue (with verified=0) + err = DeleteQueueTx(dbTransaction, hash) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transaction from queue") + return utils.ErrInfo(err) + } + + return nil +} + +// AllTxParser parses new transactions +func ProcessTransactionsQueue(dbTransaction *model.DbTransaction) error { + all, err := model.GetAllUnverifiedAndUnusedTransactions() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting all unverified and unused transactions") + return err + } + for _, data := range all { + err := ProcessQueueTransaction(dbTransaction, data.Hash, data.Data, false) + if err != nil { + return utils.ErrInfo(err) + } + log.Debug("transaction parsed successfully") + } + return nil +} From b6dbc1d6bca6b228bd04c38438cd46bba73e93bc Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 032/169] move changes --- cmd/config.go | 4 +- packages/conf/conf.go | 26 +++ packages/conf/runmode.go | 40 ++++ packages/migration/{ => vde}/vde.go | 0 packages/smart/funcs.go | 41 ++++ packages/vdemanager/config.go | 65 +++++++ packages/vdemanager/manager.go | 286 ++++++++++++++++++++++++++++ 7 files changed, 460 insertions(+), 2 deletions(-) create mode 100644 packages/conf/runmode.go rename packages/migration/{ => vde}/vde.go (100%) create mode 100644 packages/vdemanager/config.go create mode 100644 packages/vdemanager/manager.go diff --git a/cmd/config.go b/cmd/config.go index c6779160a..90ace6d01 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().BoolVar(&conf.Config.PrivateBlockchain, "privateBlockchain", false, "Is blockchain private") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) @@ -147,7 +147,7 @@ func init() { viper.BindPFlag("TLSCert", configCmd.Flags().Lookup("tls-cert")) viper.BindPFlag("TLSKey", configCmd.Flags().Lookup("tls-key")) viper.BindPFlag("MaxPageGenerationTime", configCmd.Flags().Lookup("mpgt")) - viper.BindPFlag("PrivateBlockchain", configCmd.Flags().Lookup("privateBlockchain")) viper.BindPFlag("TempDir", configCmd.Flags().Lookup("tempDir")) viper.BindPFlag("NodesAddr", configCmd.Flags().Lookup("nodesAddr")) + viper.BindPFlag("RunningMode", configCmd.Flags().Lookup("runMode")) } diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 7ac76b6e8..2e175b01c 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -89,6 +89,7 @@ type GlobalConfig struct { TLS bool // TLS is on/off. It is required for https TLSCert string // TLSCert is a filepath of the fullchain of certificate. TLSKey string // TLSKey is a filepath of the private key. + RunningMode string MaxPageGenerationTime int64 // in milliseconds @@ -216,3 +217,28 @@ func FillRuntimeKey() error { func GetNodesAddr() []string { return Config.NodesAddr[:] } + +// IsPrivateBlockchain check running mode +func (c *GlobalConfig) IsPrivateBlockchain() bool { + return RunMode(c.RunningMode).IsPrivateBlockchain() +} + +// IsPublicBlockchain check running mode +func (c *GlobalConfig) IsPublicBlockchain() bool { + return RunMode(c.RunningMode).IsPublicBlockchain() +} + +// IsVDE check running mode +func (c *GlobalConfig) IsVDE() bool { + return RunMode(c.RunningMode).IsVDE() +} + +// IsVDEMaster check running mode +func (c *GlobalConfig) IsVDEMaster() bool { + return RunMode(c.RunningMode).IsVDEMaster() +} + +// IsSupportingVDE check running mode +func (c *GlobalConfig) IsSupportingVDE() bool { + return RunMode(c.RunningMode).IsSupportingVDE() +} diff --git a/packages/conf/runmode.go b/packages/conf/runmode.go new file mode 100644 index 000000000..a03f2aeb0 --- /dev/null +++ b/packages/conf/runmode.go @@ -0,0 +1,40 @@ +package conf + +// PrivateBlockchain const label for running mode +const privateBlockchain RunMode = "PrivateBlockchain" + +// PublicBlockchain const label for running mode +const publicBlockchain RunMode = "PublicBlockchain" + +// VDEManager const label for running mode +const vdeMaster RunMode = "VDEMaster" + +// VDE const label for running mode +const vde RunMode = "VDE" + +type RunMode string + +// IsPublicBlockchain returns true if mode equal PublicBlockchain +func (rm RunMode) IsPublicBlockchain() bool { + return rm == publicBlockchain +} + +// IsPrivateBlockchain returns true if mode equal PrivateBlockchain +func (rm RunMode) IsPrivateBlockchain() bool { + return rm == privateBlockchain +} + +// IsVDEMaster returns true if mode equal vdeMaster +func (rm RunMode) IsVDEMaster() bool { + return rm == vdeMaster +} + +// IsVDE returns true if mode equal vde +func (rm RunMode) IsVDE() bool { + return rm == vde +} + +// IsSupportingVDE returns true if mode support vde +func (rm RunMode) IsSupportingVDE() bool { + return rm.IsVDE() || rm.IsVDEMaster() +} diff --git a/packages/migration/vde.go b/packages/migration/vde/vde.go similarity index 100% rename from packages/migration/vde.go rename to packages/migration/vde/vde.go diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index df6e8dd22..8358cb003 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -46,6 +46,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/script" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/satori/go.uuid" "github.com/shopspring/decimal" @@ -254,6 +255,21 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["UpdateCron"] = UpdateCron vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: + f["HTTPRequest"] = HTTPRequest + f["GetMapKeys"] = GetMapKeys + f["SortedKeys"] = SortedKeys + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDE"] = StopVDE + f["GetVDEList"] = GetVDEList + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan @@ -1670,3 +1686,28 @@ func StringToBytes(src string) []byte { func BytesToString(src []byte) string { return string(src) } + +// CreateVDE allow create new VDE throw vdemanager +func CreateVDE(sc *SmartContract, name, dbUser, dbPassword string, port int64) error { + return vdemanager.Manager.CreateVDE(name, dbUser, dbPassword, int(port)) +} + +// DeleteVDE delete vde +func DeleteVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.DeleteVDE(name) +} + +// StartVDE run VDE process +func StartVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StartVDE(name) +} + +// StopVDE stops VDE process +func StopVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StopVDE(name) +} + +// GetVDEList returns list VDE process with statuses +func GetVDEList(sc *SmartContract, name string) (map[string]string, error) { + return vdemanager.Manager.ListProcess() +} diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go new file mode 100644 index 000000000..450ff5aac --- /dev/null +++ b/packages/vdemanager/config.go @@ -0,0 +1,65 @@ +package vdemanager + +import ( + "fmt" + "os/exec" + "path/filepath" +) + +const ( + inidDBCommand = "initDatabase" + genKeysCommand = "generateKeys" + startCommand = "start" +) +// ChildVDEConfig struct to manage child entry +type ChildVDEConfig struct { + Executable string + Name string + Directory string + DBUser string + DBPassword string + ConfigFileName string + HTTPPort int +} + +func (c ChildVDEConfig) configCommand() *exec.Cmd { + + args := []string{ + "config", + fmt.Sprintf("--path=%s", c.configPath()), + fmt.Sprintf("--dbUser=%s", c.DBUser), + fmt.Sprintf("--dbPassword=%s", c.DBPassword), + fmt.Sprintf("--dbName=%s", c.Name), + fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--dataDir=%s", c.Directory), + fmt.Sprintf("--keysDir=%s", c.Directory), + fmt.Sprintf("--runMode=VDE") + } + + return exec.Command(c.Executable, args...) +} + +func (c ChildVDEConfig) initDBCommand() exec.Cmd { + return getCommand(inidDBCommand) +} + +func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { + return getCommand(genKeysCommand) +} + +func (c ChildVDEConfig) startCommand() exec.Cmd { + retturn getCommand(startCommand) +} + +func (c ChildVDEConfig) configPath() string { + return filepath.Join(c.Directory, ConfigFileName) +} + +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + return args := []string{ + commandName, + fmt.Sprintf("--config=%s", c.configPath()), + } + + return exec.Command(c.Executable, args...) +} \ No newline at end of file diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go new file mode 100644 index 000000000..d35362ce2 --- /dev/null +++ b/packages/vdemanager/manager.go @@ -0,0 +1,286 @@ +package vdemanager + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + + "github.com/GenesisKernel/go-genesis/packages/conf" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/model" + pConf "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/process" + log "github.com/sirupsen/logrus" +) + +const ( + childFolder = "configs" + createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` + createDBTemplate = `CREATE DATABASE %s OWNER %s` + + dropDBTemplate = `DROP OWNED BY %s CASCADE` + dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` + commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` +) + +var ( + errWrongMode = errors.New("node must be running as VDEMaster") +) + +// VDEManager struct +type VDEManager struct { + processes *process.ProcessManager +} + +var ( + Manager *VDEManager + childConfigsPath string +) + +// InitVDEManager create init instance of VDEManager +func InitVDEManager() error { + if err := prepareWorkDir(); err != nil { + return err + } + + return initProcessManager() +} + +func prepareWorkDir() error { + childConfigsPath = path.Join(conf.Config.DataDir, childFolder) + + if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { + if err := os.Mkdir(childConfigsPath, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") + return err + } + } + + return nil +} + +// CreateVDE creates one instance of VDE +func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + + config := ChildVDEConfig{ + Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Name: name, + Directory: path.Join(childConfigsPath, name) + DBUser: dbUser, + DBPassword: dbPassword, + ConfigFileName: consts.DefaultConfigFile, + HTTPPort: port, + } + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("creating new VDE") + return errWrongMode + } + + if err := mgr.createVDEDB(name, dbUser, dbPassword); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on creating VDE DB") + return err + } + + if err := mgr.initVDEDir(name); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "DirName": name, "error": err}).Error("on init VDE dir") + return err + } + + cmd := config.configCommand() + if err := cmd.Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run config command") + return err + } + + if err := config.generateKeysCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run generateKeys command") + return err + } + + if err := config.initDBCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run initDB command") + return err + } + + procConfEntry := pConf.NewConfigEntry(config.Directory) + procConfEntry.Name = "program:" + name + command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) + procConfEntry.AddKeyValue("command", command) + proc := process.NewProcess("vdeMaster", confEntry) + + mgr.processes.Add(name, proc) + mgr.processes.Find(name).Start(true) + return nil +} + +// ListProcess returns list of process names with state of process +func (mgr *VDEManager) ListProcess() (map[string]string, error) { + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("get VDE list") + return nil, errWrongMode + } + + list := make(map[string]string) + + mgr.processes.ForEachProcess(func(p *process.Process) { + list[p.GetName()] = p.GetState().String() + }) + + return list, nil +} + +// DeleteVDE stop VDE process and remove VDE folder +func (mgr *VDEManager) DeleteVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("deleting VDE") + return errWrongMode + } + + p := mgr.processes.Find(name) + if p != nil { + p.Stop(true) + } + + vdeDir := path.Join(childConfigsPath, name) + vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) + vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Errorf("Getting config from path %s", vdeConfigPath) + return err + } + + dropDBquery := fmt.Sprintf(dropDBTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropDBquery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") + return err + } + + dropVDERoleQuery := fmt.Sprintf(dropDBRoleTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropVDERoleQuery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db user") + return err + } + + return os.RemoveAll(vdeDir) +} + +// StartVDE find process and then start him +func (mgr *VDEManager) StartVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("starting VDE") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.STOPPED || + state == process.EXITED || + state == process.FATAL { + proc.Start(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE started") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on starting VDE") + return err +} + +// StopVDE find process with definded name and then stop him +func (mgr *VDEManager) StopVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("on stopping VDE process") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.RUNNING || + state == process.STARTING { + proc.Stop(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE is stoped") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on stoping VDE") + return err +} + +func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { + + if err := model.DBConn.Exec(fmt.Sprintf(createRoleTemplate, login, pass)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB User") + return err + } + + if err := model.DBConn.Exec(fmt.Sprintf(createDBTemplate, vdeName, login)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB") + return err + } + + return nil +} + +func (mgr *VDEManager) initVDEDir(vdeName string) error { + + vdeDirName := path.Join(childConfigsPath, vdeName) + if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { + if err := os.Mkdir(vdeDirName, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") + return err + } + } + + return nil +} + +func initProcessManager() error { + Manager = &VDEManager{ + processes: process.NewProcessManager(), + } + + list, err := ioutil.ReadDir(childConfigsPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") + return err + } + + for _, item := range list { + if item.IsDir() { + procDir := path.Join(childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + confEntry := pConf.NewConfigEntry(procDir) + confEntry.Name = "program:" + item.Name() + confEntry.AddKeyValue("command", commandStr) + confEntry.AddKeyValue("redirect_stderr", "true") + confEntry.AddKeyValue("autostart", "true") + confEntry.AddKeyValue("autorestart", "true") + + proc := process.NewProcess("vdeMaster", confEntry) + Manager.processes.Add(item.Name(), proc) + } + } + + return nil +} From 1b8cfd001ac59a8f939fbc133a6db55146d3d696 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:01 +0300 Subject: [PATCH 033/169] setup vde mode for vm in default handler --- packages/api/api.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index 1bdeb76d2..44a877c47 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -30,6 +30,7 @@ import ( hr "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" @@ -241,10 +242,8 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { - // Getting and validating request parameters - vde := r.FormValue(`vde`) - if vde == `1` || vde == `true` { - data.vm = smart.GetVM(true, data.ecosystemId) + if conf.Config.IsSupportingVDE() { + data.vm = smart.GetVM(true, consts.DefaultVDE) if data.vm == nil { return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) } @@ -252,6 +251,7 @@ func fillParams(params map[string]int) apiHandle { } else { data.vm = smart.GetVM(false, 0) } + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { From 3bd5d172983716fa5d4cd8b8b76e5d1aa0c14304 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 034/169] separate routes by vde --- packages/api/route.go | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/packages/api/route.go b/packages/api/route.go index 1fb9e45b0..be778c441 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -19,6 +19,7 @@ package api import ( "strings" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/utils/tx" @@ -50,14 +51,8 @@ func Route(route *hr.Router) { route.Handle(`OPTIONS`, consts.ApiPath+`*name`, optionsHandler()) route.Handle(`GET`, consts.ApiPath+`data/:table/:id/:column/:hash`, dataHandler()) - get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) - get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) - get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`contract/:name`, ``, authWallet, getContract) get(`contracts`, `?limit ?offset:int64`, authWallet, getContracts) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) - get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) - get(`ecosystems`, ``, authWallet, ecosystems) get(`getuid`, ``, getUID) get(`list/:name`, `?limit ?offset:int64,?columns:string`, authWallet, list) get(`row/:name/:id`, `?columns:string`, authWallet, row) @@ -67,11 +62,7 @@ func Route(route *hr.Router) { get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) - get(`txstatus/:hash`, ``, authWallet, txstatus) get(`test/:name`, ``, getTest) - get(`history/:table/:id`, ``, authWallet, getHistory) - get(`block/:id`, ``, getBlockInfo) - get(`maxblockid`, ``, getMaxBlockID) get(`version`, ``, getVersion) get(`avatar/:ecosystem/:member`, ``, getAvatar) get(`config/:option`, ``, getConfigOption) @@ -80,7 +71,6 @@ func Route(route *hr.Router) { post(`content/page/:name`, `?lang:string`, authWallet, getPage) post(`content/menu/:name`, `?lang:string`, authWallet, getMenu) post(`content/hash/:name`, ``, getPageHash) - post(`vde/create`, ``, authWallet, vdeCreate) post(`login`, `?pubkey signature:hex,?key_id ?mobile:string,?ecosystem ?expire ?role_id:int64`, login) post(`prepare/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, authWallet, contractHandlers.prepareContract) post(`prepareMultiple`, `data:string`, authWallet, contractHandlers.prepareMultipleContract) @@ -93,6 +83,20 @@ func Route(route *hr.Router) { post(`updnotificator`, `ids:string`, updateNotificator) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) + + if !conf.Config.IsSupportingVDE() { + get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) + get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) + get(`txstatus/:hash`, ``, authWallet, txstatus) + get(`history/:table/:id`, ``, authWallet, getHistory) + get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) + get(`block/:id`, ``, getBlockInfo) + get(`maxblockid`, ``, getMaxBlockID) + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) + get(`systemparams`, `?names:string`, authWallet, systemParams) + get(`ecosystems`, ``, authWallet, ecosystems) + } } func processParams(input string) (params map[string]int) { From 94ac428c3b9dde7932368aaf3cb6535467779ec7 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 035/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- packages/model/db.go | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; diff --git a/packages/model/db.go b/packages/model/db.go index 4a6c7b0be..db9665979 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -10,6 +10,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/migration" + "github.com/GenesisKernel/go-genesis/packages/migration/vde" "github.com/jinzhu/gorm" log "github.com/sirupsen/logrus" @@ -155,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(migration.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error } // ExecSchema is executing schema From 1bdabf41fab080d730e326c5ae534a2a35352230 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 15:59:38 +0300 Subject: [PATCH 036/169] temp commit --- packages/consts/log_types.go | 2 ++ packages/smart/smart.go | 10 +++++++++- packages/vdemanager/config.go | 29 +++++++++++++++-------------- packages/vdemanager/manager.go | 10 ++++++++-- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/packages/consts/log_types.go b/packages/consts/log_types.go index d44b81bad..5f421a00b 100644 --- a/packages/consts/log_types.go +++ b/packages/consts/log_types.go @@ -54,4 +54,6 @@ const ( BCActualizationError = "BCActualizationError" SchedulerError = "SchedulerError" SyncProcess = "SyncProcess" + WrongModeError = "WrongModeError" + VDEManagerError = "VDEManagerError" ) diff --git a/packages/smart/smart.go b/packages/smart/smart.go index bee64083b..ea25a1c7d 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -486,7 +486,15 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } state := converter.StrToInt64(prefix) vm := newVM() - EmbedFuncs(vm, script.VMTypeVDE) + + var vmt script.VMType + if conf.Config.IsVDE() { + vmt = script.VMTypeVDE + } else if conf.Config.IsVDEMaster() { + vmt = script.VMTypeVDEMaster + } + + EmbedFuncs(vm, vmt) smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index 450ff5aac..bcafa10ff 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -7,10 +7,11 @@ import ( ) const ( - inidDBCommand = "initDatabase" + inidDBCommand = "initDatabase" genKeysCommand = "generateKeys" - startCommand = "start" + startCommand = "start" ) + // ChildVDEConfig struct to manage child entry type ChildVDEConfig struct { Executable string @@ -30,36 +31,36 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--httpPort=%d", c.HTTPPort), fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - fmt.Sprintf("--runMode=VDE") + "--runMode=VDE", } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() exec.Cmd { - return getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() *exec.Cmd { + return c.getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { - return getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { + return c.getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() exec.Cmd { - retturn getCommand(startCommand) +func (c ChildVDEConfig) startCommand() *exec.Cmd { + return c.getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, ConfigFileName) + return filepath.Join(c.Directory, c.ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - return args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } return exec.Command(c.Executable, args...) -} \ No newline at end of file +} diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d35362ce2..d1a37d413 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -66,10 +66,16 @@ func prepareWorkDir() error { // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") + return err + } + config := ChildVDEConfig{ - Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Executable: execPath, Name: name, - Directory: path.Join(childConfigsPath, name) + Directory: path.Join(childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, From 5c986fb4c1292047390503694c0815b660109384 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 8 May 2018 09:59:10 +0300 Subject: [PATCH 037/169] temporary commit --- cmd/config.go | 2 +- packages/conf/conf.go | 35 +++++++++-- packages/consts/consts.go | 3 + packages/daemons/common.go | 13 ++++- packages/daylight/daemonsctl/daemonsctl.go | 27 ++++----- packages/daylight/start.go | 44 +++++++------- packages/script/vminit.go | 2 + packages/smart/smart.go | 6 +- packages/tcpserver/tcpserver.go | 6 ++ packages/vdemanager/manager.go | 68 ++++++++++------------ 10 files changed, 123 insertions(+), 83 deletions(-) diff --git a/cmd/config.go b/cmd/config.go index 90ace6d01..127f7c827 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "PublicBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 2e175b01c..b91be9b38 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -133,10 +133,33 @@ func LoadConfig(path string) error { if err != nil { return errors.Wrapf(err, "marshalling config to global struct variable") } - return nil } +// GetConfigFromPath read config from path and returns GlobalConfig struct +func GetConfigFromPath(path string) (*GlobalConfig, error) { + log.WithFields(log.Fields{"path": path}).Info("Loading config") + + _, err := os.Stat(path) + if os.IsNotExist(err) { + return nil, errors.Errorf("Unable to load config file %s", path) + } + + viper.SetConfigFile(path) + err = viper.ReadInConfig() + if err != nil { + return nil, errors.Wrapf(err, "reading config") + } + + c := &GlobalConfig{} + err = viper.Unmarshal(c) + if err != nil { + return c, errors.Wrapf(err, "marshalling config to global struct variable") + } + + return c, nil +} + // SaveConfig save global parameters to configFile func SaveConfig(path string) error { dir := filepath.Dir(path) @@ -219,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c *GlobalConfig) IsPrivateBlockchain() bool { +func (c GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c *GlobalConfig) IsPublicBlockchain() bool { +func (c GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c *GlobalConfig) IsVDE() bool { +func (c GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c *GlobalConfig) IsVDEMaster() bool { +func (c GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c *GlobalConfig) IsSupportingVDE() bool { +func (c GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } diff --git a/packages/consts/consts.go b/packages/consts/consts.go index 9684221d7..45b07c9b6 100644 --- a/packages/consts/consts.go +++ b/packages/consts/consts.go @@ -157,3 +157,6 @@ const TxRequestExpire = 1 * time.Minute // DefaultTempDirName is default name of temporary directory const DefaultTempDirName = "genesis-temp" + +// DefaultVDE allways is 1 +const DefaultVDE = 1 diff --git a/packages/daemons/common.go b/packages/daemons/common.go index 861c03983..8f1bb4d6b 100644 --- a/packages/daemons/common.go +++ b/packages/daemons/common.go @@ -130,7 +130,7 @@ func StartDaemons() { utils.CancelFunc = cancel utils.ReturnCh = make(chan string) - daemonsToStart := serverList + daemonsToStart := getDaemonsToStart() if conf.Config.TestRollBack { daemonsToStart = rollbackList } @@ -156,3 +156,14 @@ func getHostPort(h string) string { } return fmt.Sprintf("%s:%d", h, consts.DEFAULT_TCP_PORT) } + +func getDaemonsToStart() []string { + if conf.Config.IsSupportingVDE() { + return []string{ + "Notificator", + "Scheduler", + } + } + + return serverList +} diff --git a/packages/daylight/daemonsctl/daemonsctl.go b/packages/daylight/daemonsctl/daemonsctl.go index cdddac4d8..84cac3036 100644 --- a/packages/daylight/daemonsctl/daemonsctl.go +++ b/packages/daylight/daemonsctl/daemonsctl.go @@ -14,17 +14,19 @@ import ( // RunAllDaemons start daemons, load contracts and tcpserver func RunAllDaemons() error { - logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) - - daemons.InitialLoad(logEntry) - err := syspar.SysUpdate(nil) - if err != nil { - log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) - return err - } - - if data, ok := parser.GetDataFromFirstBlock(); ok { - syspar.SetFirstBlockData(data) + if !conf.Config.IsSupportingVDE() { + logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) + + daemons.InitialLoad(logEntry) + err := syspar.SysUpdate(nil) + if err != nil { + log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) + return err + } + + if data, ok := parser.GetDataFromFirstBlock(); ok { + syspar.SetFirstBlockData(data) + } } log.Info("load contracts") @@ -36,8 +38,7 @@ func RunAllDaemons() error { log.Info("start daemons") daemons.StartDaemons() - err = tcpserver.TcpListener(conf.Config.TCPServer.Str()) - if err != nil { + if err := tcpserver.TcpListener(conf.Config.TCPServer.Str()); err != nil { log.Errorf("can't start tcp servers, stop") return err } diff --git a/packages/daylight/start.go b/packages/daylight/start.go index c2017942c..946523556 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -39,6 +39,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/service" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" @@ -181,15 +182,6 @@ func initRoutes(listenHost string) { httpListener(listenHost, route) } -func logBlockchainMode() { - mode := "private" - if !conf.Config.PrivateBlockchain { - mode = "non private" - } - - log.WithFields(log.Fields{"mode": mode}).Error("Node running mode") -} - // Start starts the main code of the program func Start() { var err error @@ -218,7 +210,7 @@ func Start() { } } - logBlockchainMode() + log.WithFields(log.Fields{"mode": conf.Config.RunningMode}).Info("Node running mode") f := utils.LockOrDie(conf.Config.LockFilePath) defer f.Unlock() @@ -259,22 +251,28 @@ func Start() { os.Exit(1) } - var availableBCGap int64 = consts.AvailableBCGap - if syspar.GetRbBlocks1() > consts.AvailableBCGap { - availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap - } + if !conf.Config.IsSupportingVDE() { + var availableBCGap int64 = consts.AvailableBCGap + if syspar.GetRbBlocks1() > consts.AvailableBCGap { + availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap + } - blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) - blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) - blockGenerationTime := blockGenerationDuration + blocksGapDuration + blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) + blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) + blockGenerationTime := blockGenerationDuration + blocksGapDuration - checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) - na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) - na.Run() + checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) + na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) + na.Run() - err = service.InitNodesBanService() - if err != nil { - log.WithError(err).Fatal("Can't init ban service") + err = service.InitNodesBanService() + if err != nil { + log.WithError(err).Fatal("Can't init ban service") + } + } + + if conf.Config.IsVDEMaster() { + vdemanager.InitVDEManager() } } diff --git a/packages/script/vminit.go b/packages/script/vminit.go index 84d9a561b..a82309641 100644 --- a/packages/script/vminit.go +++ b/packages/script/vminit.go @@ -69,6 +69,8 @@ const ( VMTypeSmart VMType = 1 // VMTypeVDE is vde vm type VMTypeVDE VMType = 2 + // VMTypeVDEMaster is VDEMaster type + VMTypeVDEMaster VMType = 3 TagFile = "file" TagAddress = "address" diff --git a/packages/smart/smart.go b/packages/smart/smart.go index ea25a1c7d..612476eb3 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -877,7 +877,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect sign") return retError(ErrIncorrectSign) } - if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.PrivateBlockchain { + if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { if sc.TxSmart.TokenEcosystem == 0 { sc.TxSmart.TokenEcosystem = 1 } @@ -999,8 +999,8 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { result = result[:255] } } - if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && - !sc.VDE && !conf.Config.PrivateBlockchain && sc.TxContract.Name != `@1NewUser` { + + if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { apl := sc.TxUsedCost.Mul(fuelRate) wltAmount, ierr := decimal.NewFromString(payWallet.Amount) diff --git a/packages/tcpserver/tcpserver.go b/packages/tcpserver/tcpserver.go index c533456dc..1b11f111b 100644 --- a/packages/tcpserver/tcpserver.go +++ b/packages/tcpserver/tcpserver.go @@ -22,6 +22,8 @@ import ( "sync/atomic" "time" + "github.com/GenesisKernel/go-genesis/packages/conf" + "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/service" @@ -107,6 +109,10 @@ func HandleTCPRequest(rw net.Conn) { // TcpListener is listening tcp address func TcpListener(laddr string) error { + if conf.Config.IsSupportingVDE() { + return nil + } + if strings.HasPrefix(laddr, "127.") { log.Warn("Listening at local address: ", laddr) } diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d1a37d413..4cca4ac8e 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -24,7 +24,7 @@ const ( dropDBTemplate = `DROP OWNED BY %s CASCADE` dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` - commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` + commandTemplate = `%s start --config=%s` ) var ( @@ -33,49 +33,35 @@ var ( // VDEManager struct type VDEManager struct { - processes *process.ProcessManager + processes *process.ProcessManager + execPath string + childConfigsPath string } var ( - Manager *VDEManager - childConfigsPath string + Manager *VDEManager ) -// InitVDEManager create init instance of VDEManager -func InitVDEManager() error { - if err := prepareWorkDir(); err != nil { - return err - } - - return initProcessManager() -} - -func prepareWorkDir() error { - childConfigsPath = path.Join(conf.Config.DataDir, childFolder) +func prepareWorkDir() (string, error) { + childConfigsPath := path.Join(conf.Config.DataDir, childFolder) if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { if err := os.Mkdir(childConfigsPath, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") - return err + return "", err } } - return nil + return childConfigsPath, nil } // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { - execPath, err := os.Executable() - if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") - return err - } - config := ChildVDEConfig{ - Executable: execPath, + Executable: mgr.execPath, Name: name, - Directory: path.Join(childConfigsPath, name), + Directory: path.Join(mgr.childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, @@ -117,7 +103,7 @@ func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) erro procConfEntry.Name = "program:" + name command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) procConfEntry.AddKeyValue("command", command) - proc := process.NewProcess("vdeMaster", confEntry) + proc := process.NewProcess("vdeMaster", procConfEntry) mgr.processes.Add(name, proc) mgr.processes.Find(name).Start(true) @@ -153,7 +139,7 @@ func (mgr *VDEManager) DeleteVDE(name string) error { p.Stop(true) } - vdeDir := path.Join(childConfigsPath, name) + vdeDir := path.Join(mgr.childConfigsPath, name) vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) if err != nil { @@ -250,7 +236,7 @@ func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { func (mgr *VDEManager) initVDEDir(vdeName string) error { - vdeDirName := path.Join(childConfigsPath, vdeName) + vdeDirName := path.Join(mgr.childConfigsPath, vdeName) if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { if err := os.Mkdir(vdeDirName, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") @@ -261,21 +247,33 @@ func (mgr *VDEManager) initVDEDir(vdeName string) error { return nil } -func initProcessManager() error { +func InitVDEManager() { + + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on determine executable path") + } + + childConfigsPath, err := prepareWorkDir() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on prepare child configs folder") + } + Manager = &VDEManager{ - processes: process.NewProcessManager(), + processes: process.NewProcessManager(), + execPath: execPath, + childConfigsPath: childConfigsPath, } list, err := ioutil.ReadDir(childConfigsPath) if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") - return err + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Fatal("on read child VDE directory") } for _, item := range list { if item.IsDir() { - procDir := path.Join(childConfigsPath, item.Name()) - commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + procDir := path.Join(Manager.childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) confEntry := pConf.NewConfigEntry(procDir) confEntry.Name = "program:" + item.Name() confEntry.AddKeyValue("command", commandStr) @@ -287,6 +285,4 @@ func initProcessManager() error { Manager.processes.Add(item.Name(), proc) } } - - return nil } From 215c0fd87fe17087286d73d244041bffb9a781ec Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 038/169] temporary commit --- packages/api/api.go | 8 +- packages/api/login.go | 5 +- packages/api/vde.go | 4 +- packages/daemons/block_generator_tx.go | 2 +- packages/daylight/start.go | 8 + .../vde/{vde.go => vde_data_contracts.go} | 267 ++---------------- packages/migration/vde/vde_data_keys.go | 6 + packages/migration/vde/vde_data_members.go | 7 + packages/migration/vde/vde_data_menu.go | 45 +++ packages/migration/vde/vde_data_pages.go | 5 + packages/migration/vde/vde_data_parameters.go | 18 ++ packages/migration/vde/vde_data_tables.go | 68 +++++ packages/migration/vde/vde_schema.go | 143 ++++++++++ packages/model/db.go | 9 +- packages/parser/common.go | 2 +- packages/smart/smart.go | 14 +- packages/template/template.go | 2 +- 17 files changed, 343 insertions(+), 270 deletions(-) rename packages/migration/vde/{vde.go => vde_data_contracts.go} (60%) create mode 100644 packages/migration/vde/vde_data_keys.go create mode 100644 packages/migration/vde/vde_data_members.go create mode 100644 packages/migration/vde/vde_data_menu.go create mode 100644 packages/migration/vde/vde_data_pages.go create mode 100644 packages/migration/vde/vde_data_parameters.go create mode 100644 packages/migration/vde/vde_data_tables.go create mode 100644 packages/migration/vde/vde_schema.go diff --git a/packages/api/api.go b/packages/api/api.go index 44a877c47..c24d3260e 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -243,15 +243,11 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { if conf.Config.IsSupportingVDE() { - data.vm = smart.GetVM(true, consts.DefaultVDE) - if data.vm == nil { - return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) - } data.vde = true - } else { - data.vm = smart.GetVM(false, 0) } + data.vm = smart.GetVM() + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { diff --git a/packages/api/login.go b/packages/api/login.go index b55fe85c3..ef8114139 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,7 +128,8 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM(false, 0) + vm := smart.GetVM() + contract := smart.VMGetContract(vm, "NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) @@ -207,7 +208,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, ecosystemID)), + IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, consts.DefaultVDE)), } data.result = &result diff --git a/packages/api/vde.go b/packages/api/vde.go index d494dba3e..cf83ec6b0 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -67,8 +67,8 @@ func InitSmartContract(sc *smart.SmartContract, data []byte) error { if err := msgpack.Unmarshal(data, &sc.TxSmart); err != nil { return err } - sc.TxContract = smart.VMGetContractByID(smart.GetVM(sc.VDE, sc.TxSmart.EcosystemID), - int32(sc.TxSmart.Type)) + + sc.TxContract = smart.VMGetContractByID(smart.GetVM(), int32(sc.TxSmart.Type)) if sc.TxContract == nil { return fmt.Errorf(`unknown contract %d`, sc.TxSmart.Type) } diff --git a/packages/daemons/block_generator_tx.go b/packages/daemons/block_generator_tx.go index 9b5ddb977..d96e58f8c 100644 --- a/packages/daemons/block_generator_tx.go +++ b/packages/daemons/block_generator_tx.go @@ -45,7 +45,7 @@ func (dtx *DelayedTx) RunForBlockID(blockID int64) { } func (dtx *DelayedTx) createTx(delayedContactID, keyID int64) error { - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, callDelayedContract, uint32(firstEcosystemID)) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/daylight/start.go b/packages/daylight/start.go index 946523556..98394511e 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -37,6 +37,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/publisher" "github.com/GenesisKernel/go-genesis/packages/service" + "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/vdemanager" @@ -271,6 +272,13 @@ func Start() { } } + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde_data_contracts.go similarity index 60% rename from packages/migration/vde/vde.go rename to packages/migration/vde/vde_data_contracts.go index 640338e93..4e5ca29ab 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -1,247 +1,6 @@ package vde -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES +var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "conditions") VALUES ('1','MainCondition','contract MainCondition { conditions { if EcosysParam("founder_account")!=$key_id @@ -927,7 +686,7 @@ MenuItem( UpdateCron($Id) } }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { + ('23', 'UploadBinary', 'contract UploadBinary { data { Name string Data bytes "file" @@ -954,5 +713,23 @@ MenuItem( $result = $Id } - }', 'ContractConditions("MainCondition")'); - ` + }', 'ContractConditions("MainCondition")'), + ('24', 'NewUser','contract NewUser { + data { + NewPubkey string + } + conditions { + $newId = PubToID($NewPubkey) + if $newId == 0 { + error "Wrong pubkey" + } + if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { + error "User already exists" + } + + $amount = Money(1000) * Money(1000000000000000000) + } + action { + DBInsert("keys", "id, pub", $newId, $NewPubKey) + } + }', 'ContractConditions("MainCondition")');` diff --git a/packages/migration/vde/vde_data_keys.go b/packages/migration/vde/vde_data_keys.go new file mode 100644 index 000000000..42e26c843 --- /dev/null +++ b/packages/migration/vde/vde_data_keys.go @@ -0,0 +1,6 @@ +package vde + +var keysDataSQL = ` +INSERT INTO "%[1]d_keys" (id, pub) +VALUES (4544233900443112470, '489347a1205c818d9a02f285faaedd0122a56138e3d985f5e1b4f6a9470f90f692a00a3453771dd7feea388ceb7aefeaf183e299c70ad1aecb7f870bfada3b86'); +` diff --git a/packages/migration/vde/vde_data_members.go b/packages/migration/vde/vde_data_members.go new file mode 100644 index 000000000..069f1ea2b --- /dev/null +++ b/packages/migration/vde/vde_data_members.go @@ -0,0 +1,7 @@ +package vde + +var membersDataSQL = ` +INSERT INTO "%[1]d_members" ("id", "member_name") +VALUES('%[2]d', 'founder'), +('4544233900443112470', 'guest'); +` diff --git a/packages/migration/vde/vde_data_menu.go b/packages/migration/vde/vde_data_menu.go new file mode 100644 index 000000000..b52a1699f --- /dev/null +++ b/packages/migration/vde/vde_data_menu.go @@ -0,0 +1,45 @@ +package vde + +var menuDataSQL = ` +INSERT INTO "%[1]d_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true');` diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go new file mode 100644 index 000000000..90ef6eab4 --- /dev/null +++ b/packages/migration/vde/vde_data_pages.go @@ -0,0 +1,5 @@ +package vde + +var pagesDataSQL = ` +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +` diff --git a/packages/migration/vde/vde_data_parameters.go b/packages/migration/vde/vde_data_parameters.go new file mode 100644 index 000000000..3ba29e2f9 --- /dev/null +++ b/packages/migration/vde/vde_data_parameters.go @@ -0,0 +1,18 @@ +package vde + +var parametersDataSQL = ` +INSERT INTO "%[1]d_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go new file mode 100644 index 000000000..4223e825a --- /dev/null +++ b/packages/migration/vde/vde_data_tables.go @@ -0,0 +1,68 @@ +package vde + +var tablesDataSQL = ` +INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"menu": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")", +"validate_count": "ContractConditions(\"MainCondition\")", +"validate_mode": "ContractConditions(\"MainCondition\")", +"app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go new file mode 100644 index 000000000..c3fda993f --- /dev/null +++ b/packages/migration/vde/vde_schema.go @@ -0,0 +1,143 @@ +package vde + +import ( + "strings" +) + +// GetVDEScript returns script for VDE schema +func GetVDEScript() string { + scripts := []string{ + schemaVDE, + membersDataSQL, + menuDataSQL, + pagesDataSQL, + parametersDataSQL, + tablesDataSQL, + contractsDataSQL, + keysDataSQL, + } + + return strings.Join(scripts, "\r\n") +} + +var schemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_keys" ( + "id" bigint NOT NULL DEFAULT '0', + "pub" bytea NOT NULL DEFAULT '', + "multi" bigint NOT NULL DEFAULT '0', + "deleted" bigint NOT NULL DEFAULT '0', + "blocked" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_keys" ADD CONSTRAINT "%[1]d_keys_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_members"; + CREATE TABLE "%[1]d_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_members" ADD CONSTRAINT "%[1]d_members_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_languages"; CREATE TABLE "%[1]d_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_languages" ADD CONSTRAINT "%[1]d_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_languages_index_name" ON "%[1]d_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_menu"; CREATE TABLE "%[1]d_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_menu" ADD CONSTRAINT "%[1]d_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_menu_index_name" ON "%[1]d_menu" (name); + + DROP TABLE IF EXISTS "%[1]d_pages"; CREATE TABLE "%[1]d_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_pages" ADD CONSTRAINT "%[1]d_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_pages_index_name" ON "%[1]d_pages" (name); + + DROP TABLE IF EXISTS "%[1]d_blocks"; CREATE TABLE "%[1]d_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_blocks" ADD CONSTRAINT "%[1]d_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_blocks_index_name" ON "%[1]d_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_signatures"; CREATE TABLE "%[1]d_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_signatures" ADD CONSTRAINT "%[1]d_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_contracts" ADD CONSTRAINT "%[1]d_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_parameters"; + CREATE TABLE "%[1]d_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_parameters" ADD CONSTRAINT "%[1]d_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_parameters_index_name" ON "%[1]d_parameters" (name); + + DROP TABLE IF EXISTS "%[1]d_cron"; + CREATE TABLE "%[1]d_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_cron" ADD CONSTRAINT "%[1]d_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_binaries"; + CREATE TABLE "%[1]d_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_binaries" ADD CONSTRAINT "%[1]d_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_binaries_index_app_id_member_id_name" ON "%[1]d_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + ` diff --git a/packages/model/db.go b/packages/model/db.go index db9665979..7e32186ea 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -156,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.GetVDEScript(), id, wallet)).Error } // ExecSchema is executing schema @@ -385,5 +385,12 @@ func InitDB(cfg conf.DBConfig) error { return err } + if conf.Config.IsSupportingVDE() { + if err := ExecSchemaLocalData(consts.DefaultVDE, conf.Config.KeyID); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE schema") + return err + } + } + return nil } diff --git a/packages/parser/common.go b/packages/parser/common.go index d61617cf1..328c28dfb 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -506,7 +506,7 @@ func (p *Parser) CallContract(flags int) (resultContract string, err error) { VDE: false, Rollback: true, SysUpdate: false, - VM: smart.GetVM(false, 0), + VM: smart.GetVM(), TxSmart: *p.TxSmart, TxData: p.TxData, TxContract: p.TxContract, diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 612476eb3..cf6f77696 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -66,7 +66,6 @@ const ( var ( smartVM *script.VM - smartVDE map[int64]*script.VM smartTest = make(map[string]string) ErrCurrentBalance = errors.New(`current balance is not enough`) @@ -118,17 +117,10 @@ func newVM() *script.VM { func init() { smartVM = newVM() - smartVDE = make(map[int64]*script.VM) } // GetVM is returning smart vm -func GetVM(vde bool, ecosystemID int64) *script.VM { - if vde { - if v, ok := smartVDE[ecosystemID]; ok { - return v - } - return nil - } +func GetVM() *script.VM { return smartVM } @@ -495,7 +487,6 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } EmbedFuncs(vm, vmt) - smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { list, err := script.ContractsList(item[`value`]) @@ -828,7 +819,8 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { methods := []string{`init`, `conditions`, `action`, `rollback`} sc.AppendStack(sc.TxContract.Name) - sc.VM = GetVM(sc.VDE, sc.TxSmart.EcosystemID) + sc.VM = GetVM() + if (flags&CallRollback) == 0 && (flags&CallAction) != 0 { if !sc.VDE { toID = sc.BlockData.KeyID diff --git a/packages/template/template.go b/packages/template/template.go index 5c0dc1842..8beb4882b 100644 --- a/packages/template/template.go +++ b/packages/template/template.go @@ -692,7 +692,7 @@ func Template2JSON(input string, timeout *bool, vars *map[string]string) []byte isvde := (*vars)[`vde`] == `true` || (*vars)[`vde`] == `1` sc := smart.SmartContract{ VDE: isvde, - VM: smart.GetVM(isvde, converter.StrToInt64((*vars)[`ecosystem_id`])), + VM: smart.GetVM(), TxSmart: tx.SmartContract{ Header: tx.Header{ EcosystemID: converter.StrToInt64((*vars)[`ecosystem_id`]), From c1f4b9f8774bfb814c8de21ce4a4f969d7472d0f Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 039/169] fix login --- packages/api/api.go | 37 ++++--- packages/api/login.go | 4 +- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- packages/migration/vde/vde_data_pages.go | 2 +- packages/migration/vde/vde_schema.go | 29 +++++ packages/smart/smart.go | 8 +- 6 files changed, 148 insertions(+), 43 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index c24d3260e..9e55102aa 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -133,9 +133,6 @@ func errorAPI(w http.ResponseWriter, err interface{}, code int, params ...interf func getPrefix(data *apiData) (prefix string) { prefix = converter.Int64ToStr(data.ecosystemId) - if data.vde { - prefix += `_vde` - } return } @@ -274,6 +271,10 @@ func fillParams(params map[string]int) apiHandle { } func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (int64, string, error) { + if conf.Config.IsSupportingVDE() { + return consts.DefaultVDE, "1", nil + } + ecosystemID := data.ecosystemId if data.params[`ecosystem`].(int64) > 0 { ecosystemID = data.params[`ecosystem`].(int64) @@ -288,9 +289,9 @@ func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (in } } prefix := converter.Int64ToStr(ecosystemID) - if data.vde { - prefix += `_vde` - } + // if data.vde { + // prefix += `_vde` + // } return ecosystemID, prefix, nil } @@ -299,18 +300,20 @@ func fillTokenData(data *apiData, claims *JWTClaims, logger *log.Entry) error { data.keyId = converter.StrToInt64(claims.KeyID) data.isMobile = claims.IsMobile data.roleId = converter.StrToInt64(claims.RoleID) - ecosystem := &model.Ecosystem{} - found, err := ecosystem.Get(data.ecosystemId) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") - return err - } + if !conf.Config.IsSupportingVDE() { + ecosystem := &model.Ecosystem{} + found, err := ecosystem.Get(data.ecosystemId) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") + return err + } - if !found { - err := fmt.Errorf("ecosystem not found") - logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") - } + if !found { + err := fmt.Errorf("ecosystem not found") + logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") + } - data.ecosystemName = ecosystem.Name + data.ecosystemName = ecosystem.Name + } return nil } diff --git a/packages/api/login.go b/packages/api/login.go index ef8114139..d9c7f8de6 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,9 +128,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM() - - contract := smart.VMGetContract(vm, "NewUser", 1) + contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) err = tx.BuildTransaction(tx.SmartContract{ diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go index 90ef6eab4..b013166b1 100644 --- a/packages/migration/vde/vde_data_pages.go +++ b/packages/migration/vde/vde_data_pages.go @@ -1,5 +1,5 @@ package vde var pagesDataSQL = ` -INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('1', 'default_page', '', 'admin_menu', 'true'),('2','admin_index','','admin_menu','true'); ` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go index c3fda993f..7edf5da94 100644 --- a/packages/migration/vde/vde_schema.go +++ b/packages/migration/vde/vde_schema.go @@ -140,4 +140,33 @@ var schemaVDE = ` ); ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + + DROP TABLE IF EXISTS "%[1]d_notifications"; + CREATE TABLE "%[1]d_notifications" ( + "id" bigint NOT NULL DEFAULT '0', + "recipient" jsonb, + "sender" jsonb, + "notification" jsonb, + "page_params" jsonb, + "processing_info" jsonb, + "page_name" varchar(255) NOT NULL DEFAULT '', + "date_created" timestamp, + "date_start_processing" timestamp, + "date_closed" timestamp, + "closed" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_notifications" ADD CONSTRAINT "%[1]d_notifications_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_roles_participants"; + CREATE TABLE "%[1]d_roles_participants" ( + "id" bigint NOT NULL DEFAULT '0', + "role" jsonb, + "member" jsonb, + "appointed" jsonb, + "date_created" timestamp, + "date_deleted" timestamp, + "deleted" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_roles_participants" ADD CONSTRAINT "%[1]d_roles_participants_pkey" PRIMARY KEY ("id"); + ` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index cf6f77696..bb2a10a1f 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -174,6 +174,7 @@ func VMRun(vm *script.VM, block *script.Block, params []interface{}, extend *map func VMGetContract(vm *script.VM, name string, state uint32) *Contract { name = script.StateName(state, name) obj, ok := vm.Objects[name] + if ok && obj.Type == script.ObjContract { return &Contract{Name: name, Block: obj.Value.(*script.Block)} } @@ -469,15 +470,15 @@ func LoadContract(transaction *model.DbTransaction, prefix string) (err error) { func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err error) { var contracts []map[string]string - if !model.IsTable(prefix + `_vde_contracts`) { + if !model.IsTable(prefix + `_contracts`) { return } - contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_vde_contracts" order by id`, -1) + contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_contracts" order by id`, -1) if err != nil { return err } state := converter.StrToInt64(prefix) - vm := newVM() + vm := GetVM() var vmt script.VMType if conf.Config.IsVDE() { @@ -502,6 +503,7 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro WalletID: 0, TokenID: 0, } + if err = vmCompile(vm, item[`value`], &owner); err != nil { log.WithFields(log.Fields{"names": names, "error": err}).Error("Load VDE Contract") } else { From 612488ac3b019c216bad685bf8453ade2875f849 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 040/169] temporary commit --- packages/api/login.go | 50 +++++++- packages/api/route.go | 6 +- packages/api/vde.go | 7 +- packages/api/vde_test.go | 120 ++----------------- packages/migration/vde/vde_data_contracts.go | 41 +++++++ packages/smart/smart.go | 1 + 6 files changed, 105 insertions(+), 120 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index d9c7f8de6..9e0f9a07e 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -19,12 +19,14 @@ package api import ( "fmt" "net/http" + "strings" "time" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/notificator" "github.com/GenesisKernel/go-genesis/packages/publisher" + msgpack "gopkg.in/vmihailenco/msgpack.v2" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" @@ -131,20 +133,60 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) - err = tx.BuildTransaction(tx.SmartContract{ + // scHeader, err := getHeader("NewUser", data) + if err != nil { + return errorAPI(w, "E_EMPTYOBJECT", http.StatusBadRequest) + } + + sc := tx.SmartContract{ Header: tx.Header{ Type: int(info.ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, NetworkID: consts.NETWORK_ID, + PublicKey: pubkey, }, SignedBy: smart.PubToID(NodePublicKey), Data: params, - }, NodePrivateKey, NodePublicKey, string(hexPubKey)) - if err != nil { - log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } + + if conf.Config.IsSupportingVDE() { + + signPrms := []string{sc.ForSign()} + signPrms = append(signPrms, string(hexPubKey)) + signature, err := crypto.Sign( + NodePrivateKey, + strings.Join(signPrms, ","), + ) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") + return err + } + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + serializedContract, err := msgpack.Marshal(sc) + if err != nil { + logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") + return errorAPI(w, err, http.StatusInternalServerError) + } + // signature := data.params[`signature`].([]byte) + // if len(signature) == 0 { + // log.WithFields(log.Fields{"type": consts.EmptyObject, "params": data.params}).Error("signature is empty") + // } + + fmt.Println(len(signature)) + ret, err := VDEContract(serializedContract, data) + if err != nil { + return errorAPI(w, err, http.StatusInternalServerError) + } + data.result = ret + } else { + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, string(hexPubKey)) + if err != nil { + log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") + } + } + } if ecosystemID > 1 && len(pubkey) == 0 { diff --git a/packages/api/route.go b/packages/api/route.go index be778c441..16f4857a4 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -59,7 +59,7 @@ func Route(route *hr.Router) { get(`interface/page/:name`, ``, authWallet, getPageRow) get(`interface/menu/:name`, ``, authWallet, getMenuRow) get(`interface/block/:name`, ``, authWallet, getBlockInterfaceRow) - get(`systemparams`, `?names:string`, authWallet, systemParams) + // get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) get(`test/:name`, ``, getTest) @@ -81,7 +81,7 @@ func Route(route *hr.Router) { post(`test/:name`, ``, getTest) post(`content`, `template ?source:string`, jsonContent) post(`updnotificator`, `ids:string`, updateNotificator) - + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) if !conf.Config.IsSupportingVDE() { @@ -92,7 +92,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) diff --git a/packages/api/vde.go b/packages/api/vde.go index cf83ec6b0..9891ffddb 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -173,17 +173,22 @@ func VDEContract(contractData []byte, data *apiData) (result *contractResult, er result.Message = &txstatusError{Type: "panic", Error: err.Error()} return } + if data.token != nil && data.token.Valid { if auth, err := data.token.SignedString([]byte(jwtSecret)); err == nil { sc.TxData[`auth_token`] = auth } } + if ret, err = sc.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction); err == nil { result.Result = ret } else { if errResult := json.Unmarshal([]byte(err.Error()), &result.Message); errResult != nil { - log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "text": err.Error(), + log.WithFields(log.Fields{ + "type": consts.JSONUnmarshallError, + "text": err.Error(), "error": errResult}).Error("unmarshalling contract error") + result.Message = &txstatusError{Type: "panic", Error: errResult.Error()} } } diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index c0d6b7d68..bd32c97de 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -24,6 +24,7 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" @@ -33,121 +34,16 @@ import ( ) func TestVDECreate(t *testing.T) { - var ( - err error - retid int64 - ret vdeCreateResult - ) - - assert.NoError(t, keyLogin(1)) - - if err = sendPost(`vde/create`, nil, &ret); err != nil && - err.Error() != `400 {"error": "E_VDECREATED", "msg": "Virtual Dedicated Ecosystem is already created" }` { - t.Error(err) - return - } - - rnd := `rnd` + crypto.RandSeq(6) - form := url.Values{`Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - - retid, _, err = postTxResult(`NewContract`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active 5", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - assert.NoError(t, postTx(`EditContract`, &form)) - - form = url.Values{`Name`: {rnd}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - - retid, _, err = postTxResult(`NewParameter`, &form) - assert.NoError(t, err) + require.NoError(t, keyLogin(1)) - form = url.Values{`Name`: {`new_table`}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - if err = postTx(`NewParameter`, &form); err != nil && err.Error() != - `500 {"error": "E_SERVER", "msg": "{\"type\":\"warning\",\"error\":\"Parameter new_table already exists\"}" }` { - t.Error(err) - return + form := url.Values{ + "VDEName": {"testvde"}, + "DBUser": {"vdeuser"}, + "DBPassword": {"vdepassword"}, + "VDEAPIPort": {"8000"}, } - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, `Conditions`: {`true`}, - `vde`: {`1`}} - - assert.NoError(t, postTx(`EditParameter`, &form)) - - form = url.Values{"Name": {`menu` + rnd}, "Value": {`first - second - third`}, "Title": {`My Menu`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewMenu`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, - `Conditions`: {`true`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditMenu`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Append)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendMenu`, &form)) - - form = url.Values{"Name": {`page` + rnd}, "Value": {`Page`}, "Menu": {`government`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewPage`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit page value`}, - `Conditions`: {`true`}, "Menu": {`government`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditPage`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Test Page)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendPage`, &form)) - - form = url.Values{"Name": {`block` + rnd}, "Value": {`Page block`}, "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewBlock`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit block value`}, - `Conditions`: {`true`}, `vde`: {`1`}} - assert.NoError(t, postTx(`EditBlock`, &form)) - - name := randName(`tbl`) - form = url.Values{"Name": {name}, `vde`: {`true`}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "1", - "conditions":"true"}, - {"name":"Amount", "type":"number","index": "0", "conditions":"true"}, - {"name":"Active", "type":"character","index": "0", "conditions":"true"}]`}, - "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} - assert.NoError(t, postTx(`NewTable`, &form)) - - form = url.Values{"Name": {name}, `vde`: {`true`}, - "Permissions": {`{"insert": "ContractConditions(\"MainCondition\")", - "update" : "true", "new_column": "ContractConditions(\"MainCondition\")"}`}} - assert.NoError(t, postTx(`EditTable`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {"true"}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newColRead`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {"ContractConditions(\"MainCondition\")"}} - assert.NoError(t, postTx(`EditColumn`, &form)) + require.NoError(t, postTx("NewVDE", &form)) - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`EditColumn`, &form)) } func TestVDEParams(t *testing.T) { diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index bb2a10a1f..b0592009e 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -861,6 +861,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { return retError(ErrEmptyPublicKey) } sc.PublicKeys = append(sc.PublicKeys, public) + var CheckSignResult bool CheckSignResult, err = utils.CheckSign(sc.PublicKeys, sc.TxData[`forsign`].(string), sc.TxSmart.BinSignatures, false) if err != nil { From 91764d962a9e94bd2342374d658b465d22e26643 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 041/169] move changes --- packages/conf/conf.go | 10 +- packages/migration/vde/vde.go | 958 ++++++++++++++++++++++++++++++++++ packages/smart/funcs.go | 15 + packages/vdemanager/config.go | 22 +- 4 files changed, 989 insertions(+), 16 deletions(-) create mode 100644 packages/migration/vde/vde.go diff --git a/packages/conf/conf.go b/packages/conf/conf.go index b91be9b38..59887d12e 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -242,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c GlobalConfig) IsPrivateBlockchain() bool { +func (c *GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c GlobalConfig) IsPublicBlockchain() bool { +func (c *GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c GlobalConfig) IsVDE() bool { +func (c *GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c GlobalConfig) IsVDEMaster() bool { +func (c *GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c GlobalConfig) IsSupportingVDE() bool { +func (c *GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go new file mode 100644 index 000000000..b63cf858d --- /dev/null +++ b/packages/migration/vde/vde.go @@ -0,0 +1,958 @@ +package migration + +var SchemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_vde_members"; + CREATE TABLE "%[1]d_vde_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); + + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); + + DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); + + + INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); + + INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_vde_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_vde_parameters"; + CREATE TABLE "%[1]d_vde_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); + + INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); + + DROP TABLE IF EXISTS "%[1]d_vde_cron"; + CREATE TABLE "%[1]d_vde_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_vde_binaries"; + CREATE TABLE "%[1]d_vde_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_vde_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); + + INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "menu": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")", + "validate_count": "ContractConditions(\"MainCondition\")", + "validate_mode": "ContractConditions(\"MainCondition\")", + "app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); + + INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES + ('1','MainCondition','contract MainCondition { + conditions { + if EcosysParam("founder_account")!=$key_id + { + warning "Sorry, you do not have access to this action." + } + } + }', 'ContractConditions("MainCondition")'), + ('2','NewContract','contract NewContract { + data { + Value string + Conditions string + Wallet string "optional" + TokenEcosystem int "optional" + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + $walletContract = $key_id + if $Wallet { + $walletContract = AddressToId($Wallet) + if $walletContract == 0 { + error Sprintf("wrong wallet %%s", $Wallet) + } + } + var list array + list = ContractsList($Value) + + if Len(list) == 0 { + error "must be the name" + } + + var i int + while i < Len(list) { + if IsObject(list[i], $ecosystem_id) { + warning Sprintf("Contract or function %%s exists", list[i] ) + } + i = i + 1 + } + + $contract_name = list[0] + if !$TokenEcosystem { + $TokenEcosystem = 1 + } else { + if !SysFuel($TokenEcosystem) { + warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) + } + } + } + action { + var root, id int + root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) + id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", + $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) + FlushContract(root, id, false) + $result = id + } + func rollback() { + var list array + list = ContractsList($Value) + var i int + while i < Len(list) { + RollbackContract(list[i]) + i = i + 1 + } + } + func price() int { + return SysParamInt("contract_price") + } + }', 'ContractConditions("MainCondition")'), + ('3','EditContract','contract EditContract { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("contracts", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + + var row array + row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) + if !Len(row) { + error Sprintf("Contract %%d does not exist", $Id) + } + $cur = row[0] + if $Value { + var list, curlist array + list = ContractsList($Value) + curlist = ContractsList($cur["value"]) + if Len(list) != Len(curlist) { + error "Contracts cannot be removed or inserted" + } + var i int + while i < Len(list) { + var j int + var ok bool + while j < Len(curlist) { + if curlist[j] == list[i] { + ok = true + break + } + j = j + 1 + } + if !ok { + error "Contracts or functions names cannot be changed" + } + i = i + 1 + } + } + } + action { + var root int + var pars, vals array + + if $Value { + root = CompileContract($Value, $ecosystem_id, 0, 0) + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("contracts", $Id, Join(pars, ","), vals...) + } + if $Value { + FlushContract(root, $Id, false) + } + } + }', 'ContractConditions("MainCondition")'), + ('4','NewParameter','contract NewParameter { + data { + Name string + Value string + Conditions string + } + conditions { + var ret array + ValidateCondition($Conditions, $ecosystem_id) + ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) + if Len(ret) > 0 { + warning Sprintf( "Parameter %%s already exists", $Name) + } + } + action { + $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('5','EditParameter','contract EditParameter { + data { + Id int + Value string + Conditions string + } + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("parameters", $Id, onlyConditions()) + ValidateCondition($Conditions, $ecosystem_id) + } + action { + DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('6', 'NewMenu','contract NewMenu { + data { + Name string + Value string + Title string "optional" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("menu").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Menu %%s already exists", $Name) + } + } + action { + DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) + } + func price() int { + return SysParamInt("menu_price") + } + }', 'ContractConditions("MainCondition")'), + ('7','EditMenu','contract EditMenu { + data { + Id int + Value string "optional" + Title string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value && !$Title + } + conditions { + RowConditions("menu", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Title { + pars[Len(pars)] = "title" + vals[Len(vals)] = $Title + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("menu", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('8','AppendMenu','contract AppendMenu { + data { + Id int + Value string + } + conditions { + RowConditions("menu", $Id, false) + } + action { + var row map + row = DBRow("menu").Columns("value").WhereId($Id) + DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('9','NewPage','contract NewPage { + data { + Name string + Value string + Menu string + Conditions string + ValidateCount int "optional" + ApplicationId int "optional" + ValidateMode int "optional" + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("pages").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Page %%s already exists", $Name) + } + + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", + $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) + } + func price() int { + return SysParamInt("page_price") + } + }', 'ContractConditions("MainCondition")'), + ('10','EditPage','contract EditPage { + data { + Id int + Value string "optional" + Menu string "optional" + Conditions string "optional" + ValidateCount int "optional" + ValidateMode string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$Menu + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + RowConditions("pages", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Menu { + pars[Len(pars)] = "menu" + vals[Len(vals)] = $Menu + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if $ValidateCount { + pars[Len(pars)] = "validate_count" + vals[Len(vals)] = $ValidateCount + } + if $ValidateMode { + if $ValidateMode != "1" { + $ValidateMode = "0" + } + pars[Len(pars)] = "validate_mode" + vals[Len(vals)] = $ValidateMode + } + if Len(vals) > 0 { + DBUpdate("pages", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('11','AppendPage','contract AppendPage { + data { + Id int + Value string + } + conditions { + RowConditions("pages", $Id, false) + } + action { + var row map + row = DBRow("pages").Columns("value").WhereId($Id) + DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('12','NewBlock','contract NewBlock { + data { + Name string + Value string + Conditions string + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("blocks").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Block %%s already exists", $Name) + } + } + action { + DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) + } + }', 'ContractConditions("MainCondition")'), + ('13','EditBlock','contract EditBlock { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + + conditions { + RowConditions("blocks", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("blocks", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('14','NewTable','contract NewTable { + data { + Name string + Columns string + Permissions string + ApplicationId int "optional" + } + conditions { + TableConditions($Name, $Columns, $Permissions) + } + action { + CreateTable($Name, $Columns, $Permissions, $ApplicationId) + } + func rollback() { + RollbackTable($Name) + } + func price() int { + return SysParamInt("table_price") + } + }', 'ContractConditions("MainCondition")'), + ('15','EditTable','contract EditTable { + data { + Name string + Permissions string + } + conditions { + TableConditions($Name, "", $Permissions) + } + action { + PermTable($Name, $Permissions ) + } + }', 'ContractConditions("MainCondition")'), + ('16','NewColumn','contract NewColumn { + data { + TableName string + Name string + Type string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, $Type, $Permissions) + } + action { + CreateColumn($TableName, $Name, $Type, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('17','EditColumn','contract EditColumn { + data { + TableName string + Name string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, "", $Permissions) + } + action { + PermColumn($TableName, $Name, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('18','NewLang','contract NewLang { + data { + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } + } + action { + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('19','EditLang','contract EditLang { + data { + Id int + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + } + action { + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('20','Import','contract Import { + data { + Data string + } + conditions { + $list = JSONDecode($Data) + } + func ImportList(row array, cnt string) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + idata = row[i] + if(cnt == "pages"){ + $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) + $page_id = One($ret_page, "id") + if ($page_id != nil){ + idata["Id"] = Int($page_id) + CallContract("EditPage", idata) + } else { + CallContract("NewPage", idata) + } + } + if(cnt == "blocks"){ + $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) + $block_id = One($ret_block, "id") + if ($block_id != nil){ + idata["Id"] = Int($block_id) + CallContract("EditBlock", idata) + } else { + CallContract("NewBlock", idata) + } + } + if(cnt == "menus"){ + $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) + $menu_id = One($ret_menu, "id") + $menu_value = One($ret_menu, "value") + if ($menu_id != nil){ + idata["Id"] = Int($menu_id) + idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) + CallContract("EditMenu", idata) + } else { + CallContract("NewMenu", idata) + } + } + if(cnt == "parameters"){ + $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) + $param_id = One($ret_param, "id") + if ($param_id != nil){ + idata["Id"] = Int($param_id) + CallContract("EditParameter", idata) + } else { + CallContract("NewParameter", idata) + } + } + if(cnt == "languages"){ + $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) + $lang_id = One($ret_lang, "id") + if ($lang_id != nil){ + CallContract("EditLang", idata) + } else { + CallContract("NewLang", idata) + } + } + if(cnt == "contracts"){ + if IsObject(idata["Name"], $ecosystem_id){ + } else { + CallContract("NewContract", idata) + } + } + if(cnt == "tables"){ + $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) + $table_id = One($ret_table, "id") + if ($table_id != nil){ + } else { + CallContract("NewTable", idata) + } + } + i = i + 1 + } + } + func ImportData(row array) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + var list array + var tblname, columns string + idata = row[i] + i = i + 1 + tblname = idata["Table"] + columns = Join(idata["Columns"], ",") + list = idata["Data"] + if !list { + continue + } + var j int + while j < Len(list) { + var ilist array + ilist = list[j] + DBInsert(tblname, columns, ilist) + j=j+1 + } + } + } + action { + ImportList($list["pages"], "pages") + ImportList($list["blocks"], "blocks") + ImportList($list["menus"], "menus") + ImportList($list["parameters"], "parameters") + ImportList($list["languages"], "languages") + ImportList($list["contracts"], "contracts") + ImportList($list["tables"], "tables") + ImportData($list["data"]) + } + }', 'ContractConditions("MainCondition")'), + ('21', 'NewCron','contract NewCron { + data { + Cron string + Contract string + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", + $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($result) + } + }', 'ContractConditions("MainCondition")'), + ('22','EditCron','contract EditCron { + data { + Id int + Contract string + Cron string "optional" + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ConditionById("cron", true) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", + $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($Id) + } + }', 'ContractConditions("MainCondition")'), + ('23', 'UploadBinary', contract UploadBinary { + data { + Name string + Data bytes "file" + AppID int + DataMimeType string "optional" + MemberID int "optional" + } + conditions { + $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) + } + action { + var hash string + hash = MD5($Data) + + if $DataMimeType == "" { + $DataMimeType = "application/octet-stream" + } + + if $Id != 0 { + DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) + } else { + $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) + } + + $result = $Id + } + }', 'ContractConditions("MainCondition")'); + ` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 8358cb003..22861a3a7 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -270,6 +270,21 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: + f["HTTPRequest"] = HTTPRequest + f["GetMapKeys"] = GetMapKeys + f["SortedKeys"] = SortedKeys + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDE"] = StopVDE + f["GetVDEList"] = GetVDEList + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index bcafa10ff..c5d06f741 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -31,33 +31,33 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort), + fmt.Sprintf("--httpPort=%d", c.HTTPPort) fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - "--runMode=VDE", + fmt.Sprintf("--runMode=VDE") } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() *exec.Cmd { - return c.getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() exec.Cmd { + return getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { - return c.getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { + return getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() *exec.Cmd { - return c.getCommand(startCommand) +func (c ChildVDEConfig) startCommand() exec.Cmd { + retturn getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, c.ConfigFileName) + return filepath.Join(c.Directory, ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + return args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } From 0904cc8281b9d2006512fb99cdb714c53e5401cc Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 042/169] separate routes by vde --- packages/api/route.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/api/route.go b/packages/api/route.go index 16f4857a4..ef84e9637 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -85,14 +85,15 @@ func Route(route *hr.Router) { methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) if !conf.Config.IsSupportingVDE() { + get(`txstatus/:hash`, ``, authWallet, txstatus) + get(`txstatusMultiple`, `data:string`, authWallet, txstatusMulti) get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) - get(`txstatus/:hash`, ``, authWallet, txstatus) get(`history/:table/:id`, ``, authWallet, getHistory) get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) From a9350d4d2f616e6557da569a7c9a0224d91faecb Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 043/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; From 5d4d319c4c9a0abf21456305bb251a2eb76851b6 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 15:59:38 +0300 Subject: [PATCH 044/169] temp commit --- packages/vdemanager/config.go | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index c5d06f741..bcafa10ff 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -31,33 +31,33 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--httpPort=%d", c.HTTPPort), fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - fmt.Sprintf("--runMode=VDE") + "--runMode=VDE", } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() exec.Cmd { - return getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() *exec.Cmd { + return c.getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { - return getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { + return c.getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() exec.Cmd { - retturn getCommand(startCommand) +func (c ChildVDEConfig) startCommand() *exec.Cmd { + return c.getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, ConfigFileName) + return filepath.Join(c.Directory, c.ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - return args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } From 6df11382bac5d0c5a9a8b1984b16a9e078fc86ec Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 8 May 2018 09:59:10 +0300 Subject: [PATCH 045/169] temporary commit --- packages/conf/conf.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 59887d12e..b91be9b38 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -242,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c *GlobalConfig) IsPrivateBlockchain() bool { +func (c GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c *GlobalConfig) IsPublicBlockchain() bool { +func (c GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c *GlobalConfig) IsVDE() bool { +func (c GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c *GlobalConfig) IsVDEMaster() bool { +func (c GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c *GlobalConfig) IsSupportingVDE() bool { +func (c GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } From babb7169e2d463112624cd943016e3130a103096 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 046/169] temporary commit --- packages/daylight/start.go | 7 + packages/migration/vde/vde.go | 958 ------------------- packages/migration/vde/vde_data_contracts.go | 152 +-- 3 files changed, 26 insertions(+), 1091 deletions(-) delete mode 100644 packages/migration/vde/vde.go diff --git a/packages/daylight/start.go b/packages/daylight/start.go index 98394511e..aede916a8 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -279,6 +279,13 @@ func Start() { } } + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go deleted file mode 100644 index 640338e93..000000000 --- a/packages/migration/vde/vde.go +++ /dev/null @@ -1,958 +0,0 @@ -package vde - -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES - ('1','MainCondition','contract MainCondition { - conditions { - if EcosysParam("founder_account")!=$key_id - { - warning "Sorry, you do not have access to this action." - } - } - }', 'ContractConditions("MainCondition")'), - ('2','NewContract','contract NewContract { - data { - Value string - Conditions string - Wallet string "optional" - TokenEcosystem int "optional" - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - $walletContract = $key_id - if $Wallet { - $walletContract = AddressToId($Wallet) - if $walletContract == 0 { - error Sprintf("wrong wallet %%s", $Wallet) - } - } - var list array - list = ContractsList($Value) - - if Len(list) == 0 { - error "must be the name" - } - - var i int - while i < Len(list) { - if IsObject(list[i], $ecosystem_id) { - warning Sprintf("Contract or function %%s exists", list[i] ) - } - i = i + 1 - } - - $contract_name = list[0] - if !$TokenEcosystem { - $TokenEcosystem = 1 - } else { - if !SysFuel($TokenEcosystem) { - warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) - } - } - } - action { - var root, id int - root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) - id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", - $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) - FlushContract(root, id, false) - $result = id - } - func rollback() { - var list array - list = ContractsList($Value) - var i int - while i < Len(list) { - RollbackContract(list[i]) - i = i + 1 - } - } - func price() int { - return SysParamInt("contract_price") - } - }', 'ContractConditions("MainCondition")'), - ('3','EditContract','contract EditContract { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("contracts", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - - var row array - row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) - if !Len(row) { - error Sprintf("Contract %%d does not exist", $Id) - } - $cur = row[0] - if $Value { - var list, curlist array - list = ContractsList($Value) - curlist = ContractsList($cur["value"]) - if Len(list) != Len(curlist) { - error "Contracts cannot be removed or inserted" - } - var i int - while i < Len(list) { - var j int - var ok bool - while j < Len(curlist) { - if curlist[j] == list[i] { - ok = true - break - } - j = j + 1 - } - if !ok { - error "Contracts or functions names cannot be changed" - } - i = i + 1 - } - } - } - action { - var root int - var pars, vals array - - if $Value { - root = CompileContract($Value, $ecosystem_id, 0, 0) - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("contracts", $Id, Join(pars, ","), vals...) - } - if $Value { - FlushContract(root, $Id, false) - } - } - }', 'ContractConditions("MainCondition")'), - ('4','NewParameter','contract NewParameter { - data { - Name string - Value string - Conditions string - } - conditions { - var ret array - ValidateCondition($Conditions, $ecosystem_id) - ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) - if Len(ret) > 0 { - warning Sprintf( "Parameter %%s already exists", $Name) - } - } - action { - $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('5','EditParameter','contract EditParameter { - data { - Id int - Value string - Conditions string - } - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("parameters", $Id, onlyConditions()) - ValidateCondition($Conditions, $ecosystem_id) - } - action { - DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('6', 'NewMenu','contract NewMenu { - data { - Name string - Value string - Title string "optional" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("menu").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Menu %%s already exists", $Name) - } - } - action { - DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) - } - func price() int { - return SysParamInt("menu_price") - } - }', 'ContractConditions("MainCondition")'), - ('7','EditMenu','contract EditMenu { - data { - Id int - Value string "optional" - Title string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value && !$Title - } - conditions { - RowConditions("menu", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Title { - pars[Len(pars)] = "title" - vals[Len(vals)] = $Title - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("menu", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('8','AppendMenu','contract AppendMenu { - data { - Id int - Value string - } - conditions { - RowConditions("menu", $Id, false) - } - action { - var row map - row = DBRow("menu").Columns("value").WhereId($Id) - DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('9','NewPage','contract NewPage { - data { - Name string - Value string - Menu string - Conditions string - ValidateCount int "optional" - ApplicationId int "optional" - ValidateMode int "optional" - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("pages").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Page %%s already exists", $Name) - } - - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", - $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) - } - func price() int { - return SysParamInt("page_price") - } - }', 'ContractConditions("MainCondition")'), - ('10','EditPage','contract EditPage { - data { - Id int - Value string "optional" - Menu string "optional" - Conditions string "optional" - ValidateCount int "optional" - ValidateMode string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value && !$Menu - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - RowConditions("pages", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Menu { - pars[Len(pars)] = "menu" - vals[Len(vals)] = $Menu - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if $ValidateCount { - pars[Len(pars)] = "validate_count" - vals[Len(vals)] = $ValidateCount - } - if $ValidateMode { - if $ValidateMode != "1" { - $ValidateMode = "0" - } - pars[Len(pars)] = "validate_mode" - vals[Len(vals)] = $ValidateMode - } - if Len(vals) > 0 { - DBUpdate("pages", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('11','AppendPage','contract AppendPage { - data { - Id int - Value string - } - conditions { - RowConditions("pages", $Id, false) - } - action { - var row map - row = DBRow("pages").Columns("value").WhereId($Id) - DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('12','NewBlock','contract NewBlock { - data { - Name string - Value string - Conditions string - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("blocks").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Block %%s already exists", $Name) - } - } - action { - DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) - } - }', 'ContractConditions("MainCondition")'), - ('13','EditBlock','contract EditBlock { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - - conditions { - RowConditions("blocks", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("blocks", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('14','NewTable','contract NewTable { - data { - Name string - Columns string - Permissions string - ApplicationId int "optional" - } - conditions { - TableConditions($Name, $Columns, $Permissions) - } - action { - CreateTable($Name, $Columns, $Permissions, $ApplicationId) - } - func rollback() { - RollbackTable($Name) - } - func price() int { - return SysParamInt("table_price") - } - }', 'ContractConditions("MainCondition")'), - ('15','EditTable','contract EditTable { - data { - Name string - Permissions string - } - conditions { - TableConditions($Name, "", $Permissions) - } - action { - PermTable($Name, $Permissions ) - } - }', 'ContractConditions("MainCondition")'), - ('16','NewColumn','contract NewColumn { - data { - TableName string - Name string - Type string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, $Type, $Permissions) - } - action { - CreateColumn($TableName, $Name, $Type, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('17','EditColumn','contract EditColumn { - data { - TableName string - Name string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, "", $Permissions) - } - action { - PermColumn($TableName, $Name, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { - data { - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) - } - } - action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('19','EditLang','contract EditLang { - data { - Id int - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - } - action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('20','Import','contract Import { - data { - Data string - } - conditions { - $list = JSONDecode($Data) - } - func ImportList(row array, cnt string) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - idata = row[i] - if(cnt == "pages"){ - $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) - $page_id = One($ret_page, "id") - if ($page_id != nil){ - idata["Id"] = Int($page_id) - CallContract("EditPage", idata) - } else { - CallContract("NewPage", idata) - } - } - if(cnt == "blocks"){ - $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) - $block_id = One($ret_block, "id") - if ($block_id != nil){ - idata["Id"] = Int($block_id) - CallContract("EditBlock", idata) - } else { - CallContract("NewBlock", idata) - } - } - if(cnt == "menus"){ - $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) - $menu_id = One($ret_menu, "id") - $menu_value = One($ret_menu, "value") - if ($menu_id != nil){ - idata["Id"] = Int($menu_id) - idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) - CallContract("EditMenu", idata) - } else { - CallContract("NewMenu", idata) - } - } - if(cnt == "parameters"){ - $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) - $param_id = One($ret_param, "id") - if ($param_id != nil){ - idata["Id"] = Int($param_id) - CallContract("EditParameter", idata) - } else { - CallContract("NewParameter", idata) - } - } - if(cnt == "languages"){ - $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) - $lang_id = One($ret_lang, "id") - if ($lang_id != nil){ - CallContract("EditLang", idata) - } else { - CallContract("NewLang", idata) - } - } - if(cnt == "contracts"){ - if IsObject(idata["Name"], $ecosystem_id){ - } else { - CallContract("NewContract", idata) - } - } - if(cnt == "tables"){ - $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) - $table_id = One($ret_table, "id") - if ($table_id != nil){ - } else { - CallContract("NewTable", idata) - } - } - i = i + 1 - } - } - func ImportData(row array) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - var list array - var tblname, columns string - idata = row[i] - i = i + 1 - tblname = idata["Table"] - columns = Join(idata["Columns"], ",") - list = idata["Data"] - if !list { - continue - } - var j int - while j < Len(list) { - var ilist array - ilist = list[j] - DBInsert(tblname, columns, ilist) - j=j+1 - } - } - } - action { - ImportList($list["pages"], "pages") - ImportList($list["blocks"], "blocks") - ImportList($list["menus"], "menus") - ImportList($list["parameters"], "parameters") - ImportList($list["languages"], "languages") - ImportList($list["contracts"], "contracts") - ImportList($list["tables"], "tables") - ImportData($list["data"]) - } - }', 'ContractConditions("MainCondition")'), - ('21', 'NewCron','contract NewCron { - data { - Cron string - Contract string - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", - $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($result) - } - }', 'ContractConditions("MainCondition")'), - ('22','EditCron','contract EditCron { - data { - Id int - Contract string - Cron string "optional" - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ConditionById("cron", true) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", - $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($Id) - } - }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { - data { - Name string - Data bytes "file" - AppID int - DataMimeType string "optional" - MemberID int "optional" - } - conditions { - $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) - } - action { - var hash string - hash = MD5($Data) - - if $DataMimeType == "" { - $DataMimeType = "application/octet-stream" - } - - if $Id != 0 { - DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) - } else { - $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) - } - - $result = $Id - } - }', 'ContractConditions("MainCondition")'); - ` diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 755e626c7..4e5ca29ab 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,113 +483,38 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang', 'contract NewLang { + ('18','NewLang','contract NewLang { data { - ApplicationId int "optional" - Name string - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Name string + Trans string + AppID int } - conditions { - if $ApplicationId == 0 { - warning "Application id cannot equal 0" - } - - if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { - warning Sprintf( "Language resource %%s already exists", $Name) - } - - var j int - while j < Len($IdLanguage) { - if $IdLanguage[j] == "" { - info("Locale empty") - } - if $Value[j] == "" { - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } } - action { - var i,len,lenshar int - var res,langarr string - len = Len($IdLanguage) - lenshar = Len($Value) - while i < len { - if i + 1 == len { - res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) - } else { - res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) - } - i = i + 1 - } - if len > 0 { - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - } - $result = CreateLanguage($Name, $Trans, $ApplicationId) + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string "optional" - ApplicationId int "optional" - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Id int + Name string + Trans string + AppID int } - conditions { - var j int - while j < Len($IdLanguage) { - if ($IdLanguage[j] == ""){ - info("Locale empty") - } - if ($Value[j] == ""){ - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") } - action { - var i,len int - var res,langarr string - len = Len($IdLanguage) - while i < len { - if (i + 1 == len){ - res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) - } - else { - res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) - } - i = i + 1 - } - - $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() - if !$row{ - warning "Language not found" - } - - if $ApplicationId == 0 { - $ApplicationId = Int($row["app_id"]) - } - if $Name == "" { - $Name = $row["name"] - } - - if (len > 0){ - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - - } - EditLanguage($Id, $Name, $Trans, $ApplicationId) + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -794,7 +719,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { - Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -802,48 +726,10 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - } - action { - DBInsert("keys", "id, pub", $newId, $NewPubKey) - } - }', 'ContractConditions("MainCondition")'), - ('25', 'NewVDE', 'contract NewVDE { - data { - VDEName string - DBUser string - DBPassword string - VDEAPIPort int - } - conditions { + $amount = Money(1000) * Money(1000000000000000000) } - action { - CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) - } - }', 'ContractConditions("MainCondition")'), - ('26', 'ListVDE', 'contract ListVDE { - data { - VDEName string - } - - conditions { - - } - - action { - GetVDEList($VDEName) - } - }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { - data { - VDEName string - } - - conditions { - } - - action { - StartVDE($VDEName) + DBInsert("keys", "id, pub", $newId, $NewPubKey) } }', 'ContractConditions("MainCondition")');` From 5e34d2a2b362375b9a1d9af4a7e441a94ad3a484 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 047/169] fix login --- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- 1 file changed, 92 insertions(+), 19 deletions(-) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) From b2705e2e57c404d0eb651affe8221068bb089f6d Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 048/169] temporary commit --- packages/api/route.go | 2 +- packages/migration/vde/vde_data_contracts.go | 41 ++++++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/packages/api/route.go b/packages/api/route.go index ef84e9637..56b547f61 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -93,7 +93,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` From 4f88bf1bd2417504e993d79cd658e61daf4d296c Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 12:05:42 +0300 Subject: [PATCH 049/169] temp commit --- packages/api/login.go | 37 +++++++++----------- packages/migration/vde/vde_data_contracts.go | 3 +- packages/migration/vde/vde_data_tables.go | 10 +++++- packages/smart/funcs.go | 3 -- 4 files changed, 28 insertions(+), 25 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index 9e0f9a07e..7882de84e 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -114,6 +114,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) + fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -126,21 +127,16 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En return err } + pubkey = data.params[`pubkey`].([]byte) hexPubKey := hex.EncodeToString(pubkey) - params := make([]byte, 0) - params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) + params := converter.EncodeLength(int64(len(hexPubKey))) + params = append(params, hexPubKey...) contract := smart.GetContract("NewUser", 1) - info := contract.Block.Info.(*script.ContractInfo) - - // scHeader, err := getHeader("NewUser", data) - if err != nil { - return errorAPI(w, "E_EMPTYOBJECT", http.StatusBadRequest) - } sc := tx.SmartContract{ Header: tx.Header{ - Type: int(info.ID), + Type: int(contract.Block.Info.(*script.ContractInfo).ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, @@ -154,34 +150,34 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En if conf.Config.IsSupportingVDE() { signPrms := []string{sc.ForSign()} - signPrms = append(signPrms, string(hexPubKey)) - signature, err := crypto.Sign( - NodePrivateKey, - strings.Join(signPrms, ","), - ) + signPrms = append(signPrms, hexPubKey) + signData := strings.Join(signPrms, ",") + signature, err := crypto.Sign(NodePrivateKey, signData) if err != nil { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") return err } + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + + if sc.PublicKey, err = hex.DecodeString(NodePublicKey); err != nil { + log.WithFields(log.Fields{"type": consts.ConversionError, "error": err}).Error("decoding public key from hex") + return err + } + serializedContract, err := msgpack.Marshal(sc) if err != nil { logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") return errorAPI(w, err, http.StatusInternalServerError) } - // signature := data.params[`signature`].([]byte) - // if len(signature) == 0 { - // log.WithFields(log.Fields{"type": consts.EmptyObject, "params": data.params}).Error("signature is empty") - // } - fmt.Println(len(signature)) ret, err := VDEContract(serializedContract, data) if err != nil { return errorAPI(w, err, http.StatusInternalServerError) } data.result = ret } else { - err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, string(hexPubKey)) + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, hexPubKey) if err != nil { log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } @@ -216,6 +212,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } + fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 755e626c7..c26fa16d6 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -804,7 +804,8 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } } action { - DBInsert("keys", "id, pub", $newId, $NewPubKey) + DBInsert("keys", "id", $newId) + SetPubKey($newId, StringToBytes($NewPubkey)) } }', 'ContractConditions("MainCondition")'), ('25', 'NewVDE', 'contract NewVDE { diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go index 4223e825a..955514d55 100644 --- a/packages/migration/vde/vde_data_tables.go +++ b/packages/migration/vde/vde_data_tables.go @@ -64,5 +64,13 @@ INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") "data": "ContractConditions(\"MainCondition\")", "hash": "ContractConditions(\"MainCondition\")", "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); + 'ContractConditions("MainCondition")'), + ('9', 'keys', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"pub": "ContractConditions(\"MainCondition\")", + "multi": "ContractConditions(\"MainCondition\")", + "deleted": "ContractConditions(\"MainCondition\")", + "blocked": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); ` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 22861a3a7..cedfd6812 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -304,9 +304,6 @@ func GetTableName(sc *SmartContract, tblname string, ecosystem int64) string { return strings.ToLower(tblname[1:]) } prefix := converter.Int64ToStr(ecosystem) - if sc.VDE { - prefix += `_vde` - } return strings.ToLower(fmt.Sprintf(`%s_%s`, prefix, tblname)) } From 16d556557ed20e376995e9345076f1729fdad3cc Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 21:28:09 +0300 Subject: [PATCH 050/169] remove fmt from login api handlers --- packages/api/login.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index 7882de84e..a6548fcfb 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -17,7 +17,6 @@ package api import ( - "fmt" "net/http" "strings" "time" @@ -114,7 +113,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) - fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -212,7 +210,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } - fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") @@ -245,7 +242,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, consts.DefaultVDE)), + IsVDE: conf.Config.IsSupportingVDE(), } data.result = &result From f32acac4e88b7b33f142ac4d593be74f05c0ea4a Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:53:47 +0300 Subject: [PATCH 051/169] add drop db function --- packages/model/db.go | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/model/db.go b/packages/model/db.go index 7e32186ea..fdf4d54aa 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -394,3 +394,25 @@ func InitDB(cfg conf.DBConfig) error { return nil } + +// DropDatabase kill all process and drop database +func DropDatabase(name string) error { + query := `SELECT + pg_terminate_backend (pg_stat_activity.pid) + FROM + pg_stat_activity + WHERE + pg_stat_activity.datname = ?` + + if err := DBConn.Exec(query, name).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on kill db process") + return err + } + + if err := DBConn.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", name)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on drop db") + return err + } + + return nil +} From 7cb55609c3d1a11d4e017683d37e35768bbf74cb Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:54:14 +0300 Subject: [PATCH 052/169] fix manager --- packages/api/vde_test.go | 36 +++++++++++++++++--- packages/migration/vde/vde_data_contracts.go | 25 +++++++++++--- packages/smart/funcs.go | 8 ++--- packages/vdemanager/manager.go | 18 +++++----- 4 files changed, 67 insertions(+), 20 deletions(-) diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index bd32c97de..990809436 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -37,15 +37,43 @@ func TestVDECreate(t *testing.T) { require.NoError(t, keyLogin(1)) form := url.Values{ - "VDEName": {"testvde"}, - "DBUser": {"vdeuser"}, + "VDEName": {"myvde3"}, + "DBUser": {"myvdeuser3"}, "DBPassword": {"vdepassword"}, - "VDEAPIPort": {"8000"}, + "VDEAPIPort": {"8004"}, } - require.NoError(t, postTx("NewVDE", &form)) + assert.NoError(t, postTx("NewVDE", &form)) +} + +func TestVDEList(t *testing.T) { + require.NoError(t, keyLogin(1)) + fmt.Println(postTx("ListVDE", nil)) } +func TestStopVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("StopVDE", &form)) +} + +func TestRunVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RunVDE", &form)) +} + +func TestRemoveVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RemoveVDE", &form)) +} func TestVDEParams(t *testing.T) { assert.NoError(t, keyLogin(1)) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index c26fa16d6..4297f287a 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -824,19 +824,27 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } }', 'ContractConditions("MainCondition")'), ('26', 'ListVDE', 'contract ListVDE { + data {} + + conditions {} + + action { + GetVDEList() + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { data { VDEName string } conditions { - } action { - GetVDEList($VDEName) + StartVDE($VDEName) } }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { + ('28', 'StopVDE', 'contract StopVDE { data { VDEName string } @@ -845,6 +853,15 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } action { - StartVDE($VDEName) + StopVDEProcess($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('29', 'RemoveVDE', 'contract RemoveVDE { + data { + VDEName string + } + conditions {} + action{ + DeleteVDE($VDEName) } }', 'ContractConditions("MainCondition")');` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index cedfd6812..002792d05 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -266,7 +266,7 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["CreateVDE"] = CreateVDE f["DeleteVDE"] = DeleteVDE f["StartVDE"] = StartVDE - f["StopVDE"] = StopVDE + f["StopVDEProcess"] = StopVDEProcess f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) @@ -1714,12 +1714,12 @@ func StartVDE(sc *SmartContract, name string) error { return vdemanager.Manager.StartVDE(name) } -// StopVDE stops VDE process -func StopVDE(sc *SmartContract, name string) error { +// StopVDEProcess stops VDE process +func StopVDEProcess(sc *SmartContract, name string) error { return vdemanager.Manager.StopVDE(name) } // GetVDEList returns list VDE process with statuses -func GetVDEList(sc *SmartContract, name string) (map[string]string, error) { +func GetVDEList(sc *SmartContract) (map[string]string, error) { return vdemanager.Manager.ListProcess() } diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index 4cca4ac8e..0e628edca 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -7,6 +7,7 @@ import ( "os" "path" "path/filepath" + "time" "github.com/GenesisKernel/go-genesis/packages/conf" @@ -22,7 +23,8 @@ const ( createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` createDBTemplate = `CREATE DATABASE %s OWNER %s` - dropDBTemplate = `DROP OWNED BY %s CASCADE` + dropDBTemplate = `DROP DATABASE IF EXISTS %s` + dropOwnedTemplate = `DROP OWNED BY %s CASCADE` dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` commandTemplate = `%s start --config=%s` ) @@ -101,7 +103,8 @@ func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) erro procConfEntry := pConf.NewConfigEntry(config.Directory) procConfEntry.Name = "program:" + name - command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) + command := fmt.Sprintf("%s start --config=%s", config.Executable, filepath.Join(config.Directory, consts.DefaultConfigFile)) + log.Infoln(command) procConfEntry.AddKeyValue("command", command) proc := process.NewProcess("vdeMaster", procConfEntry) @@ -134,10 +137,7 @@ func (mgr *VDEManager) DeleteVDE(name string) error { return errWrongMode } - p := mgr.processes.Find(name) - if p != nil { - p.Stop(true) - } + mgr.StopVDE(name) vdeDir := path.Join(mgr.childConfigsPath, name) vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) @@ -147,8 +147,8 @@ func (mgr *VDEManager) DeleteVDE(name string) error { return err } - dropDBquery := fmt.Sprintf(dropDBTemplate, vdeConfig.DB.User) - if err := model.DBConn.Exec(dropDBquery).Error; err != nil { + time.Sleep(1 * time.Second) + if err := model.DropDatabase(vdeConfig.DB.Name); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") return err } @@ -274,6 +274,7 @@ func InitVDEManager() { if item.IsDir() { procDir := path.Join(Manager.childConfigsPath, item.Name()) commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) + log.Info(commandStr) confEntry := pConf.NewConfigEntry(procDir) confEntry.Name = "program:" + item.Name() confEntry.AddKeyValue("command", commandStr) @@ -283,6 +284,7 @@ func InitVDEManager() { proc := process.NewProcess("vdeMaster", confEntry) Manager.processes.Add(item.Name(), proc) + proc.Start(true) } } } From cf352742f85c4018ea3e66ec7d4596fa0ed57224 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 15 Jun 2018 14:26:20 +0300 Subject: [PATCH 053/169] fix rebase errors --- packages/api/login.go | 4 ---- packages/service/node_ban.go | 2 +- packages/smart/funcs.go | 30 ------------------------------ 3 files changed, 1 insertion(+), 35 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index e3146ba6c..90cba58b4 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -168,10 +168,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") return errorAPI(w, err, http.StatusInternalServerError) } -<<<<<<< HEAD - -======= ->>>>>>> 6c85191376afc8462fb926256c57cb60f0bbda2a ret, err := VDEContract(serializedContract, data) if err != nil { return errorAPI(w, err, http.StatusInternalServerError) diff --git a/packages/service/node_ban.go b/packages/service/node_ban.go index e69810f77..d8292de5d 100644 --- a/packages/service/node_ban.go +++ b/packages/service/node_ban.go @@ -143,7 +143,7 @@ func (nbs *NodesBanService) newBadBlock(producer syspar.FullNode, blockId, block } params = append(append(params, converter.EncodeLength(int64(len(reason)))...), []byte(reason)...) - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, "NewBadBlock", 1) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 277d19946..6bde1d49e 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -270,36 +270,6 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) - case script.VMTypeVDEMaster: - f["HTTPRequest"] = HTTPRequest - f["GetMapKeys"] = GetMapKeys - f["SortedKeys"] = SortedKeys - f["Date"] = Date - f["HTTPPostJSON"] = HTTPPostJSON - f["ValidateCron"] = ValidateCron - f["UpdateCron"] = UpdateCron - f["CreateVDE"] = CreateVDE - f["DeleteVDE"] = DeleteVDE - f["StartVDE"] = StartVDE - f["StopVDE"] = StopVDE - f["GetVDEList"] = GetVDEList - vmExtendCost(vm, getCost) - vmFuncCallsDB(vm, funcCallsDB) - case script.VMTypeVDEMaster: - f["HTTPRequest"] = HTTPRequest - f["GetMapKeys"] = GetMapKeys - f["SortedKeys"] = SortedKeys - f["Date"] = Date - f["HTTPPostJSON"] = HTTPPostJSON - f["ValidateCron"] = ValidateCron - f["UpdateCron"] = UpdateCron - f["CreateVDE"] = CreateVDE - f["DeleteVDE"] = DeleteVDE - f["StartVDE"] = StartVDE - f["StopVDEProcess"] = StopVDEProcess - f["GetVDEList"] = GetVDEList - vmExtendCost(vm, getCost) - vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan From b2efa6e884954d87b0753b67b97844c77e165d64 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 15 Jun 2018 16:32:29 +0300 Subject: [PATCH 054/169] vendoring supervisord --- vendor/github.com/gorilla/rpc/LICENSE | 27 + vendor/github.com/gorilla/rpc/README.md | 7 + vendor/github.com/gorilla/rpc/doc.go | 81 ++ vendor/github.com/gorilla/rpc/map.go | 180 +++++ vendor/github.com/gorilla/rpc/server.go | 269 +++++++ vendor/github.com/ochinchina/go-ini/LICENSE | 21 + vendor/github.com/ochinchina/go-ini/README.md | 368 +++++++++ vendor/github.com/ochinchina/go-ini/doc.go | 49 ++ .../ochinchina/go-ini/env_replacer.go | 65 ++ vendor/github.com/ochinchina/go-ini/ini.go | 265 +++++++ vendor/github.com/ochinchina/go-ini/key.go | 282 +++++++ vendor/github.com/ochinchina/go-ini/loader.go | 349 ++++++++ .../ochinchina/go-ini/properties.go | 116 +++ .../github.com/ochinchina/go-ini/section.go | 177 +++++ .../ochinchina/gorilla-xmlrpc/LICENSE | 27 + .../ochinchina/gorilla-xmlrpc/xml/client.go | 26 + .../ochinchina/gorilla-xmlrpc/xml/doc.go | 50 ++ .../ochinchina/gorilla-xmlrpc/xml/fault.go | 51 ++ .../ochinchina/gorilla-xmlrpc/xml/rpc2xml.go | 149 ++++ .../ochinchina/gorilla-xmlrpc/xml/server.go | 118 +++ .../ochinchina/gorilla-xmlrpc/xml/xml2rpc.go | 219 +++++ .../rogpeppe/go-charset/charset/big5.go | 88 +++ .../rogpeppe/go-charset/charset/charset.go | 301 +++++++ .../rogpeppe/go-charset/charset/codepage.go | 133 ++++ .../rogpeppe/go-charset/charset/cp932.go | 195 +++++ .../rogpeppe/go-charset/charset/file.go | 40 + .../rogpeppe/go-charset/charset/local.go | 162 ++++ .../rogpeppe/go-charset/charset/utf16.go | 110 +++ .../rogpeppe/go-charset/charset/utf8.go | 51 ++ .../rogpeppe/go-charset/data/data_big5.dat.go | 18 + .../go-charset/data/data_charsets.json.go | 18 + .../go-charset/data/data_cp932.dat.go | 18 + .../go-charset/data/data_ibm437.cp.go | 18 + .../go-charset/data/data_ibm850.cp.go | 18 + .../go-charset/data/data_ibm866.cp.go | 18 + .../go-charset/data/data_iso-8859-1.cp.go | 18 + .../go-charset/data/data_iso-8859-10.cp.go | 18 + .../go-charset/data/data_iso-8859-15.cp.go | 18 + .../go-charset/data/data_iso-8859-2.cp.go | 18 + .../go-charset/data/data_iso-8859-3.cp.go | 18 + .../go-charset/data/data_iso-8859-4.cp.go | 18 + .../go-charset/data/data_iso-8859-5.cp.go | 18 + .../go-charset/data/data_iso-8859-6.cp.go | 18 + .../go-charset/data/data_iso-8859-7.cp.go | 18 + .../go-charset/data/data_iso-8859-8.cp.go | 18 + .../go-charset/data/data_iso-8859-9.cp.go | 18 + .../go-charset/data/data_jisx0201kana.dat.go | 18 + .../go-charset/data/data_koi8-r.cp.go | 18 + .../go-charset/data/data_windows-1250.cp.go | 18 + .../go-charset/data/data_windows-1251.cp.go | 18 + .../go-charset/data/data_windows-1252.cp.go | 18 + .../rogpeppe/go-charset/data/doc.go | 6 + .../rogpeppe/go-charset/data/generate.go | 97 +++ .../rpoletaev/supervisord/Gopkg.lock | 63 ++ .../rpoletaev/supervisord/Gopkg.toml | 46 ++ .../github.com/rpoletaev/supervisord/LICENSE | 21 + .../rpoletaev/supervisord/README.md | 161 ++++ .../rpoletaev/supervisord/circle.yml | 9 + .../rpoletaev/supervisord/config/config.go | 558 +++++++++++++ .../supervisord/config/process_group.go | 114 +++ .../supervisord/config/process_sort.go | 159 ++++ .../supervisord/config/string_expression.go | 88 +++ .../rpoletaev/supervisord/config_template.go | 137 ++++ .../rpoletaev/supervisord/content_checker.go | 149 ++++ .../github.com/rpoletaev/supervisord/ctl.go | 159 ++++ .../rpoletaev/supervisord/daemonize.go | 25 + .../supervisord/daemonize_windows.go | 7 + .../rpoletaev/supervisord/events/events.go | 745 ++++++++++++++++++ .../rpoletaev/supervisord/faults/faults.go | 30 + .../rpoletaev/supervisord/logger/log.go | 485 ++++++++++++ .../rpoletaev/supervisord/logger/log_unix.go | 16 + .../supervisord/logger/log_windows.go | 7 + .../github.com/rpoletaev/supervisord/main.go | 75 ++ .../supervisord/process/command_parser.go | 81 ++ .../rpoletaev/supervisord/process/path.go | 46 ++ .../supervisord/process/pdeathsig_linux.go | 12 + .../supervisord/process/pdeathsig_other.go | 12 + .../supervisord/process/pdeathsig_windows.go | 9 + .../rpoletaev/supervisord/process/process.go | 689 ++++++++++++++++ .../supervisord/process/process_manager.go | 160 ++++ .../supervisord/process/set_user_id.go | 11 + .../process/set_user_id_windows.go | 11 + .../rpoletaev/supervisord/signals/signal.go | 34 + .../supervisord/signals/signal_windows.go | 46 ++ .../rpoletaev/supervisord/supervisor.go | 586 ++++++++++++++ .../rpoletaev/supervisord/util/util.go | 64 ++ .../rpoletaev/supervisord/version.go | 24 + .../rpoletaev/supervisord/xmlrpc.go | 136 ++++ vendor/vendor.json | 78 ++ 89 files changed, 9528 insertions(+) create mode 100644 vendor/github.com/gorilla/rpc/LICENSE create mode 100644 vendor/github.com/gorilla/rpc/README.md create mode 100644 vendor/github.com/gorilla/rpc/doc.go create mode 100644 vendor/github.com/gorilla/rpc/map.go create mode 100644 vendor/github.com/gorilla/rpc/server.go create mode 100644 vendor/github.com/ochinchina/go-ini/LICENSE create mode 100644 vendor/github.com/ochinchina/go-ini/README.md create mode 100644 vendor/github.com/ochinchina/go-ini/doc.go create mode 100644 vendor/github.com/ochinchina/go-ini/env_replacer.go create mode 100644 vendor/github.com/ochinchina/go-ini/ini.go create mode 100644 vendor/github.com/ochinchina/go-ini/key.go create mode 100644 vendor/github.com/ochinchina/go-ini/loader.go create mode 100644 vendor/github.com/ochinchina/go-ini/properties.go create mode 100644 vendor/github.com/ochinchina/go-ini/section.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/big5.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/charset.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/codepage.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/cp932.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/file.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/local.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/utf16.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/utf8.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/doc.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/generate.go create mode 100644 vendor/github.com/rpoletaev/supervisord/Gopkg.lock create mode 100644 vendor/github.com/rpoletaev/supervisord/Gopkg.toml create mode 100644 vendor/github.com/rpoletaev/supervisord/LICENSE create mode 100644 vendor/github.com/rpoletaev/supervisord/README.md create mode 100644 vendor/github.com/rpoletaev/supervisord/circle.yml create mode 100644 vendor/github.com/rpoletaev/supervisord/config/config.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/process_group.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/process_sort.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/string_expression.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config_template.go create mode 100644 vendor/github.com/rpoletaev/supervisord/content_checker.go create mode 100644 vendor/github.com/rpoletaev/supervisord/ctl.go create mode 100644 vendor/github.com/rpoletaev/supervisord/daemonize.go create mode 100644 vendor/github.com/rpoletaev/supervisord/daemonize_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/events/events.go create mode 100644 vendor/github.com/rpoletaev/supervisord/faults/faults.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log_unix.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/main.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/command_parser.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/path.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/process.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/process_manager.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/set_user_id.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/signals/signal.go create mode 100644 vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/supervisor.go create mode 100644 vendor/github.com/rpoletaev/supervisord/util/util.go create mode 100644 vendor/github.com/rpoletaev/supervisord/version.go create mode 100644 vendor/github.com/rpoletaev/supervisord/xmlrpc.go diff --git a/vendor/github.com/gorilla/rpc/LICENSE b/vendor/github.com/gorilla/rpc/LICENSE new file mode 100644 index 000000000..0e5fb8728 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 Rodrigo Moraes. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/rpc/README.md b/vendor/github.com/gorilla/rpc/README.md new file mode 100644 index 000000000..75c26eaa8 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/README.md @@ -0,0 +1,7 @@ +rpc +=== +[![Build Status](https://travis-ci.org/gorilla/rpc.png?branch=master)](https://travis-ci.org/gorilla/rpc) + +gorilla/rpc is a foundation for RPC over HTTP services, providing access to the exported methods of an object through HTTP requests. + +Read the full documentation here: http://www.gorillatoolkit.org/pkg/rpc diff --git a/vendor/github.com/gorilla/rpc/doc.go b/vendor/github.com/gorilla/rpc/doc.go new file mode 100644 index 000000000..bc65b532a --- /dev/null +++ b/vendor/github.com/gorilla/rpc/doc.go @@ -0,0 +1,81 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package gorilla/rpc is a foundation for RPC over HTTP services, providing +access to the exported methods of an object through HTTP requests. + +This package derives from the standard net/rpc package but uses a single HTTP +request per call instead of persistent connections. Other differences +compared to net/rpc: + + - Multiple codecs can be registered in the same server. + - A codec is chosen based on the "Content-Type" header from the request. + - Service methods also receive http.Request as parameter. + - This package can be used on Google App Engine. + +Let's setup a server and register a codec and service: + + import ( + "http" + "github.com/gorilla/rpc" + "github.com/gorilla/rpc/json" + ) + + func init() { + s := rpc.NewServer() + s.RegisterCodec(json.NewCodec(), "application/json") + s.RegisterService(new(HelloService), "") + http.Handle("/rpc", s) + } + +This server handles requests to the "/rpc" path using a JSON codec. +A codec is tied to a content type. In the example above, the JSON codec is +registered to serve requests with "application/json" as the value for the +"Content-Type" header. If the header includes a charset definition, it is +ignored; only the media-type part is taken into account. + +A service can be registered using a name. If the name is empty, like in the +example above, it will be inferred from the service type. + +That's all about the server setup. Now let's define a simple service: + + type HelloArgs struct { + Who string + } + + type HelloReply struct { + Message string + } + + type HelloService struct {} + + func (h *HelloService) Say(r *http.Request, args *HelloArgs, reply *HelloReply) error { + reply.Message = "Hello, " + args.Who + "!" + return nil + } + +The example above defines a service with a method "HelloService.Say" and +the arguments and reply related to that method. + +The service must be exported (begin with an upper case letter) or local +(defined in the package registering the service). + +When a service is registered, the server inspects the service methods +and make available the ones that follow these rules: + + - The method name is exported. + - The method has three arguments: *http.Request, *args, *reply. + - All three arguments are pointers. + - The second and third arguments are exported or local. + - The method has return type error. + +All other methods are ignored. + +Gorilla has packages with common RPC codecs. Check out their documentation: + + JSON: http://gorilla-web.appspot.com/pkg/rpc/json +*/ +package rpc diff --git a/vendor/github.com/gorilla/rpc/map.go b/vendor/github.com/gorilla/rpc/map.go new file mode 100644 index 000000000..433f275b8 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/map.go @@ -0,0 +1,180 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package rpc + +import ( + "fmt" + "net/http" + "reflect" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +var ( + // Precompute the reflect.Type of error and http.Request + typeOfError = reflect.TypeOf((*error)(nil)).Elem() + typeOfRequest = reflect.TypeOf((*http.Request)(nil)).Elem() +) + +// ---------------------------------------------------------------------------- +// service +// ---------------------------------------------------------------------------- + +type service struct { + name string // name of service + rcvr reflect.Value // receiver of methods for the service + rcvrType reflect.Type // type of the receiver + methods map[string]*serviceMethod // registered methods + passReq bool +} + +type serviceMethod struct { + method reflect.Method // receiver method + argsType reflect.Type // type of the request argument + replyType reflect.Type // type of the response argument +} + +// ---------------------------------------------------------------------------- +// serviceMap +// ---------------------------------------------------------------------------- + +// serviceMap is a registry for services. +type serviceMap struct { + mutex sync.Mutex + services map[string]*service +} + +// register adds a new service using reflection to extract its methods. +func (m *serviceMap) register(rcvr interface{}, name string, passReq bool) error { + // Setup service. + s := &service{ + name: name, + rcvr: reflect.ValueOf(rcvr), + rcvrType: reflect.TypeOf(rcvr), + methods: make(map[string]*serviceMethod), + passReq: passReq, + } + if name == "" { + s.name = reflect.Indirect(s.rcvr).Type().Name() + if !isExported(s.name) { + return fmt.Errorf("rpc: type %q is not exported", s.name) + } + } + if s.name == "" { + return fmt.Errorf("rpc: no service name for type %q", + s.rcvrType.String()) + } + // Setup methods. + for i := 0; i < s.rcvrType.NumMethod(); i++ { + method := s.rcvrType.Method(i) + mtype := method.Type + + // offset the parameter indexes by one if the + // service methods accept an HTTP request pointer + var paramOffset int + if passReq { + paramOffset = 1 + } else { + paramOffset = 0 + } + + // Method must be exported. + if method.PkgPath != "" { + continue + } + // Method needs four ins: receiver, *http.Request, *args, *reply. + if mtype.NumIn() != 3+paramOffset { + continue + } + + // If the service methods accept an HTTP request pointer + if passReq { + // First argument must be a pointer and must be http.Request. + reqType := mtype.In(1) + if reqType.Kind() != reflect.Ptr || reqType.Elem() != typeOfRequest { + continue + } + } + // Next argument must be a pointer and must be exported. + args := mtype.In(1 + paramOffset) + if args.Kind() != reflect.Ptr || !isExportedOrBuiltin(args) { + continue + } + // Next argument must be a pointer and must be exported. + reply := mtype.In(2 + paramOffset) + if reply.Kind() != reflect.Ptr || !isExportedOrBuiltin(reply) { + continue + } + // Method needs one out: error. + if mtype.NumOut() != 1 { + continue + } + if returnType := mtype.Out(0); returnType != typeOfError { + continue + } + s.methods[method.Name] = &serviceMethod{ + method: method, + argsType: args.Elem(), + replyType: reply.Elem(), + } + } + if len(s.methods) == 0 { + return fmt.Errorf("rpc: %q has no exported methods of suitable type", + s.name) + } + // Add to the map. + m.mutex.Lock() + defer m.mutex.Unlock() + if m.services == nil { + m.services = make(map[string]*service) + } else if _, ok := m.services[s.name]; ok { + return fmt.Errorf("rpc: service already defined: %q", s.name) + } + m.services[s.name] = s + return nil +} + +// get returns a registered service given a method name. +// +// The method name uses a dotted notation as in "Service.Method". +func (m *serviceMap) get(method string) (*service, *serviceMethod, error) { + parts := strings.Split(method, ".") + if len(parts) != 2 { + err := fmt.Errorf("rpc: service/method request ill-formed: %q", method) + return nil, nil, err + } + m.mutex.Lock() + service := m.services[parts[0]] + m.mutex.Unlock() + if service == nil { + err := fmt.Errorf("rpc: can't find service %q", method) + return nil, nil, err + } + serviceMethod := service.methods[parts[1]] + if serviceMethod == nil { + err := fmt.Errorf("rpc: can't find method %q", method) + return nil, nil, err + } + return service, serviceMethod, nil +} + +// isExported returns true of a string is an exported (upper case) name. +func isExported(name string) bool { + rune, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(rune) +} + +// isExportedOrBuiltin returns true if a type is exported or a builtin. +func isExportedOrBuiltin(t reflect.Type) bool { + for t.Kind() == reflect.Ptr { + t = t.Elem() + } + // PkgPath will be non-empty even for an exported type, + // so we need to check the type name as well. + return isExported(t.Name()) || t.PkgPath() == "" +} diff --git a/vendor/github.com/gorilla/rpc/server.go b/vendor/github.com/gorilla/rpc/server.go new file mode 100644 index 000000000..d61b5eaa9 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/server.go @@ -0,0 +1,269 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package rpc + +import ( + "fmt" + "net/http" + "reflect" + "strings" +) + +// ---------------------------------------------------------------------------- +// Codec +// ---------------------------------------------------------------------------- + +// Codec creates a CodecRequest to process each request. +type Codec interface { + NewRequest(*http.Request) CodecRequest +} + +// CodecRequest decodes a request and encodes a response using a specific +// serialization scheme. +type CodecRequest interface { + // Reads request and returns the RPC method name. + Method() (string, error) + // Reads request filling the RPC method args. + ReadRequest(interface{}) error + // Writes response using the RPC method reply. The error parameter is + // the error returned by the method call, if any. + WriteResponse(http.ResponseWriter, interface{}, error) error +} + +// ---------------------------------------------------------------------------- +// Server +// ---------------------------------------------------------------------------- + +// NewServer returns a new RPC server. +func NewServer() *Server { + return &Server{ + codecs: make(map[string]Codec), + services: new(serviceMap), + } +} + +// RequestInfo contains all the information we pass to before/after functions +type RequestInfo struct { + Method string + Error error + Request *http.Request + StatusCode int +} + +// Server serves registered RPC services using registered codecs. +type Server struct { + codecs map[string]Codec + services *serviceMap + interceptFunc func(i *RequestInfo) *http.Request + beforeFunc func(i *RequestInfo) + afterFunc func(i *RequestInfo) +} + +// RegisterCodec adds a new codec to the server. +// +// Codecs are defined to process a given serialization scheme, e.g., JSON or +// XML. A codec is chosen based on the "Content-Type" header from the request, +// excluding the charset definition. +func (s *Server) RegisterCodec(codec Codec, contentType string) { + s.codecs[strings.ToLower(contentType)] = codec +} + +// RegisterService adds a new service to the server. +// +// The name parameter is optional: if empty it will be inferred from +// the receiver type name. +// +// Methods from the receiver will be extracted if these rules are satisfied: +// +// - The receiver is exported (begins with an upper case letter) or local +// (defined in the package registering the service). +// - The method name is exported. +// - The method has three arguments: *http.Request, *args, *reply. +// - All three arguments are pointers. +// - The second and third arguments are exported or local. +// - The method has return type error. +// +// All other methods are ignored. +func (s *Server) RegisterService(receiver interface{}, name string) error { + return s.services.register(receiver, name, true) +} + +// RegisterTCPService adds a new TCP service to the server. +// No HTTP request struct will be passed to the service methods. +// +// The name parameter is optional: if empty it will be inferred from +// the receiver type name. +// +// Methods from the receiver will be extracted if these rules are satisfied: +// +// - The receiver is exported (begins with an upper case letter) or local +// (defined in the package registering the service). +// - The method name is exported. +// - The method has two arguments: *args, *reply. +// - Both arguments are pointers. +// - Both arguments are exported or local. +// - The method has return type error. +// +// All other methods are ignored. +func (s *Server) RegisterTCPService(receiver interface{}, name string) error { + return s.services.register(receiver, name, false) +} + +// HasMethod returns true if the given method is registered. +// +// The method uses a dotted notation as in "Service.Method". +func (s *Server) HasMethod(method string) bool { + if _, _, err := s.services.get(method); err == nil { + return true + } + return false +} + +// RegisterInterceptFunc registers the specified function as the function +// that will be called before every request. The function is allowed to intercept +// the request e.g. add values to the context. +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterInterceptFunc(f func(i *RequestInfo) *http.Request) { + s.interceptFunc = f +} + +// RegisterBeforeFunc registers the specified function as the function +// that will be called before every request. +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterBeforeFunc(f func(i *RequestInfo)) { + s.beforeFunc = f +} + +// RegisterAfterFunc registers the specified function as the function +// that will be called after every request +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterAfterFunc(f func(i *RequestInfo)) { + s.afterFunc = f +} + +// ServeHTTP +func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if r.Method != "POST" { + s.writeError(w, 405, "rpc: POST method required, received "+r.Method) + return + } + contentType := r.Header.Get("Content-Type") + idx := strings.Index(contentType, ";") + if idx != -1 { + contentType = contentType[:idx] + } + var codec Codec + if contentType == "" && len(s.codecs) == 1 { + // If Content-Type is not set and only one codec has been registered, + // then default to that codec. + for _, c := range s.codecs { + codec = c + } + } else if codec = s.codecs[strings.ToLower(contentType)]; codec == nil { + s.writeError(w, 415, "rpc: unrecognized Content-Type: "+contentType) + return + } + // Create a new codec request. + codecReq := codec.NewRequest(r) + // Get service method to be called. + method, errMethod := codecReq.Method() + if errMethod != nil { + s.writeError(w, 400, errMethod.Error()) + return + } + serviceSpec, methodSpec, errGet := s.services.get(method) + if errGet != nil { + s.writeError(w, 400, errGet.Error()) + return + } + // Decode the args. + args := reflect.New(methodSpec.argsType) + if errRead := codecReq.ReadRequest(args.Interface()); errRead != nil { + s.writeError(w, 400, errRead.Error()) + return + } + + // Call the registered Intercept Function + if s.interceptFunc != nil { + req := s.interceptFunc(&RequestInfo{ + Request: r, + Method: method, + }) + if req != nil { + r = req + } + } + // Call the registered Before Function + if s.beforeFunc != nil { + s.beforeFunc(&RequestInfo{ + Request: r, + Method: method, + }) + } + + // Call the service method. + reply := reflect.New(methodSpec.replyType) + + // omit the HTTP request if the service method doesn't accept it + var errValue []reflect.Value + if serviceSpec.passReq { + errValue = methodSpec.method.Func.Call([]reflect.Value{ + serviceSpec.rcvr, + reflect.ValueOf(r), + args, + reply, + }) + } else { + errValue = methodSpec.method.Func.Call([]reflect.Value{ + serviceSpec.rcvr, + args, + reply, + }) + } + + // Cast the result to error if needed. + var errResult error + errInter := errValue[0].Interface() + if errInter != nil { + errResult = errInter.(error) + } + + // Prevents Internet Explorer from MIME-sniffing a response away + // from the declared content-type + w.Header().Set("x-content-type-options", "nosniff") + // Encode the response. + if errWrite := codecReq.WriteResponse(w, reply.Interface(), errResult); errWrite != nil { + s.writeError(w, 400, errWrite.Error()) + } else { + // Call the registered After Function + if s.afterFunc != nil { + s.afterFunc(&RequestInfo{ + Request: r, + Method: method, + Error: errResult, + StatusCode: 200, + }) + } + } +} + +func (s *Server) writeError(w http.ResponseWriter, status int, msg string) { + w.WriteHeader(status) + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + fmt.Fprint(w, msg) + if s.afterFunc != nil { + s.afterFunc(&RequestInfo{ + Error: fmt.Errorf(msg), + StatusCode: status, + }) + } +} diff --git a/vendor/github.com/ochinchina/go-ini/LICENSE b/vendor/github.com/ochinchina/go-ini/LICENSE new file mode 100644 index 000000000..6713cd967 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Steven Ou + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/ochinchina/go-ini/README.md b/vendor/github.com/ochinchina/go-ini/README.md new file mode 100644 index 000000000..6c67d5c1e --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/README.md @@ -0,0 +1,368 @@ +# Overview + +This is a golang library for reading/writing the .ini format file. The description on .ini file can be found at https://en.wikipedia.org/wiki/INI_file + +# Supported .ini format + +A .ini file contains one or more sections and each section contains one or more key/value pair. Following is an example of .ini file + +```ini +# this is a comment line +; this is also a comment line + +[section1] + +key1 = value1 + +[section2] + +key2 = value2 +``` + +## Comments +### Comments line + +A comments line is started with char '#' or ';' and it will be ignored when processing the .ini file. + +```ini + +# this is a comment line +; this is also a comment line + +``` + +### inline comments + +A comment can be appended in a tail of line. The inline comments must be started with ';' or '#' and its previous char must be a space. + +```ini +[section1] +key1 = value1 ;this is a inline comment +key2 = value2;this is not a inline comment +``` + +## Multiline value + +if a value is multiple line value, the value can be put between """ and """, an example: + +```ini + +[section1] + +multi-line-key = """this is a multi-line example, +multiple line can be put in a value, +this is multiple line is just for test""" + +single-line-key = this is a normal value +``` + +## Continuation line + +If a line is too long, user can devide one line to multiple line and on the end of line the char '\\' should be put: + +```ini +[section1] +key1 = this line is too long, \ +we need to write it to multiple line, \ +but actually it is one line from the point of user + +``` + +## Escape char + +This library supports the escape char, the escape char is started with char \\ + +|Common escape sequences Sequence | Meaning | +|---------------------------------|-----------------------------------------------------| +|\\\\ |\ (a single backslash, escaping the escape character)| +|\0 |Null character | +|\a |Bell/Alert/Audible | +|\b |Backspace, Bell character for some applications | +|\t |Tab character | +|\r |Carriage return | +|\n |Line feed | +|\\; |Semicolon | +|\\# |Number sign | +|\\= |Equals sign | +|\\: |Colon | +|\\x???? |Unicode character with hexadecimal code point | + + +## Environemnt variable support + +Environment variable can be embeded in the value of the key and the environment variable will be replaced. For example: + +```ini +[section1] +key1 = this value has env ${HOME} +key2 = this value has env with default ${SOME_ENV:-test},hihi +``` + +In the above example, the environment variable HOME is in the value of key1. So if the value of environment variable HOME is "/home/test", the value of key1 is "this value has env /home/test". + +For the key2, the environemnt SOME_ENV is included and if the environment variable SOME_ENV does not exist, its value will be "test" otherwise it will be the value of SOME_ENV environment variable. + +# API + +## import the library + +The go-ini library should be imported before using this library: + +```go +import ( + ini "github.com/ochinchina/go-ini" +) +``` +## Load .ini file + +.ini format file or string can be loaded by the method: + +### Load from a file + +```go +//Load the .ini from a file +ini := ini.Load( "fileName" ) + +``` + +### Load from a string or byte array in .ini format + +```go +ini_str := `[section1] +key1 = value1 +key2 = value 2 +` + +ini := ini.Load( ini_str ) +//load from a byte array + +ini = ini.Load( []byte(ini_str) ) + +``` + +### Load from a io.Reader + +```go + +var reader io.Reader = ... + +ini := ini.Load( reader ) + +``` + +### Load .ini from multiple source + +The Load() method can load .ini from multiple mixed sources. + +``` go +//load multiple sources: fileName, string, reader and byte array in one statement + +ini := ini.Load( "fileName", ini_str, reader ) +``` + +### Load the .ini in Ini object + +The Ini class also provide a method named Load(), this method can be called multiple times and the later loaded .ini will be appended to the Ini object. + +```go +//first load the .ini from a file +ini := ini.Load( "fileName" ) + +//append the .ini from string to the ini object +ini_str := `[section1] +key1 = value1 +key2 = value 2 +` +ini.Load( ini_str ) + +//append the .ini from a reader to the ini object +var reader io.Reader = ... +ini.Load( reader ) + +``` + +## Access the value of key in the .ini file + +After loading the .ini from a file/string/reader, we can access a keya under a section. This library provides three level API to access the value of a key in a section. + +### Access the value of key in Ini class level + +The value of key can be accessed in Ini class level. + +```go +ini := ini.Load(...) + +value, err := ini.GetValue( "section1", "key1") + +// if err is nil, the value is ok +if err == nil { + //the value exists and DO something according to the value +} +``` + +Sometimes we need to provide a default value if the key in the section does not exist, at this time the user can provide a default value by GetValueWithDefault() method. + +```go +ini := ini.Load(...) + +//if the section1 or key1 does not exist, return a default value(empty string) +value := ini.GetValueWithDefault( "section1", "key1", "" ) +``` +### Access the value of key in Section class level + +Call the GetSection() method by the section name on the Ini object at frist, and then call GetValue() on the section to get the value of key. + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + value, err := section.GetValue( "key1" ) + if err == nil { + //the value of key1 exists + } +} +``` + +The method GetValueWithDefault() ask user provide a default value if the key under section does not exist, the user provided default value will be returned. + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + //get the value of key1 and if the key1 does not exists, return the default empty string + value := section.GetValueWithDefault("key1", "" ) +} +``` + +### Access the value of key in Key class level + +The value of a key can be acccessed in the Key class level also. The method Key() on the section with keyname can be called even if the key does not exist. After getting a Key object, user can call Value() method to get the value of key. +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) +if err == nil { + //the Key() method always returns a Key object even if the key does not exist + value, err := section.Key( "key1" ).Value() + if err == nul { + //the value in key1 exists + } +} +``` +User can provide a default value to method ValueWithDefault() on the Key object to get the value of key and if the key does not exist the default value will be returned. + + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) +if err == nil { + //the Key() method always returns a Key object even if the key does not exist + value:= section.Key( "key1" ).ValueWithDefault("") +} +``` + +## Convert the string value to desired types + +Except for getting a string value of a key, you can also ask the library convert the string to one of following types: + +- bool +- int +- int64 +- uint64 +- float32 +- float64 + +For each data type, this library provides two methods GetXXX() and GetXXXWithDefault() on the Ini&Section class level where the XXX stands for the Bool, Int, Int64, Uint64, Float32, Float64. + +An example to ask the library convert the key to a int data type in Ini level: + +```go + +ini := ini.Load(...) + +value, err := ini.GetInt( "section1", "key1" ) + +if err == nil { + //at this time, the value of key1 exists and can be converted to integer +} + +value = ini.GetIntWithDefault( "section1", "key1", 0 ) + +``` + +An example to ask the library convert the key to a int data type in Section level: +```go + +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + value, err = section.GetInt( "key1" ) + if err == nil { + //at this time the key1 exists and its value can be converted to int + } + + value = section.GetIntWithDefault("key1", 0 ) +} +``` + +An example to ask the library convert the key to a int data type in Key level: +```go + +ini := ini.Load(...) +section, err := ini.GetSection( "section1" ) +if err == nil { + value, err := section.Key( "key1" ).Int() + if err == nil { + //at this time the key1 exists and its value can be converted to int + } + + //get with default value + value = section.Key( "key1" ).IntWithDefault( 0 ) +} +``` + +## Add the key&value to .ini file + +This library also provides API to add key&value to the .ini file. + +```go + +ini := ini.NewIni() + +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) +``` + +## Save the .ini to the file + +User can call the Write() method on Ini object to write the .ini contents to a io.Writer + +```go + +ini := ini.NewIni() +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) + +buf := bytes.NewBufferString("") +ini.Write( buf ) +``` + +If want to write to the file, there is a convinent API WriteToFile() with filename on the Ini object to write the .ini content to the file. + + +```go + +ini := ini.NewIni() +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) + +ini.WriteToFile( "test.ini" ) + +``` diff --git a/vendor/github.com/ochinchina/go-ini/doc.go b/vendor/github.com/ochinchina/go-ini/doc.go new file mode 100644 index 000000000..105ca3e98 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/doc.go @@ -0,0 +1,49 @@ +/* +A golang implemented library to read/write .ini format files. + +With this library, you can load the .ini file a string, a byte array, a file and a io.Reader. + + import ( + ini "github.com/ochinchina/go-ini" + ) + + + func main() { + //load from .ini file + ini := ini.Load( "myfile.ini") + //load from .ini format string + str_data := "[section1]\nkey1=value1\n[section2]\nkey2=value2" + ini = ini.Load( str_data ) + + //load .ini format byte array + ini = ini.Load( []byte(str_data) ) + + //load from io.Reader + var reader io.Reader = ... + + ini = ini.Load( reader ) + + //load from multiple source in one Load method + ini = ini.Load( "myfile.ini", reader, str_data, bytes_data ) + } + +The loaded Ini includes sections, you can access section: + + //get all the sections in the .ini + var sections []*Section = ini.Sections() + + //get a section by Name + var section *Section = ini.GetSection( sectionName ) + + +Then the key in a section can be accessed by method GetXXX() and GetXXXWithDefault(defValue): + //get the value of key + value, err := section.GetValue( "key1") + value = section.GetValueWithDefault("key1", "") + + //get value of key as int + i, err := section.GetInt( "key2" ) + i = section.GetIntWithDefault( "key2" ) + +*/ +package ini diff --git a/vendor/github.com/ochinchina/go-ini/env_replacer.go b/vendor/github.com/ochinchina/go-ini/env_replacer.go new file mode 100644 index 000000000..efbd5d92d --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/env_replacer.go @@ -0,0 +1,65 @@ +package ini + +import ( + "bytes" + "os" + "strings" +) + +func get_env_value(env string) (string, bool) { + pos := strings.Index(env, ":") + if pos == -1 { + return os.LookupEnv(env) + } + + real_env := env[0:pos] + def_value := env[pos+1:] + if len(def_value) > 0 && def_value[0] == '-' { + def_value = def_value[1:] + } + if value, ok := os.LookupEnv(real_env); ok { + return value, ok + } else { + return def_value, true + } +} + +func replace_env(s string) string { + n := len(s) + env_start_pos := -1 + result := bytes.NewBuffer(make([]byte, 0)) + + for i := 0; i < n; i++ { + //if env start flag "${" is found but env end flag "}" is not found + if env_start_pos >= 0 && s[i] != '}' { + continue + } + switch s[i] { + case '\\': + result.WriteByte(s[i]) + if i+1 < n { + i++ + result.WriteByte(s[i]) + } + case '$': + if i+1 < n && s[i+1] == '{' { + env_start_pos = i + i++ + } else { + result.WriteByte(s[i]) + } + case '}': + if env_start_pos >= 0 { + if env_value, ok := get_env_value(s[env_start_pos+2 : i]); ok { + result.WriteString(env_value) + } + env_start_pos = -1 + } else { + result.WriteByte(s[i]) + } + default: + result.WriteByte(s[i]) + } + } + return result.String() +} diff --git a/vendor/github.com/ochinchina/go-ini/ini.go b/vendor/github.com/ochinchina/go-ini/ini.go new file mode 100644 index 000000000..f628ae5a6 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/ini.go @@ -0,0 +1,265 @@ +package ini + +import ( + "bytes" + "fmt" + "io" + "os" +) + +// manage all the sections and their key values defined in the .ini file +// +type Ini struct { + defaultSectionName string + sections map[string]*Section +} + +func NewIni() *Ini { + return &Ini{defaultSectionName: "default", + sections: make(map[string]*Section)} +} + +func (ini *Ini) GetDefaultSectionName() string { + return ini.defaultSectionName +} + +func (ini *Ini) SetDefaultSectionName(defSectionName string) { + ini.defaultSectionName = defSectionName +} + +// create a new section if the section with name does not exist +// or return the exist one if the section with name already exists +// +func (ini *Ini) NewSection(name string) *Section { + if section, ok := ini.sections[name]; ok { + return section + } + section := NewSection(name) + ini.sections[name] = section + return section +} + +// add a section to the .ini file and overwrite the exist section +// with same name +func (ini *Ini) AddSection(section *Section) { + ini.sections[section.Name] = section +} + +// Get all the section name in the ini +// +// return all the section names +func (ini *Ini) Sections() []*Section { + r := make([]*Section, 0) + for _, section := range ini.sections { + r = append(r, section) + } + return r +} + +// check if a key exists or not in the Ini +// +// return true if the key in section exists +func (ini *Ini) HasKey(sectionName, key string) bool { + if section, ok := ini.sections[sectionName]; ok { + return section.HasKey(key) + } + return false +} + +// get section by section name +// +// return: section or nil +func (ini *Ini) GetSection(name string) (*Section, error) { + if section, ok := ini.sections[name]; ok { + return section, nil + } + return nil, noSuchSection(name) +} + +// return true if the section with name exists +// return false if the section with name does not exist +func (ini *Ini) HasSection(name string) bool { + _, err := ini.GetSection(name) + return err == nil +} + +// get the value of key in section +func (ini *Ini) GetValue(sectionName, key string) (string, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetValue(key) + } + return "", noSuchSection(sectionName) +} + +// get the value of the key in section +// if the key does not exist, return the defValue +func (ini *Ini) GetValueWithDefault(sectionName, key string, defValue string) string { + if section, ok := ini.sections[sectionName]; ok { + return section.GetValueWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in section as bool. +// return true if the value of the key is one of following(case insensitive): +// - true +// - yes +// - t +// - y +// - 1 +// return false for all other values +func (ini *Ini) GetBool(sectionName, key string) (bool, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetBool(key) + } + return false, noSuchSection(sectionName) +} + +// get the value of key as bool and return the default value if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetBoolWithDefault(sectionName, key string, defValue bool) bool { + if section, ok := ini.sections[sectionName]; ok { + return section.GetBoolWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as int +func (ini *Ini) GetInt(sectionName, key string) (int, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetIntWithDefault(sectionName, key string, defValue int) int { + if section, ok := ini.sections[sectionName]; ok { + return section.GetIntWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as uint +func (ini *Ini) GetUint(sectionName, key string) (uint, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetUintWithDefault(sectionName, key string, defValue uint) uint { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUintWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as int64 +func (ini *Ini) GetInt64(sectionName, key string) (int64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetInt64WithDefault(sectionName, key string, defValue int64) int64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt64WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as uint64 +func (ini *Ini) GetUint64(sectionName, key string) (uint64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as uint64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetUint64WithDefault(sectionName, key string, defValue uint64) uint64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint64WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as float32 +func (ini *Ini) GetFloat32(sectionName, key string) (float32, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat32(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as float32 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetFloat32WithDefault(sectionName, key string, defValue float32) float32 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat32WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as float64 +func (ini *Ini) GetFloat64(sectionName, key string) (float64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as float64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetFloat64WithDefault(sectionName, key string, defValue float64) float64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat64WithDefault(key, defValue) + } + return defValue +} + +func noSuchSection(sectionName string) error { + return fmt.Errorf("no such section:%s", sectionName) +} + +func (ini *Ini) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + ini.Write(buf) + return buf.String() +} + +// write the content of the .ini in the .ini file format, e.g. in following format: +// +// [section1] +// key1 = value1 +// key2 = value2 +// [section2] +// key3 = value3 +// key4 = value4 +func (ini *Ini) Write(writer io.Writer) error { + for _, section := range ini.sections { + err := section.Write(writer) + if err != nil { + return err + } + } + return nil +} + +// Write the conents of ini to a file +func (ini *Ini) WriteToFile(fileName string) error { + file, err := os.Create(fileName) + if err == nil { + defer file.Close() + return ini.Write(file) + } + return err +} diff --git a/vendor/github.com/ochinchina/go-ini/key.go b/vendor/github.com/ochinchina/go-ini/key.go new file mode 100644 index 000000000..a3dd881a0 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/key.go @@ -0,0 +1,282 @@ +package ini + +import ( + "fmt" + "strconv" + "strings" +) + +// represents the pair stored in the +// section of the .ini file +// +type Key interface { + // get name of the key + Name() string + + // get value of the key + Value() (string, error) + + //get the value of key and return defValue if + //the value does not exist + ValueWithDefault(defValue string) string + + // get the value as bool + // return true if the value is one of following(case insensitive): + // - true + // - yes + // - T + // - Y + // - 1 + // Any other value will return false + Bool() (bool, error) + + // get the value as bool and return the defValue if the + // value of the key does not exist + BoolWithDefault(defValue bool) bool + // get the value as int + Int() (int, error) + + // get value as int and return defValue if the + // value of the key does not exist + IntWithDefault(defValue int) int + + //get value as uint + Uint() (uint, error) + + //get value as uint and return defValue if the + //key does not exist or it is not uint format + UintWithDefault(defValue uint) uint + + // get the value as int64 + Int64() (int64, error) + + // get the value as int64 and return defValue + // if the value of the key does not exist + Int64WithDefault(defValue int64) int64 + + // get the value as uint64 + Uint64() (uint64, error) + + // get the value as uint64 and return defValue + // if the value of the key does not exist + Uint64WithDefault(defValue uint64) uint64 + + // get the value as float32 + Float32() (float32, error) + + // get the value as float32 and return defValue + // if the value of the key does not exist + Float32WithDefault(defValue float32) float32 + + // get the value as float64 + Float64() (float64, error) + + // get the value as the float64 and return defValue + // if the value of the key does not exist + Float64WithDefault(defValue float64) float64 + + // return a string as "key=value" format + // and if no value return empty string + String() string +} + +type nonExistKey struct { + name string +} + +func newNonExistKey(name string) *nonExistKey { + return &nonExistKey{name: name} +} + +func (nek *nonExistKey) Name() string { + return nek.name +} + +func (nek *nonExistKey) Value() (string, error) { + return "", nek.noSuchKey() +} + +func (nek *nonExistKey) ValueWithDefault(defValue string) string { + return defValue +} + +func (nek *nonExistKey) Bool() (bool, error) { + return false, nek.noSuchKey() +} + +func (nek *nonExistKey) BoolWithDefault(defValue bool) bool { + return defValue +} + +func (nek *nonExistKey) Int() (int, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) IntWithDefault(defValue int) int { + return defValue +} + +func (nek *nonExistKey) Uint() (uint, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) UintWithDefault(defValue uint) uint { + return defValue +} + +func (nek *nonExistKey) Int64() (int64, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) Int64WithDefault(defValue int64) int64 { + return defValue +} + +func (nek *nonExistKey) Uint64() (uint64, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) Uint64WithDefault(defValue uint64) uint64 { + return defValue +} + +func (nek *nonExistKey) Float32() (float32, error) { + return .0, nek.noSuchKey() +} + +func (nek *nonExistKey) Float32WithDefault(defValue float32) float32 { + return defValue +} + +func (nek *nonExistKey) Float64() (float64, error) { + return .0, nek.noSuchKey() +} + +func (nek *nonExistKey) Float64WithDefault(defValue float64) float64 { + return defValue +} + +func (nek *nonExistKey) String() string { + return "" +} + +func (nek *nonExistKey) noSuchKey() error { + return fmt.Errorf("no such key:%s", nek.name) +} + +type normalKey struct { + name string + value string +} + +var trueBoolValue = map[string]bool{"true": true, "t": true, "yes": true, "y": true, "1": true} + +func newNormalKey(name, value string) *normalKey { + return &normalKey{name: name, value: replace_env(value)} +} + +func (k *normalKey) Name() string { + return k.name +} + +func (k *normalKey) Value() (string, error) { + return k.value, nil +} + +func (k *normalKey) ValueWithDefault(defValue string) string { + return k.value +} + +func (k *normalKey) Bool() (bool, error) { + if _, ok := trueBoolValue[strings.ToLower(k.value)]; ok { + return true, nil + } + return false, nil +} + +func (k *normalKey) BoolWithDefault(defValue bool) bool { + v, err := k.Bool() + if err == nil { + return v + } + return defValue +} + +func (k *normalKey) Int() (int, error) { + return strconv.Atoi(k.value) +} + +func (k *normalKey) IntWithDefault(defValue int) int { + i, err := strconv.Atoi(k.value) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Uint() (uint, error) { + v, err := strconv.ParseUint(k.value, 0, 32) + return uint(v), err +} + +func (k *normalKey) UintWithDefault(defValue uint) uint { + i, err := k.Uint() + if err == nil { + return i + } + return defValue + +} + +func (k *normalKey) Int64() (int64, error) { + return strconv.ParseInt(k.value, 0, 64) +} + +func (k *normalKey) Int64WithDefault(defValue int64) int64 { + i, err := strconv.ParseInt(k.value, 0, 64) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Uint64() (uint64, error) { + return strconv.ParseUint(k.value, 0, 64) +} + +func (k *normalKey) Uint64WithDefault(defValue uint64) uint64 { + i, err := strconv.ParseUint(k.value, 0, 64) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Float32() (float32, error) { + f, err := strconv.ParseFloat(k.value, 32) + return float32(f), err +} + +func (k *normalKey) Float32WithDefault(defValue float32) float32 { + f, err := strconv.ParseFloat(k.value, 32) + if err == nil { + return float32(f) + } + return defValue +} + +func (k *normalKey) Float64() (float64, error) { + return strconv.ParseFloat(k.value, 64) +} + +func (k *normalKey) Float64WithDefault(defValue float64) float64 { + f, err := strconv.ParseFloat(k.value, 64) + if err == nil { + return f + } + return defValue +} + +func (k *normalKey) String() string { + return fmt.Sprintf("%s=%s", k.name, toEscape(k.value)) +} diff --git a/vendor/github.com/ochinchina/go-ini/loader.go b/vendor/github.com/ochinchina/go-ini/loader.go new file mode 100644 index 000000000..059b8c26a --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/loader.go @@ -0,0 +1,349 @@ +package ini + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "os" + "strconv" + "strings" + "unicode" +) + +// remove inline comments +// +// inline comments must start with ';' or '#' +// and the char before the ';' or '#' must be a space +// +func removeComments(value string) string { + n := len( value ) + i := 0 + for ;i < n; i++ { + if value[i] == '\\' { + i++ + } else if value[i] == ';' || value[i] == '#' { + if i > 0 && unicode.IsSpace( rune( value[i-1] ) ) { + return strings.TrimSpace( value[0:i] ) + } + } + } + return strings.TrimSpace( value ) +} + +// check if it is a oct char,e.g. must be char '0' to '7' +// +func isOctChar(ch byte) bool { + return ch >= '0' && ch <= '7' +} + +// check if the char is a hex char, e.g. the char +// must be '0'..'9' or 'a'..'f' or 'A'..'F' +// +func isHexChar(ch byte) bool { + return ch >= '0' && ch <= '9' || + ch >= 'a' && ch <= 'f' || + ch >= 'A' && ch <= 'F' +} + +func fromEscape(value string) string { + if strings.Index(value, "\\") == -1 { + return value + } + + r := "" + n := len(value) + for i := 0; i < n; i++ { + if value[i] == '\\' { + if i+1 < n { + i++ + //if is it oct + if i+2 < n && isOctChar(value[i]) && isOctChar(value[i+1]) && isOctChar(value[i+2]) { + t, err := strconv.ParseInt(value[i:i+3], 8, 32) + if err == nil { + r = r + string(rune(t)) + } + i += 2 + continue + } + switch value[i] { + case '0': + r = r + string(byte(0)) + case 'a': + r = r + "\a" + case 'b': + r = r + "\b" + case 'f': + r = r + "\f" + case 't': + r = r + "\t" + case 'r': + r = r + "\r" + case 'n': + r = r + "\n" + case 'v': + r = r + "\v" + case 'x': + i++ + if i+3 < n && isHexChar(value[i]) && + isHexChar(value[i+1]) && + isHexChar(value[i+2]) && + isHexChar(value[i+3]) { + + t, err := strconv.ParseInt(value[i:i+4], 16, 32) + if err == nil { + r = r + string(rune(t)) + } + i += 3 + } + default: + r = fmt.Sprintf("%s%c", r, value[i]) + } + } + } else { + r = fmt.Sprintf("%s%c", r, value[i]) + } + } + return r +} + +func toEscape(s string) string { + result := bytes.NewBuffer(make([]byte, 0)) + + n := len(s) + + for i := 0; i < n; i++ { + switch s[i] { + case 0: + result.WriteString("\\0") + case '\\': + result.WriteString("\\\\") + case '\a': + result.WriteString("\\a") + case '\b': + result.WriteString("\\b") + case '\t': + result.WriteString("\\t") + case '\r': + result.WriteString("\\r") + case '\n': + result.WriteString("\\n") + case ';': + result.WriteString("\\;") + case '#': + result.WriteString("\\#") + case '=': + result.WriteString("\\=") + case ':': + result.WriteString("\\:") + default: + result.WriteByte(s[i]) + } + } + return result.String() +} +func removeContinuationSuffix(value string) (string, bool) { + pos := strings.LastIndex(value, "\\") + n := len(value) + if pos == -1 || pos != n-1 { + return "", false + } + for pos >= 0 { + if value[pos] != '\\' { + return "", false + } + pos-- + if pos < 0 || value[pos] != '\\' { + return value[0 : n-1], true + } + pos-- + } + return "", false +} + +type lineReader struct { + reader *bufio.Scanner +} + +func newLineReader(reader io.Reader) *lineReader { + return &lineReader{reader: bufio.NewScanner(reader)} +} + +func (lr *lineReader) readLine() (string, error) { + if lr.reader.Scan() { + return lr.reader.Text(), nil + } + return "", errors.New("No data") + +} + +func readLinesUntilSuffix(lineReader *lineReader, suffix string) string { + r := "" + for { + line, err := lineReader.readLine() + if err != nil { + break + } + t := strings.TrimRightFunc(line, unicode.IsSpace) + if strings.HasSuffix(t, suffix) { + r = r + t[0:len(t)-len(suffix)] + break + } else { + r = r + line + "\n" + } + } + return r +} + +func readContinuationLines(lineReader *lineReader) string { + r := "" + for { + line, err := lineReader.readLine() + if err != nil { + break + } + line = strings.TrimRightFunc(line, unicode.IsSpace) + if t, continuation := removeContinuationSuffix(line); continuation { + r = r + t + } else { + r = r + line + break + } + } + return r +} + +/* +Load from the sources, the source can be one of: + - fileName + - a string includes .ini + - io.Reader the reader to load the .ini contents + - byte array incldues .ini content +*/ +func (ini *Ini) Load(sources ...interface{}) { + for _, source := range sources { + switch source.(type) { + case string: + s, _ := source.(string) + if _, err := os.Stat(s); err == nil { + ini.LoadFile(s) + } else { + ini.LoadString(s) + } + case io.Reader: + reader, _ := source.(io.Reader) + ini.LoadReader(reader) + case []byte: + b, _ := source.([]byte) + ini.LoadBytes(b) + } + } + +} + +// Explicitly loads .ini from a reader +// +func (ini *Ini) LoadReader(reader io.Reader) { + lineReader := newLineReader(reader) + var curSection *Section = nil + for { + line, err := lineReader.readLine() + if err != nil { + break + } + line = strings.TrimSpace(line) + + //empty line or comments line + if len(line) <= 0 || line[0] == ';' || line[0] == '#' { + continue + } + //if it is a section + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + sectionName := strings.TrimSpace(line[1 : len(line)-1]) + if len(sectionName) > 0 { + curSection = ini.NewSection(sectionName) + } + continue + } + pos := strings.IndexAny(line, "=;") + if pos != -1 { + key := strings.TrimSpace(line[0:pos]) + value := strings.TrimLeftFunc(line[pos+1:], unicode.IsSpace) + //if it is a multiline indicator + if strings.HasPrefix(value, "\"\"\"") { + t := strings.TrimRightFunc(value, unicode.IsSpace) + //if the end multiline indicator is found + if strings.HasSuffix(t, "\"\"\"") { + value = t[3 : len(t)-3] + } else { //read lines until end multiline indicator is found + value = value[3:] + "\n" + readLinesUntilSuffix(lineReader, "\"\"\"") + } + } else { + value = strings.TrimRightFunc(value, unicode.IsSpace) + //if is it a continuation line + if t, continuation := removeContinuationSuffix(value); continuation { + value = t + readContinuationLines(lineReader) + } + } + + if len(key) > 0 { + if curSection == nil && len(ini.defaultSectionName) > 0 { + curSection = ini.NewSection(ini.defaultSectionName) + } + if curSection != nil { + //remove the comments and convert escape char to real + curSection.Add(key, strings.TrimSpace(fromEscape(removeComments(value)))) + } + } + } + } +} + +// Load ini file from file named fileName +// +func (ini *Ini) LoadFile(fileName string) { + f, err := os.Open(fileName) + if err == nil { + defer f.Close() + ini.Load(f) + } +} + +var defaultSectionName string = "default" + +func SetDefaultSectionName(defSectionName string) { + defaultSectionName = defSectionName +} + +// load ini from the content which contains the .ini formated string +// +func (ini *Ini) LoadString(content string) { + ini.Load(bytes.NewBufferString(content)) +} + +// load .ini from a byte array which contains the .ini formated content +func (ini *Ini) LoadBytes(content []byte) { + ini.Load(bytes.NewBuffer(content)) +} + +/* +Load the .ini from one of following resource: + - file + - string in .ini format + - byte array in .ini format + - io.Reader a reader to load .ini content + +One or more source can be provided in this Load method, such as: + var reader1 io.Reader = ... + var reader2 io.Reader = ... + ini.Load( "./my.ini", "[section]\nkey=1", "./my2.ini", reader1, reader2 ) +*/ +func Load(sources ...interface{}) *Ini { + ini := NewIni() + ini.SetDefaultSectionName(defaultSectionName) + for _, source := range sources { + ini.Load(source) + } + return ini +} diff --git a/vendor/github.com/ochinchina/go-ini/properties.go b/vendor/github.com/ochinchina/go-ini/properties.go new file mode 100644 index 000000000..bf02b6e5c --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/properties.go @@ -0,0 +1,116 @@ +package ini + +type Properties struct { + ini *Ini +} + +func NewProperties() *Properties { + return &Properties{ini: NewIni()} +} + +func (p *Properties) Load(sources ...interface{}) { + p.ini.Load(sources) +} + +func (p *Properties) GetProperty(key string) (string, error) { + return p.ini.GetValue(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetPropertyWithDefault(key string, defValue string) string { + v, err := p.GetProperty(key) + if err == nil { + return v + } + return defValue +} + +func (p *Properties) GetBool(key string) (bool, error) { + return p.ini.GetBool(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetBoolWithDefault(key string, defValue bool) bool{ + v, err := p.GetBool(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetInt(key string) (int, error) { + return p.ini.GetInt(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetIntWithDefault(key string, defValue int) int { + v, err := p.GetInt(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetInt64(key string) (int64, error) { + return p.ini.GetInt64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetInt64WithDefault(key string, defValue int64) int64 { + v, err := p.GetInt64(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetUint64(key string) (uint64, error) { + return p.ini.GetUint64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetUint64WithDefault(key string, defValue uint64) uint64 { + v, err := p.GetUint64(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetUint(key string) (uint, error) { + return p.ini.GetUint(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetUintWithDefault(key string, defValue uint) uint { + v, err := p.GetUint(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetFloat32(key string) (float32, error) { + return p.ini.GetFloat32(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetFloat32WithDefault(key string, defValue float32) float32 { + v, err := p.GetFloat32(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetFloat64(key string) (float64, error) { + return p.ini.GetFloat64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetFloat64WithDefault(key string, defValue float64) float64 { + v, err := p.GetFloat64(key) + if err == nil { + return v + } else { + return defValue + } +} diff --git a/vendor/github.com/ochinchina/go-ini/section.go b/vendor/github.com/ochinchina/go-ini/section.go new file mode 100644 index 000000000..e04ba4277 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/section.go @@ -0,0 +1,177 @@ +package ini + +import ( + "bytes" + "fmt" + "io" +) + +// manages all the key/value defined in the .ini file format +type Section struct { + //Name of the section + Name string + //key values + keyValues map[string]Key +} + +// construct a new section with section name +func NewSection(name string) *Section { + return &Section{Name: name, + keyValues: make(map[string]Key)} +} + +// add key/value to the section and overwrite the old one +func (section *Section) Add(key, value string) { + section.keyValues[key] = newNormalKey(key, value) +} + +// check if the key is in the section +// +// return true if the section contains the key +func (section *Section) HasKey(key string) bool { + _, ok := section.keyValues[key] + return ok +} + +// Get all the keys in the section +// +// return: all keys in the section +func (section *Section) Keys() []Key { + r := make([]Key, 0) + for _, v := range section.keyValues { + r = append(r, v) + } + return r +} + +// Get the key. +// +// This method can be called even if the key is not in the +// section. +func (section *Section) Key(key string) Key { + if v, ok := section.keyValues[key]; ok { + return v + } + return newNonExistKey(key) +} + +// Get value of key as string +func (section *Section) GetValue(key string) (string, error) { + return section.Key(key).Value() +} + +// Get value of key and if the key does not exist, return the defValue +func (section *Section) GetValueWithDefault(key string, defValue string) string { + return section.Key(key).ValueWithDefault(defValue) +} + +// Get the value of key as bool, it will return true if the value of the key is one +// of following( case insensitive): +// - true +// - yes +// - t +// - y +// - 1 +func (section *Section) GetBool(key string) (bool, error) { + return section.Key(key).Bool() +} + +// Get the value of key as bool and if the key does not exist, return the +// default value +func (section *Section) GetBoolWithDefault(key string, defValue bool) bool { + return section.Key(key).BoolWithDefault(defValue) +} + +// Get the value of the key as int +func (section *Section) GetInt(key string) (int, error) { + return section.Key(key).Int() +} + +// Get the value of the key as int and if the key does not exist return +// the default value +func (section *Section) GetIntWithDefault(key string, defValue int) int { + return section.Key(key).IntWithDefault(defValue) +} + +// Get the value of the key as uint +func (section *Section) GetUint(key string) (uint, error) { + return section.Key(key).Uint() +} + +// Get the value of the key as int and if the key does not exist return +// the default value +func (section *Section) GetUintWithDefault(key string, defValue uint) uint { + return section.Key(key).UintWithDefault(defValue) +} + +// Get the value of the key as int64 +func (section *Section) GetInt64(key string) (int64, error) { + return section.Key(key).Int64() +} + +// Get the value of the key as int64 and if the key does not exist return +// the default value +func (section *Section) GetInt64WithDefault(key string, defValue int64) int64 { + return section.Key(key).Int64WithDefault(defValue) +} + +// Get the value of the key as uint64 +func (section *Section) GetUint64(key string) (uint64, error) { + return section.Key(key).Uint64() +} + +// Get the value of the key as uint64 and if the key does not exist return +// the default value +func (section *Section) GetUint64WithDefault(key string, defValue uint64) uint64 { + return section.Key(key).Uint64WithDefault(defValue) +} + +// Get the value of the key as float32 +func (section *Section) GetFloat32(key string) (float32, error) { + return section.Key(key).Float32() +} + +// Get the value of the key as float32 and if the key does not exist return +// the default value +func (section *Section) GetFloat32WithDefault(key string, defValue float32) float32 { + return section.Key(key).Float32WithDefault(defValue) +} + +// Get the value of the key as float64 +func (section *Section) GetFloat64(key string) (float64, error) { + return section.Key(key).Float64() +} + +// Get the value of the key as float64 and if the key does not exist return +// the default value +func (section *Section) GetFloat64WithDefault(key string, defValue float64) float64 { + return section.Key(key).Float64WithDefault(defValue) +} + +// convert the section content to the .ini section format, so the section content will +// be converted to following format: +// +// [sectionx] +// key1 = value1 +// key2 = value2 +// +func (section *Section) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + section.Write(buf) + return buf.String() +} + +// write the section content to the writer with .ini section format. +func (section *Section) Write(writer io.Writer) error { + _, err := fmt.Fprintf(writer, "[%s]\n", section.Name) + if err != nil { + return err + } + for _, v := range section.keyValues { + _, err = fmt.Fprintf(writer, "%s\n", v.String()) + if err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE b/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE new file mode 100644 index 000000000..2e907e487 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, Ivan Daniluk +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +* Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go new file mode 100644 index 000000000..d8cb0a4cf --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go @@ -0,0 +1,26 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "io" + "io/ioutil" +) + +// EncodeClientRequest encodes parameters for a XML-RPC client request. +func EncodeClientRequest(method string, args interface{}) ([]byte, error) { + xml, err := rpcRequest2XML(method, args) + return []byte(xml), err +} + +// DecodeClientResponse decodes the response body of a client request into +// the interface reply. +func DecodeClientResponse(r io.Reader, reply interface{}) error { + rawxml, err := ioutil.ReadAll(r) + if err != nil { + return FaultSystemError + } + return xml2RPC(string(rawxml), reply) +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go new file mode 100644 index 000000000..eebf2e8ae --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go @@ -0,0 +1,50 @@ +/* +XML-RPC implementation for the Gorilla/RPC toolkit. + +It's built on top of gorilla/rpc package in Go(Golang) language and implements XML-RPC, according to it's specification. Unlike net/rpc from Go strlib, gorilla/rpc allows usage of HTTP POST requests for RPC. + +XML-RPC spec: http://xmlrpc.scripting.com/spec.html + +Installation + +Assuming you already imported gorilla/rpc, use the following command: + + go get github.com/divan/gorilla-xmlrpc/xml + +Implementation details + +The main objective was to use standard encoding/xml package for XML marshalling/unmarshalling. Unfortunately, in current implementation there is no graceful way to implement common structre for marshal and unmarshal functions - marshalling doesn't handle interface{} types so far (though, it could be changed in the future). So, marshalling is implemented manually. + +Unmarshalling code first creates temporary structure for unmarshalling XML into, then converts it into the passed variable using reflect package. If XML struct member's name is lowercased, it's first letter will be uppercased, as in Go/Gorilla field name must be exported(first-letter uppercased). + +Marshalling code converts rpc directly to the string XML representation. + +For the better understanding, I use terms 'rpc2xml' and 'xml2rpc' instead of 'marshal' and 'unmarshall'. + +Types + +The following types are supported: + + XML-RPC Golang + ------- ------ + int, i4 int + double float64 + boolean bool + stringi string + dateTime.iso8601 time.Time + base64 []byte + struct struct + array []interface{} + nil nil + +TODO + +TODO list: + * Add more corner cases tests + +Examples + +Checkout examples in examples/ directory. + +*/ +package xml diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go new file mode 100644 index 000000000..4a24efb66 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go @@ -0,0 +1,51 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "fmt" + "io" +) + +// Default Faults +// NOTE: XMLRPC spec doesn't specify any Fault codes. +// These codes seems to be widely accepted, and taken from the http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php +var ( + FaultInvalidParams = Fault{Code: -32602, String: "Invalid Method Parameters"} + FaultWrongArgumentsNumber = Fault{Code: -32602, String: "Wrong Arguments Number"} + FaultInternalError = Fault{Code: -32603, String: "Internal Server Error"} + FaultApplicationError = Fault{Code: -32500, String: "Application Error"} + FaultSystemError = Fault{Code: -32400, String: "System Error"} + FaultDecode = Fault{Code: -32700, String: "Parsing error: not well formed"} +) + +// Fault represents XML-RPC Fault. +type Fault struct { + Code int `xml:"faultCode"` + String string `xml:"faultString"` +} + +// Error satisifies error interface for Fault. +func (f Fault) Error() string { + return fmt.Sprintf("%d: %s", f.Code, f.String) +} + +// Fault2XML is a quick 'marshalling' replacemnt for the Fault case. +func fault2XML(fault Fault, buffer io.Writer) { + fmt.Fprintf(buffer, "") + rpc2XML(fault, buffer) + fmt.Fprintf(buffer, "") +} + +type faultValue struct { + Value value `xml:"value"` +} + +// IsEmpty returns true if faultValue contain fault. +// +// faultValue should be a struct with 2 members. +func (f faultValue) IsEmpty() bool { + return len(f.Value.Struct) == 0 +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go new file mode 100644 index 000000000..6c17e5a2f --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go @@ -0,0 +1,149 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/base64" + "fmt" + "io" + "reflect" + "strings" + "time" +) + +func rpcRequest2XML(method string, rpc interface{}) (string, error) { + buffer := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buffer, "%s", method) + err := rpcParams2XML(rpc, buffer) + fmt.Fprintf(buffer, "") + return buffer.String(), err +} + +func rpcResponse2XMLStr(rpc interface{}) (string, error) { + buffer := bytes.NewBuffer(make([]byte, 0)) + err := rpcResponse2XML(rpc, buffer) + return buffer.String(), err +} + +func rpcResponse2XML(rpc interface{}, writer io.Writer) error { + fmt.Fprintf(writer, "") + err := rpcParams2XML(rpc, writer) + fmt.Fprintf(writer, "") + return err +} + +func rpcParams2XML(rpc interface{}, writer io.Writer) error { + var err error + fmt.Fprintf(writer, "") + for i := 0; i < reflect.ValueOf(rpc).Elem().NumField(); i++ { + fmt.Fprintf(writer, "") + err = rpc2XML(reflect.ValueOf(rpc).Elem().Field(i).Interface(), writer) + fmt.Fprintf(writer, "") + } + fmt.Fprintf(writer, "") + return err +} + +func rpc2XML(value interface{}, writer io.Writer) error { + fmt.Fprintf(writer, "") + switch reflect.ValueOf(value).Kind() { + case reflect.Int: + fmt.Fprintf(writer, "%d", value.(int)) + case reflect.Float64: + fmt.Fprintf(writer, "%f", value.(float64)) + case reflect.String: + string2XML(value.(string), writer) + case reflect.Bool: + bool2XML(value.(bool), writer) + case reflect.Struct: + if reflect.TypeOf(value).String() != "time.Time" { + struct2XML(value, writer) + } else { + time2XML(value.(time.Time), writer) + } + case reflect.Slice, reflect.Array: + // FIXME: is it the best way to recognize '[]byte'? + if reflect.TypeOf(value).String() != "[]uint8" { + array2XML(value, writer) + } else { + base642XML(value.([]byte), writer) + } + case reflect.Ptr: + if reflect.ValueOf(value).IsNil() { + fmt.Fprintf(writer, "") + } + } + fmt.Fprintf(writer, "") + return nil +} + +func bool2XML(value bool, writer io.Writer) { + var b string + if value { + b = "1" + } else { + b = "0" + } + fmt.Fprintf(writer, "%s", b) +} + +func string2XML(value string, writer io.Writer) { + value = strings.Replace(value, "&", "&", -1) + value = strings.Replace(value, "\"", """, -1) + value = strings.Replace(value, "<", "<", -1) + value = strings.Replace(value, ">", ">", -1) + fmt.Fprintf(writer, "%s", value) +} + +func struct2XML(value interface{}, writer io.Writer) { + fmt.Fprintf(writer, "") + for i := 0; i < reflect.TypeOf(value).NumField(); i++ { + field := reflect.ValueOf(value).Field(i) + field_type := reflect.TypeOf(value).Field(i) + var name string + if field_type.Tag.Get("xml") != "" { + name = field_type.Tag.Get("xml") + } else { + name = field_type.Name + } + fmt.Fprintf(writer, "") + fmt.Fprintf(writer, "%s", name) + rpc2XML(field.Interface(), writer) + fmt.Fprintf(writer, "") + } + fmt.Fprintf(writer, "") + return +} + +func array2XML(value interface{}, writer io.Writer) { + fmt.Fprintf(writer, "") + for i := 0; i < reflect.ValueOf(value).Len(); i++ { + rpc2XML(reflect.ValueOf(value).Index(i).Interface(), writer) + } + fmt.Fprintf(writer, "") +} + +func time2XML(t time.Time, writer io.Writer) { + /* + // TODO: find out whether we need to deal + // here with TZ + var tz string; + zone, offset := t.Zone() + if zone == "UTC" { + tz = "Z" + } else { + tz = fmt.Sprintf("%03d00", offset / 3600 ) + } + */ + fmt.Fprintf(writer, "%04d%02d%02dT%02d:%02d:%02d", + t.Year(), t.Month(), t.Day(), + t.Hour(), t.Minute(), t.Second()) +} + +func base642XML(data []byte, writer io.Writer) { + str := base64.StdEncoding.EncodeToString(data) + fmt.Fprintf(writer, "%s", str) +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go new file mode 100644 index 000000000..a1eb0a7e0 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go @@ -0,0 +1,118 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/xml" + "fmt" + "io/ioutil" + "net/http" + + "github.com/gorilla/rpc" +) + +// ---------------------------------------------------------------------------- +// Codec +// ---------------------------------------------------------------------------- + +// NewCodec returns a new XML-RPC Codec. +func NewCodec() *Codec { + return &Codec{ + aliases: make(map[string]string), + } +} + +// Codec creates a CodecRequest to process each request. +type Codec struct { + aliases map[string]string +} + +// RegisterAlias creates a method alias +func (c *Codec) RegisterAlias(alias, method string) { + c.aliases[alias] = method +} + +// NewRequest returns a CodecRequest. +func (c *Codec) NewRequest(r *http.Request) rpc.CodecRequest { + rawxml, err := ioutil.ReadAll(r.Body) + if err != nil { + return &CodecRequest{err: err} + } + defer r.Body.Close() + + var request ServerRequest + if err := xml.Unmarshal(rawxml, &request); err != nil { + return &CodecRequest{err: err} + } + request.rawxml = string(rawxml) + if method, ok := c.aliases[request.Method]; ok { + request.Method = method + } + return &CodecRequest{request: &request} +} + +// ---------------------------------------------------------------------------- +// CodecRequest +// ---------------------------------------------------------------------------- + +type ServerRequest struct { + Name xml.Name `xml:"methodCall"` + Method string `xml:"methodName"` + rawxml string +} + +// CodecRequest decodes and encodes a single request. +type CodecRequest struct { + request *ServerRequest + err error +} + +// Method returns the RPC method for the current request. +// +// The method uses a dotted notation as in "Service.Method". +func (c *CodecRequest) Method() (string, error) { + if c.err == nil { + return c.request.Method, nil + } + return "", c.err +} + +// ReadRequest fills the request object for the RPC method. +// +// args is the pointer to the Service.Args structure +// it gets populated from temporary XML structure +func (c *CodecRequest) ReadRequest(args interface{}) error { + c.err = xml2RPC(c.request.rawxml, args) + return nil +} + +// WriteResponse encodes the response and writes it to the ResponseWriter. +// +// response is the pointer to the Service.Response structure +// it gets encoded into the XML-RPC xml string +func (c *CodecRequest) WriteResponse(w http.ResponseWriter, response interface{}, methodErr error) error { + if c.err == nil { + c.err = methodErr + } + buffer := bytes.NewBuffer(make([]byte, 0)) + if c.err != nil { + var fault Fault + switch c.err.(type) { + case Fault: + fault = c.err.(Fault) + default: + fault = FaultApplicationError + fault.String += fmt.Sprintf(": %v", c.err) + } + fault2XML(fault, buffer) + } else { + rpcResponse2XML(response, buffer) + } + + w.Header().Set("Content-Type", "text/xml; charset=utf-8") + buffer.WriteTo(w) + return nil +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go new file mode 100644 index 000000000..48b08536f --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go @@ -0,0 +1,219 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/base64" + "encoding/xml" + "fmt" + "reflect" + "strconv" + "time" + "unicode" + "unicode/utf8" + + "github.com/rogpeppe/go-charset/charset" + _ "github.com/rogpeppe/go-charset/data" +) + +// Types used for unmarshalling +type response struct { + Name xml.Name `xml:"methodResponse"` + Params []param `xml:"params>param"` + Fault faultValue `xml:"fault,omitempty"` +} + +type param struct { + Value value `xml:"value"` +} + +type value struct { + Array []value `xml:"array>data>value"` + Struct []member `xml:"struct>member"` + String string `xml:"string"` + Int string `xml:"int"` + Int4 string `xml:"i4"` + Double string `xml:"double"` + Boolean string `xml:"boolean"` + DateTime string `xml:"dateTime.iso8601"` + Base64 string `xml:"base64"` + Raw string `xml:",innerxml"` // the value can be defualt string +} + +type member struct { + Name string `xml:"name"` + Value value `xml:"value"` +} + +func xml2RPC(xmlraw string, rpc interface{}) error { + // Unmarshal raw XML into the temporal structure + var ret response + decoder := xml.NewDecoder(bytes.NewReader([]byte(xmlraw))) + decoder.CharsetReader = charset.NewReader + err := decoder.Decode(&ret) + if err != nil { + return FaultDecode + } + + if !ret.Fault.IsEmpty() { + return getFaultResponse(ret.Fault) + } + + // Now, convert temporal structure into the + // passed rpc variable, according to it's structure + fieldNum := reflect.TypeOf(rpc).Elem().NumField() + //for i, param := range ret.Params { + for i := 0; i < fieldNum; i += 1 { + field := reflect.ValueOf(rpc).Elem().Field(i) + if len(ret.Params) > i { + err = value2Field(ret.Params[i].Value, &field) + } else if reflect.TypeOf(rpc).Elem().Field(i).Tag.Get("default") != "" { + err = value2Field(createValue(reflect.TypeOf(rpc).Elem().Field(i).Type.Kind(), reflect.TypeOf(rpc).Elem().Field(i).Tag.Get("default")), &field) + } + if err != nil { + return err + } + } + + return nil +} + +func createValue(kind reflect.Kind, val string) value { + v := value{} + if kind == reflect.Bool { + v.Boolean = val + } else if kind == reflect.Int { + v.Int = val + } + return v +} + +// getFaultResponse converts faultValue to Fault. +func getFaultResponse(fault faultValue) Fault { + var ( + code int + str string + ) + + for _, field := range fault.Value.Struct { + if field.Name == "faultCode" { + code, _ = strconv.Atoi(field.Value.Int) + } else if field.Name == "faultString" { + str = field.Value.String + if str == "" { + str = field.Value.Raw + } + } + } + + return Fault{Code: code, String: str} +} + +func value2Field(value value, field *reflect.Value) error { + if !field.CanSet() { + return FaultApplicationError + } + + var ( + err error + val interface{} + ) + + switch { + case value.Int != "": + val, _ = strconv.Atoi(value.Int) + case value.Int4 != "": + val, _ = strconv.Atoi(value.Int4) + case value.Double != "": + val, _ = strconv.ParseFloat(value.Double, 64) + case value.String != "": + val = value.String + case value.Boolean != "": + val = xml2Bool(value.Boolean) + case value.DateTime != "": + val, err = xml2DateTime(value.DateTime) + case value.Base64 != "": + val, err = xml2Base64(value.Base64) + case len(value.Struct) != 0: + if field.Kind() != reflect.Struct { + fault := FaultInvalidParams + fault.String += fmt.Sprintf("structure fields mismatch: %s != %s", field.Kind(), reflect.Struct.String()) + return fault + } + s := value.Struct + for i := 0; i < len(s); i++ { + // Uppercase first letter for field name to deal with + // methods in lowercase, which cannot be used + field_name := uppercaseFirst(s[i].Name) + f := field.FieldByName(field_name) + err = value2Field(s[i].Value, &f) + } + case len(value.Array) != 0: + a := value.Array + f := *field + slice := reflect.MakeSlice(reflect.TypeOf(f.Interface()), + len(a), len(a)) + for i := 0; i < len(a); i++ { + item := slice.Index(i) + err = value2Field(a[i], &item) + } + f = reflect.AppendSlice(f, slice) + val = f.Interface() + + default: + // value field is default to string, see http://en.wikipedia.org/wiki/XML-RPC#Data_types + // also can be + if value.Raw != "" { + val = value.Raw + } + } + + if val != nil { + if reflect.TypeOf(val) != reflect.TypeOf(field.Interface()) { + fault := FaultInvalidParams + fault.String += fmt.Sprintf(": fields type mismatch: %s != %s", + reflect.TypeOf(val), + reflect.TypeOf(field.Interface())) + return fault + } + + field.Set(reflect.ValueOf(val)) + } + + return err +} + +func xml2Bool(value string) bool { + var b bool + switch value { + case "1", "true", "TRUE", "True": + b = true + case "0", "false", "FALSE", "False": + b = false + } + return b +} + +func xml2DateTime(value string) (time.Time, error) { + var ( + year, month, day int + hour, minute, second int + ) + _, err := fmt.Sscanf(value, "%04d%02d%02dT%02d:%02d:%02d", + &year, &month, &day, + &hour, &minute, &second) + t := time.Date(year, time.Month(month), day, hour, minute, second, 0, time.Local) + return t, err +} + +func xml2Base64(value string) ([]byte, error) { + return base64.StdEncoding.DecodeString(value) +} + +func uppercaseFirst(in string) (out string) { + r, n := utf8.DecodeRuneInString(in) + return string(unicode.ToUpper(r)) + in[n:] +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/big5.go b/vendor/github.com/rogpeppe/go-charset/charset/big5.go new file mode 100644 index 000000000..e01fa1afd --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/big5.go @@ -0,0 +1,88 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("big5", fromBig5, nil) +} + +// Big5 consists of 89 fonts of 157 chars each +const ( + big5Max = 13973 + big5Font = 157 + big5Data = "big5.dat" +) + +type translateFromBig5 struct { + font int + scratch []byte + big5map []rune +} + +func (p *translateFromBig5) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = p.scratch[:0] + n := 0 + for len(data) > 0 { + c := int(data[0]) + data = data[1:] + n++ + if p.font == -1 { + // idle state + if c >= 0xa1 { + p.font = c + continue + } + if c == 26 { + c = '\n' + } + continue + } + f := p.font + p.font = -1 + r := utf8.RuneError + switch { + case c >= 64 && c <= 126: + c -= 64 + case c >= 161 && c <= 254: + c = c - 161 + 63 + default: + // bad big5 char + f = 255 + } + if f <= 254 { + f -= 161 + ix := f*big5Font + c + if ix < len(p.big5map) { + r = p.big5map[ix] + } + if r == -1 { + r = utf8.RuneError + } + } + p.scratch = appendRune(p.scratch, r) + } + return n, p.scratch, nil +} + +type big5Key bool + +func fromBig5(arg string) (Translator, error) { + big5map, err := cache(big5Key(false), func() (interface{}, error) { + data, err := readFile(big5Data) + if err != nil { + return nil, fmt.Errorf("charset: cannot open big5 data file: %v", err) + } + big5map := []rune(string(data)) + if len(big5map) != big5Max { + return nil, fmt.Errorf("charset: corrupt big5 data") + } + return big5map, nil + }) + if err != nil { + return nil, err + } + return &translateFromBig5{big5map: big5map.([]rune), font: -1}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/charset.go b/vendor/github.com/rogpeppe/go-charset/charset/charset.go new file mode 100644 index 000000000..a7af30ee6 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/charset.go @@ -0,0 +1,301 @@ +// The charset package implements translation between character sets. +// It uses Unicode as the intermediate representation. +// Because it can be large, the character set data is separated +// from the charset package. It can be embedded in the Go +// executable by importing the data package: +// +// import _ "code.google.com/p/go-charset/data" +// +// It can also made available in a data directory (by settting CharsetDir). +package charset + +import ( + "io" + "strings" + "unicode/utf8" +) + +// Charset holds information about a given character set. +type Charset struct { + Name string // Canonical name of character set. + Aliases []string // Known aliases. + Desc string // Description. + NoFrom bool // Not possible to translate from this charset. + NoTo bool // Not possible to translate to this charset. +} + +// Translator represents a character set converter. +// The Translate method translates the given data, +// and returns the number of bytes of data consumed, +// a slice containing the converted data (which may be +// overwritten on the next call to Translate), and any +// conversion error. If eof is true, the data represents +// the final bytes of the input. +type Translator interface { + Translate(data []byte, eof bool) (n int, cdata []byte, err error) +} + +// A Factory can be used to make character set translators. +type Factory interface { + // TranslatorFrom creates a translator that will translate from the named character + // set to UTF-8. + TranslatorFrom(name string) (Translator, error) // Create a Translator from this character set to. + + // TranslatorTo creates a translator that will translate from UTF-8 to the named character set. + TranslatorTo(name string) (Translator, error) // Create a Translator To this character set. + + // Names returns all the character set names accessibile through the factory. + Names() []string + + // Info returns information on the named character set. It returns nil if the + // factory doesn't recognise the given name. + Info(name string) *Charset +} + +var factories = []Factory{localFactory{}} + +// Register registers a new Factory which will be consulted when NewReader +// or NewWriter needs a character set translator for a given name. +func Register(factory Factory) { + factories = append(factories, factory) +} + +// NewReader returns a new Reader that translates from the named +// character set to UTF-8 as it reads r. +func NewReader(charset string, r io.Reader) (io.Reader, error) { + tr, err := TranslatorFrom(charset) + if err != nil { + return nil, err + } + return NewTranslatingReader(r, tr), nil +} + +// NewWriter returns a new WriteCloser writing to w. It converts writes +// of UTF-8 text into writes on w of text in the named character set. +// The Close is necessary to flush any remaining partially translated +// characters to the output. +func NewWriter(charset string, w io.Writer) (io.WriteCloser, error) { + tr, err := TranslatorTo(charset) + if err != nil { + return nil, err + } + return NewTranslatingWriter(w, tr), nil +} + +// Info returns information about a character set, or nil +// if the character set is not found. +func Info(name string) *Charset { + for _, f := range factories { + if info := f.Info(name); info != nil { + return info + } + } + return nil +} + +// Names returns the canonical names of all supported character sets, in alphabetical order. +func Names() []string { + // TODO eliminate duplicates + var names []string + for _, f := range factories { + names = append(names, f.Names()...) + } + return names +} + +// TranslatorFrom returns a translator that will translate from +// the named character set to UTF-8. +func TranslatorFrom(charset string) (Translator, error) { + var err error + var tr Translator + for _, f := range factories { + tr, err = f.TranslatorFrom(charset) + if err == nil { + break + } + } + if tr == nil { + return nil, err + } + return tr, nil +} + +// TranslatorTo returns a translator that will translate from UTF-8 +// to the named character set. +func TranslatorTo(charset string) (Translator, error) { + var err error + var tr Translator + for _, f := range factories { + tr, err = f.TranslatorTo(charset) + if err == nil { + break + } + } + if tr == nil { + return nil, err + } + return tr, nil +} + +func normalizedChar(c rune) rune { + switch { + case c >= 'A' && c <= 'Z': + c = c - 'A' + 'a' + case c == '_': + c = '-' + } + return c +} + +// NormalisedName returns s with all Roman capitals +// mapped to lower case, and '_' mapped to '-' +func NormalizedName(s string) string { + return strings.Map(normalizedChar, s) +} + +type translatingWriter struct { + w io.Writer + tr Translator + buf []byte // unconsumed data from writer. +} + +// NewTranslatingWriter returns a new WriteCloser writing to w. +// It passes the written bytes through the given Translator. +func NewTranslatingWriter(w io.Writer, tr Translator) io.WriteCloser { + return &translatingWriter{w: w, tr: tr} +} + +func (w *translatingWriter) Write(data []byte) (rn int, rerr error) { + wdata := data + if len(w.buf) > 0 { + w.buf = append(w.buf, data...) + wdata = w.buf + } + n, cdata, err := w.tr.Translate(wdata, false) + if err != nil { + // TODO + } + if n > 0 { + _, err = w.w.Write(cdata) + if err != nil { + return 0, err + } + } + w.buf = w.buf[:0] + if n < len(wdata) { + w.buf = append(w.buf, wdata[n:]...) + } + return len(data), nil +} + +func (p *translatingWriter) Close() error { + for { + n, data, err := p.tr.Translate(p.buf, true) + p.buf = p.buf[n:] + if err != nil { + // TODO + } + // If the Translator produces no data + // at EOF, then assume that it never will. + if len(data) == 0 { + break + } + n, err = p.w.Write(data) + if err != nil { + return err + } + if n < len(data) { + return io.ErrShortWrite + } + if len(p.buf) == 0 { + break + } + } + return nil +} + +type translatingReader struct { + r io.Reader + tr Translator + cdata []byte // unconsumed data from converter. + rdata []byte // unconverted data from reader. + err error // final error from reader. +} + +// NewTranslatingReader returns a new Reader that +// translates data using the given Translator as it reads r. +func NewTranslatingReader(r io.Reader, tr Translator) io.Reader { + return &translatingReader{r: r, tr: tr} +} + +func (r *translatingReader) Read(buf []byte) (int, error) { + for { + if len(r.cdata) > 0 { + n := copy(buf, r.cdata) + r.cdata = r.cdata[n:] + return n, nil + } + if r.err == nil { + r.rdata = ensureCap(r.rdata, len(r.rdata)+len(buf)) + n, err := r.r.Read(r.rdata[len(r.rdata):cap(r.rdata)]) + // Guard against non-compliant Readers. + if n == 0 && err == nil { + err = io.EOF + } + r.rdata = r.rdata[0 : len(r.rdata)+n] + r.err = err + } else if len(r.rdata) == 0 { + break + } + nc, cdata, cvterr := r.tr.Translate(r.rdata, r.err != nil) + if cvterr != nil { + // TODO + } + r.cdata = cdata + + // Ensure that we consume all bytes at eof + // if the converter refuses them. + if nc == 0 && r.err != nil { + nc = len(r.rdata) + } + + // Copy unconsumed data to the start of the rdata buffer. + r.rdata = r.rdata[0:copy(r.rdata, r.rdata[nc:])] + } + return 0, r.err +} + +// ensureCap returns s with a capacity of at least n bytes. +// If cap(s) < n, then it returns a new copy of s with the +// required capacity. +func ensureCap(s []byte, n int) []byte { + if n <= cap(s) { + return s + } + // logic adapted from appendslice1 in runtime + m := cap(s) + if m == 0 { + m = n + } else { + for { + if m < 1024 { + m += m + } else { + m += m / 4 + } + if m >= n { + break + } + } + } + t := make([]byte, len(s), m) + copy(t, s) + return t +} + +func appendRune(buf []byte, r rune) []byte { + n := len(buf) + buf = ensureCap(buf, n+utf8.UTFMax) + nu := utf8.EncodeRune(buf[n:n+utf8.UTFMax], r) + return buf[0 : n+nu] +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/codepage.go b/vendor/github.com/rogpeppe/go-charset/charset/codepage.go new file mode 100644 index 000000000..6864c8753 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/codepage.go @@ -0,0 +1,133 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("cp", fromCodePage, toCodePage) +} + +type translateFromCodePage struct { + byte2rune *[256]rune + scratch []byte +} + +type cpKeyFrom string +type cpKeyTo string + +func (p *translateFromCodePage) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, len(data)*utf8.UTFMax)[:0] + buf := p.scratch + for _, x := range data { + r := p.byte2rune[x] + if r < utf8.RuneSelf { + buf = append(buf, byte(r)) + continue + } + size := utf8.EncodeRune(buf[len(buf):cap(buf)], r) + buf = buf[0 : len(buf)+size] + } + return len(data), buf, nil +} + +type toCodePageInfo struct { + rune2byte map[rune]byte + // same gives the number of runes at start of code page that map exactly to + // unicode. + same rune +} + +type translateToCodePage struct { + toCodePageInfo + scratch []byte +} + +func (p *translateToCodePage) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, len(data)) + buf := p.scratch[:0] + + for i := 0; i < len(data); { + r := rune(data[i]) + size := 1 + if r >= utf8.RuneSelf { + r, size = utf8.DecodeRune(data[i:]) + if size == 1 && !eof && !utf8.FullRune(data[i:]) { + return i, buf, nil + } + } + + var b byte + if r < p.same { + b = byte(r) + } else { + var ok bool + b, ok = p.rune2byte[r] + if !ok { + b = '?' + } + } + buf = append(buf, b) + i += size + } + return len(data), buf, nil +} + +func fromCodePage(arg string) (Translator, error) { + runes, err := cache(cpKeyFrom(arg), func() (interface{}, error) { + data, err := readFile(arg) + if err != nil { + return nil, err + } + runes := []rune(string(data)) + if len(runes) != 256 { + return nil, fmt.Errorf("charset: %q has wrong rune count (%d)", arg, len(runes)) + } + r := new([256]rune) + copy(r[:], runes) + return r, nil + }) + if err != nil { + return nil, err + } + return &translateFromCodePage{byte2rune: runes.(*[256]rune)}, nil +} + +func toCodePage(arg string) (Translator, error) { + m, err := cache(cpKeyTo(arg), func() (interface{}, error) { + data, err := readFile(arg) + if err != nil { + return nil, err + } + + info := toCodePageInfo{ + rune2byte: make(map[rune]byte), + same: 256, + } + atStart := true + i := rune(0) + for _, r := range string(data) { + if atStart { + if r == i { + i++ + continue + } + info.same = i + atStart = false + } + info.rune2byte[r] = byte(i) + i++ + } + // TODO fix tables + // fmt.Printf("%s, same = %d\n", arg, info.same) + if i != 256 { + return nil, fmt.Errorf("charset: %q has wrong rune count (%d)", arg, i) + } + return info, nil + }) + if err != nil { + return nil, err + } + return &translateToCodePage{toCodePageInfo: m.(toCodePageInfo)}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/cp932.go b/vendor/github.com/rogpeppe/go-charset/charset/cp932.go new file mode 100644 index 000000000..9f46262ba --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/cp932.go @@ -0,0 +1,195 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("cp932", fromCP932, nil) +} + +// encoding details +// (Traditional) Shift-JIS +// +// 00..1f control characters +// 20 space +// 21..7f JIS X 0201:1976/1997 roman (see notes) +// 80 undefined +// 81..9f lead byte of JIS X 0208-1983 or JIS X 0202:1990/1997 +// a0 undefined +// a1..df JIS X 0201:1976/1997 katakana +// e0..ea lead byte of JIS X 0208-1983 or JIS X 0202:1990/1997 +// eb..ff undefined +// +// CP932 (windows-31J) +// +// this encoding scheme extends Shift-JIS in the following way +// +// eb..ec undefined (marked as lead bytes - see notes below) +// ed..ee lead byte of NEC-selected IBM extended characters +// ef undefined (marked as lead byte - see notes below) +// f0..f9 lead byte of User defined GAIJI (see note below) +// fa..fc lead byte of IBM extended characters +// fd..ff undefined +// +// +// Notes +// +// JISX 0201:1976/1997 roman +// this is the same as ASCII but with 0x5c (ASCII code for '\') +// representing the Yen currency symbol '¥' (U+00a5) +// This mapping is contentious, some conversion packages implent it +// others do not. +// The mapping files from The Unicode Consortium show cp932 mapping +// plain ascii in the range 00..7f whereas shift-jis maps 0x5c ('\') to the yen +// symbol (¥) and 0x7e ('~') to overline (¯) +// +// CP932 double-byte character codes: +// +// eb-ec, ef, f0-f9: +// Marked as DBCS LEAD BYTEs in the unicode mapping data +// obtained from: +// https://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP932.TXT +// +// but there are no defined mappings for codes in this range. +// It is not clear whether or not an implementation should +// consume one or two bytes before emitting an error char. + +const ( + kanaPages = 1 + kanaPageSize = 63 + kanaChar0 = 0xa1 + + cp932Pages = 45 // 81..84, 87..9f, e0..ea, ed..ee, fa..fc + cp932PageSize = 189 // 40..fc (including 7f) + cp932Char0 = 0x40 +) + +type jisTables struct { + page0 [256]rune + dbcsoff [256]int + cp932 []rune +} + +type translateFromCP932 struct { + tables *jisTables + scratch []byte +} + +func (p *translateFromCP932) Translate(data []byte, eof bool) (int, []byte, error) { + tables := p.tables + p.scratch = p.scratch[:0] + n := 0 + for i := 0; i < len(data); i++ { + b := data[i] + r := tables.page0[b] + if r != -1 { + p.scratch = appendRune(p.scratch, r) + n++ + continue + } + // DBCS + i++ + if i >= len(data) { + break + } + pnum := tables.dbcsoff[b] + ix := int(data[i]) - cp932Char0 + if pnum == -1 || ix < 0 || ix >= cp932PageSize { + r = utf8.RuneError + } else { + r = tables.cp932[pnum*cp932PageSize+ix] + } + p.scratch = appendRune(p.scratch, r) + n += 2 + } + return n, p.scratch, nil +} + +type cp932Key bool + +func fromCP932(arg string) (Translator, error) { + shiftJIS := arg == "shiftjis" + tables, err := cache(cp932Key(shiftJIS), func() (interface{}, error) { + tables := new(jisTables) + kana, err := jisGetMap("jisx0201kana.dat", kanaPageSize, kanaPages) + if err != nil { + return nil, err + } + tables.cp932, err = jisGetMap("cp932.dat", cp932PageSize, cp932Pages) + if err != nil { + return nil, err + } + + // jisx0201kana is mapped into 0xA1..0xDF + for i := 0; i < kanaPageSize; i++ { + tables.page0[i+kanaChar0] = kana[i] + } + + // 00..7f same as ascii in cp932 + for i := rune(0); i < 0x7f; i++ { + tables.page0[i] = i + } + + if shiftJIS { + // shift-jis uses JIS X 0201 for the ASCII range + // this is the same as ASCII apart from + // 0x5c ('\') maps to yen symbol (¥) and 0x7e ('~') maps to overline (¯) + tables.page0['\\'] = '¥' + tables.page0['~'] = '¯' + } + + // pre-calculate DBCS page numbers to mapping file page numbers + // and mark codes in page0 that are DBCS lead bytes + pnum := 0 + for i := 0x81; i <= 0x84; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0x87; i <= 0x9f; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0xe0; i <= 0xea; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + if shiftJIS { + return tables, nil + } + // add in cp932 extensions + for i := 0xed; i <= 0xee; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0xfa; i <= 0xfc; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + return tables, nil + }) + + if err != nil { + return nil, err + } + + return &translateFromCP932{tables: tables.(*jisTables)}, nil +} + +func jisGetMap(name string, pgsize, npages int) ([]rune, error) { + data, err := readFile(name) + if err != nil { + return nil, err + } + m := []rune(string(data)) + if len(m) != pgsize*npages { + return nil, fmt.Errorf("%q: incorrect length data", name) + } + return m, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/file.go b/vendor/github.com/rogpeppe/go-charset/charset/file.go new file mode 100644 index 000000000..a0c26225e --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/file.go @@ -0,0 +1,40 @@ +package charset + +import ( + "io" + "io/ioutil" + "os" + "path/filepath" +) + +var files = make(map[string]func() (io.ReadCloser, error)) + +// RegisterDataFile registers the existence of a given data +// file with the given name that may be used by a character-set converter. +// It is intended to be used by packages that wish to embed +// data in the executable binary, and should not be +// used normally. +func RegisterDataFile(name string, open func() (io.ReadCloser, error)) { + files[name] = open +} + +// CharsetDir gives the location of the default data file directory. +// This directory will be used for files with names that have not +// been registered with RegisterDataFile. +var CharsetDir = "/usr/local/lib/go-charset/datafiles" + +func readFile(name string) (data []byte, err error) { + var r io.ReadCloser + if open := files[name]; open != nil { + r, err = open() + if err != nil { + return + } + } else { + r, err = os.Open(filepath.Join(CharsetDir, name)) + if err != nil { + return + } + } + return ioutil.ReadAll(r) +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/local.go b/vendor/github.com/rogpeppe/go-charset/charset/local.go new file mode 100644 index 000000000..9776b962f --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/local.go @@ -0,0 +1,162 @@ +package charset + +import ( + "encoding/json" + "fmt" + "os" + "sync" +) + +var ( + readLocalCharsetsOnce sync.Once + localCharsets = make(map[string]*localCharset) +) + +type localCharset struct { + Charset + arg string + *class +} + +// A class of character sets. +// Each class can be instantiated with an argument specified in the config file. +// Many character sets can use a single class. +type class struct { + from, to func(arg string) (Translator, error) +} + +// The set of classes, indexed by class name. +var classes = make(map[string]*class) + +func registerClass(charset string, from, to func(arg string) (Translator, error)) { + classes[charset] = &class{from, to} +} + +type localFactory struct{} + +func (f localFactory) TranslatorFrom(name string) (Translator, error) { + f.init() + name = NormalizedName(name) + cs := localCharsets[name] + if cs == nil { + return nil, fmt.Errorf("character set %q not found", name) + } + if cs.from == nil { + return nil, fmt.Errorf("cannot translate from %q", name) + } + return cs.from(cs.arg) +} + +func (f localFactory) TranslatorTo(name string) (Translator, error) { + f.init() + name = NormalizedName(name) + cs := localCharsets[name] + if cs == nil { + return nil, fmt.Errorf("character set %q not found", name) + } + if cs.to == nil { + return nil, fmt.Errorf("cannot translate to %q", name) + } + return cs.to(cs.arg) +} + +func (f localFactory) Names() []string { + f.init() + var names []string + for name, cs := range localCharsets { + // add names only for non-aliases. + if localCharsets[cs.Name] == cs { + names = append(names, name) + } + } + return names +} + +func (f localFactory) Info(name string) *Charset { + f.init() + lcs := localCharsets[NormalizedName(name)] + if lcs == nil { + return nil + } + // copy the charset info so that callers can't mess with it. + cs := lcs.Charset + return &cs +} + +func (f localFactory) init() { + readLocalCharsetsOnce.Do(readLocalCharsets) +} + +// charsetEntry is the data structure for one entry in the JSON config file. +// If Alias is non-empty, it should be the canonical name of another +// character set; otherwise Class should be the name +// of an entry in classes, and Arg is the argument for +// instantiating it. +type charsetEntry struct { + Aliases []string + Desc string + Class string + Arg string +} + +// readCharsets reads the JSON config file. +// It's done once only, when first needed. +func readLocalCharsets() { + csdata, err := readFile("charsets.json") + if err != nil { + fmt.Fprintf(os.Stderr, "charset: cannot open \"charsets.json\": %v\n", err) + return + } + + var entries map[string]charsetEntry + err = json.Unmarshal(csdata, &entries) + if err != nil { + fmt.Fprintf(os.Stderr, "charset: cannot decode config file: %v\n", err) + } + for name, e := range entries { + class := classes[e.Class] + if class == nil { + continue + } + name = NormalizedName(name) + for i, a := range e.Aliases { + e.Aliases[i] = NormalizedName(a) + } + cs := &localCharset{ + Charset: Charset{ + Name: name, + Aliases: e.Aliases, + Desc: e.Desc, + NoFrom: class.from == nil, + NoTo: class.to == nil, + }, + arg: e.Arg, + class: class, + } + localCharsets[cs.Name] = cs + for _, a := range cs.Aliases { + localCharsets[a] = cs + } + } +} + +// A general cache store that local character set translators +// can use for persistent storage of data. +var ( + cacheMutex sync.Mutex + cacheStore = make(map[interface{}]interface{}) +) + +func cache(key interface{}, f func() (interface{}, error)) (interface{}, error) { + cacheMutex.Lock() + defer cacheMutex.Unlock() + if x := cacheStore[key]; x != nil { + return x, nil + } + x, err := f() + if err != nil { + return nil, err + } + cacheStore[key] = x + return x, err +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/utf16.go b/vendor/github.com/rogpeppe/go-charset/charset/utf16.go new file mode 100644 index 000000000..ebde794c9 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/utf16.go @@ -0,0 +1,110 @@ +package charset + +import ( + "encoding/binary" + "errors" + "unicode/utf8" +) + +func init() { + registerClass("utf16", fromUTF16, toUTF16) +} + +type translateFromUTF16 struct { + first bool + endian binary.ByteOrder + scratch []byte +} + +func (p *translateFromUTF16) Translate(data []byte, eof bool) (int, []byte, error) { + data = data[0 : len(data)&^1] // round to even number of bytes. + if len(data) < 2 { + return 0, nil, nil + } + n := 0 + if p.first && p.endian == nil { + switch binary.BigEndian.Uint16(data) { + case 0xfeff: + p.endian = binary.BigEndian + data = data[2:] + n += 2 + case 0xfffe: + p.endian = binary.LittleEndian + data = data[2:] + n += 2 + default: + p.endian = guessEndian(data) + } + p.first = false + } + + p.scratch = p.scratch[:0] + for ; len(data) > 0; data = data[2:] { + p.scratch = appendRune(p.scratch, rune(p.endian.Uint16(data))) + n += 2 + } + return n, p.scratch, nil +} + +func guessEndian(data []byte) binary.ByteOrder { + // XXX TODO + return binary.LittleEndian +} + +type translateToUTF16 struct { + first bool + endian binary.ByteOrder + scratch []byte +} + +func (p *translateToUTF16) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch[:0], (len(data)+1)*2) + if p.first { + p.scratch = p.scratch[0:2] + p.endian.PutUint16(p.scratch, 0xfeff) + p.first = false + } + n := 0 + for len(data) > 0 { + if !utf8.FullRune(data) && !eof { + break + } + r, size := utf8.DecodeRune(data) + // TODO if r > 65535? + + slen := len(p.scratch) + p.scratch = p.scratch[0 : slen+2] + p.endian.PutUint16(p.scratch[slen:], uint16(r)) + data = data[size:] + n += size + } + return n, p.scratch, nil +} + +func getEndian(arg string) (binary.ByteOrder, error) { + switch arg { + case "le": + return binary.LittleEndian, nil + case "be": + return binary.BigEndian, nil + case "": + return nil, nil + } + return nil, errors.New("charset: unknown utf16 endianness") +} + +func fromUTF16(arg string) (Translator, error) { + endian, err := getEndian(arg) + if err != nil { + return nil, err + } + return &translateFromUTF16{first: true, endian: endian}, nil +} + +func toUTF16(arg string) (Translator, error) { + endian, err := getEndian(arg) + if err != nil { + return nil, err + } + return &translateToUTF16{first: false, endian: endian}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/utf8.go b/vendor/github.com/rogpeppe/go-charset/charset/utf8.go new file mode 100644 index 000000000..23980b334 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/utf8.go @@ -0,0 +1,51 @@ +package charset + +import ( + "unicode/utf8" +) + +func init() { + registerClass("utf8", toUTF8, toUTF8) +} + +type translateToUTF8 struct { + scratch []byte +} + +var errorBytes = []byte(string(utf8.RuneError)) + +const errorRuneLen = len(string(utf8.RuneError)) + +func (p *translateToUTF8) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, (len(data))*errorRuneLen) + buf := p.scratch[:0] + for i := 0; i < len(data); { + // fast path for ASCII + if b := data[i]; b < utf8.RuneSelf { + buf = append(buf, b) + i++ + continue + } + _, size := utf8.DecodeRune(data[i:]) + if size == 1 { + if !eof && !utf8.FullRune(data) { + // When DecodeRune has converted only a single + // byte, we know there must be some kind of error + // because we know the byte's not ASCII. + // If we aren't at EOF, and it's an incomplete + // rune encoding, then we return to process + // the final bytes in a subsequent call. + return i, buf, nil + } + buf = append(buf, errorBytes...) + } else { + buf = append(buf, data[i:i+size]...) + } + i += size + } + return len(data), buf, nil +} + +func toUTF8(arg string) (Translator, error) { + return new(translateToUTF8), nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go new file mode 100644 index 000000000..398ebe339 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("big5.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("\u3000,、。.・;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗︲–︱—︳�︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅‾�_�﹉﹊﹍﹎﹋﹌#&*+-×÷±√<>=≤≥≠∞≒≡﹢﹣﹤﹥﹦∼∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂♁☉↑↓←→↖↗↙↘∥∣��/\$¥〒¢£%@℃℉$%@㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁▂▃▄▅▆▇█▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭╮╰╯═╞╪╡◢◣◥◤╱╲╳0123456789ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ〡〢〣〤〥〦〧〨〩�卄�ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρστυφχψωㄅㄆㄇㄈㄉㄊㄋㄌㄍㄎㄏㄐㄑㄒㄓㄔㄕㄖㄗㄘㄙㄚㄛㄜㄝㄞㄟㄠㄡㄢㄣㄤㄥㄦㄧㄨㄩ˙ˉˊˇˋ���������������������������������������������������������������一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿氓氛泣注泳沱泌泥河沽沾沼波沫法泓沸泄油況沮泗泅泱沿治泡泛泊沬泯泜泖泠炕炎炒炊炙爬爭爸版牧物狀狎狙狗狐玩玨玟玫玥甽疝疙疚的盂盲直知矽社祀祁秉秈空穹竺糾罔羌羋者肺肥肢肱股肫肩肴肪肯臥臾舍芳芝芙芭芽芟芹花芬芥芯芸芣芰芾芷虎虱初表軋迎返近邵邸邱邶采金長門阜陀阿阻附陂隹雨青非亟亭亮信侵侯便俠俑俏保促侶俘俟俊俗侮俐俄係俚俎俞侷兗冒冑冠剎剃削前剌剋則勇勉勃勁匍南卻厚叛咬哀咨哎哉咸咦咳哇哂咽咪品哄哈咯咫咱咻咩咧咿囿垂型垠垣垢城垮垓奕契奏奎奐姜姘姿姣姨娃姥姪姚姦威姻孩宣宦室客宥封屎屏屍屋峙峒巷帝帥帟幽庠度建弈弭彥很待徊律徇後徉怒思怠急怎怨恍恰恨恢恆恃恬恫恪恤扁拜挖按拼拭持拮拽指拱拷拯括拾拴挑挂政故斫施既春昭映昧是星昨昱昤曷柿染柱柔某柬架枯柵柩柯柄柑枴柚查枸柏柞柳枰柙柢柝柒歪殃殆段毒毗氟泉洋洲洪流津洌洱洞洗活洽派洶洛泵洹洧洸洩洮洵洎洫炫為炳炬炯炭炸炮炤爰牲牯牴狩狠狡玷珊玻玲珍珀玳甚甭畏界畎畋疫疤疥疢疣癸皆皇皈盈盆盃盅省盹相眉看盾盼眇矜砂研砌砍祆祉祈祇禹禺科秒秋穿突竿竽籽紂紅紀紉紇約紆缸美羿耄耐耍耑耶胖胥胚胃胄背胡胛胎胞胤胝致舢苧范茅苣苛苦茄若茂茉苒苗英茁苜苔苑苞苓苟苯茆虐虹虻虺衍衫要觔計訂訃貞負赴赳趴軍軌述迦迢迪迥迭迫迤迨郊郎郁郃酋酊重閂限陋陌降面革韋韭音頁風飛食首香乘亳倌倍倣俯倦倥俸倩倖倆值借倚倒們俺倀倔倨俱倡個候倘俳修倭倪俾倫倉兼冤冥冢凍凌准凋剖剜剔剛剝匪卿原厝叟哨唐唁唷哼哥哲唆哺唔哩哭員唉哮哪哦唧唇哽唏圃圄埂埔埋埃堉夏套奘奚娑娘娜娟娛娓姬娠娣娩娥娌娉孫屘宰害家宴宮宵容宸射屑展屐峭峽峻峪峨峰島崁峴差席師庫庭座弱徒徑徐恙恣恥恐恕恭恩息悄悟悚悍悔悌悅悖扇拳挈拿捎挾振捕捂捆捏捉挺捐挽挪挫挨捍捌效敉料旁旅時晉晏晃晒晌晅晁書朔朕朗校核案框桓根桂桔栩梳栗桌桑栽柴桐桀格桃株桅栓栘桁殊殉殷氣氧氨氦氤泰浪涕消涇浦浸海浙涓浬涉浮浚浴浩涌涊浹涅浥涔烊烘烤烙烈烏爹特狼狹狽狸狷玆班琉珮珠珪珞畔畝畜畚留疾病症疲疳疽疼疹痂疸皋皰益盍盎眩真眠眨矩砰砧砸砝破砷砥砭砠砟砲祕祐祠祟祖神祝祗祚秤秣秧租秦秩秘窄窈站笆笑粉紡紗紋紊素索純紐紕級紜納紙紛缺罟羔翅翁耆耘耕耙耗耽耿胱脂胰脅胭胴脆胸胳脈能脊胼胯臭臬舀舐航舫舨般芻茫荒荔荊茸荐草茵茴荏茲茹茶茗荀茱茨荃虔蚊蚪蚓蚤蚩蚌蚣蚜衰衷袁袂衽衹記訐討訌訕訊託訓訖訏訑豈豺豹財貢起躬軒軔軏辱送逆迷退迺迴逃追逅迸邕郡郝郢酒配酌釘針釗釜釙閃院陣陡陛陝除陘陞隻飢馬骨高鬥鬲鬼乾偺偽停假偃偌做偉健偶偎偕偵側偷偏倏偯偭兜冕凰剪副勒務勘動匐匏匙匿區匾參曼商啪啦啄啞啡啃啊唱啖問啕唯啤唸售啜唬啣唳啁啗圈國圉域堅堊堆埠埤基堂堵執培夠奢娶婁婉婦婪婀娼婢婚婆婊孰寇寅寄寂宿密尉專將屠屜屝崇崆崎崛崖崢崑崩崔崙崤崧崗巢常帶帳帷康庸庶庵庾張強彗彬彩彫得徙從徘御徠徜恿患悉悠您惋悴惦悽情悻悵惜悼惘惕惆惟悸惚惇戚戛扈掠控捲掖探接捷捧掘措捱掩掉掃掛捫推掄授掙採掬排掏掀捻捩捨捺敝敖救教敗啟敏敘敕敔斜斛斬族旋旌旎晝晚晤晨晦晞曹勗望梁梯梢梓梵桿桶梱梧梗械梃棄梭梆梅梔條梨梟梡梂欲殺毫毬氫涎涼淳淙液淡淌淤添淺清淇淋涯淑涮淞淹涸混淵淅淒渚涵淚淫淘淪深淮淨淆淄涪淬涿淦烹焉焊烽烯爽牽犁猜猛猖猓猙率琅琊球理現琍瓠瓶瓷甜產略畦畢異疏痔痕疵痊痍皎盔盒盛眷眾眼眶眸眺硫硃硎祥票祭移窒窕笠笨笛第符笙笞笮粒粗粕絆絃統紮紹紼絀細紳組累終紲紱缽羞羚翌翎習耜聊聆脯脖脣脫脩脰脤舂舵舷舶船莎莞莘荸莢莖莽莫莒莊莓莉莠荷荻荼莆莧處彪蛇蛀蚶蛄蚵蛆蛋蚱蚯蛉術袞袈被袒袖袍袋覓規訪訝訣訥許設訟訛訢豉豚販責貫貨貪貧赧赦趾趺軛軟這逍通逗連速逝逐逕逞造透逢逖逛途部郭都酗野釵釦釣釧釭釩閉陪陵陳陸陰陴陶陷陬雀雪雩章竟頂頃魚鳥鹵鹿麥麻傢傍傅備傑傀傖傘傚最凱割剴創剩勞勝勛博厥啻喀喧啼喊喝喘喂喜喪喔喇喋喃喳單喟唾喲喚喻喬喱啾喉喫喙圍堯堪場堤堰報堡堝堠壹壺奠婷媚婿媒媛媧孳孱寒富寓寐尊尋就嵌嵐崴嵇巽幅帽幀幃幾廊廁廂廄弼彭復循徨惑惡悲悶惠愜愣惺愕惰惻惴慨惱愎惶愉愀愒戟扉掣掌描揀揩揉揆揍插揣提握揖揭揮捶援揪換摒揚揹敞敦敢散斑斐斯普晰晴晶景暑智晾晷曾替期朝棺棕棠棘棗椅棟棵森棧棹棒棲棣棋棍植椒椎棉棚楮棻款欺欽殘殖殼毯氮氯氬港游湔渡渲湧湊渠渥渣減湛湘渤湖湮渭渦湯渴湍渺測湃渝渾滋溉渙湎湣湄湲湩湟焙焚焦焰無然煮焜牌犄犀猶猥猴猩琺琪琳琢琥琵琶琴琯琛琦琨甥甦畫番痢痛痣痙痘痞痠登發皖皓皴盜睏短硝硬硯稍稈程稅稀窘窗窖童竣等策筆筐筒答筍筋筏筑粟粥絞結絨絕紫絮絲絡給絢絰絳善翔翕耋聒肅腕腔腋腑腎脹腆脾腌腓腴舒舜菩萃菸萍菠菅萋菁華菱菴著萊菰萌菌菽菲菊萸萎萄菜萇菔菟虛蛟蛙蛭蛔蛛蛤蛐蛞街裁裂袱覃視註詠評詞証詁詔詛詐詆訴診訶詖象貂貯貼貳貽賁費賀貴買貶貿貸越超趁跎距跋跚跑跌跛跆軻軸軼辜逮逵週逸進逶鄂郵鄉郾酣酥量鈔鈕鈣鈉鈞鈍鈐鈇鈑閔閏開閑間閒閎隊階隋陽隅隆隍陲隄雁雅雄集雇雯雲韌項順須飧飪飯飩飲飭馮馭黃黍黑亂傭債傲傳僅傾催傷傻傯僇剿剷剽募勦勤勢勣匯嗟嗨嗓嗦嗎嗜嗇嗑嗣嗤嗯嗚嗡嗅嗆嗥嗉園圓塞塑塘塗塚塔填塌塭塊塢塒塋奧嫁嫉嫌媾媽媼媳嫂媲嵩嵯幌幹廉廈弒彙徬微愚意慈感想愛惹愁愈慎慌慄慍愾愴愧愍愆愷戡戢搓搾搞搪搭搽搬搏搜搔損搶搖搗搆敬斟新暗暉暇暈暖暄暘暍會榔業楚楷楠楔極椰概楊楨楫楞楓楹榆楝楣楛歇歲毀殿毓毽溢溯滓溶滂源溝滇滅溥溘溼溺溫滑準溜滄滔溪溧溴煎煙煩煤煉照煜煬煦煌煥煞煆煨煖爺牒猷獅猿猾瑯瑚瑕瑟瑞瑁琿瑙瑛瑜當畸瘀痰瘁痲痱痺痿痴痳盞盟睛睫睦睞督睹睪睬睜睥睨睢矮碎碰碗碘碌碉硼碑碓硿祺祿禁萬禽稜稚稠稔稟稞窟窠筷節筠筮筧粱粳粵經絹綑綁綏絛置罩罪署義羨群聖聘肆肄腱腰腸腥腮腳腫腹腺腦舅艇蒂葷落萱葵葦葫葉葬葛萼萵葡董葩葭葆虞虜號蛹蜓蜈蜇蜀蛾蛻蜂蜃蜆蜊衙裟裔裙補裘裝裡裊裕裒覜解詫該詳試詩詰誇詼詣誠話誅詭詢詮詬詹詻訾詨豢貊貉賊資賈賄貲賃賂賅跡跟跨路跳跺跪跤跦躲較載軾輊辟農運遊道遂達逼違遐遇遏過遍遑逾遁鄒鄗酬酪酩釉鈷鉗鈸鈽鉀鈾鉛鉋鉤鉑鈴鉉鉍鉅鈹鈿鉚閘隘隔隕雍雋雉雊雷電雹零靖靴靶預頑頓頊頒頌飼飴飽飾馳馱馴髡鳩麂鼎鼓鼠僧僮僥僖僭僚僕像僑僱僎僩兢凳劃劂匱厭嗾嘀嘛嘗嗽嘔嘆嘉嘍嘎嗷嘖嘟嘈嘐嗶團圖塵塾境墓墊塹墅塽壽夥夢夤奪奩嫡嫦嫩嫗嫖嫘嫣孵寞寧寡寥實寨寢寤察對屢嶄嶇幛幣幕幗幔廓廖弊彆彰徹慇愿態慷慢慣慟慚慘慵截撇摘摔撤摸摟摺摑摧搴摭摻敲斡旗旖暢暨暝榜榨榕槁榮槓構榛榷榻榫榴槐槍榭槌榦槃榣歉歌氳漳演滾漓滴漩漾漠漬漏漂漢滿滯漆漱漸漲漣漕漫漯澈漪滬漁滲滌滷熔熙煽熊熄熒爾犒犖獄獐瑤瑣瑪瑰瑭甄疑瘧瘍瘋瘉瘓盡監瞄睽睿睡磁碟碧碳碩碣禎福禍種稱窪窩竭端管箕箋筵算箝箔箏箸箇箄粹粽精綻綰綜綽綾綠緊綴網綱綺綢綿綵綸維緒緇綬罰翠翡翟聞聚肇腐膀膏膈膊腿膂臧臺與舔舞艋蓉蒿蓆蓄蒙蒞蒲蒜蓋蒸蓀蓓蒐蒼蓑蓊蜿蜜蜻蜢蜥蜴蜘蝕蜷蜩裳褂裴裹裸製裨褚裯誦誌語誣認誡誓誤說誥誨誘誑誚誧豪貍貌賓賑賒赫趙趕跼輔輒輕輓辣遠遘遜遣遙遞遢遝遛鄙鄘鄞酵酸酷酴鉸銀銅銘銖鉻銓銜銨鉼銑閡閨閩閣閥閤隙障際雌雒需靼鞅韶頗領颯颱餃餅餌餉駁骯骰髦魁魂鳴鳶鳳麼鼻齊億儀僻僵價儂儈儉儅凜劇劈劉劍劊勰厲嘮嘻嘹嘲嘿嘴嘩噓噎噗噴嘶嘯嘰墀墟增墳墜墮墩墦奭嬉嫻嬋嫵嬌嬈寮寬審寫層履嶝嶔幢幟幡廢廚廟廝廣廠彈影德徵慶慧慮慝慕憂慼慰慫慾憧憐憫憎憬憚憤憔憮戮摩摯摹撞撲撈撐撰撥撓撕撩撒撮播撫撚撬撙撢撳敵敷數暮暫暴暱樣樟槨樁樞標槽模樓樊槳樂樅槭樑歐歎殤毅毆漿潼澄潑潦潔澆潭潛潸潮澎潺潰潤澗潘滕潯潠潟熟熬熱熨牖犛獎獗瑩璋璃瑾璀畿瘠瘩瘟瘤瘦瘡瘢皚皺盤瞎瞇瞌瞑瞋磋磅確磊碾磕碼磐稿稼穀稽稷稻窯窮箭箱範箴篆篇篁箠篌糊締練緯緻緘緬緝編緣線緞緩綞緙緲緹罵罷羯翩耦膛膜膝膠膚膘蔗蔽蔚蓮蔬蔭蔓蔑蔣蔡蔔蓬蔥蓿蔆螂蝴蝶蝠蝦蝸蝨蝙蝗蝌蝓衛衝褐複褒褓褕褊誼諒談諄誕請諸課諉諂調誰論諍誶誹諛豌豎豬賠賞賦賤賬賭賢賣賜質賡赭趟趣踫踐踝踢踏踩踟踡踞躺輝輛輟輩輦輪輜輞輥適遮遨遭遷鄰鄭鄧鄱醇醉醋醃鋅銻銷鋪銬鋤鋁銳銼鋒鋇鋰銲閭閱霄霆震霉靠鞍鞋鞏頡頫頜颳養餓餒餘駝駐駟駛駑駕駒駙骷髮髯鬧魅魄魷魯鴆鴉鴃麩麾黎墨齒儒儘儔儐儕冀冪凝劑劓勳噙噫噹噩噤噸噪器噥噱噯噬噢噶壁墾壇壅奮嬝嬴學寰導彊憲憑憩憊懍憶憾懊懈戰擅擁擋撻撼據擄擇擂操撿擒擔撾整曆曉暹曄曇暸樽樸樺橙橫橘樹橄橢橡橋橇樵機橈歙歷氅濂澱澡濃澤濁澧澳激澹澶澦澠澴熾燉燐燒燈燕熹燎燙燜燃燄獨璜璣璘璟璞瓢甌甍瘴瘸瘺盧盥瞠瞞瞟瞥磨磚磬磧禦積穎穆穌穋窺篙簑築篤篛篡篩篦糕糖縊縑縈縛縣縞縝縉縐罹羲翰翱翮耨膳膩膨臻興艘艙蕊蕙蕈蕨蕩蕃蕉蕭蕪蕞螃螟螞螢融衡褪褲褥褫褡親覦諦諺諫諱謀諜諧諮諾謁謂諷諭諳諶諼豫豭貓賴蹄踱踴蹂踹踵輻輯輸輳辨辦遵遴選遲遼遺鄴醒錠錶鋸錳錯錢鋼錫錄錚錐錦錡錕錮錙閻隧隨險雕霎霑霖霍霓霏靛靜靦鞘頰頸頻頷頭頹頤餐館餞餛餡餚駭駢駱骸骼髻髭鬨鮑鴕鴣鴦鴨鴒鴛默黔龍龜優償儡儲勵嚎嚀嚐嚅嚇嚏壕壓壑壎嬰嬪嬤孺尷屨嶼嶺嶽嶸幫彌徽應懂懇懦懋戲戴擎擊擘擠擰擦擬擱擢擭斂斃曙曖檀檔檄檢檜櫛檣橾檗檐檠歜殮毚氈濘濱濟濠濛濤濫濯澀濬濡濩濕濮濰燧營燮燦燥燭燬燴燠爵牆獰獲璩環璦璨癆療癌盪瞳瞪瞰瞬瞧瞭矯磷磺磴磯礁禧禪穗窿簇簍篾篷簌篠糠糜糞糢糟糙糝縮績繆縷縲繃縫總縱繅繁縴縹繈縵縿縯罄翳翼聱聲聰聯聳臆臃膺臂臀膿膽臉膾臨舉艱薪薄蕾薜薑薔薯薛薇薨薊虧蟀蟑螳蟒蟆螫螻螺蟈蟋褻褶襄褸褽覬謎謗謙講謊謠謝謄謐豁谿豳賺賽購賸賻趨蹉蹋蹈蹊轄輾轂轅輿避遽還邁邂邀鄹醣醞醜鍍鎂錨鍵鍊鍥鍋錘鍾鍬鍛鍰鍚鍔闊闋闌闈闆隱隸雖霜霞鞠韓顆颶餵騁駿鮮鮫鮪鮭鴻鴿麋黏點黜黝黛鼾齋叢嚕嚮壙壘嬸彝懣戳擴擲擾攆擺擻擷斷曜朦檳檬櫃檻檸櫂檮檯歟歸殯瀉瀋濾瀆濺瀑瀏燻燼燾燸獷獵璧璿甕癖癘癒瞽瞿瞻瞼礎禮穡穢穠竄竅簫簧簪簞簣簡糧織繕繞繚繡繒繙罈翹翻職聶臍臏舊藏薩藍藐藉薰薺薹薦蟯蟬蟲蟠覆覲觴謨謹謬謫豐贅蹙蹣蹦蹤蹟蹕軀轉轍邇邃邈醫醬釐鎔鎊鎖鎢鎳鎮鎬鎰鎘鎚鎗闔闖闐闕離雜雙雛雞霤鞣鞦鞭韹額顏題顎顓颺餾餿餽餮馥騎髁鬃鬆魏魎魍鯊鯉鯽鯈鯀鵑鵝鵠黠鼕鼬儳嚥壞壟壢寵龐廬懲懷懶懵攀攏曠曝櫥櫝櫚櫓瀛瀟瀨瀚瀝瀕瀘爆爍牘犢獸獺璽瓊瓣疇疆癟癡矇礙禱穫穩簾簿簸簽簷籀繫繭繹繩繪羅繳羶羹羸臘藩藝藪藕藤藥藷蟻蠅蠍蟹蟾襠襟襖襞譁譜識證譚譎譏譆譙贈贊蹼蹲躇蹶蹬蹺蹴轔轎辭邊邋醱醮鏡鏑鏟鏃鏈鏜鏝鏖鏢鏍鏘鏤鏗鏨關隴難霪霧靡韜韻類願顛颼饅饉騖騙鬍鯨鯧鯖鯛鶉鵡鵲鵪鵬麒麗麓麴勸嚨嚷嚶嚴嚼壤孀孃孽寶巉懸懺攘攔攙曦朧櫬瀾瀰瀲爐獻瓏癢癥礦礪礬礫竇競籌籃籍糯糰辮繽繼纂罌耀臚艦藻藹蘑藺蘆蘋蘇蘊蠔蠕襤覺觸議譬警譯譟譫贏贍躉躁躅躂醴釋鐘鐃鏽闡霰飄饒饑馨騫騰騷騵鰓鰍鹹麵黨鼯齟齣齡儷儸囁囀囂夔屬巍懼懾攝攜斕曩櫻欄櫺殲灌爛犧瓖瓔癩矓籐纏續羼蘗蘭蘚蠣蠢蠡蠟襪襬覽譴護譽贓躊躍躋轟辯醺鐮鐳鐵鐺鐸鐲鐫闢霸霹露響顧顥饗驅驃驀騾髏魔魑鰭鰥鶯鶴鷂鶸麝黯鼙齜齦齧儼儻囈囊囉孿巔巒彎懿攤權歡灑灘玀瓤疊癮癬禳籠籟聾聽臟襲襯觼讀贖贗躑躓轡酈鑄鑑鑒霽霾韃韁顫饕驕驍髒鬚鱉鰱鰾鰻鷓鷗鼴齬齪龔囌巖戀攣攫攪曬欐瓚竊籤籣籥纓纖纔臢蘸蘿蠱變邐邏鑣鑠鑤靨顯饜驚驛驗髓體髑鱔鱗鱖鷥麟黴囑壩攬灞癱癲矗罐羈蠶蠹衢讓讒讖艷贛釀鑪靂靈靄韆顰驟鬢魘鱟鷹鷺鹼鹽鼇齷齲廳欖灣籬籮蠻觀躡釁鑲鑰顱饞髖鬣黌灤矚讚鑷韉驢驥纜讜躪釅鑽鑾鑼鱷鱸黷豔鑿鸚爨驪鬱鸛鸞籲ヾゝゞ々ぁあぃいぅうぇえぉおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろゎわゐゑをんァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミムメモャヤュユョヨラリルレロヮワヰヱヲンヴヵヶДЕЁЖЗИЙКЛМУФХЦЧШЩЪЫЬЭЮЯабвгдеёжзийклмнопрстуфхцчшщъыьэюя①②③④⑤⑥⑦⑧⑨⑩⑴⑵⑶⑷⑸⑹⑺⑻⑼⑽���������������������������������������������������������������������������������������������������������������������������������������������������������������乂乜凵匚厂万丌乇亍囗兀屮彳丏冇与丮亓仂仉仈冘勼卬厹圠夃夬尐巿旡殳毌气爿丱丼仨仜仩仡仝仚刌匜卌圢圣夗夯宁宄尒尻屴屳帄庀庂忉戉扐氕氶汃氿氻犮犰玊禸肊阞伎优伬仵伔仱伀价伈伝伂伅伢伓伄仴伒冱刓刉刐劦匢匟卍厊吇囡囟圮圪圴夼妀奼妅奻奾奷奿孖尕尥屼屺屻屾巟幵庄异弚彴忕忔忏扜扞扤扡扦扢扙扠扚扥旯旮朾朹朸朻机朿朼朳氘汆汒汜汏汊汔汋汌灱牞犴犵玎甪癿穵网艸艼芀艽艿虍襾邙邗邘邛邔阢阤阠阣佖伻佢佉体佤伾佧佒佟佁佘伭伳伿佡冏冹刜刞刡劭劮匉卣卲厎厏吰吷吪呔呅吙吜吥吘吽呏呁吨吤呇囮囧囥坁坅坌坉坋坒夆奀妦妘妠妗妎妢妐妏妧妡宎宒尨尪岍岏岈岋岉岒岊岆岓岕巠帊帎庋庉庌庈庍弅弝彸彶忒忑忐忭忨忮忳忡忤忣忺忯忷忻怀忴戺抃抌抎抏抔抇扱扻扺扰抁抈扷扽扲扴攷旰旴旳旲旵杅杇杙杕杌杈杝杍杚杋毐氙氚汸汧汫沄沋沏汱汯汩沚汭沇沕沜汦汳汥汻沎灴灺牣犿犽狃狆狁犺狅玕玗玓玔玒町甹疔疕皁礽耴肕肙肐肒肜芐芏芅芎芑芓芊芃芄豸迉辿邟邡邥邞邧邠阰阨阯阭丳侘佼侅佽侀侇佶佴侉侄佷佌侗佪侚佹侁佸侐侜侔侞侒侂侕佫佮冞冼冾刵刲刳剆刱劼匊匋匼厒厔咇呿咁咑咂咈呫呺呾呥呬呴呦咍呯呡呠咘呣呧呤囷囹坯坲坭坫坱坰坶垀坵坻坳坴坢坨坽夌奅妵妺姏姎妲姌姁妶妼姃姖妱妽姀姈妴姇孢孥宓宕屄屇岮岤岠岵岯岨岬岟岣岭岢岪岧岝岥岶岰岦帗帔帙弨弢弣弤彔徂彾彽忞忥怭怦怙怲怋怴怊怗怳怚怞怬怢怍怐怮怓怑怌怉怜戔戽抭抴拑抾抪抶拊抮抳抯抻抩抰抸攽斨斻昉旼昄昒昈旻昃昋昍昅旽昑昐曶朊枅杬枎枒杶杻枘枆构杴枍枌杺枟枑枙枃杽极杸杹枔欥殀歾毞氝沓泬泫泮泙沶泔沭泧沷泐泂沺泃泆泭泲泒泝沴沊沝沀泞泀洰泍泇沰泹泏泩泑炔炘炅炓炆炄炑炖炂炚炃牪狖狋狘狉狜狒狔狚狌狑玤玡玭玦玢玠玬玝瓝瓨甿畀甾疌疘皯盳盱盰盵矸矼矹矻矺矷祂礿秅穸穻竻籵糽耵肏肮肣肸肵肭舠芠苀芫芚芘芛芵芧芮芼芞芺芴芨芡芩苂芤苃芶芢虰虯虭虮豖迒迋迓迍迖迕迗邲邴邯邳邰阹阽阼阺陃俍俅俓侲俉俋俁俔俜俙侻侳俛俇俖侺俀侹俬剄剉勀勂匽卼厗厖厙厘咺咡咭咥哏哃茍咷咮哖咶哅哆咠呰咼咢咾呲哞咰垵垞垟垤垌垗垝垛垔垘垏垙垥垚垕壴复奓姡姞姮娀姱姝姺姽姼姶姤姲姷姛姩姳姵姠姾姴姭宨屌峐峘峌峗峋峛峞峚峉峇峊峖峓峔峏峈峆峎峟峸巹帡帢帣帠帤庰庤庢庛庣庥弇弮彖徆怷怹恔恲恞恅恓恇恉恛恌恀恂恟怤恄恘恦恮扂扃拏挍挋拵挎挃拫拹挏挌拸拶挀挓挔拺挕拻拰敁敃斪斿昶昡昲昵昜昦昢昳昫昺昝昴昹昮朏朐柁柲柈枺柜枻柸柘柀枷柅柫柤柟枵柍枳柷柶柮柣柂枹柎柧柰枲柼柆柭柌枮柦柛柺柉柊柃柪柋欨殂殄殶毖毘毠氠氡洨洴洭洟洼洿洒洊泚洳洄洙洺洚洑洀洝浂洁洘洷洃洏浀洇洠洬洈洢洉洐炷炟炾炱炰炡炴炵炩牁牉牊牬牰牳牮狊狤狨狫狟狪狦狣玅珌珂珈珅玹玶玵玴珫玿珇玾珃珆玸珋瓬瓮甮畇畈疧疪癹盄眈眃眄眅眊盷盻盺矧矨砆砑砒砅砐砏砎砉砃砓祊祌祋祅祄秕种秏秖秎窀穾竑笀笁籺籸籹籿粀粁紃紈紁罘羑羍羾耇耎耏耔耷胘胇胠胑胈胂胐胅胣胙胜胊胕胉胏胗胦胍臿舡芔苙苾苹茇苨茀苕茺苫苖苴苬苡苲苵茌苻苶苰苪苤苠苺苳苭虷虴虼虳衁衎衧衪衩觓訄訇赲迣迡迮迠郱邽邿郕郅邾郇郋郈釔釓陔陏陑陓陊陎倞倅倇倓倢倰倛俵俴倳倷倬俶俷倗倜倠倧倵倯倱倎党冔冓凊凄凅凈凎剡剚剒剞剟剕剢勍匎厞唦哢唗唒哧哳哤唚哿唄唈哫唑唅哱唊哻哷哸哠唎唃唋圁圂埌堲埕埒垺埆垽垼垸垶垿埇埐垹埁夎奊娙娖娭娮娕娏娗娊娞娳孬宧宭宬尃屖屔峬峿峮峱峷崀峹帩帨庨庮庪庬弳弰彧恝恚恧恁悢悈悀悒悁悝悃悕悛悗悇悜悎戙扆拲挐捖挬捄捅挶捃揤挹捋捊挼挩捁挴捘捔捙挭捇挳捚捑挸捗捀捈敊敆旆旃旄旂晊晟晇晑朒朓栟栚桉栲栳栻桋桏栖栱栜栵栫栭栯桎桄栴栝栒栔栦栨栮桍栺栥栠欬欯欭欱欴歭肂殈毦毤毨毣毢毧氥浺浣浤浶洍浡涒浘浢浭浯涑涍淯浿涆浞浧浠涗浰浼浟涂涘洯浨涋浾涀涄洖涃浻浽浵涐烜烓烑烝烋缹烢烗烒烞烠烔烍烅烆烇烚烎烡牂牸牷牶猀狺狴狾狶狳狻猁珓珙珥珖玼珧珣珩珜珒珛珔珝珚珗珘珨瓞瓟瓴瓵甡畛畟疰痁疻痄痀疿疶疺皊盉眝眛眐眓眒眣眑眕眙眚眢眧砣砬砢砵砯砨砮砫砡砩砳砪砱祔祛祏祜祓祒祑秫秬秠秮秭秪秜秞秝窆窉窅窋窌窊窇竘笐笄笓笅笏笈笊笎笉笒粄粑粊粌粈粍粅紞紝紑紎紘紖紓紟紒紏紌罜罡罞罠罝罛羖羒翃翂翀耖耾耹胺胲胹胵脁胻脀舁舯舥茳茭荄茙荑茥荖茿荁茦茜茢荂荎茛茪茈茼荍茖茤茠茷茯茩荇荅荌荓茞茬荋茧荈虓虒蚢蚨蚖蚍蚑蚞蚇蚗蚆蚋蚚蚅蚥蚙蚡蚧蚕蚘蚎蚝蚐蚔衃衄衭衵衶衲袀衱衿衯袃衾衴衼訒豇豗豻貤貣赶赸趵趷趶軑軓迾迵适迿迻逄迼迶郖郠郙郚郣郟郥郘郛郗郜郤酐酎酏釕釢釚陜陟隼飣髟鬯乿偰偪偡偞偠偓偋偝偲偈偍偁偛偊偢倕偅偟偩偫偣偤偆偀偮偳偗偑凐剫剭剬剮勖勓匭厜啵啶唼啍啐唴唪啑啢唶唵唰啒啅唌唲啥啎唹啈唭唻啀啋圊圇埻堔埢埶埜埴堀埭埽堈埸堋埳埏堇埮埣埲埥埬埡堎埼堐埧堁堌埱埩埰堍堄奜婠婘婕婧婞娸娵婭婐婟婥婬婓婤婗婃婝婒婄婛婈媎娾婍娹婌婰婩婇婑婖婂婜孲孮寁寀屙崞崋崝崚崠崌崨崍崦崥崏崰崒崣崟崮帾帴庱庴庹庲庳弶弸徛徖徟悊悐悆悾悰悺惓惔惏惤惙惝惈悱惛悷惊悿惃惍惀挲捥掊掂捽掽掞掭掝掗掫掎捯掇掐据掯捵掜捭掮捼掤挻掟捸掅掁掑掍捰敓旍晥晡晛晙晜晢朘桹梇梐梜桭桮梮梫楖桯梣梬梩桵桴梲梏桷梒桼桫桲梪梀桱桾梛梖梋梠梉梤桸桻梑梌梊桽欶欳欷欸殑殏殍殎殌氪淀涫涴涳湴涬淩淢涷淶淔渀淈淠淟淖涾淥淜淝淛淴淊涽淭淰涺淕淂淏淉淐淲淓淽淗淍淣涻烺焍烷焗烴焌烰焄烳焐烼烿焆焓焀烸烶焋焂焎牾牻牼牿猝猗猇猑猘猊猈狿猏猞玈珶珸珵琄琁珽琇琀珺珼珿琌琋珴琈畤畣痎痒痏痋痌痑痐皏皉盓眹眯眭眱眲眴眳眽眥眻眵硈硒硉硍硊硌砦硅硐祤祧祩祪祣祫祡离秺秸秶秷窏窔窐笵筇笴笥笰笢笤笳笘笪笝笱笫笭笯笲笸笚笣粔粘粖粣紵紽紸紶紺絅紬紩絁絇紾紿絊紻紨罣羕羜羝羛翊翋翍翐翑翇翏翉耟耞耛聇聃聈脘脥脙脛脭脟脬脞脡脕脧脝脢舑舸舳舺舴舲艴莐莣莨莍荺荳莤荴莏莁莕莙荵莔莩荽莃莌莝莛莪莋荾莥莯莈莗莰荿莦莇莮荶莚虙虖蚿蚷蛂蛁蛅蚺蚰蛈蚹蚳蚸蛌蚴蚻蚼蛃蚽蚾衒袉袕袨袢袪袚袑袡袟袘袧袙袛袗袤袬袌袓袎覂觖觙觕訰訧訬訞谹谻豜豝豽貥赽赻赹趼跂趹趿跁軘軞軝軜軗軠軡逤逋逑逜逌逡郯郪郰郴郲郳郔郫郬郩酖酘酚酓酕釬釴釱釳釸釤釹釪釫釷釨釮镺閆閈陼陭陫陱陯隿靪頄飥馗傛傕傔傞傋傣傃傌傎傝偨傜傒傂傇兟凔匒匑厤厧喑喨喥喭啷噅喢喓喈喏喵喁喣喒喤啽喌喦啿喕喡喎圌堩堷堙堞堧堣堨埵塈堥堜堛堳堿堶堮堹堸堭堬堻奡媯媔媟婺媢媞婸媦婼媥媬媕媮娷媄媊媗媃媋媩婻婽媌媜媏媓媝寪寍寋寔寑寊寎尌尰崷嵃嵫嵁嵋崿崵嵑嵎嵕崳崺嵒崽崱嵙嵂崹嵉崸崼崲崶嵀嵅幄幁彘徦徥徫惉悹惌惢惎惄愔惲愊愖愅惵愓惸惼惾惁愃愘愝愐惿愄愋扊掔掱掰揎揥揨揯揃撝揳揊揠揶揕揲揵摡揟掾揝揜揄揘揓揂揇揌揋揈揰揗揙攲敧敪敤敜敨敥斌斝斞斮旐旒晼晬晻暀晱晹晪晲朁椌棓椄棜椪棬棪棱椏棖棷棫棤棶椓椐棳棡椇棌椈楰梴椑棯棆椔棸棐棽棼棨椋椊椗棎棈棝棞棦棴棑椆棔棩椕椥棇欹欻欿欼殔殗殙殕殽毰毲毳氰淼湆湇渟湉溈渼渽湅湢渫渿湁湝湳渜渳湋湀湑渻渃渮湞湨湜湡渱渨湠湱湫渹渢渰湓湥渧湸湤湷湕湹湒湦渵渶湚焠焞焯烻焮焱焣焥焢焲焟焨焺焛牋牚犈犉犆犅犋猒猋猰猢猱猳猧猲猭猦猣猵猌琮琬琰琫琖琚琡琭琱琤琣琝琩琠琲瓻甯畯畬痧痚痡痦痝痟痤痗皕皒盚睆睇睄睍睅睊睎睋睌矞矬硠硤硥硜硭硱硪确硰硩硨硞硢祴祳祲祰稂稊稃稌稄窙竦竤筊笻筄筈筌筎筀筘筅粢粞粨粡絘絯絣絓絖絧絪絏絭絜絫絒絔絩絑絟絎缾缿罥罦羢羠羡翗聑聏聐胾胔腃腊腒腏腇脽腍脺臦臮臷臸臹舄舼舽舿艵茻菏菹萣菀菨萒菧菤菼菶萐菆菈菫菣莿萁菝菥菘菿菡菋菎菖菵菉萉萏菞萑萆菂菳菕菺菇菑菪萓菃菬菮菄菻菗菢萛菛菾蛘蛢蛦蛓蛣蛚蛪蛝蛫蛜蛬蛩蛗蛨蛑衈衖衕袺裗袹袸裀袾袶袼袷袽袲褁裉覕覘覗觝觚觛詎詍訹詙詀詗詘詄詅詒詈詑詊詌詏豟貁貀貺貾貰貹貵趄趀趉跘跓跍跇跖跜跏跕跙跈跗跅軯軷軺軹軦軮軥軵軧軨軶軫軱軬軴軩逭逴逯鄆鄬鄄郿郼鄈郹郻鄁鄀鄇鄅鄃酡酤酟酢酠鈁鈊鈥鈃鈚鈦鈏鈌鈀鈒釿釽鈆鈄鈧鈂鈜鈤鈙鈗鈅鈖镻閍閌閐隇陾隈隉隃隀雂雈雃雱雰靬靰靮頇颩飫鳦黹亃亄亶傽傿僆傮僄僊傴僈僂傰僁傺傱僋僉傶傸凗剺剸剻剼嗃嗛嗌嗐嗋嗊嗝嗀嗔嗄嗩喿嗒喍嗏嗕嗢嗖嗈嗲嗍嗙嗂圔塓塨塤塏塍塉塯塕塎塝塙塥塛堽塣塱壼嫇嫄嫋媺媸媱媵媰媿嫈媻嫆媷嫀嫊媴媶嫍媹媐寖寘寙尟尳嵱嵣嵊嵥嵲嵬嵞嵨嵧嵢巰幏幎幊幍幋廅廌廆廋廇彀徯徭惷慉慊愫慅愶愲愮慆愯慏愩慀戠酨戣戥戤揅揱揫搐搒搉搠搤搳摃搟搕搘搹搷搢搣搌搦搰搨摁搵搯搊搚摀搥搧搋揧搛搮搡搎敯斒旓暆暌暕暐暋暊暙暔晸朠楦楟椸楎楢楱椿楅楪椹楂楗楙楺楈楉椵楬椳椽楥棰楸椴楩楀楯楄楶楘楁楴楌椻楋椷楜楏楑椲楒椯楻椼歆歅歃歂歈歁殛嗀毻毼毹毷毸溛滖滈溏滀溟溓溔溠溱溹滆滒溽滁溞滉溷溰滍溦滏溲溾滃滜滘溙溒溎溍溤溡溿溳滐滊溗溮溣煇煔煒煣煠煁煝煢煲煸煪煡煂煘煃煋煰煟煐煓煄煍煚牏犍犌犑犐犎猼獂猻猺獀獊獉瑄瑊瑋瑒瑑瑗瑀瑏瑐瑎瑂瑆瑍瑔瓡瓿瓾瓽甝畹畷榃痯瘏瘃痷痾痼痹痸瘐痻痶痭痵痽皙皵盝睕睟睠睒睖睚睩睧睔睙睭矠碇碚碔碏碄碕碅碆碡碃硹碙碀碖硻祼禂祽祹稑稘稙稒稗稕稢稓稛稐窣窢窞竫筦筤筭筴筩筲筥筳筱筰筡筸筶筣粲粴粯綈綆綀綍絿綅絺綎絻綃絼綌綔綄絽綒罭罫罧罨罬羦羥羧翛翜耡腤腠腷腜腩腛腢腲朡腞腶腧腯腄腡舝艉艄艀艂艅蓱萿葖葶葹蒏蒍葥葑葀蒆葧萰葍葽葚葙葴葳葝蔇葞萷萺萴葺葃葸萲葅萩菙葋萯葂萭葟葰萹葎葌葒葯蓅蒎萻葇萶萳葨葾葄萫葠葔葮葐蜋蜄蛷蜌蛺蛖蛵蝍蛸蜎蜉蜁蛶蜍蜅裖裋裍裎裞裛裚裌裐覅覛觟觥觤觡觠觢觜触詶誆詿詡訿詷誂誄詵誃誁詴詺谼豋豊豥豤豦貆貄貅賌赨赩趑趌趎趏趍趓趔趐趒跰跠跬跱跮跐跩跣跢跧跲跫跴輆軿輁輀輅輇輈輂輋遒逿遄遉逽鄐鄍鄏鄑鄖鄔鄋鄎酮酯鉈鉒鈰鈺鉦鈳鉥鉞銃鈮鉊鉆鉭鉬鉏鉠鉧鉯鈶鉡鉰鈱鉔鉣鉐鉲鉎鉓鉌鉖鈲閟閜閞閛隒隓隑隗雎雺雽雸雵靳靷靸靲頏頍頎颬飶飹馯馲馰馵骭骫魛鳪鳭鳧麀黽僦僔僗僨僳僛僪僝僤僓僬僰僯僣僠凘劀劁勩勫匰厬嘧嘕嘌嘒嗼嘏嘜嘁嘓嘂嗺嘝嘄嗿嗹墉塼墐墘墆墁塿塴墋塺墇墑墎塶墂墈塻墔墏壾奫嫜嫮嫥嫕嫪嫚嫭嫫嫳嫢嫠嫛嫬嫞嫝嫙嫨嫟孷寠寣屣嶂嶀嵽嶆嵺嶁嵷嶊嶉嶈嵾嵼嶍嵹嵿幘幙幓廘廑廗廎廜廕廙廒廔彄彃彯徶愬愨慁慞慱慳慒慓慲慬憀慴慔慺慛慥愻慪慡慖戩戧戫搫摍摛摝摴摶摲摳摽摵摦撦摎撂摞摜摋摓摠摐摿搿摬摫摙摥摷敳斠暡暠暟朅朄朢榱榶槉榠槎榖榰榬榼榑榙榎榧榍榩榾榯榿槄榽榤槔榹槊榚槏榳榓榪榡榞槙榗榐槂榵榥槆歊歍歋殞殟殠毃毄毾滎滵滱漃漥滸漷滻漮漉潎漙漚漧漘漻漒滭漊漶潳滹滮漭潀漰漼漵滫漇漎潃漅滽滶漹漜滼漺漟漍漞漈漡熇熐熉熀熅熂熏煻熆熁熗牄牓犗犕犓獃獍獑獌瑢瑳瑱瑵瑲瑧瑮甀甂甃畽疐瘖瘈瘌瘕瘑瘊瘔皸瞁睼瞅瞂睮瞀睯睾瞃碲碪碴碭碨硾碫碞碥碠碬碢碤禘禊禋禖禕禔禓禗禈禒禐稫穊稰稯稨稦窨窫窬竮箈箜箊箑箐箖箍箌箛箎箅箘劄箙箤箂粻粿粼粺綧綷緂綣綪緁緀緅綝緎緄緆緋緌綯綹綖綼綟綦綮綩綡緉罳翢翣翥翞耤聝聜膉膆膃膇膍膌膋舕蒗蒤蒡蒟蒺蓎蓂蒬蒮蒫蒹蒴蓁蓍蒪蒚蒱蓐蒝蒧蒻蒢蒔蓇蓌蒛蒩蒯蒨蓖蒘蒶蓏蒠蓗蓔蓒蓛蒰蒑虡蜳蜣蜨蝫蝀蜮蜞蜡蜙蜛蝃蜬蝁蜾蝆蜠蜲蜪蜭蜼蜒蜺蜱蜵蝂蜦蜧蜸蜤蜚蜰蜑裷裧裱裲裺裾裮裼裶裻裰裬裫覝覡覟覞觩觫觨誫誙誋誒誏誖谽豨豩賕賏賗趖踉踂跿踍跽踊踃踇踆踅跾踀踄輐輑輎輍鄣鄜鄠鄢鄟鄝鄚鄤鄡鄛酺酲酹酳銥銤鉶銛鉺銠銔銪銍銦銚銫鉹銗鉿銣鋮銎銂銕銢鉽銈銡銊銆銌銙銧鉾銇銩銝銋鈭隞隡雿靘靽靺靾鞃鞀鞂靻鞄鞁靿韎韍頖颭颮餂餀餇馝馜駃馹馻馺駂馽駇骱髣髧鬾鬿魠魡魟鳱鳲鳵麧僿儃儰僸儆儇僶僾儋儌僽儊劋劌勱勯噈噂噌嘵噁噊噉噆噘噚噀嘳嘽嘬嘾嘸嘪嘺圚墫墝墱墠墣墯墬墥墡壿嫿嫴嫽嫷嫶嬃嫸嬂嫹嬁嬇嬅嬏屧嶙嶗嶟嶒嶢嶓嶕嶠嶜嶡嶚嶞幩幝幠幜緳廛廞廡彉徲憋憃慹憱憰憢憉憛憓憯憭憟憒憪憡憍慦憳戭摮摰撖撠撅撗撜撏撋撊撌撣撟摨撱撘敶敺敹敻斲斳暵暰暩暲暷暪暯樀樆樗槥槸樕槱槤樠槿槬槢樛樝槾樧槲槮樔槷槧橀樈槦槻樍槼槫樉樄樘樥樏槶樦樇槴樖歑殥殣殢殦氁氀毿氂潁漦潾澇濆澒澍澉澌潢潏澅潚澖潶潬澂潕潲潒潐潗澔澓潝漀潡潫潽潧澐潓澋潩潿澕潣潷潪潻熲熯熛熰熠熚熩熵熝熥熞熤熡熪熜熧熳犘犚獘獒獞獟獠獝獛獡獚獙獢璇璉璊璆璁瑽璅璈瑼瑹甈甇畾瘥瘞瘙瘝瘜瘣瘚瘨瘛皜皝皞皛瞍瞏瞉瞈磍碻磏磌磑磎磔磈磃磄磉禚禡禠禜禢禛歶稹窲窴窳箷篋箾箬篎箯箹篊箵糅糈糌糋緷緛緪緧緗緡縃緺緦緶緱緰緮緟罶羬羰羭翭翫翪翬翦翨聤聧膣膟膞膕膢膙膗舖艏艓艒艐艎艑蔤蔻蔏蔀蔩蔎蔉蔍蔟蔊蔧蔜蓻蔫蓺蔈蔌蓴蔪蓲蔕蓷蓫蓳蓼蔒蓪蓩蔖蓾蔨蔝蔮蔂蓽蔞蓶蔱蔦蓧蓨蓰蓯蓹蔘蔠蔰蔋蔙蔯虢蝖蝣蝤蝷蟡蝳蝘蝔蝛蝒蝡蝚蝑蝞蝭蝪蝐蝎蝟蝝蝯蝬蝺蝮蝜蝥蝏蝻蝵蝢蝧蝩衚褅褌褔褋褗褘褙褆褖褑褎褉覢覤覣觭觰觬諏諆誸諓諑諔諕誻諗誾諀諅諘諃誺誽諙谾豍貏賥賟賙賨賚賝賧趠趜趡趛踠踣踥踤踮踕踛踖踑踙踦踧踔踒踘踓踜踗踚輬輤輘輚輠輣輖輗遳遰遯遧遫鄯鄫鄩鄪鄲鄦鄮醅醆醊醁醂醄醀鋐鋃鋄鋀鋙銶鋏鋱鋟鋘鋩鋗鋝鋌鋯鋂鋨鋊鋈鋎鋦鋍鋕鋉鋠鋞鋧鋑鋓銵鋡鋆銴镼閬閫閮閰隤隢雓霅霈霂靚鞊鞎鞈韐韏頞頝頦頩頨頠頛頧颲餈飺餑餔餖餗餕駜駍駏駓駔駎駉駖駘駋駗駌骳髬髫髳髲髱魆魃魧魴魱魦魶魵魰魨魤魬鳼鳺鳽鳿鳷鴇鴀鳹鳻鴈鴅鴄麃黓鼏鼐儜儓儗儚儑凞匴叡噰噠噮噳噦噣噭噲噞噷圜圛壈墽壉墿墺壂墼壆嬗嬙嬛嬡嬔嬓嬐嬖嬨嬚嬠嬞寯嶬嶱嶩嶧嶵嶰嶮嶪嶨嶲嶭嶯嶴幧幨幦幯廩廧廦廨廥彋徼憝憨憖懅憴懆懁懌憺憿憸憌擗擖擐擏擉撽撉擃擛擳擙攳敿敼斢曈暾曀曊曋曏暽暻暺曌朣樴橦橉橧樲橨樾橝橭橶橛橑樨橚樻樿橁橪橤橐橏橔橯橩橠樼橞橖橕橍橎橆歕歔歖殧殪殫毈毇氄氃氆澭濋澣濇澼濎濈潞濄澽澞濊澨瀄澥澮澺澬澪濏澿澸澢濉澫濍澯澲澰燅燂熿熸燖燀燁燋燔燊燇燏熽燘熼燆燚燛犝犞獩獦獧獬獥獫獪瑿璚璠璔璒璕璡甋疀瘯瘭瘱瘽瘳瘼瘵瘲瘰皻盦瞚瞝瞡瞜瞛瞢瞣瞕瞙瞗磝磩磥磪磞磣磛磡磢磭磟磠禤穄穈穇窶窸窵窱窷篞篣篧篝篕篥篚篨篹篔篪篢篜篫篘篟糒糔糗糐糑縒縡縗縌縟縠縓縎縜縕縚縢縋縏縖縍縔縥縤罃罻罼罺羱翯耪耩聬膱膦膮膹膵膫膰膬膴膲膷膧臲艕艖艗蕖蕅蕫蕍蕓蕡蕘蕀蕆蕤蕁蕢蕄蕑蕇蕣蔾蕛蕱蕎蕮蕵蕕蕧蕠薌蕦蕝蕔蕥蕬虣虥虤螛螏螗螓螒螈螁螖螘蝹螇螣螅螐螑螝螄螔螜螚螉褞褦褰褭褮褧褱褢褩褣褯褬褟觱諠諢諲諴諵諝謔諤諟諰諈諞諡諨諿諯諻貑貒貐賵賮賱賰賳赬赮趥趧踳踾踸蹀蹅踶踼踽蹁踰踿躽輶輮輵輲輹輷輴遶遹遻邆郺鄳鄵鄶醓醐醑醍醏錧錞錈錟錆錏鍺錸錼錛錣錒錁鍆錭錎錍鋋錝鋺錥錓鋹鋷錴錂錤鋿錩錹錵錪錔錌錋鋾錉錀鋻錖閼闍閾閹閺閶閿閵閽隩雔霋霒霐鞙鞗鞔韰韸頵頯頲餤餟餧餩馞駮駬駥駤駰駣駪駩駧骹骿骴骻髶髺髹髷鬳鮀鮅鮇魼魾魻鮂鮓鮒鮐魺鮕魽鮈鴥鴗鴠鴞鴔鴩鴝鴘鴢鴐鴙鴟麈麆麇麮麭黕黖黺鼒鼽儦儥儢儤儠儩勴嚓嚌嚍嚆嚄嚃噾嚂噿嚁壖壔壏壒嬭嬥嬲嬣嬬嬧嬦嬯嬮孻寱寲嶷幬幪徾徻懃憵憼懧懠懥懤懨懞擯擩擣擫擤擨斁斀斶旚曒檍檖檁檥檉檟檛檡檞檇檓檎檕檃檨檤檑橿檦檚檅檌檒歛殭氉濌澩濴濔濣濜濭濧濦濞濲濝濢濨燡燱燨燲燤燰燢獳獮獯璗璲璫璐璪璭璱璥璯甐甑甒甏疄癃癈癉癇皤盩瞵瞫瞲瞷瞶瞴瞱瞨矰磳磽礂磻磼磲礅磹磾礄禫禨穜穛穖穘穔穚窾竀竁簅簏篲簀篿篻簎篴簋篳簂簉簃簁篸篽簆篰篱簐簊糨縭縼繂縳顈縸縪繉繀繇縩繌縰縻縶繄縺罅罿罾罽翴翲耬膻臄臌臊臅臇膼臩艛艚艜薃薀薏薧薕薠薋薣蕻薤薚薞蕷蕼薉薡蕺蕸蕗薎薖薆薍薙薝薁薢薂薈薅蕹蕶薘薐薟虨螾螪螭蟅螰螬螹螵螼螮蟉蟃蟂蟌螷螯蟄蟊螴螶螿螸螽蟞螲褵褳褼褾襁襒褷襂覭覯覮觲觳謞謘謖謑謅謋謢謏謒謕謇謍謈謆謜謓謚豏豰豲豱豯貕貔賹赯蹎蹍蹓蹐蹌蹇轃轀邅遾鄸醚醢醛醙醟醡醝醠鎡鎃鎯鍤鍖鍇鍼鍘鍜鍶鍉鍐鍑鍠鍭鎏鍌鍪鍹鍗鍕鍒鍏鍱鍷鍻鍡鍞鍣鍧鎀鍎鍙闇闀闉闃闅閷隮隰隬霠霟霘霝霙鞚鞡鞜鞞鞝韕韔韱顁顄顊顉顅顃餥餫餬餪餳餲餯餭餱餰馘馣馡騂駺駴駷駹駸駶駻駽駾駼騃骾髾髽鬁髼魈鮚鮨鮞鮛鮦鮡鮥鮤鮆鮢鮠鮯鴳鵁鵧鴶鴮鴯鴱鴸鴰鵅鵂鵃鴾鴷鵀鴽翵鴭麊麉麍麰黈黚黻黿鼤鼣鼢齔龠儱儭儮嚘嚜嚗嚚嚝嚙奰嬼屩屪巀幭幮懘懟懭懮懱懪懰懫懖懩擿攄擽擸攁攃擼斔旛曚曛曘櫅檹檽櫡櫆檺檶檷櫇檴檭歞毉氋瀇瀌瀍瀁瀅瀔瀎濿瀀濻瀦濼濷瀊爁燿燹爃燽獶璸瓀璵瓁璾璶璻瓂甔甓癜癤癙癐癓癗癚皦皽盬矂瞺磿礌礓礔礉礐礒礑禭禬穟簜簩簙簠簟簭簝簦簨簢簥簰繜繐繖繣繘繢繟繑繠繗繓羵羳翷翸聵臑臒臐艟艞薴藆藀藃藂薳薵薽藇藄薿藋藎藈藅薱薶藒蘤薸薷薾虩蟧蟦蟢蟛蟫蟪蟥蟟蟳蟤蟔蟜蟓蟭蟘蟣螤蟗蟙蠁蟴蟨蟝襓襋襏襌襆襐襑襉謪謧謣謳謰謵譇謯謼謾謱謥謷謦謶謮謤謻謽謺豂豵貙貘貗賾贄贂贀蹜蹢蹠蹗蹖蹞蹥蹧蹛蹚蹡蹝蹩蹔轆轇轈轋鄨鄺鄻鄾醨醥醧醯醪鎵鎌鎒鎷鎛鎝鎉鎧鎎鎪鎞鎦鎕鎈鎙鎟鎍鎱鎑鎲鎤鎨鎴鎣鎥闒闓闑隳雗雚巂雟雘雝霣霢霥鞬鞮鞨鞫鞤鞪鞢鞥韗韙韖韘韺顐顑顒颸饁餼餺騏騋騉騍騄騑騊騅騇騆髀髜鬈鬄鬅鬩鬵魊魌魋鯇鯆鯃鮿鯁鮵鮸鯓鮶鯄鮹鮽鵜鵓鵏鵊鵛鵋鵙鵖鵌鵗鵒鵔鵟鵘鵚麎麌黟鼁鼀鼖鼥鼫鼪鼩鼨齌齕儴儵劖勷厴嚫嚭嚦嚧嚪嚬壚壝壛夒嬽嬾嬿巃幰徿懻攇攐攍攉攌攎斄旞旝曞櫧櫠櫌櫑櫙櫋櫟櫜櫐櫫櫏櫍櫞歠殰氌瀙瀧瀠瀖瀫瀡瀢瀣瀩瀗瀤瀜瀪爌爊爇爂爅犥犦犤犣犡瓋瓅璷瓃甖癠矉矊矄矱礝礛礡礜礗礞禰穧穨簳簼簹簬簻糬糪繶繵繸繰繷繯繺繲繴繨罋罊羃羆羷翽翾聸臗臕艤艡艣藫藱藭藙藡藨藚藗藬藲藸藘藟藣藜藑藰藦藯藞藢蠀蟺蠃蟶蟷蠉蠌蠋蠆蟼蠈蟿蠊蠂襢襚襛襗襡襜襘襝襙覈覷覶觶譐譈譊譀譓譖譔譋譕譑譂譒譗豃豷豶貚贆贇贉趬趪趭趫蹭蹸蹳蹪蹯蹻軂轒轑轏轐轓辴酀鄿醰醭鏞鏇鏏鏂鏚鏐鏹鏬鏌鏙鎩鏦鏊鏔鏮鏣鏕鏄鏎鏀鏒鏧镽闚闛雡霩霫霬霨霦鞳鞷鞶韝韞韟顜顙顝顗颿颽颻颾饈饇饃馦馧騚騕騥騝騤騛騢騠騧騣騞騜騔髂鬋鬊鬎鬌鬷鯪鯫鯠鯞鯤鯦鯢鯰鯔鯗鯬鯜鯙鯥鯕鯡鯚鵷鶁鶊鶄鶈鵱鶀鵸鶆鶋鶌鵽鵫鵴鵵鵰鵩鶅鵳鵻鶂鵯鵹鵿鶇鵨麔麑黀黼鼭齀齁齍齖齗齘匷嚲嚵嚳壣孅巆巇廮廯忀忁懹攗攖攕攓旟曨曣曤櫳櫰櫪櫨櫹櫱櫮櫯瀼瀵瀯瀷瀴瀱灂瀸瀿瀺瀹灀瀻瀳灁爓爔犨獽獼璺皫皪皾盭矌矎矏矍矲礥礣礧礨礤礩禲穮穬穭竷籉籈籊籇籅糮繻繾纁纀羺翿聹臛臙舋艨艩蘢藿蘁藾蘛蘀藶蘄蘉蘅蘌藽蠙蠐蠑蠗蠓蠖襣襦覹觷譠譪譝譨譣譥譧譭趮躆躈躄轙轖轗轕轘轚邍酃酁醷醵醲醳鐋鐓鏻鐠鐏鐔鏾鐕鐐鐨鐙鐍鏵鐀鏷鐇鐎鐖鐒鏺鐉鏸鐊鏿鏼鐌鏶鐑鐆闞闠闟霮霯鞹鞻韽韾顠顢顣顟飁飂饐饎饙饌饋饓騲騴騱騬騪騶騩騮騸騭髇髊髆鬐鬒鬑鰋鰈鯷鰅鰒鯸鱀鰇鰎鰆鰗鰔鰉鶟鶙鶤鶝鶒鶘鶐鶛鶠鶔鶜鶪鶗鶡鶚鶢鶨鶞鶣鶿鶩鶖鶦鶧麙麛麚黥黤黧黦鼰鼮齛齠齞齝齙龑儺儹劘劗囃嚽嚾孈孇巋巏廱懽攛欂櫼欃櫸欀灃灄灊灈灉灅灆爝爚爙獾甗癪矐礭礱礯籔籓糲纊纇纈纋纆纍罍羻耰臝蘘蘪蘦蘟蘣蘜蘙蘧蘮蘡蘠蘩蘞蘥蠩蠝蠛蠠蠤蠜蠫衊襭襩襮襫觺譹譸譅譺譻贐贔趯躎躌轞轛轝酆酄酅醹鐿鐻鐶鐩鐽鐼鐰鐹鐪鐷鐬鑀鐱闥闤闣霵霺鞿韡顤飉飆飀饘饖騹騽驆驄驂驁騺騿髍鬕鬗鬘鬖鬺魒鰫鰝鰜鰬鰣鰨鰩鰤鰡鶷鶶鶼鷁鷇鷊鷏鶾鷅鷃鶻鶵鷎鶹鶺鶬鷈鶱鶭鷌鶳鷍鶲鹺麜黫黮黭鼛鼘鼚鼱齎齥齤龒亹囆囅囋奱孋孌巕巑廲攡攠攦攢欋欈欉氍灕灖灗灒爞爟犩獿瓘瓕瓙瓗癭皭礵禴穰穱籗籜籙籛籚糴糱纑罏羇臞艫蘴蘵蘳蘬蘲蘶蠬蠨蠦蠪蠥襱覿覾觻譾讄讂讆讅譿贕躕躔躚躒躐躖躗轠轢酇鑌鑐鑊鑋鑏鑇鑅鑈鑉鑆霿韣顪顩飋饔饛驎驓驔驌驏驈驊驉驒驐髐鬙鬫鬻魖魕鱆鱈鰿鱄鰹鰳鱁鰼鰷鰴鰲鰽鰶鷛鷒鷞鷚鷋鷐鷜鷑鷟鷩鷙鷘鷖鷵鷕鷝麶黰鼵鼳鼲齂齫龕龢儽劙壨壧奲孍巘蠯彏戁戃戄攩攥斖曫欑欒欏毊灛灚爢玂玁玃癰矔籧籦纕艬蘺虀蘹蘼蘱蘻蘾蠰蠲蠮蠳襶襴襳觾讌讎讋讈豅贙躘轤轣醼鑢鑕鑝鑗鑞韄韅頀驖驙鬞鬟鬠鱒鱘鱐鱊鱍鱋鱕鱙鱌鱎鷻鷷鷯鷣鷫鷸鷤鷶鷡鷮鷦鷲鷰鷢鷬鷴鷳鷨鷭黂黐黲黳鼆鼜鼸鼷鼶齃齏齱齰齮齯囓囍孎屭攭曭曮欓灟灡灝灠爣瓛瓥矕礸禷禶籪纗羉艭虃蠸蠷蠵衋讔讕躞躟躠躝醾醽釂鑫鑨鑩雥靆靃靇韇韥驞髕魙鱣鱧鱦鱢鱞鱠鸂鷾鸇鸃鸆鸅鸀鸁鸉鷿鷽鸄麠鼞齆齴齵齶囔攮斸欘欙欗欚灢爦犪矘矙礹籩籫糶纚纘纛纙臠臡虆虇虈襹襺襼襻觿讘讙躥躤躣鑮鑭鑯鑱鑳靉顲饟鱨鱮鱭鸋鸍鸐鸏鸒鸑麡黵鼉齇齸齻齺齹圞灦籯蠼趲躦釃鑴鑸鑶鑵驠鱴鱳鱱鱵鸔鸓黶鼊龤灨灥糷虪蠾蠽蠿讞貜躩軉靋顳顴飌饡馫驤驦驧鬤鸕鸗齈戇欞爧虌躨钂钀钁驩驨鬮鸙爩虋讟钃鱹麷癵驫鱺鸝灩灪麤齾齉龘�����������������������������������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go b/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go new file mode 100644 index 000000000..a2e578d4a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("charsets.json", func() (io.ReadCloser, error) { + r := strings.NewReader("{\n\"8bit\": {\n\t\"Desc\": \"raw 8-bit data\",\n\t\"Class\": \"8bit\",\n\t\"Comment\": \"special class for raw 8bit data that has been converted to utf-8\"\n},\n\"big5\": {\n\t\"Desc\": \"Big 5 (HKU)\",\n\t\"Class\": \"big5\",\n\t\"Comment\": \"Traditional Chinese\"\n},\n\"euc-jp\": {\n\t\"Aliases\":[\"x-euc-jp\"],\n\t\"Desc\": \"Japanese Extended UNIX Code\",\n\t\"Class\": \"euc-jp\"\n},\n\"gb2312\": {\n\t\"Aliases\":[\"iso-ir-58\", \"chinese\", \"gb_2312-80\"],\n\t\"Desc\": \"Chinese mixed one byte\",\n\t\"Class\": \"gb2312\"\n},\n\"ibm437\": {\n\t\"Aliases\":[\"437\", \"cp437\"],\n\t\"Desc\": \"IBM PC: CP 437\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm437.cp\",\n\t\"Comment\": \"originally from jhelling@cs.ruu.nl (Jeroen Hellingman)\"\n},\n\"ibm850\": {\n\t\"Aliases\":[\"850\", \"cp850\"],\n\t\"Desc\": \"IBM PS/2: CP 850\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm850.cp\",\n\t\"Comment\": \"originally from jhelling@cs.ruu.nl (Jeroen Hellingman)\"\n},\n\"ibm866\": {\n\t\"Aliases\":[\"cp866\", \"866\"],\n\t\"Desc\": \"Russian MS-DOS CP 866\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm866.cp\"\n},\n\"iso-8859-1\": {\n\t\"Aliases\":[\"iso-ir-100\", \"ibm819\", \"l1\", \"iso8859-1\", \"iso-latin-1\", \"iso_8859-1:1987\", \"cp819\", \"iso_8859-1\", \"iso8859_1\", \"latin1\"],\n\t\"Desc\": \"Latin-1\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-1.cp\"\n},\n\"iso-8859-10\": {\n\t\"Aliases\":[\"iso_8859-10:1992\", \"l6\", \"iso-ir-157\", \"latin6\"],\n\t\"Desc\": \"Latin-6\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-10.cp\",\n\t\"Comment\": \"originally from dkuug.dk:i18n/charmaps/ISO_8859-10:1993\"\n},\n\"iso-8859-15\": {\n\t\"Aliases\":[\"l9-iso-8859-15\", \"latin9\"],\n\t\"Desc\": \"Latin-9\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-15.cp\"\n},\n\"iso-8859-2\": {\n\t\"Aliases\":[\"iso-ir-101\", \"iso_8859-2:1987\", \"l2\", \"iso_8859-2\", \"latin2\"],\n\t\"Desc\": \"Latin-2\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-2.cp\"\n},\n\"iso-8859-3\": {\n\t\"Aliases\":[\"iso-ir-109\", \"l3\", \"iso_8859-3:1988\", \"iso_8859-3\", \"latin3\"],\n\t\"Desc\": \"Latin-3\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-3.cp\"\n},\n\"iso-8859-4\": {\n\t\"Aliases\":[\"iso-ir-110\", \"iso_8859-4:1988\", \"l4\", \"iso_8859-4\", \"latin4\"],\n\t\"Desc\": \"Latin-4\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-4.cp\"\n},\n\"iso-8859-5\": {\n\t\"Aliases\":[\"cyrillic\", \"iso_8859-5\", \"iso-ir-144\", \"iso_8859-5:1988\"],\n\t\"Desc\": \"Part 5 (Cyrillic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-5.cp\"\n},\n\"iso-8859-6\": {\n\t\"Aliases\":[\"ecma-114\", \"iso_8859-6:1987\", \"arabic\", \"iso_8859-6\", \"asmo-708\", \"iso-ir-127\"],\n\t\"Desc\": \"Part 6 (Arabic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-6.cp\"\n},\n\"iso-8859-7\": {\n\t\"Aliases\":[\"greek8\", \"elot_928\", \"ecma-118\", \"greek\", \"iso_8859-7\", \"iso_8859-7:1987\", \"iso-ir-126\"],\n\t\"Desc\": \"Part 7 (Greek)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-7.cp\"\n},\n\"iso-8859-8\": {\n\t\"Aliases\":[\"iso_8859-8:1988\", \"hebrew\", \"iso_8859-8\", \"iso-ir-138\"],\n\t\"Desc\": \"Part 8 (Hebrew)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-8.cp\"\n},\n\"iso-8859-9\": {\n\t\"Aliases\":[\"l5\", \"iso_8859-9:1989\", \"iso_8859-9\", \"iso-ir-148\", \"latin5\"],\n\t\"Desc\": \"Latin-5\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-9.cp\"\n},\n\"koi8-r\": {\n\t\"Desc\": \"KOI8-R (RFC1489)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"koi8-r.cp\"\n},\n\"shift_jis\": {\n\t\"Aliases\":[\"sjis\", \"ms_kanji\", \"x-sjis\"],\n\t\"Desc\": \"Shift-JIS Japanese\",\n\t\"Class\": \"cp932\",\n\t\"Arg\": \"shiftjis\"\n},\n\"utf-16\": {\n\t\"Aliases\":[\"utf16\"],\n\t\"Desc\": \"Unicode UTF-16\",\n\t\"Class\": \"utf16\"\n},\n\"utf-16be\": {\n\t\"Aliases\":[\"utf16be\"],\n\t\"Desc\": \"Unicode UTF-16 big endian\",\n\t\"Class\": \"utf16\",\n\t\"Arg\": \"be\"\n},\n\"utf-16le\": {\n\t\"Aliases\":[\"utf16le\"],\n\t\"Desc\": \"Unicode UTF-16 little endian\",\n\t\"Class\": \"utf16\",\n\t\"Arg\": \"le\"\n},\n\"utf-8\": {\n\t\"Aliases\":[\"utf8\", \"ascii\", \"us-ascii\"],\n\t\"Desc\": \"Unicode UTF-8\",\n\t\"Class\": \"utf8\"\n},\n\"windows-1250\": {\n\t\"Desc\": \"MS Windows CP 1250 (Central Europe)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1250.cp\"\n},\n\"windows-1251\": {\n\t\"Desc\": \"MS Windows CP 1251 (Cyrillic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1251.cp\"\n},\n\"windows-1252\": {\n\t\"Desc\": \"MS Windows CP 1252 (Latin 1)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1252.cp\"\n},\n\"windows-31j\": {\n\t\"Aliases\":[\"cp932\"],\n\t\"Desc\": \"MS-Windows Japanese (cp932)\",\n\t\"Class\": \"cp932\",\n\t\"Arg\": \"cp932\"\n}\n}\n") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go new file mode 100644 index 000000000..0e53a5c1e --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("cp932.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("\u3000、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈〉《》「」『』【】+-±×�÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓�����������∈∋⊆⊇⊂⊃∪∩��������∧∨¬⇒⇔∀∃�����������∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬�������ʼn♯♭♪†‡¶����◯���������������0123456789�������ABCDEFGHIJKLMNOPQRSTUVWXYZ�������abcdefghijklmnopqrstuvwxyz����ぁあぃいぅうぇえぉおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろゎわゐゑをん�����������ァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミ�ムメモャヤュユョヨラリルレロヮワヰヱヲンヴヵヶ��������ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩ��������αβγδεζηθικλμνξοπρστυφχψω��������������������������������������АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ���������������абвгдеёжзийклмн�опрстуфхцчшщъыьэюя�������������─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂��������������������������������������������������������������①②③④⑤⑥⑦⑧⑨⑩⑪⑫⑬⑭⑮⑯⑰⑱⑲⑳ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ�㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡��������㍻�〝〟№㏍℡㊤㊥㊦㊧㊨㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪�����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円�園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫�橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救�朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨�劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降�項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止�死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳�準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨�逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻�操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄�逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬�凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅�楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷�斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆�摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲�沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕��������������������������������������������弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭�凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸�噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀�它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠�怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫�捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎�梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯�麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝�烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿�痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰�窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷�縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤�艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬�蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧�諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜�轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙�閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃�騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯�黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠堯槇遙瑤凜熙����������������������������������������������������������������������������������������纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏�塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙�蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑��ⅰⅱⅲⅳⅴⅵⅶⅷⅸⅹ¬¦'"ⅰⅱⅲⅳⅴⅵⅶⅷⅸⅹⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ¬¦'"㈱№℡∵纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊�兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神�祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go new file mode 100644 index 000000000..ecd0631ef --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm437.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞∅∈∩≡±≥≤⌠⌡÷≈°•·√ⁿ²∎\u00a0") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go new file mode 100644 index 000000000..ea833fa4c --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm850.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´\u00ad±‗¾¶§÷¸°¨·¹³²∎\u00a0") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go new file mode 100644 index 000000000..338c99c93 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm866.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп������������������������������������������������рстуфхцчшщъыьэюяЁё��������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go new file mode 100644 index 000000000..dab316145 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-1.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go new file mode 100644 index 000000000..252aef1e1 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-10.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ĄĒĢĪĨĶ§ĻĐŠŦŽ\u00adŪŊ°ąēģīĩķ·ļĐšŧž—ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go new file mode 100644 index 000000000..26e0764ac --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-15.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£€¥Š§š©ª«¬\u00ad®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go new file mode 100644 index 000000000..d8a5f95ed --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-2.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0Ą˘Ł¤ĽŚ§¨ŠŞŤŹ\u00adŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go new file mode 100644 index 000000000..d632e8707 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-3.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0Ħ˘£¤�Ĥ§¨İŞĞĴ\u00ad�Ż°ħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go new file mode 100644 index 000000000..f9874c17f --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-4.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ĄĸŖ¤ĨĻ§¨ŠĒĢŦ\u00adŽ¯°ą˛ŗ´ĩļˇ¸šēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖ×ØŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go new file mode 100644 index 000000000..0ab027a0a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-5.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ЁЂЃЄЅІЇЈЉЊЋЌ\u00adЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go new file mode 100644 index 000000000..b1eaf9677 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-6.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0���¤�������،\u00ad�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go new file mode 100644 index 000000000..1057692b8 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-7.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0‘’£��¦§¨©�«¬\u00ad�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go new file mode 100644 index 000000000..e0e035aaa --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-8.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0�¢£¤¥¦§¨©×«¬\u00ad®‾°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת�����") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go new file mode 100644 index 000000000..1c1a5bc68 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-9.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go new file mode 100644 index 000000000..a26c174db --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("jisx0201kana.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("。「」、・ヲァィゥェォャュョッーアイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン゙゚") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go new file mode 100644 index 000000000..831fae5c4 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("koi8-r.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥\u00a0⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go new file mode 100644 index 000000000..5147af073 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1250.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź\u00a0ˇ˘Ł¤Ą¦§¨©Ş«¬\u00ad®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go new file mode 100644 index 000000000..2722b19b8 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1251.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fЂЃ‚ѓ„…†‡�‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ\u00a0ЎўЈ¤Ґ¦§Ё©Є«¬\u00ad®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go new file mode 100644 index 000000000..bf3b67e02 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1252.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/doc.go b/vendor/github.com/rogpeppe/go-charset/data/doc.go new file mode 100644 index 000000000..630a83d53 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/doc.go @@ -0,0 +1,6 @@ +// The data package embeds all the charset +// data files as Go data. It registers the data with the charset +// package as a side effect of its import. To use: +// +// import _ "code.google.com/p/go-charset" +package data diff --git a/vendor/github.com/rogpeppe/go-charset/data/generate.go b/vendor/github.com/rogpeppe/go-charset/data/generate.go new file mode 100644 index 000000000..156ee2c63 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/generate.go @@ -0,0 +1,97 @@ +// +build ignore + +// go run generate.go && go fmt + +// The generate-charset-data command generates the Go source code +// for code.google.com/p/go-charset/data from the data files +// found in code.google.com/p/go-charset/datafiles. +// It should be run in the go-charset root directory. +// The resulting Go files will need gofmt'ing. +package main + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "text/template" +) + +type info struct { + Path string +} + +var tfuncs = template.FuncMap{ + "basename": func(s string) string { + return filepath.Base(s) + }, + "read": func(path string) ([]byte, error) { + return ioutil.ReadFile(path) + }, +} + +var tmpl = template.Must(template.New("").Funcs(tfuncs).Parse(` + // This file is automatically generated by generate-charset-data. + // Do not hand-edit. + + package data + import ( + "code.google.com/p/go-charset/charset" + "io" + "io/ioutil" + "strings" + ) + + func init() { + charset.RegisterDataFile({{basename .Path | printf "%q"}}, func() (io.ReadCloser, error) { + r := strings.NewReader({{read .Path | printf "%q"}}) + return ioutil.NopCloser(r), nil + }) + } +`)) + +var docTmpl = template.Must(template.New("").Funcs(tfuncs).Parse(` + // This file is automatically generated by generate-charset-data. + // Do not hand-edit. + + // The {{basename .Package}} package embeds all the charset + // data files as Go data. It registers the data with the charset + // package as a side effect of its import. To use: + // + // import _ "code.google.com/p/go-charset" + package {{basename .Package}} +`)) + +func main() { + dataDir := filepath.Join("..", "datafiles") + d, err := os.Open(dataDir) + if err != nil { + fatalf("%v", err) + } + names, err := d.Readdirnames(0) + if err != nil { + fatalf("cannot read datafiles dir: %v", err) + } + for _, name := range names { + writeFile("data_"+name+".go", tmpl, info{ + Path: filepath.Join(dataDir, name), + }) + } +} + +func writeFile(name string, t *template.Template, data interface{}) { + w, err := os.Create(name) + if err != nil { + fatalf("cannot create output file: %v", err) + } + defer w.Close() + err = t.Execute(w, data) + if err != nil { + fatalf("template execute %q: %v", name, err) + } +} + +func fatalf(f string, a ...interface{}) { + fmt.Fprintf(os.Stderr, "%s\n", fmt.Sprintf(f, a...)) + os.Exit(2) +} diff --git a/vendor/github.com/rpoletaev/supervisord/Gopkg.lock b/vendor/github.com/rpoletaev/supervisord/Gopkg.lock new file mode 100644 index 000000000..0bbb9ad99 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/Gopkg.lock @@ -0,0 +1,63 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/sirupsen/logrus" + packages = ["."] + revision = "a3f95b5c423586578a4e099b11a46c2479628cac" + version = "1.0.2" + +[[projects]] + name = "github.com/go-ini/ini" + packages = ["."] + revision = "20b96f641a5ea98f2f8619ff4f3e061cff4833bd" + version = "v1.28.2" + +[[projects]] + name = "github.com/gorilla/rpc" + packages = ["."] + revision = "22c016f3df3febe0c1f6727598b6389507e03a18" + version = "v1.1.0" + +[[projects]] + name = "github.com/jessevdk/go-flags" + packages = ["."] + revision = "96dc06278ce32a0e9d957d590bb987c81ee66407" + version = "v1.3.0" + +[[projects]] + branch = "master" + name = "github.com/kardianos/osext" + packages = ["."] + revision = "ae77be60afb1dcacde03767a8c37337fad28ac14" + +[[projects]] + branch = "master" + name = "github.com/rpoletaev/gorilla-xmlrpc" + packages = ["xml"] + revision = "d37a0d21ebabd0d9b59cd868499d6af4673ee9fe" + +[[projects]] + branch = "master" + name = "github.com/rogpeppe/go-charset" + packages = ["charset","data"] + revision = "e9ff06f347d3f5d0013d59ed83754f0e88de10d4" + +[[projects]] + name = "github.com/sevlyar/go-daemon" + packages = ["."] + revision = "1ae26ef5036ad04968706917222a23c535673d8c" + version = "v0.1.1" + +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = ["unix"] + revision = "c84c1ab9fd18cdd4c23dd021c10f5f46dea95e46" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "d765dd3cd60e45504031ba3cbd8f4b516e314572e0047939f23118993a985b67" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/rpoletaev/supervisord/Gopkg.toml b/vendor/github.com/rpoletaev/supervisord/Gopkg.toml new file mode 100644 index 000000000..b9b3db7f3 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/Gopkg.toml @@ -0,0 +1,46 @@ + +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + + +[[constraint]] + name = "github.com/sirupsen/logrus" + version = "1.0.2" + +[[constraint]] + name = "github.com/go-ini/ini" + version = "1.28.2" + +[[constraint]] + name = "github.com/gorilla/rpc" + version = "1.1.0" + +[[constraint]] + name = "github.com/jessevdk/go-flags" + version = "1.3.0" + +[[constraint]] + branch = "master" + name = "github.com/rpoletaev/gorilla-xmlrpc" + +[[constraint]] + name = "github.com/sevlyar/go-daemon" + version = "0.1.1" diff --git a/vendor/github.com/rpoletaev/supervisord/LICENSE b/vendor/github.com/rpoletaev/supervisord/LICENSE new file mode 100644 index 000000000..6713cd967 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Steven Ou + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/rpoletaev/supervisord/README.md b/vendor/github.com/rpoletaev/supervisord/README.md new file mode 100644 index 000000000..2d4384a5e --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/README.md @@ -0,0 +1,161 @@ +# Why this project? + +The python script supervisord is a powerful tool used by a lot of guys to manage the processes. I like the tool supervisord also. + +But this tool requires us to install the big python environment. In some situation, for example in the docker environment, the python is too big for us. + +In this project, the supervisord is re-implemented in go-lang. The compiled supervisord is very suitable for these environment that the python is not installed. + +# Compile the supervisord + +Before compiling the supervisord, make sure the go-lang is installed in your environement. + +To compile the go-lang version supervisord, run following commands: + +```shell +$ mkdir ~/go-supervisor +$ export GOPATH=~/go-supervisor +$ go get -u github.com/ochinchina/supervisord +``` + +# Run the supervisord + +After the supervisord binary is generated, create a supervisord configuration file and start the supervisord like below: + +```shell +$ cat supervisor.conf +[program:test] +command = /your/program args +$ supervisord -c supervisor.conf +``` +# Run as daemon +Add the inet interface in your configuration: +```ini +[inet_http_server] +port=127.0.0.1:9001 +``` +then run +```shell +$ supervisord -c supervisor.conf -d +``` +In order to controll the daemon, you can use `$ supervisord ctl` subcommand, available commands are: `status`, `start`, `stop`, `shutdown`, `reload`. + +```shell +$ supervisord ctl status +$ supervisord ctl stop +$ supervisord ctl stop all +$ supervisord ctl start +$ supervisord ctl start all +$ supervisord ctl shutdown +$ supervisord ctl reload +$ supervisord ctl signal ... +$ supervisord ctl signal all +``` + +the URL of supervisord in the "supervisor ctl" subcommand is dected in following order: + +- check if option -s or --serverurl is present, use this url +- check if -c option is present and the "serverurl" in "supervisorctl" section is present, use the "serverurl" in section "supervisorctl" +- return http://localhost:9001 + +# Check the version + +command "version" will show the current supervisor version. + +```shell +$ supervisord version +``` + +# Supported features + +## http server + +the unix socket & TCP http server is supported. Basic auth is supported. + +The unix socket setting is in the "unix_http_server" section. +The TCP http server setting is in "inet_http_server" section. + +If both "inet_http_server" and "unix_http_server" is not configured in the configuration file, no http server will be started. + +## supervisord information + +The log & pid of supervisord process is supported by section "supervisord" setting. + +## program + +the following features is supported in the "program:x" section: + +- program command +- process name +- numprocs +- numprocs_start +- autostart +- startsecs +- startretries +- autorestart +- exitcodes +- stopsignal +- stopwaitsecs +- stdout_logfile +- stdout_logfile_maxbytes +- stdout_logfile_backups +- redirect_stderr +- stderr_logfile +- stderr_logfile_maxbytes +- stderr_logfile_backups +- environment +- priority +- user +- directory + +### program extends + +Following new keys are supported by the [program:xxx] section: + +- depends_on: define program depends information. If program A depends on program B, C, the program B, C will be started before program A. Example: + +```ini +[program:A] +depends_on = B, C + +[program:B] +... +[program:C] +... +``` + +- user: user in the section "program:xxx" now is extended to support group with format "user[:group]". So "user" can be configured as: + +```ini +[program:xxx] +user = user_name +... +``` +or +```ini +[program:xxx] +user = user_name:group_name +... +``` +## Group +the "group" section is supported and you can set "programs" item + +## Events + +the supervisor 3.x defined events are supported partially. Now it supports following events: + +- all process state related events +- process communication event +- remote communication event +- tick related events +- process log related events + +# The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/rpoletaev/supervisord/circle.yml b/vendor/github.com/rpoletaev/supervisord/circle.yml new file mode 100644 index 000000000..41481db33 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/circle.yml @@ -0,0 +1,9 @@ +deployment: + master: + branch: [master] + commands: + - go version + - go get github.com/mitchellh/gox + - go get github.com/tcnksm/ghr + - gox -output "dist/supervisord_{{.OS}}_{{.Arch}}" -osarch="linux/amd64 linux/386 darwin/amd64" + - ghr -t $GITHUB_TOKEN -u $CIRCLE_PROJECT_USERNAME -r $CIRCLE_PROJECT_REPONAME --replace v1.0.0 dist/ \ No newline at end of file diff --git a/vendor/github.com/rpoletaev/supervisord/config/config.go b/vendor/github.com/rpoletaev/supervisord/config/config.go new file mode 100644 index 000000000..b1e9a6076 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/config.go @@ -0,0 +1,558 @@ +package config + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" + + ini "github.com/ochinchina/go-ini" + log "github.com/sirupsen/logrus" +) + +type ConfigEntry struct { + ConfigDir string + Group string + Name string + keyValues map[string]string +} + +func (c *ConfigEntry) IsProgram() bool { + return strings.HasPrefix(c.Name, "program:") +} + +func (c *ConfigEntry) GetProgramName() string { + if strings.HasPrefix(c.Name, "program:") { + return c.Name[len("program:"):] + } + return "" +} + +func (c *ConfigEntry) IsEventListener() bool { + return strings.HasPrefix(c.Name, "eventlistener:") +} + +func (c *ConfigEntry) GetEventListenerName() string { + if strings.HasPrefix(c.Name, "eventlistener:") { + return c.Name[len("eventlistener:"):] + } + return "" +} + +func (c *ConfigEntry) IsGroup() bool { + return strings.HasPrefix(c.Name, "group:") +} + +// get the group name if this entry is group +func (c *ConfigEntry) GetGroupName() string { + if strings.HasPrefix(c.Name, "group:") { + return c.Name[len("group:"):] + } + return "" +} + +// get the programs from the group +func (c *ConfigEntry) GetPrograms() []string { + if c.IsGroup() { + r := c.GetStringArray("programs", ",") + for i, p := range r { + r[i] = strings.TrimSpace(p) + } + return r + } + return make([]string, 0) +} + +// add key value entry +func (c *ConfigEntry) AddKeyValue(key, value string) { + c.keyValues[key] = value +} + +func (c *ConfigEntry) setGroup(group string) { + c.Group = group +} + +// dump the configuration as string +func (c *ConfigEntry) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buf, "configDir=%s\n", c.ConfigDir) + fmt.Fprintf(buf, "group=%s\n", c.Group) + for k, v := range c.keyValues { + fmt.Fprintf(buf, "%s=%s\n", k, v) + } + return buf.String() + +} + +type Config struct { + configFile string + //mapping between the section name and the configure + entries map[string]*ConfigEntry + + ProgramGroup *ProcessGroup +} + +func NewConfigEntry(configDir string) *ConfigEntry { + return &ConfigEntry{configDir, "", "", make(map[string]string)} +} + +func NewConfig(configFile string) *Config { + return &Config{configFile, make(map[string]*ConfigEntry), NewProcessGroup()} +} + +//create a new entry or return the already-exist entry +func (c *Config) createEntry(name string, configDir string) *ConfigEntry { + entry, ok := c.entries[name] + + if !ok { + entry = NewConfigEntry(configDir) + c.entries[name] = entry + } + return entry +} + +// +// return the loaded programs +func (c *Config) Load() ([]string, error) { + ini := ini.NewIni() + c.ProgramGroup = NewProcessGroup() + ini.LoadFile(c.configFile) + + includeFiles := c.getIncludeFiles(ini) + for _, f := range includeFiles { + ini.LoadFile(f) + } + return c.parse(ini), nil +} + +func (c *Config) getIncludeFiles(cfg *ini.Ini) []string { + result := make([]string, 0) + if includeSection, err := cfg.GetSection("include"); err == nil { + key, err := includeSection.GetValue("files") + if err == nil { + env := NewStringExpression("here", c.GetConfigFileDir()) + files := strings.Fields(key) + for _, f_raw := range files { + dir := c.GetConfigFileDir() + f, err := env.Eval(f_raw) + if err != nil { + continue + } + if filepath.IsAbs(f) { + dir = filepath.Dir(f) + } + fileInfos, err := ioutil.ReadDir(dir) + if err == nil { + goPattern := toRegexp(filepath.Base(f)) + for _, fileInfo := range fileInfos { + if matched, err := regexp.MatchString(goPattern, fileInfo.Name()); matched && err == nil { + result = append(result, filepath.Join(dir, fileInfo.Name())) + } + } + } + + } + } + } + return result + +} + +func (c *Config) parse(cfg *ini.Ini) []string { + c.parseGroup(cfg) + loaded_programs := c.parseProgram(cfg) + + //parse non-group,non-program and non-eventlistener sections + for _, section := range cfg.Sections() { + if !strings.HasPrefix(section.Name, "group:") && !strings.HasPrefix(section.Name, "program:") && !strings.HasPrefix(section.Name, "eventlistener:") { + entry := c.createEntry(section.Name, c.GetConfigFileDir()) + c.entries[section.Name] = entry + entry.parse(section) + } + } + return loaded_programs +} + +func (c *Config) GetConfigFileDir() string { + return filepath.Dir(c.configFile) +} + +//convert supervisor file pattern to the go regrexp +func toRegexp(pattern string) string { + tmp := strings.Split(pattern, ".") + for i, t := range tmp { + s := strings.Replace(t, "*", ".*", -1) + tmp[i] = strings.Replace(s, "?", ".", -1) + } + return strings.Join(tmp, "\\.") +} + +//get the unix_http_server section +func (c *Config) GetUnixHttpServer() (*ConfigEntry, bool) { + entry, ok := c.entries["unix_http_server"] + + return entry, ok +} + +//get the supervisord section +func (c *Config) GetSupervisord() (*ConfigEntry, bool) { + entry, ok := c.entries["supervisord"] + return entry, ok +} + +// Get the inet_http_server configuration section +func (c *Config) GetInetHttpServer() (*ConfigEntry, bool) { + entry, ok := c.entries["inet_http_server"] + return entry, ok +} + +func (c *Config) GetSupervisorctl() (*ConfigEntry, bool) { + entry, ok := c.entries["supervisorctl"] + return entry, ok +} +func (c *Config) GetEntries(filterFunc func(entry *ConfigEntry) bool) []*ConfigEntry { + result := make([]*ConfigEntry, 0) + for _, entry := range c.entries { + if filterFunc(entry) { + result = append(result, entry) + } + } + return result +} +func (c *Config) GetGroups() []*ConfigEntry { + return c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsGroup() + }) +} + +func (c *Config) GetPrograms() []*ConfigEntry { + programs := c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsProgram() + }) + + return sortProgram(programs) +} + +func (c *Config) GetEventListeners() []*ConfigEntry { + eventListeners := c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsEventListener() + }) + + return eventListeners +} + +func (c *Config) GetProgramNames() []string { + result := make([]string, 0) + programs := c.GetPrograms() + + programs = sortProgram(programs) + for _, entry := range programs { + result = append(result, entry.GetProgramName()) + } + return result +} + +//return the proram configure entry or nil +func (c *Config) GetProgram(name string) *ConfigEntry { + for _, entry := range c.entries { + if entry.IsProgram() && entry.GetProgramName() == name { + return entry + } + } + return nil +} + +// get value of key as bool +func (c *ConfigEntry) GetBool(key string, defValue bool) bool { + value, ok := c.keyValues[key] + + if ok { + b, err := strconv.ParseBool(value) + if err == nil { + return b + } + } + return defValue +} + +// check if has parameter +func (c *ConfigEntry) HasParameter(key string) bool { + _, ok := c.keyValues[key] + return ok +} + +func toInt(s string, factor int, defValue int) int { + i, err := strconv.Atoi(s) + if err == nil { + return i * factor + } + return defValue +} + +// get the value of the key as int +func (c *ConfigEntry) GetInt(key string, defValue int) int { + value, ok := c.keyValues[key] + + if ok { + return toInt(value, 1, defValue) + } + return defValue +} + +// get the value of key as environment setting. An enviroment string example: +// environment = A="env 1",B="this is a test" +func (c *ConfigEntry) GetEnv(key string) []string { + value, ok := c.keyValues[key] + env := make([]string, 0) + + if ok { + start := 0 + n := len(value) + var i int + for { + for i = start; i < n && value[i] != '='; { + i++ + } + key := value[start:i] + start = i + 1 + if value[start] == '"' { + for i = start + 1; i < n && value[i] != '"'; { + i++ + } + if i < n { + env = append(env, fmt.Sprintf("%s=%s", key, value[start+1:i])) + } + if i+1 < n && value[i+1] == ',' { + start = i + 2 + } else { + break + } + } else { + for i = start; i < n && value[i] != ','; { + i++ + } + if i < n { + env = append(env, fmt.Sprintf("%s=%s", key, value[start:i])) + start = i + 1 + } else { + env = append(env, fmt.Sprintf("%s=%s", key, value[start:])) + break + } + } + } + } + + result := make([]string, 0) + for i := 0; i < len(env); i++ { + tmp, err := NewStringExpression("program_name", c.GetProgramName(), + "process_num", c.GetString("process_num", "0"), + "group_name", c.GetGroupName(), + "here", c.ConfigDir).Eval(env[i]) + if err == nil { + result = append(result, tmp) + } + } + return result +} + +//get the value of key as string +func (c *ConfigEntry) GetString(key string, defValue string) string { + s, ok := c.keyValues[key] + + if ok { + env := NewStringExpression("here", c.ConfigDir) + rep_s, err := env.Eval(s) + if err == nil { + return rep_s + } else { + log.WithFields(log.Fields{ + log.ErrorKey: err, + "program": c.GetProgramName(), + "key": key, + }).Warn("Unable to parse expression") + } + } + return defValue +} + +//get the value of key as string and attempt to parse it with StringExpression +func (c *ConfigEntry) GetStringExpression(key string, defValue string) string { + s, ok := c.keyValues[key] + if !ok || s == "" { + return "" + } + + host_name, err := os.Hostname() + if err != nil { + host_name = "Unknown" + } + result, err := NewStringExpression("program_name", c.GetProgramName(), + "process_num", c.GetString("process_num", "0"), + "group_name", c.GetGroupName(), + "here", c.ConfigDir, + "host_node_name", host_name).Eval(s) + + if err != nil { + log.WithFields(log.Fields{ + log.ErrorKey: err, + "program": c.GetProgramName(), + "key": key, + }).Warn("unable to parse expression") + return s + } + + return result +} + +func (c *ConfigEntry) GetStringArray(key string, sep string) []string { + s, ok := c.keyValues[key] + + if ok { + return strings.Split(s, sep) + } + return make([]string, 0) +} + +// get the value of key as the bytes setting. +// +// logSize=1MB +// logSize=1GB +// logSize=1KB +// logSize=1024 +// +func (c *ConfigEntry) GetBytes(key string, defValue int) int { + v, ok := c.keyValues[key] + + if ok { + if len(v) > 2 { + lastTwoBytes := v[len(v)-2:] + if lastTwoBytes == "MB" { + return toInt(v[:len(v)-2], 1024*1024, defValue) + } else if lastTwoBytes == "GB" { + return toInt(v[:len(v)-2], 1024*1024*1024, defValue) + } else if lastTwoBytes == "KB" { + return toInt(v[:len(v)-2], 1024, defValue) + } + } + return toInt(v, 1, defValue) + } + return defValue +} + +func (c *ConfigEntry) parse(section *ini.Section) { + c.Name = section.Name + for _, key := range section.Keys() { + c.keyValues[key.Name()] = key.ValueWithDefault("") + } +} + +func (c *Config) parseGroup(cfg *ini.Ini) { + + //parse the group at first + for _, section := range cfg.Sections() { + if strings.HasPrefix(section.Name, "group:") { + entry := c.createEntry(section.Name, c.GetConfigFileDir()) + entry.parse(section) + groupName := entry.GetGroupName() + programs := entry.GetPrograms() + for _, program := range programs { + c.ProgramGroup.Add(groupName, program) + } + } + } +} + +func (c *Config) isProgramOrEventListener(section *ini.Section) (bool, string) { + //check if it is a program or event listener section + is_program := strings.HasPrefix(section.Name, "program:") + is_event_listener := strings.HasPrefix(section.Name, "eventlistener:") + prefix := "" + if is_program { + prefix = "program:" + } else if is_event_listener { + prefix = "eventlistener:" + } + return is_program || is_event_listener, prefix +} + +// parse the sections starts with "program:" prefix. +// +// Return all the parsed program names in the ini +func (c *Config) parseProgram(cfg *ini.Ini) []string { + loaded_programs := make([]string, 0) + for _, section := range cfg.Sections() { + + program_or_event_listener, prefix := c.isProgramOrEventListener(section) + + //if it is program or event listener + if program_or_event_listener { + //get the number of processes + numProcs, err := section.GetInt("numprocs") + programName := section.Name[len(prefix):] + if err != nil { + numProcs = 1 + } + procName, err := section.GetValue("process_name") + if numProcs > 1 { + if err != nil || strings.Index(procName, "%(process_num)") == -1 { + log.WithFields(log.Fields{ + "numprocs": numProcs, + "process_name": procName, + }).Error("no process_num in process name") + } + } + originalProcName := programName + if err == nil { + originalProcName = procName + } + + for i := 1; i <= numProcs; i++ { + envs := NewStringExpression("program_name", programName, + "process_num", fmt.Sprintf("%d", i), + "group_name", c.ProgramGroup.GetGroup(programName, programName), + "here", c.GetConfigFileDir()) + cmd, err := envs.Eval(section.GetValueWithDefault("command", "")) + if err != nil { + continue + } + section.Add("command", cmd) + + procName, err := envs.Eval(originalProcName) + if err != nil { + continue + } + + section.Add("process_name", procName) + section.Add("numprocs_start", fmt.Sprintf("%d", (i-1))) + section.Add("process_num", fmt.Sprintf("%d", i)) + entry := c.createEntry(procName, c.GetConfigFileDir()) + entry.parse(section) + entry.Name = prefix + procName + group := c.ProgramGroup.GetGroup(programName, programName) + entry.Group = group + loaded_programs = append(loaded_programs, procName) + } + } + } + return loaded_programs + +} + +func (c *Config) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buf, "configFile:%s\n", c.configFile) + for k, v := range c.entries { + fmt.Fprintf(buf, "[program:%s]\n", k) + fmt.Fprintf(buf, "%s\n", v.String()) + } + return buf.String() +} + +func (c *Config) RemoveProgram(programName string) { + delete(c.entries, fmt.Sprintf("program:%s", programName)) + c.ProgramGroup.Remove(programName) +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/process_group.go b/vendor/github.com/rpoletaev/supervisord/config/process_group.go new file mode 100644 index 000000000..264995bdf --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/process_group.go @@ -0,0 +1,114 @@ +package config + +import ( + "bytes" + "strings" + + "github.com/rpoletaev/supervisord/util" +) + +type ProcessGroup struct { + //mapping between the program and its group + processGroup map[string]string +} + +func NewProcessGroup() *ProcessGroup { + return &ProcessGroup{processGroup: make(map[string]string)} +} + +// clone the process group +func (pg *ProcessGroup) Clone() *ProcessGroup { + new_pg := NewProcessGroup() + for k, v := range pg.processGroup { + new_pg.processGroup[k] = v + } + return new_pg +} + +func (pg *ProcessGroup) Sub(other *ProcessGroup) (added []string, changed []string, removed []string) { + thisGroup := pg.GetAllGroup() + otherGroup := other.GetAllGroup() + added = util.Sub(thisGroup, otherGroup) + changed = make([]string, 0) + removed = util.Sub(otherGroup, thisGroup) + + for _, group := range thisGroup { + proc_1 := pg.GetAllProcess(group) + proc_2 := other.GetAllProcess(group) + if len(proc_2) > 0 && !util.IsSameStringArray(proc_1, proc_2) { + changed = append(changed, group) + } + } + return +} + +//add a process to a group +func (pg *ProcessGroup) Add(group string, procName string) { + pg.processGroup[procName] = group +} + +//remove a process +func (pg *ProcessGroup) Remove(procName string) { + delete(pg.processGroup, procName) +} + +//get all the groups +func (pg *ProcessGroup) GetAllGroup() []string { + groups := make(map[string]bool) + for _, group := range pg.processGroup { + groups[group] = true + } + + result := make([]string, 0) + for group, _ := range groups { + result = append(result, group) + } + return result +} + +// get all the processes in a group +func (pg *ProcessGroup) GetAllProcess(group string) []string { + result := make([]string, 0) + for procName, groupName := range pg.processGroup { + if group == groupName { + result = append(result, procName) + } + } + return result +} + +// check if a process belongs to a group or not +func (pg *ProcessGroup) InGroup(procName string, group string) bool { + groupName, ok := pg.processGroup[procName] + if ok && group == groupName { + return true + } + return false +} + +func (pg *ProcessGroup) ForEachProcess(procFunc func(group string, procName string)) { + for procName, groupName := range pg.processGroup { + procFunc(groupName, procName) + } +} + +func (pg *ProcessGroup) GetGroup(procName string, defGroup string) string { + group, ok := pg.processGroup[procName] + + if ok { + return group + } + pg.processGroup[procName] = defGroup + return defGroup +} + +func (pg *ProcessGroup) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + for _, group := range pg.GetAllGroup() { + buf.WriteString(group) + buf.WriteString(":") + buf.WriteString(strings.Join(pg.GetAllProcess(group), ",")) + buf.WriteString(";") + } + return buf.String() +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/process_sort.go b/vendor/github.com/rpoletaev/supervisord/config/process_sort.go new file mode 100644 index 000000000..bbd902f49 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/process_sort.go @@ -0,0 +1,159 @@ +package config + +import ( + "sort" + "strings" +) + +type ProgramByPriority []*ConfigEntry + +func (p ProgramByPriority) Len() int { + return len(p) +} + +func (p ProgramByPriority) Swap(i, j int) { + p[i], p[j] = p[j], p[i] +} + +func (p ProgramByPriority) Less(i, j int) bool { + return p[i].GetInt("priority", 999) < p[j].GetInt("priority", 999) +} + +type ProcessSorter struct { + depends_on_gragh map[string][]string + procs_without_depends []*ConfigEntry +} + +func NewProcessSorter() *ProcessSorter { + return &ProcessSorter{depends_on_gragh: make(map[string][]string), + procs_without_depends: make([]*ConfigEntry, 0)} +} + +func (p *ProcessSorter) initDepends(program_configs []*ConfigEntry) { + //sort by depends_on + for _, config := range program_configs { + if config.IsProgram() && config.HasParameter("depends_on") { + depends_on := config.GetString("depends_on", "") + prog_name := config.GetProgramName() + for _, depends_on_prog := range strings.Split(depends_on, ",") { + depends_on_prog = strings.TrimSpace(depends_on_prog) + if depends_on_prog != "" { + if _, ok := p.depends_on_gragh[prog_name]; !ok { + p.depends_on_gragh[prog_name] = make([]string, 0) + } + p.depends_on_gragh[prog_name] = append(p.depends_on_gragh[prog_name], depends_on_prog) + + } + } + } + } + +} + +func (p *ProcessSorter) initProgramWithoutDepends(program_configs []*ConfigEntry) { + depends_on_programs := p.getDependsOnInfo() + for _, config := range program_configs { + if config.IsProgram() { + if _, ok := depends_on_programs[config.GetProgramName()]; !ok { + p.procs_without_depends = append(p.procs_without_depends, config) + } + } + } +} + +func (p *ProcessSorter) getDependsOnInfo() map[string]string { + depends_on_programs := make(map[string]string) + + for k, v := range p.depends_on_gragh { + depends_on_programs[k] = k + for _, t := range v { + depends_on_programs[t] = t + } + } + + return depends_on_programs +} + +func (p *ProcessSorter) sortDepends() []string { + finished_programs := make(map[string]string) + progs_with_depends_info := p.getDependsOnInfo() + progs_start_order := make([]string, 0) + + //get all process without depends + for prog_name, _ := range progs_with_depends_info { + if _, ok := p.depends_on_gragh[prog_name]; !ok { + finished_programs[prog_name] = prog_name + progs_start_order = append(progs_start_order, prog_name) + } + } + + for len(finished_programs) < len(progs_with_depends_info) { + for prog_name, _ := range p.depends_on_gragh { + if _, ok := finished_programs[prog_name]; !ok && p.inFinishedPrograms(prog_name, finished_programs) { + finished_programs[prog_name] = prog_name + progs_start_order = append(progs_start_order, prog_name) + } + } + } + + return progs_start_order +} + +func (p *ProcessSorter) inFinishedPrograms(program_name string, finished_programs map[string]string) bool { + if depends_on, ok := p.depends_on_gragh[program_name]; ok { + for _, depend_program := range depends_on { + if _, finished := finished_programs[depend_program]; !finished { + return false + } + } + } + return true +} + +/*func (p *ProcessSorter) SortProcess(procs []*Process) []*Process { + prog_configs := make([]*ConfigEntry, 0) + for _, proc := range procs { + if proc.config.IsProgram() { + prog_configs = append(prog_configs, proc.config) + } + } + + result := make([]*Process, 0) + for _, config := range p.SortProgram(prog_configs) { + for _, proc := range procs { + if proc.config == config { + result = append(result, proc) + } + } + } + + return result +}*/ + +func (p *ProcessSorter) SortProgram(program_configs []*ConfigEntry) []*ConfigEntry { + p.initDepends(program_configs) + p.initProgramWithoutDepends(program_configs) + result := make([]*ConfigEntry, 0) + + for _, prog := range p.sortDepends() { + for _, config := range program_configs { + if config.IsProgram() && config.GetProgramName() == prog { + result = append(result, config) + } + } + } + + sort.Sort(ProgramByPriority(p.procs_without_depends)) + for _, p := range p.procs_without_depends { + result = append(result, p) + } + return result +} + +/*func sortProcess(procs []*Process) []*Process { + return NewProcessSorter().SortProcess(procs) +}*/ + +func sortProgram(configs []*ConfigEntry) []*ConfigEntry { + return NewProcessSorter().SortProgram(configs) +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/string_expression.go b/vendor/github.com/rpoletaev/supervisord/config/string_expression.go new file mode 100644 index 000000000..30933e44a --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/string_expression.go @@ -0,0 +1,88 @@ +package config + +import ( + "fmt" + "os" + "strconv" + "strings" +) + +type StringExpression struct { + env map[string]string +} + +func NewStringExpression(envs ...string) *StringExpression { + se := &StringExpression{env: make(map[string]string)} + + for _, env := range os.Environ() { + t := strings.Split(env, "=") + se.env["ENV_"+t[0]] = t[1] + } + n := len(envs) + for i := 0; i+1 < n; i += 2 { + se.env[envs[i]] = envs[i+1] + } + + hostname, err := os.Hostname() + if err == nil { + se.env["host_node_name"] = hostname + } + + return se + +} + +func (se *StringExpression) Add(key string, value string) *StringExpression { + se.env[key] = value + return se +} + +func (se *StringExpression) Eval(s string) (string, error) { + for { + //find variable start indicator + start := strings.Index(s, "%(") + + if start == -1 { + return s, nil + } + + end := start + 1 + n := len(s) + + //find variable end indicator + for end < n && s[end] != ')' { + end++ + } + + //find the type of the variable + typ := end + 1 + for typ < n && !((s[typ] >= 'a' && s[typ] <= 'z') || (s[typ] >= 'A' && s[typ] <= 'Z')) { + typ++ + } + + //evaluate the variable + if typ < n { + varName := s[start+2 : end] + + varValue, ok := se.env[varName] + + if !ok { + return "", fmt.Errorf("fail to find the environment variable %s", varName) + } + if s[typ] == 'd' { + i, err := strconv.Atoi(varValue) + if err != nil { + return "", fmt.Errorf("can't convert %s to integer", varValue) + } + s = s[0:start] + fmt.Sprintf("%"+s[end+1:typ+1], i) + s[typ+1:] + } else if s[typ] == 's' { + s = s[0:start] + varValue + s[typ+1:] + } else { + return "", fmt.Errorf("not implement type:%v", s[typ]) + } + } else { + return "", fmt.Errorf("invalid string expression format") + } + } + +} diff --git a/vendor/github.com/rpoletaev/supervisord/config_template.go b/vendor/github.com/rpoletaev/supervisord/config_template.go new file mode 100644 index 000000000..760cc4174 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config_template.go @@ -0,0 +1,137 @@ +package main + +import ( + "io" + "os" +) + +var config_template = `[unix_http_server] +file=/tmp/supervisord.sock +#chmod=not support +#chown=not support +username=test1 +password={SHA}82ab876d1387bfafe46cc1c8a2ef074eae50cb1d + +[inet_http_server] +port=127.0.0.1:9001 +username=test1 +password=thepassword + +[supervisord] +logfile=%(here)s/supervisord.log +logfile_maxbytes=50MB +logfile_backups=10 +loglevel=info +pidfile=%(here)s/supervisord.pid +#umask=not support +#nodaemon=not support +#minfds=not support +#minprocs=not support +#nocleanup=not support +#childlogdir=not support +#user=not support +#directory=not support +#strip_ansi=not support +#environment=not support +identifier=supervisor + +[program:x] +command=/bin/cat +process_name=%(program_name)s +numprocs=1 +#numprocs_start=not support +autostart=true +startsecs=3 +startretries=3 +autorestart=true +exitcodes=0,2 +stopsignal=TERM +stopwaitsecs=10 +#stopasgroup=not support +#killasgroup=not support +user=user1 +redirect_stderr=false +stdout_logfile=AUTO +stdout_logfile_maxbytes=50MB +stdout_logfile_backups=10 +stdout_capture_maxbytes=0 +stdout_events_enabled=true +stderr_logfile=AUTO +stderr_logfile_maxbytes=50MB +stderr_logfile_backups=10 +stderr_capture_maxbytes=0 +stderr_events_enabled=false +environment=KEY="val",KEY2="val2" +directory=/tmp +#umask=not support +serverurl=AUTO + +[include] +files=/an/absolute/filename.conf /an/absolute/*.conf foo.conf config??.conf + +[group:x] +programs=bar,baz +priority=999 + +[eventlistener:x] +command=/bin/eventlistener +process_name=%(program_name)s +numprocs=1 +#numprocs_start=not support +autostart=true +startsecs=3 +startretries=3 +autorestart=true +exitcodes=0,2 +stopsignal=TERM +stopwaitsecs=10 +#stopasgroup=not support +#killasgroup=not support +user=user1 +redirect_stderr=false +stdout_logfile=AUTO +stdout_logfile_maxbytes=50MB +stdout_logfile_backups=10 +stdout_capture_maxbytes=0 +stdout_events_enabled=true +stderr_logfile=AUTO +stderr_logfile_maxbytes=50MB +stderr_logfile_backups=10 +stderr_capture_maxbytes=0 +stderr_events_enabled=false +environment=KEY="val",KEY2="val2" +directory=/tmp +#umask=not support +serverurl=AUTO +buffer_size=10240 +events=PROCESS_STATE +#result_handler=not support +` + +type InitTemplateCommand struct { + OutFile string `short:"o" long:"output" description:"the output file name" required:"true"` +} + +var initTemplateCommand InitTemplateCommand + +func (x *InitTemplateCommand) Execute(args []string) error { + f, err := os.Create(x.OutFile) + if err != nil { + return err + } + defer f.Close() + return GenTemplate(f) +} + +func GenTemplate(writer io.Writer) error { + _, err := writer.Write([]byte(config_template)) + return err +} + +func init() { + parser.AddCommand("init", + "initialize a template", + "The init subcommand writes the supported configurations to specified file", + &initTemplateCommand) + +} diff --git a/vendor/github.com/rpoletaev/supervisord/content_checker.go b/vendor/github.com/rpoletaev/supervisord/content_checker.go new file mode 100644 index 000000000..6c0b7d3e3 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/content_checker.go @@ -0,0 +1,149 @@ +package main + +import ( + "fmt" + "net" + "net/http" + "os/exec" + "strings" + "time" +) + +type ContentChecker interface { + Check() bool +} + +type BaseChecker struct { + data string + includes []string + //timeout in second + timeoutTime time.Time + notifyChannel chan string +} + +func NewBaseChecker(includes []string, timeout int) *BaseChecker { + return &BaseChecker{data: "", + includes: includes, + timeoutTime: time.Now().Add(time.Duration(timeout) * time.Second), + notifyChannel: make(chan string, 1)} +} + +func (bc *BaseChecker) Write(b []byte) (int, error) { + bc.notifyChannel <- string(b) + return len(b), nil +} + +func (bc *BaseChecker) isReady() bool { + find_all := true + for _, include := range bc.includes { + if strings.Index(bc.data, include) == -1 { + find_all = false + break + } + } + return find_all +} +func (bc *BaseChecker) Check() bool { + d := bc.timeoutTime.Sub(time.Now()) + if d < 0 { + return false + } + timeoutSignal := time.After(d) + + for { + select { + case data := <-bc.notifyChannel: + bc.data = bc.data + data + if bc.isReady() { + return true + } + case <-timeoutSignal: + return false + } + } +} + +type ScriptChecker struct { + args []string +} + +func NewScriptChecker(args []string) *ScriptChecker { + return &ScriptChecker{args: args} +} + +func (sc *ScriptChecker) Check() bool { + cmd := exec.Command(sc.args[0]) + if len(sc.args) > 1 { + cmd.Args = sc.args + } + err := cmd.Run() + return err == nil && cmd.ProcessState != nil && cmd.ProcessState.Success() +} + +type TcpChecker struct { + host string + port int + conn net.Conn + baseChecker *BaseChecker +} + +func NewTcpChecker(host string, port int, includes []string, timeout int) *TcpChecker { + checker := &TcpChecker{host: host, + port: port, + baseChecker: NewBaseChecker(includes, timeout)} + checker.start() + return checker +} + +func (tc *TcpChecker) start() { + go func() { + b := make([]byte, 1024) + var err error = nil + for { + tc.conn, err = net.Dial("tcp", fmt.Sprintf("%s:%d", tc.host, tc.port)) + if err == nil || tc.baseChecker.timeoutTime.Before(time.Now()) { + break + } + } + + if err == nil { + for { + n, err := tc.conn.Read(b) + if err != nil { + break + } + tc.baseChecker.Write(b[0:n]) + } + } + }() +} + +func (tc *TcpChecker) Check() bool { + ret := tc.baseChecker.Check() + if tc.conn != nil { + tc.conn.Close() + } + return ret +} + +type HttpChecker struct { + url string + timeoutTime time.Time +} + +func NewHttpChecker(url string, timeout int) *HttpChecker { + return &HttpChecker{url: url, + timeoutTime: time.Now().Add(time.Duration(timeout) * time.Second)} +} + +func (hc *HttpChecker) Check() bool { + for { + if hc.timeoutTime.After(time.Now()) { + resp, err := http.Get(hc.url) + if err == nil { + return resp.StatusCode >= 200 && resp.StatusCode < 300 + } + } + } + return false +} diff --git a/vendor/github.com/rpoletaev/supervisord/ctl.go b/vendor/github.com/rpoletaev/supervisord/ctl.go new file mode 100644 index 000000000..977dc3ba9 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/ctl.go @@ -0,0 +1,159 @@ +package main + +import ( + "fmt" + "os" + "strings" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/xmlrpcclient" +) + +type CtlCommand struct { + ServerUrl string `short:"s" long:"serverurl" description:"URL on which supervisord server is listening"` +} + +var ctlCommand CtlCommand + +func (x *CtlCommand) getServerUrl() string { + fmt.Printf("%v\n", options) + if x.ServerUrl != "" { + return x.ServerUrl + } else if _, err := os.Stat(options.Configuration); err == nil { + config := config.NewConfig(options.Configuration) + config.Load() + if entry, ok := config.GetSupervisorctl(); ok { + serverurl := entry.GetString("serverurl", "") + if serverurl != "" { + return serverurl + } + } + } + return "http://localhost:9001" +} +func (x *CtlCommand) Execute(args []string) error { + if len(args) == 0 { + return nil + } + + rpcc := xmlrpcclient.NewXmlRPCClient(x.getServerUrl()) + verb := args[0] + + switch verb { + + //////////////////////////////////////////////////////////////////////////////// + // STATUS + //////////////////////////////////////////////////////////////////////////////// + case "status": + processes := args[1:] + processesMap := make(map[string]bool) + for _, process := range processes { + processesMap[strings.ToLower(process)] = true + } + if reply, err := rpcc.GetAllProcessInfo(); err == nil { + x.showProcessInfo(&reply, processesMap) + } + + //////////////////////////////////////////////////////////////////////////////// + // START or STOP + //////////////////////////////////////////////////////////////////////////////// + case "start", "stop": + state := map[string]string{ + "start": "started", + "stop": "stopped", + } + processes := args[1:] + if len(processes) <= 0 { + fmt.Printf("Please specify process for %s\n", verb) + } + for _, pname := range processes { + if pname == "all" { + reply, err := rpcc.ChangeAllProcessState(verb) + if err == nil { + x.showProcessInfo(&reply, make(map[string]bool)) + } else { + fmt.Printf("Fail to change all process state to %s", state) + } + } else { + if reply, err := rpcc.ChangeProcessState(verb, pname); err == nil { + fmt.Printf("%s: ", pname) + if !reply.Value { + fmt.Printf("not ") + } + fmt.Printf("%s\n", state[verb]) + } else { + fmt.Printf("%s: failed [%v]\n", pname, err) + } + } + } + + //////////////////////////////////////////////////////////////////////////////// + // SHUTDOWN + //////////////////////////////////////////////////////////////////////////////// + case "shutdown": + if reply, err := rpcc.Shutdown(); err == nil { + if reply.Value { + fmt.Printf("Shut Down\n") + } else { + fmt.Printf("Hmmm! Something gone wrong?!\n") + } + } + case "reload": + if reply, err := rpcc.ReloadConfig(); err == nil { + + if len(reply.AddedGroup) > 0 { + fmt.Printf("Added Groups: %s\n", strings.Join(reply.AddedGroup, ",")) + } + if len(reply.ChangedGroup) > 0 { + fmt.Printf("Changed Groups: %s\n", strings.Join(reply.ChangedGroup, ",")) + } + if len(reply.RemovedGroup) > 0 { + fmt.Printf("Removed Groups: %s\n", strings.Join(reply.RemovedGroup, ",")) + } + } + case "signal": + sig_name, processes := args[1], args[2:] + for _, process := range processes { + if process == "all" { + reply, err := rpcc.SignalAll(process) + if err == nil { + x.showProcessInfo(&reply, make(map[string]bool)) + } else { + fmt.Printf("Fail to send signal %s to all process", sig_name) + } + } else { + reply, err := rpcc.SignalProcess(sig_name, process) + if err == nil && reply.Success { + fmt.Printf("Succeed to send signal %s to process %s\n", sig_name, process) + } else { + fmt.Printf("Fail to send signal %s to process %s\n", sig_name, process) + } + } + } + + default: + fmt.Println("unknown command") + } + + return nil +} + +func (x *CtlCommand) showProcessInfo(reply *xmlrpcclient.AllProcessInfoReply, processesMap map[string]bool) { + for _, pinfo := range reply.Value { + name := strings.ToLower(pinfo.Name) + description := pinfo.Description + if strings.ToLower(description) == "" { + description = "" + } + if len(processesMap) <= 0 || processesMap[name] { + fmt.Printf("%-33s%-10s%s\n", name, pinfo.Statename, description) + } + } +} + +func init() { + parser.AddCommand("ctl", + "Control a running daemon", + "The ctl subcommand resembles supervisorctl command of original daemon.", + &ctlCommand) +} diff --git a/vendor/github.com/rpoletaev/supervisord/daemonize.go b/vendor/github.com/rpoletaev/supervisord/daemonize.go new file mode 100644 index 000000000..682b1e88c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/daemonize.go @@ -0,0 +1,25 @@ +// +build !windows + +package main + +import ( + log "github.com/sirupsen/logrus" + "github.com/sevlyar/go-daemon" +) + +func Deamonize(proc func()) { + context := new(daemon.Context) + + child, err := context.Reborn() + if err != nil { + log.WithFields(log.Fields{"err": err}).Fatal("Unable to run") + } + if child != nil { + return + } + defer context.Release() + + log.Info("daemon started") + + proc() +} diff --git a/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go b/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go new file mode 100644 index 000000000..9fe9d204d --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go @@ -0,0 +1,7 @@ +package main + +// +build windows + +func Deamonize(proc func()) { + proc() +} diff --git a/vendor/github.com/rpoletaev/supervisord/events/events.go b/vendor/github.com/rpoletaev/supervisord/events/events.go new file mode 100644 index 000000000..ad3c55137 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/events/events.go @@ -0,0 +1,745 @@ +package events + +import ( + "bufio" + "bytes" + "container/list" + "fmt" + log "github.com/sirupsen/logrus" + "io" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" +) + +const ( + EVENT_SYS_VERSION = "3.0" + PROC_COMMON_BEGIN_STR = "" + PROC_COMMON_END_STR = "" +) + +type Event interface { + GetSerial() uint64 + GetType() string + GetBody() string +} + +type BaseEvent struct { + serial uint64 + eventType string +} + +func (be *BaseEvent) GetSerial() uint64 { + return be.serial +} + +func (be *BaseEvent) GetType() string { + return be.eventType +} + +type EventListenerManager struct { + //mapping between the event listener name and the listener + namedListeners map[string]*EventListener + //mapping between the event name and the event listeners + eventListeners map[string]map[*EventListener]bool +} + +type EventPoolSerial struct { + sync.Mutex + poolserial map[string]uint64 +} + +func NewEventPoolSerial() *EventPoolSerial { + return &EventPoolSerial{poolserial: make(map[string]uint64)} +} + +func (eps *EventPoolSerial) nextSerial(pool string) uint64 { + eps.Lock() + defer eps.Unlock() + + r, ok := eps.poolserial[pool] + if !ok { + r = 1 + } + eps.poolserial[pool] = r + 1 + return r +} + +type EventListener struct { + pool string + server string + mutex sync.Mutex + events *list.List + stdin *bufio.Reader + stdout io.Writer + buffer_size int +} + +func NewEventListener(pool string, + server string, + stdin io.Reader, + stdout io.Writer, + buffer_size int) *EventListener { + evtListener := &EventListener{pool: pool, + server: server, + events: list.New(), + stdin: bufio.NewReader(stdin), + stdout: stdout, + buffer_size: buffer_size} + evtListener.start() + return evtListener +} + +func (el *EventListener) getFirstEvent() ([]byte, bool) { + el.mutex.Lock() + defer el.mutex.Unlock() + + if el.events.Len() > 0 { + elem := el.events.Front() + value := elem.Value + b, ok := value.([]byte) + return b, ok + } + return nil, false +} + +func (el *EventListener) removeFirstEvent() { + el.mutex.Lock() + defer el.mutex.Unlock() + if el.events.Len() > 0 { + el.events.Remove(el.events.Front()) + } +} + +func (el *EventListener) start() { + go func() { + for { + //read if it is ready + err := el.waitForReady() + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to read from event listener, the event listener may exit") + break + } + for { + if b, ok := el.getFirstEvent(); ok { + _, err := el.stdout.Write(b) + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to send event") + break + } + result, err := el.readResult() + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to read result") + break + } + if result == "OK" { //remove the event if succeed + log.WithFields(log.Fields{"eventListener": el.pool}).Info("succeed to send the event") + el.removeFirstEvent() + break + } else if result == "FAIL" { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to send the event") + break + } else { + log.WithFields(log.Fields{"eventListener": el.pool, "result": result}).Warn("unknown result from listener") + } + } + } + } + }() +} + +func (el *EventListener) waitForReady() error { + log.Debug("start to check if event listener program is ready") + for { + line, err := el.stdin.ReadString('\n') + if err != nil { + return err + } + if line == "READY\n" { + log.WithFields(log.Fields{"eventListener": el.pool}).Debug("the event listener is ready") + return nil + } + } +} + +func (el *EventListener) readResult() (string, error) { + s, err := el.stdin.ReadString('\n') + if err != nil { + return s, err + } + fields := strings.Fields(s) + if len(fields) == 2 && fields[0] == "RESULT" { + //try to get the length of result + n, err := strconv.Atoi(fields[1]) + if err != nil { + //return if fail to get the length + return "", err + } + if n < 0 { + return "", fmt.Errorf("Fail to read the result because the result bytes is less than 0") + } + //read n bytes + b := make([]byte, n) + for i := 0; i < n; i++ { + b[i], err = el.stdin.ReadByte() + if err != nil { + return "", err + } + } + //ok, get the n bytes + return string(b), nil + } else { + return "", fmt.Errorf("Fail to read the result") + } + +} + +func (el *EventListener) HandleEvent(event Event) { + encodedEvent := el.encodeEvent(event) + el.mutex.Lock() + defer el.mutex.Unlock() + if el.events.Len() <= el.buffer_size { + el.events.PushBack(encodedEvent) + } else { + log.WithFields(log.Fields{"eventListener": el.pool}).Error("events reaches the buffer_size, discard the events") + } +} + +func (el *EventListener) encodeEvent(event Event) []byte { + body := []byte(event.GetBody()) + + //header + s := fmt.Sprintf("ver:%s server:%s serial:%d pool:%s poolserial:%d eventname:%s len:%d\n", + EVENT_SYS_VERSION, + el.server, + event.GetSerial(), + el.pool, + eventPoolSerial.nextSerial(el.pool), + event.GetType(), + len(body)) + //write the header & body to buffer + r := bytes.NewBuffer([]byte(s)) + r.Write(body) + + return r.Bytes() +} + +var eventTypeDerives = map[string][]string{ + "PROCESS_STATE_STARTING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_RUNNING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_BACKOFF": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_STOPPING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_EXITED": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_STOPPED": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_FATAL": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_UNKNOWN": []string{"EVENT", "PROCESS_STATE"}, + "REMOTE_COMMUNICATION": []string{"EVENT"}, + "PROCESS_LOG_STDOUT": []string{"EVENT", "PROCESS_LOG"}, + "PROCESS_LOG_STDERR": []string{"EVENT", "PROCESS_LOG"}, + "PROCESS_COMMUNICATION_STDOUT": []string{"EVENT", "PROCESS_COMMUNICATION"}, + "PROCESS_COMMUNICATION_STDERR": []string{"EVENT", "PROCESS_COMMUNICATION"}, + "SUPERVISOR_STATE_CHANGE_RUNNING": []string{"EVENT", "SUPERVISOR_STATE_CHANGE"}, + "SUPERVISOR_STATE_CHANGE_STOPPING": []string{"EVENT", "SUPERVISOR_STATE_CHANGE"}, + "TICK_5": []string{"EVENT", "TICK"}, + "TICK_60": []string{"EVENT", "TICK"}, + "TICK_3600": []string{"EVENT", "TICK"}, + "PROCESS_GROUP_ADDED": []string{"EVENT", "PROCESS_GROUP"}, + "PROCESS_GROUP_REMOVED": []string{"EVENT", "PROCESS_GROUP"}} +var eventSerial uint64 = 0 +var eventListenerManager = NewEventListenerManager() +var eventPoolSerial = NewEventPoolSerial() + +func init() { + startTickTimer() +} + +func startTickTimer() { + tickConfigs := map[string]int64{"TICK_5": 5, + "TICK_60": 60, + "TICK_3600": 3600} + + //start a Tick timer + go func() { + lastTickSlice := make(map[string]int64) + + c := time.Tick(1 * time.Second) + for now := range c { + for tickType, period := range tickConfigs { + time_slice := now.Unix() / period + last_time_slice, ok := lastTickSlice[tickType] + if !ok { + lastTickSlice[tickType] = time_slice + } else if last_time_slice != time_slice { + lastTickSlice[tickType] = time_slice + EmitEvent(NewTickEvent(tickType, now.Unix())) + } + } + } + }() +} + +func nextEventSerial() uint64 { + return atomic.AddUint64(&eventSerial, 1) +} + +func NewEventListenerManager() *EventListenerManager { + return &EventListenerManager{namedListeners: make(map[string]*EventListener), + eventListeners: make(map[string]map[*EventListener]bool)} +} + +func (em *EventListenerManager) registerEventListener(eventListenerName string, + events []string, + listener *EventListener) { + + em.namedListeners[eventListenerName] = listener + all_events := make(map[string]bool) + for _, event := range events { + for k, values := range eventTypeDerives { + if event == k { //if it is a final event + all_events[k] = true + } else { //if it is an abstract event, add all its derived events + for _, val := range values { + if val == event { + all_events[k] = true + } + } + } + } + } + for event, _ := range all_events { + log.WithFields(log.Fields{"eventListener": eventListenerName, "event": event}).Info("register event listener") + if _, ok := em.eventListeners[event]; !ok { + em.eventListeners[event] = make(map[*EventListener]bool) + } + em.eventListeners[event][listener] = true + } +} + +func RegisterEventListener(eventListenerName string, + events []string, + listener *EventListener) { + eventListenerManager.registerEventListener( eventListenerName, events, listener ) +} + +func (em *EventListenerManager) unregisterEventListener(eventListenerName string) *EventListener { + listener, ok := em.namedListeners[eventListenerName] + if ok { + delete(em.namedListeners, eventListenerName) + for event, listeners := range em.eventListeners { + if _, ok = listeners[listener]; ok { + log.WithFields(log.Fields{"eventListener": eventListenerName, "event": event}).Info("unregister event listener") + } + + delete(listeners, listener) + } + return listener + } + return nil +} + +func UnregisterEventListener(eventListenerName string) *EventListener { + return eventListenerManager.unregisterEventListener( eventListenerName ) +} + +func (em *EventListenerManager) EmitEvent(event Event) { + listeners, ok := em.eventListeners[event.GetType()] + if ok { + log.WithFields(log.Fields{"event": event.GetType()}).Info("process event") + for listener, _ := range listeners { + log.WithFields(log.Fields{"eventListener": listener.pool, "event": event.GetType()}).Info("receive event on listener") + listener.HandleEvent(event) + } + } +} + +type RemoteCommunicationEvent struct { + BaseEvent + typ string + data string +} + +func NewRemoteCommunicationEvent(typ string, data string) *RemoteCommunicationEvent { + r := &RemoteCommunicationEvent{typ: typ, data: data} + r.eventType = "REMOTE_COMMUNICATION" + r.serial = nextEventSerial() + return r +} + +func (r *RemoteCommunicationEvent) GetBody() string { + return fmt.Sprintf("type:%s\n%s", r.typ, r.data) +} + +type ProcCommEvent struct { + BaseEvent + processName string + groupName string + pid int + data string +} + +func NewProcCommEvent(eventType string, + procName string, + groupName string, + pid int, + data string) *ProcCommEvent { + return &ProcCommEvent{BaseEvent: BaseEvent{eventType: eventType, serial: nextEventSerial()}, + processName: procName, + groupName: groupName, + pid: pid, + data: data} +} + +func (p *ProcCommEvent) GetBody() string { + return fmt.Sprintf("processname:%s groupname:%s pid:%d\n%s", p.processName, p.groupName, p.pid, p.data) +} + +func EmitEvent(event Event) { + eventListenerManager.EmitEvent(event) +} + +type TickEvent struct { + BaseEvent + when int64 +} + +func NewTickEvent(tickType string, when int64) *TickEvent { + r := &TickEvent{when: when} + r.eventType = tickType + r.serial = nextEventSerial() + return r +} + +func (te *TickEvent) GetBody() string { + return fmt.Sprintf("when:%d", te.when) +} + +type ProcCommEventCapture struct { + reader io.Reader + captureMaxBytes int + stdType string + procName string + groupName string + pid int + eventBuffer string + eventBeginPos int +} + +func NewProcCommEventCapture(reader io.Reader, + captureMaxBytes int, + stdType string, + procName string, + groupName string) *ProcCommEventCapture { + pec := &ProcCommEventCapture{reader: reader, + captureMaxBytes: captureMaxBytes, + stdType: stdType, + procName: procName, + groupName: groupName, + pid: -1, + eventBuffer: "", + eventBeginPos: -1} + pec.startCapture() + return pec +} + +func (pec *ProcCommEventCapture) SetPid(pid int) { + pec.pid = pid +} +func (pec *ProcCommEventCapture) startCapture() { + go func() { + buf := make([]byte, 10240) + for { + n, err := pec.reader.Read(buf) + if err != nil { + break + } + pec.eventBuffer += string(buf[0:n]) + for { + event := pec.captureEvent() + if event == nil { + break + } + EmitEvent(event) + } + } + }() +} + +func (pec *ProcCommEventCapture) captureEvent() Event { + pec.findBeginStr() + end_pos := pec.findEndStr() + if end_pos == -1 { + return nil + } + data := pec.eventBuffer[pec.eventBeginPos+len(PROC_COMMON_BEGIN_STR) : end_pos] + pec.eventBuffer = pec.eventBuffer[end_pos+len(PROC_COMMON_END_STR):] + pec.eventBeginPos = -1 + return NewProcCommEvent(pec.stdType, + pec.procName, + pec.groupName, + pec.pid, + data) +} + +func (pec *ProcCommEventCapture) findBeginStr() { + if pec.eventBeginPos == -1 { + pec.eventBeginPos = strings.Index(pec.eventBuffer, PROC_COMMON_BEGIN_STR) + if pec.eventBeginPos == -1 { + //remove some string + n := len(pec.eventBuffer) + if n > len(PROC_COMMON_BEGIN_STR) { + pec.eventBuffer = pec.eventBuffer[n-len(PROC_COMMON_BEGIN_STR):] + } + } + } +} + +func (pec *ProcCommEventCapture) findEndStr() int { + if pec.eventBeginPos == -1 { + return -1 + } + end_pos := strings.Index(pec.eventBuffer, PROC_COMMON_END_STR) + if end_pos == -1 { + if len(pec.eventBuffer) > pec.captureMaxBytes { + log.WithFields(log.Fields{"program": pec.procName}).Warn("The capture buffer is overflow, discard the content") + pec.eventBeginPos = -1 + pec.eventBuffer = "" + } + } + return end_pos +} + +type ProcessStateEvent struct { + BaseEvent + process_name string + group_name string + from_state string + tries int + expected int + pid int +} + +func CreateProcessStartingEvent(process string, + group string, + from_state string, + tries int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: tries, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_STARTING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessRunningEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_RUNNING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessBackoffEvent(process string, + group string, + from_state string, + tries int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: tries, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_BACKOFF" + r.serial = nextEventSerial() + return r +} + +func CreateProcessStoppingEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_STOPPING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessExitedEvent(process string, + group string, + from_state string, + expected int, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: expected, + pid: pid} + r.eventType = "PROCESS_STATE_EXITED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessStoppedEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_STOPPED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessFatalEvent(process string, + group string, + from_state string) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_FATAL" + r.serial = nextEventSerial() + return r +} + +func CreateProcessUnknownEvent(process string, + group string, + from_state string) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_UNKNOWN" + r.serial = nextEventSerial() + return r +} + +func (pse *ProcessStateEvent) GetBody() string { + body := fmt.Sprintf("processname:%s groupname:%s from_state:%s", pse.process_name, pse.group_name, pse.from_state) + if pse.tries >= 0 { + body = fmt.Sprintf("%s tries:%d", body, pse.tries) + } + + if pse.expected != -1 { + body = fmt.Sprintf("%s expected:%d", body, pse.expected) + } + + if pse.pid != 0 { + body = fmt.Sprintf("%s pid:%d", body, pse.pid) + } + return body +} + +type SupervisorStateChangeEvent struct { + BaseEvent +} + +func (s *SupervisorStateChangeEvent) GetBody() string { + return "" +} + +func CreateSupervisorStateChangeRunning() *SupervisorStateChangeEvent { + r := &SupervisorStateChangeEvent{} + r.eventType = "SUPERVISOR_STATE_CHANGE_RUNNING" + r.serial = nextEventSerial() + return r +} + +func createSupervisorStateChangeStopping() *SupervisorStateChangeEvent { + r := &SupervisorStateChangeEvent{} + r.eventType = "SUPERVISOR_STATE_CHANGE_STOPPING" + r.serial = nextEventSerial() + return r +} + +type ProcessLogEvent struct { + BaseEvent + process_name string + group_name string + pid int + data string +} + +func (pe *ProcessLogEvent) GetBody() string { + return fmt.Sprintf("processname:%s groupname:%s pid:%d\n%s", + pe.process_name, + pe.group_name, + pe.pid, + pe.data) +} + +func CreateProcessLogStdoutEvent(process_name string, + group_name string, + pid int, + data string) *ProcessLogEvent { + r := &ProcessLogEvent{process_name: process_name, + group_name: group_name, + pid: pid, + data: data} + r.eventType = "PROCESS_LOG_STDOUT" + r.serial = nextEventSerial() + return r +} + +func CreateProcessLogStderrEvent(process_name string, + group_name string, + pid int, + data string) *ProcessLogEvent { + r := &ProcessLogEvent{process_name: process_name, + group_name: group_name, + pid: pid, + data: data} + r.eventType = "PROCESS_LOG_STDERR" + r.serial = nextEventSerial() + return r +} + +type ProcessGroupEvent struct { + BaseEvent + group_name string +} + +func (pe *ProcessGroupEvent) GetBody() string { + return fmt.Sprintf("groupname:%s", pe.group_name) +} + +func CreateProcessGroupAddedEvent(group_name string) *ProcessGroupEvent { + r := &ProcessGroupEvent{group_name: group_name} + + r.eventType = "PROCESS_GROUP_ADDED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessGroupRemovedEvent(group_name string) *ProcessGroupEvent { + r := &ProcessGroupEvent{group_name: group_name} + + r.eventType = "PROCESS_GROUP_REMOVED" + r.serial = nextEventSerial() + return r +} diff --git a/vendor/github.com/rpoletaev/supervisord/faults/faults.go b/vendor/github.com/rpoletaev/supervisord/faults/faults.go new file mode 100644 index 000000000..2bc730ee2 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/faults/faults.go @@ -0,0 +1,30 @@ +package faults + +import ( + xmlrpc "github.com/ochinchina/gorilla-xmlrpc/xml" +) + +const ( + UNKNOWN_METHOD = 1 + INCORRECT_PARAMETERS = 2 + BAD_ARGUMENTS = 3 + SIGNATURE_UNSUPPORTED = 4 + SHUTDOWN_STATE = 6 + BAD_NAME = 10 + BAD_SIGNAL = 11 + NO_FILE = 20 + NOT_EXECUTABLE = 21 + FAILED = 30 + ABNORMAL_TERMINATION = 40 + SPAWN_ERROR = 50 + ALREADY_STARTED = 60 + NOT_RUNNING = 70 + SUCCESS = 80 + ALREADY_ADDED = 90 + STILL_RUNNING = 91 + CANT_REREAD = 92 +) + +func NewFault(code int, desc string) error { + return &xmlrpc.Fault{Code: code, String: desc} +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log.go b/vendor/github.com/rpoletaev/supervisord/logger/log.go new file mode 100644 index 000000000..189879015 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log.go @@ -0,0 +1,485 @@ +package logger + +import ( + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "path" + "strconv" + "strings" + "sync" + + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/faults" +) + +//implements io.Writer interface + +type Logger interface { + io.WriteCloser + SetPid(pid int) + ReadLog(offset int64, length int64) (string, error) + ReadTailLog(offset int64, length int64) (string, int64, bool, error) + ClearCurLogFile() error + ClearAllLogFile() error +} + +type LogEventEmitter interface { + emitLogEvent(data string) +} + +type FileLogger struct { + name string + maxSize int64 + backups int + curRotate int + fileSize int64 + file *os.File + logEventEmitter LogEventEmitter + locker sync.Locker +} + +type SysLogger struct { + NullLogger + logWriter io.WriteCloser + logEventEmitter LogEventEmitter +} + +type NullLogger struct { + logEventEmitter LogEventEmitter +} + +type NullLocker struct { +} + +func NewFileLogger(name string, maxSize int64, backups int, logEventEmitter LogEventEmitter, locker sync.Locker) *FileLogger { + logger := &FileLogger{name: name, + maxSize: maxSize, + backups: backups, + curRotate: -1, + fileSize: 0, + file: nil, + logEventEmitter: logEventEmitter, + locker: locker} + logger.updateLatestLog() + return logger +} + +func (l *FileLogger) SetPid(pid int) { + //NOTHING TO DO +} + +// return the next log file name +func (l *FileLogger) nextLogFile() { + l.curRotate++ + if l.curRotate >= l.backups { + l.curRotate = 0 + } +} + +func (l *FileLogger) updateLatestLog() { + dir := path.Dir(l.name) + files, err := ioutil.ReadDir(dir) + baseName := path.Base(l.name) + + if err != nil { + l.curRotate = 0 + } else { + //find all the rotate files + var latestFile os.FileInfo + latestNum := -1 + for _, fileInfo := range files { + if !fileInfo.IsDir() && strings.HasPrefix(fileInfo.Name(), baseName+".") { + n, err := strconv.Atoi(fileInfo.Name()[len(baseName)+1:]) + if err == nil && n >= 0 && n < l.backups { + if latestFile == nil || latestFile.ModTime().Before(fileInfo.ModTime()) { + latestFile = fileInfo + latestNum = n + } + } + } + } + l.curRotate = latestNum + if latestFile != nil { + l.fileSize = latestFile.Size() + } else { + l.fileSize = int64(0) + } + if l.fileSize >= l.maxSize || latestFile == nil { + l.nextLogFile() + l.openFile(true) + } else { + l.openFile(false) + } + } +} + +// open the file and truncate the file if trunc is true +func (l *FileLogger) openFile(trunc bool) error { + if l.file != nil { + l.file.Close() + } + var err error + fileName := l.GetCurrentLogFile() + if trunc { + l.file, err = os.Create(fileName) + } else { + l.file, err = os.OpenFile(fileName, os.O_RDWR|os.O_APPEND, 0666) + } + return err +} + +// get the name of current log file +func (l *FileLogger) GetCurrentLogFile() string { + return l.getLogFileName(l.curRotate) +} + +// get the name of previous log file +func (l *FileLogger) GetPrevLogFile() string { + i := (l.curRotate - 1 + l.backups) % l.backups + + return l.getLogFileName(i) +} + +func (l *FileLogger) getLogFileName(index int) string { + return fmt.Sprintf("%s.%d", l.name, index) +} + +// clear the current log file contents +func (l *FileLogger) ClearCurLogFile() error { + l.locker.Lock() + defer l.locker.Unlock() + + return l.openFile(true) +} + +func (l *FileLogger) ClearAllLogFile() error { + l.locker.Lock() + defer l.locker.Unlock() + + for i := 0; i < l.backups && i <= l.curRotate; i++ { + logFile := l.getLogFileName(i) + err := os.Remove(logFile) + if err != nil { + return faults.NewFault(faults.FAILED, err.Error()) + } + } + l.curRotate = 0 + err := l.openFile(true) + if err != nil { + return faults.NewFault(faults.FAILED, err.Error()) + } + return nil +} + +func (l *FileLogger) ReadLog(offset int64, length int64) (string, error) { + if offset < 0 && length != 0 { + return "", faults.NewFault(faults.BAD_ARGUMENTS, "BAD_ARGUMENTS") + } + if offset >= 0 && length < 0 { + return "", faults.NewFault(faults.BAD_ARGUMENTS, "BAD_ARGUMENTS") + } + + l.locker.Lock() + defer l.locker.Unlock() + f, err := os.Open(l.GetCurrentLogFile()) + + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + defer f.Close() + + //check the length of file + statInfo, err := f.Stat() + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + + fileLen := statInfo.Size() + + if offset < 0 { //offset < 0 && length == 0 + offset = fileLen + offset + if offset < 0 { + offset = 0 + } + length = fileLen - offset + } else if length == 0 { //offset >= 0 && length == 0 + if offset > fileLen { + return "", nil + } + length = fileLen - offset + } else { //offset >= 0 && length > 0 + + //if the offset exceeds the length of file + if offset >= fileLen { + return "", nil + } + + //compute actual bytes should be read + + if offset+length > fileLen { + length = fileLen - offset + } + } + + b := make([]byte, length) + n, err := f.ReadAt(b, offset) + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + return string(b[:n]), nil +} + +func (l *FileLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + if offset < 0 { + return "", offset, false, fmt.Errorf("offset should not be less than 0") + } + if length < 0 { + return "", offset, false, fmt.Errorf("length should be not be less than 0") + } + l.locker.Lock() + defer l.locker.Unlock() + + //open the file + f, err := os.Open(l.GetCurrentLogFile()) + if err != nil { + return "", 0, false, err + } + + defer f.Close() + + //get the length of file + statInfo, err := f.Stat() + if err != nil { + return "", 0, false, err + } + + fileLen := statInfo.Size() + + //check if offset exceeds the length of file + if offset >= fileLen { + return "", fileLen, true, nil + } + + //get the length + if offset+length > fileLen { + length = fileLen - offset + } + + b := make([]byte, length) + n, err := f.ReadAt(b, offset) + if err != nil { + return "", offset, false, err + } + return string(b[:n]), offset + int64(n), false, nil + +} + +// Override the function in io.Writer +func (l *FileLogger) Write(p []byte) (int, error) { + l.locker.Lock() + defer l.locker.Unlock() + + n, err := l.file.Write(p) + + if err != nil { + return n, err + } + l.logEventEmitter.emitLogEvent(string(p)) + l.fileSize += int64(n) + if l.fileSize >= l.maxSize { + fileInfo, errStat := os.Stat(fmt.Sprintf("%s.%d", l.name, l.curRotate)) + if errStat == nil { + l.fileSize = fileInfo.Size() + } else { + return n, errStat + } + } + if l.fileSize >= l.maxSize { + l.nextLogFile() + l.openFile(true) + } + return n, err +} + +func (l *FileLogger) Close() error { + if l.file != nil { + return l.file.Close() + } + return nil +} + +func (sl *SysLogger) Write(b []byte) (int, error) { + sl.logEventEmitter.emitLogEvent(string(b)) + if sl.logWriter != nil { + return sl.logWriter.Write(b) + } else { + return 0, errors.New("not connect to syslog server") + } +} + +func (sl *SysLogger) Close() error { + if sl.logWriter != nil { + return sl.logWriter.Close() + } else { + return errors.New("not connect to syslog server") + } +} +func NewNullLogger(logEventEmitter LogEventEmitter) *NullLogger { + return &NullLogger{logEventEmitter: logEventEmitter} +} + +func (l *NullLogger) SetPid(pid int) { + //NOTHING TO DO +} + +func (l *NullLogger) Write(p []byte) (int, error) { + l.logEventEmitter.emitLogEvent(string(p)) + return len(p), nil +} + +func (l *NullLogger) Close() error { + return nil +} + +func (l *NullLogger) ReadLog(offset int64, length int64) (string, error) { + return "", faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func (l *NullLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + return "", 0, false, faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func (l *NullLogger) ClearCurLogFile() error { + return fmt.Errorf("No log") +} + +func (l *NullLogger) ClearAllLogFile() error { + return faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func NewNullLocker() *NullLocker { + return &NullLocker{} +} + +func (l *NullLocker) Lock() { +} + +func (l *NullLocker) Unlock() { +} + +type StdLogger struct { + NullLogger + logEventEmitter LogEventEmitter + writer io.Writer +} + +func NewStdoutLogger(logEventEmitter LogEventEmitter) *StdLogger { + return &StdLogger{logEventEmitter: logEventEmitter, + writer: os.Stdout} +} + +func (l *StdLogger) Write(p []byte) (int, error) { + n, err := l.writer.Write(p) + if err != nil { + l.logEventEmitter.emitLogEvent(string(p)) + } + return n, err +} + +func NewStderrLogger(logEventEmitter LogEventEmitter) *StdLogger { + return &StdLogger{logEventEmitter: logEventEmitter, + writer: os.Stdout} +} + +type LogCaptureLogger struct { + underlineLogger Logger + procCommEventCapWriter io.Writer + procCommEventCapture *events.ProcCommEventCapture +} + +func NewLogCaptureLogger(underlineLogger Logger, + captureMaxBytes int, + stdType string, + procName string, + groupName string) *LogCaptureLogger { + r, w := io.Pipe() + eventCapture := events.NewProcCommEventCapture(r, + captureMaxBytes, + stdType, + procName, + groupName) + return &LogCaptureLogger{underlineLogger: underlineLogger, + procCommEventCapWriter: w, + procCommEventCapture: eventCapture} +} + +func (l *LogCaptureLogger) SetPid(pid int) { + l.procCommEventCapture.SetPid(pid) +} + +func (l *LogCaptureLogger) Write(p []byte) (int, error) { + l.procCommEventCapWriter.Write(p) + return l.underlineLogger.Write(p) +} + +func (l *LogCaptureLogger) Close() error { + return l.underlineLogger.Close() +} + +func (l *LogCaptureLogger) ReadLog(offset int64, length int64) (string, error) { + return l.underlineLogger.ReadLog(offset, length) +} + +func (l *LogCaptureLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + return l.underlineLogger.ReadTailLog(offset, length) +} + +func (l *LogCaptureLogger) ClearCurLogFile() error { + return l.underlineLogger.ClearCurLogFile() +} + +func (l *LogCaptureLogger) ClearAllLogFile() error { + return l.underlineLogger.ClearAllLogFile() +} + +type NullLogEventEmitter struct { +} + +func NewNullLogEventEmitter() *NullLogEventEmitter { + return &NullLogEventEmitter{} +} + +func (ne *NullLogEventEmitter) emitLogEvent(data string) { +} + +type StdLogEventEmitter struct { + Type string + process_name string + group_name string + pidFunc func() int +} + +func NewStdoutLogEventEmitter(process_name string, group_name string, procPidFunc func() int) *StdLogEventEmitter { + return &StdLogEventEmitter{Type: "stdout", + process_name: process_name, + group_name: group_name, + pidFunc: procPidFunc} +} + +func NewStderrLogEventEmitter(process_name string, group_name string, procPidFunc func() int) *StdLogEventEmitter { + return &StdLogEventEmitter{Type: "stderr", + process_name: process_name, + group_name: group_name, + pidFunc: procPidFunc} +} + +func (se *StdLogEventEmitter) emitLogEvent(data string) { + if se.Type == "stdout" { + events.EmitEvent(events.CreateProcessLogStdoutEvent(se.process_name, se.group_name, se.pidFunc(), data)) + } else { + events.EmitEvent(events.CreateProcessLogStderrEvent(se.process_name, se.group_name, se.pidFunc(), data)) + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go b/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go new file mode 100644 index 000000000..da683520c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go @@ -0,0 +1,16 @@ +// +build !windows,!nacl,!plan9 + +package logger + +import ( + "log/syslog" +) + +func NewSysLogger(name string, logEventEmitter LogEventEmitter) *SysLogger { + writer, err := syslog.New(syslog.LOG_DEBUG, name) + logger := &SysLogger{logEventEmitter: logEventEmitter} + if err == nil { + logger.logWriter = writer + } + return logger +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go b/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go new file mode 100644 index 000000000..cf4609f57 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go @@ -0,0 +1,7 @@ +// +build windows plan9 nacl + +package logger + +func NewSysLogger(name string, logEventEmitter LogEventEmitter) *SysLogger { + return &SysLogger{logEventEmitter: logEventEmitter, logWriter: nil} +} diff --git a/vendor/github.com/rpoletaev/supervisord/main.go b/vendor/github.com/rpoletaev/supervisord/main.go new file mode 100644 index 000000000..dfc491df6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/main.go @@ -0,0 +1,75 @@ +package main + +import ( + "fmt" + "os" + "os/signal" + "runtime" + "syscall" + + log "github.com/sirupsen/logrus" + "github.com/jessevdk/go-flags" +) + +type Options struct { + Configuration string `short:"c" long:"configuration" description:"the configuration file" default:"supervisord.conf"` + Daemon bool `short:"d" long:"daemon" description:"run as daemon"` +} + +func init() { + log.SetOutput(os.Stdout) + if runtime.GOOS == "windows" { + log.SetFormatter(&log.TextFormatter{DisableColors: true, FullTimestamp: true}) + } else { + log.SetFormatter(&log.TextFormatter{DisableColors: false, FullTimestamp: true}) + } + log.SetLevel(log.DebugLevel) +} + +func initSignals(s *Supervisor) { + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + go func() { + sig := <-sigs + log.WithFields(log.Fields{"signal": sig}).Info("receive a signal to stop all process & exit") + s.procMgr.StopAllProcesses() + os.Exit(-1) + }() + +} + +var options Options +var parser = flags.NewParser(&options, flags.Default & ^flags.PrintErrors) + +func RunServer() { + // infinite loop for handling Restart ('reload' command) + for true { + s := NewSupervisor(options.Configuration) + initSignals(s) + if sErr, _, _, _ := s.Reload(); sErr != nil { + panic(sErr) + } + s.WaitForExit() + } +} + +func main() { + if _, err := parser.Parse(); err != nil { + flagsErr, ok := err.(*flags.Error) + if ok { + switch flagsErr.Type { + case flags.ErrHelp: + fmt.Fprintln(os.Stdout, err) + os.Exit(0) + case flags.ErrCommandRequired: + if options.Daemon { + Deamonize(RunServer) + } else { + RunServer() + } + default: + panic(err) + } + } + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/command_parser.go b/vendor/github.com/rpoletaev/supervisord/process/command_parser.go new file mode 100644 index 000000000..b711bafdb --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/command_parser.go @@ -0,0 +1,81 @@ +package process + +import ( + "fmt" + "unicode" +) + +// find the position of byte ch in the string s start from offset +// +// return: -1 if byte ch is not found, >= offset if the ch is found +// in the string s from offset +func findChar(s string, offset int, ch byte) int { + for i := offset; i < len(s); i++ { + if s[i] == '\\' { + i++ + } else if s[i] == ch { + return i + } + } + return -1 +} + +// skip all the white space and return the first position of non-space char +// +// return: the first position of non-space char or -1 if all the char +// from offset are space +func skipSpace(s string, offset int) int { + for i := offset; i < len(s); i++ { + if !unicode.IsSpace(rune(s[i])) { + return i + } + } + return -1 +} + +func appendArgument(arg string, args []string) []string { + if arg[0] == '"' || arg[0] == '\'' { + return append(args, arg[1:len(arg)-1]) + } + return append(args, arg) +} + +func parseCommand(command string) ([]string, error) { + args := make([]string, 0) + cmdLen := len(command) + for i := 0; i < cmdLen; { + //find the first non-space char + j := skipSpace(command, i) + if j == -1 { + break + } + i = j + for ; j < cmdLen; j++ { + if unicode.IsSpace(rune(command[j])) { + args = appendArgument(command[i:j], args) + i = j + 1 + break + } else if command[j] == '\\' { + j++ + } else if command[j] == '"' || command[j] == '\'' { + k := findChar(command, j+1, command[j]) + if k == -1 { + args = appendArgument(command[i:], args) + i = cmdLen + } else { + args = appendArgument(command[i:k+1], args) + i = k + 1 + } + break + } + } + if j >= cmdLen { + args = appendArgument(command[i:], args) + i = cmdLen + } + } + if len(args) <= 0 { + return nil, fmt.Errorf("no command from empty string") + } + return args, nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/path.go b/vendor/github.com/rpoletaev/supervisord/process/path.go new file mode 100644 index 000000000..82b2fbd1d --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/path.go @@ -0,0 +1,46 @@ +package process + +import ( + "os/user" + "path/filepath" +) + +func path_split(path string) []string { + r := make([]string, 0) + cur_path := path + for { + dir, file := filepath.Split(cur_path) + if len(file) > 0 { + r = append(r, file) + } + if len(dir) <= 0 { + break + } + cur_path = dir[0 : len(dir)-1] + } + for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 { + r[i], r[j] = r[j], r[i] + } + return r +} +func Path_expand(path string) (string, error) { + pathList := path_split(path) + + if len(pathList) > 0 && len(pathList[0]) > 0 && pathList[0][0] == '~' { + var usr *user.User = nil + var err error = nil + + if pathList[0] == "~" { + usr, err = user.Current() + } else { + usr, err = user.Lookup(pathList[0][1:]) + } + + if err != nil { + return "", err + } + pathList[0] = usr.HomeDir + return filepath.Join(pathList...), nil + } + return path, nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go new file mode 100644 index 000000000..90e23f40b --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go @@ -0,0 +1,12 @@ +// +build linux + +package process + +import ( + "syscall" +) + +func set_deathsig(sysProcAttr *syscall.SysProcAttr) { + sysProcAttr.Setpgid = true + sysProcAttr.Pdeathsig = syscall.SIGKILL +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go new file mode 100644 index 000000000..da0042ecc --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go @@ -0,0 +1,12 @@ +// +build !linux +// +build !windows + +package process + +import ( + "syscall" +) + +func set_deathsig(sysProcAttr *syscall.SysProcAttr) { + sysProcAttr.Setpgid = true +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go new file mode 100644 index 000000000..e19e3d78c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go @@ -0,0 +1,9 @@ +// +build windows +package process + +import ( + "syscall" +) + +func set_deathsig(_ *syscall.SysProcAttr) { +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/process.go b/vendor/github.com/rpoletaev/supervisord/process/process.go new file mode 100644 index 000000000..f016dc3a6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/process.go @@ -0,0 +1,689 @@ +package process + +import ( + "fmt" + "io" + "os" + "os/exec" + "os/user" + "strconv" + "strings" + "sync" + "syscall" + "time" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/logger" + "github.com/rpoletaev/supervisord/signals" + log "github.com/sirupsen/logrus" +) + +type ProcessState int + +const ( + STOPPED ProcessState = iota + STARTING = 10 + RUNNING = 20 + BACKOFF = 30 + STOPPING = 40 + EXITED = 100 + FATAL = 200 + UNKNOWN = 1000 +) + +func (p ProcessState) String() string { + switch p { + case STOPPED: + return "STOPPED" + case STARTING: + return "STARTING" + case RUNNING: + return "RUNNING" + case BACKOFF: + return "BACKOFF" + case STOPPING: + return "STOPPING" + case EXITED: + return "EXITED" + case FATAL: + return "FATAL" + default: + return "UNKNOWN" + } +} + +type Process struct { + supervisor_id string + config *config.ConfigEntry + cmd *exec.Cmd + startTime time.Time + stopTime time.Time + state ProcessState + //true if process is starting + inStart bool + //true if the process is stopped by user + stopByUser bool + retryTimes int + lock sync.RWMutex + stdin io.WriteCloser + StdoutLog logger.Logger + StderrLog logger.Logger +} + +func NewProcess(supervisor_id string, config *config.ConfigEntry) *Process { + proc := &Process{supervisor_id: supervisor_id, + config: config, + cmd: nil, + startTime: time.Unix(0, 0), + stopTime: time.Unix(0, 0), + state: STOPPED, + inStart: false, + stopByUser: false, + retryTimes: 0} + proc.config = config + proc.cmd = nil + + //start the process if autostart is set to true + //if proc.isAutoStart() { + // proc.Start(false) + //} + + return proc +} + +func (p *Process) Start(wait bool) { + log.WithFields(log.Fields{"program": p.GetName()}).Info("try to start program") + p.lock.Lock() + if p.inStart { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start program again, program is already started") + p.lock.Unlock() + return + } + + p.inStart = true + p.stopByUser = false + p.lock.Unlock() + + var runCond *sync.Cond = nil + finished := false + if wait { + runCond = sync.NewCond(&sync.Mutex{}) + runCond.L.Lock() + } + + go func() { + p.retryTimes = 0 + + for { + if wait { + runCond.L.Lock() + } + p.run(func() { + finished = true + if wait { + runCond.L.Unlock() + runCond.Signal() + } + }) + if (p.stopTime.Unix() - p.startTime.Unix()) < int64(p.getStartSeconds()) { + p.retryTimes++ + } else { + p.retryTimes = 0 + } + if p.stopByUser { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Stopped by user, don't start it again") + break + } + if !p.isAutoRestart() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start the stopped program because its autorestart flag is false") + break + } + if p.retryTimes >= p.getStartRetries() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start the stopped program because its retry times ", p.retryTimes, " is greater than start retries ", p.getStartRetries()) + break + } + } + p.lock.Lock() + p.inStart = false + p.lock.Unlock() + }() + if wait && !finished { + runCond.Wait() + runCond.L.Unlock() + } +} + +func (p *Process) GetName() string { + if p.config.IsProgram() { + return p.config.GetProgramName() + } else if p.config.IsEventListener() { + return p.config.GetEventListenerName() + } else { + return "" + } +} + +func (p *Process) GetGroup() string { + return p.config.Group +} + +func (p *Process) GetDescription() string { + p.lock.Lock() + defer p.lock.Unlock() + if p.state == RUNNING { + seconds := int(time.Now().Sub(p.startTime).Seconds()) + minutes := seconds / 60 + hours := minutes / 60 + days := hours / 24 + if days > 0 { + return fmt.Sprintf("pid %d, uptime %d days, %d:%02d:%02d", p.cmd.Process.Pid, days, hours%24, minutes%60, seconds%60) + } else { + return fmt.Sprintf("pid %d, uptime %d:%02d:%02d", p.cmd.Process.Pid, hours%24, minutes%60, seconds%60) + } + } else if p.state != STOPPED { + return p.stopTime.String() + } + return "" +} + +func (p *Process) GetExitstatus() int { + p.lock.Lock() + defer p.lock.Unlock() + + if p.state == EXITED || p.state == BACKOFF { + if p.cmd.ProcessState == nil { + return 0 + } + status, ok := p.cmd.ProcessState.Sys().(syscall.WaitStatus) + if ok { + return status.ExitStatus() + } + } + return 0 +} + +func (p *Process) GetPid() int { + p.lock.Lock() + defer p.lock.Unlock() + + if p.state == STOPPED || p.state == FATAL || p.state == UNKNOWN || p.state == EXITED || p.state == BACKOFF { + return 0 + } + return p.cmd.Process.Pid +} + +// Get the process state +func (p *Process) GetState() ProcessState { + return p.state +} + +func (p *Process) GetStartTime() time.Time { + return p.startTime +} + +func (p *Process) GetStopTime() time.Time { + switch p.state { + case STARTING: + fallthrough + case RUNNING: + fallthrough + case STOPPING: + return time.Unix(0, 0) + default: + return p.stopTime + } +} + +func (p *Process) GetStdoutLogfile() string { + file_name := p.config.GetStringExpression("stdout_logfile", "/dev/null") + expand_file, err := Path_expand(file_name) + if err == nil { + return expand_file + } else { + return file_name + } +} + +func (p *Process) GetStderrLogfile() string { + file_name := p.config.GetStringExpression("stderr_logfile", "/dev/null") + expand_file, err := Path_expand(file_name) + if err == nil { + return expand_file + } else { + return file_name + } +} + +func (p *Process) getStartSeconds() int { + return p.config.GetInt("startsecs", 1) +} + +func (p *Process) getStartRetries() int { + return p.config.GetInt("startretries", 3) +} + +func (p *Process) isAutoStart() bool { + return p.config.GetString("autostart", "true") == "true" +} + +func (p *Process) GetPriority() int { + return p.config.GetInt("priority", 999) +} + +func (p *Process) getNumberProcs() int { + return p.config.GetInt("numprocs", 1) +} + +func (p *Process) SendProcessStdin(chars string) error { + if p.stdin != nil { + _, err := p.stdin.Write([]byte(chars)) + return err + } + return fmt.Errorf("NO_FILE") +} + +// check if the process should be +func (p *Process) isAutoRestart() bool { + autoRestart := p.config.GetString("autorestart", "unexpected") + + if autoRestart == "false" { + return false + } else if autoRestart == "true" { + return true + } else { + p.lock.Lock() + defer p.lock.Unlock() + if p.cmd != nil && p.cmd.ProcessState != nil { + exitCode, err := p.getExitCode() + return err == nil && p.inExitCodes(exitCode) + } + } + return false + +} + +func (p *Process) inExitCodes(exitCode int) bool { + for _, code := range p.getExitCodes() { + if code == exitCode { + return true + } + } + return false +} + +func (p *Process) getExitCode() (int, error) { + if p.cmd.ProcessState == nil { + return -1, fmt.Errorf("no exit code") + } + if status, ok := p.cmd.ProcessState.Sys().(syscall.WaitStatus); ok { + return status.ExitStatus(), nil + } + + return -1, fmt.Errorf("no exit code") + +} + +func (p *Process) getExitCodes() []int { + strExitCodes := strings.Split(p.config.GetString("exitcodes", "0,2"), ",") + result := make([]int, 0) + for _, val := range strExitCodes { + i, err := strconv.Atoi(val) + if err == nil { + result = append(result, i) + } + } + return result +} + +func (p *Process) run(finishCb func()) { + args, err := parseCommand(p.config.GetStringExpression("command", "")) + + if err != nil { + log.Error("the command is empty string") + finishCb() + return + } + p.lock.Lock() + if p.cmd != nil && p.cmd.ProcessState != nil { + status := p.cmd.ProcessState.Sys().(syscall.WaitStatus) + if status.Continued() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start program because it is running") + p.lock.Unlock() + finishCb() + return + } + } + p.cmd = exec.Command(args[0]) + if len(args) > 1 { + p.cmd.Args = args + } + p.cmd.SysProcAttr = &syscall.SysProcAttr{} + if p.setUser() != nil { + log.WithFields(log.Fields{"user": p.config.GetString("user", "")}).Error("fail to run as user") + p.lock.Unlock() + finishCb() + return + } + set_deathsig(p.cmd.SysProcAttr) + p.setEnv() + p.setDir() + p.setLog() + + p.stdin, _ = p.cmd.StdinPipe() + p.startTime = time.Now() + p.changeStateTo(STARTING) + err = p.cmd.Start() + if err != nil { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Errorf("fail to start program with error:%v", err) + p.changeStateTo(FATAL) + p.stopTime = time.Now() + p.lock.Unlock() + finishCb() + } else { + if p.StdoutLog != nil { + p.StdoutLog.SetPid(p.cmd.Process.Pid) + } + if p.StderrLog != nil { + p.StderrLog.SetPid(p.cmd.Process.Pid) + } + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("success to start program") + startSecs := p.config.GetInt("startsecs", 1) + //Set startsec to 0 to indicate that the program needn't stay + //running for any particular amount of time. + if startSecs <= 0 { + p.changeStateTo(RUNNING) + + } else { + time.Sleep(time.Duration(startSecs) * time.Second) + if tmpProc, err := os.FindProcess(p.cmd.Process.Pid); err == nil && tmpProc != nil { + p.changeStateTo(RUNNING) + } + } + p.lock.Unlock() + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Debug("wait program exit") + finishCb() + err = p.cmd.Wait() + if err == nil { + if p.cmd.ProcessState != nil { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Infof("program stopped with status:%v", p.cmd.ProcessState) + } else { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("program stopped") + } + } else { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Errorf("program stopped with error:%v", err) + } + + p.lock.Lock() + p.stopTime = time.Now() + if p.stopTime.Unix()-p.startTime.Unix() < int64(startSecs) { + p.changeStateTo(BACKOFF) + } else { + p.changeStateTo(EXITED) + } + p.lock.Unlock() + } + +} + +func (p *Process) changeStateTo(procState ProcessState) { + if p.config.IsProgram() { + progName := p.config.GetProgramName() + groupName := p.config.GetGroupName() + if procState == STARTING { + events.EmitEvent(events.CreateProcessStartingEvent(progName, groupName, p.state.String(), p.retryTimes)) + } else if procState == RUNNING { + events.EmitEvent(events.CreateProcessRunningEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == BACKOFF { + events.EmitEvent(events.CreateProcessBackoffEvent(progName, groupName, p.state.String(), p.retryTimes)) + } else if procState == STOPPING { + events.EmitEvent(events.CreateProcessStoppingEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == EXITED { + exitCode, err := p.getExitCode() + expected := 0 + if err == nil && p.inExitCodes(exitCode) { + expected = 1 + } + events.EmitEvent(events.CreateProcessExitedEvent(progName, groupName, p.state.String(), expected, p.cmd.Process.Pid)) + } else if procState == FATAL { + events.EmitEvent(events.CreateProcessFatalEvent(progName, groupName, p.state.String())) + } else if procState == STOPPED { + events.EmitEvent(events.CreateProcessStoppedEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == UNKNOWN { + events.EmitEvent(events.CreateProcessUnknownEvent(progName, groupName, p.state.String())) + } + } + p.state = procState +} + +func (p *Process) Signal(sig os.Signal) error { + p.lock.Lock() + defer p.lock.Unlock() + + return p.sendSignal(sig) +} + +func (p *Process) sendSignal(sig os.Signal) error { + if p.cmd != nil && p.cmd.Process != nil { + err := signals.Kill(p.cmd.Process, sig) + return err + } + return fmt.Errorf("process is not started") +} + +func (p *Process) setEnv() { + env := p.config.GetEnv("environment") + if len(env) != 0 { + p.cmd.Env = append(os.Environ(), env...) + } else { + p.cmd.Env = os.Environ() + } +} + +func (p *Process) setDir() { + dir := p.config.GetStringExpression("directory", "") + if dir != "" { + p.cmd.Dir = dir + } +} + +func (p *Process) setLog() { + if p.config.IsProgram() { + p.StdoutLog = p.createLogger(p.GetStdoutLogfile(), + int64(p.config.GetBytes("stdout_logfile_maxbytes", 50*1024*1024)), + p.config.GetInt("stdout_logfile_backups", 10), + p.createStdoutLogEventEmitter()) + capture_bytes := p.config.GetBytes("stdout_capture_maxbytes", 0) + if capture_bytes > 0 { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("capture stdout process communication") + p.StdoutLog = logger.NewLogCaptureLogger(p.StdoutLog, + capture_bytes, + "PROCESS_COMMUNICATION_STDOUT", + p.GetName(), + p.GetGroup()) + } + + p.cmd.Stdout = p.StdoutLog + + if p.config.GetBool("redirect_stderr", false) { + p.StderrLog = p.StdoutLog + } else { + p.StderrLog = p.createLogger(p.GetStderrLogfile(), + int64(p.config.GetBytes("stderr_logfile_maxbytes", 50*1024*1024)), + p.config.GetInt("stderr_logfile_backups", 10), + p.createStderrLogEventEmitter()) + } + + capture_bytes = p.config.GetBytes("stderr_capture_maxbytes", 0) + + if capture_bytes > 0 { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("capture stderr process communication") + p.StderrLog = logger.NewLogCaptureLogger(p.StdoutLog, + capture_bytes, + "PROCESS_COMMUNICATION_STDERR", + p.GetName(), + p.GetGroup()) + } + + p.cmd.Stderr = p.StderrLog + + } else if p.config.IsEventListener() { + in, err := p.cmd.StdoutPipe() + if err != nil { + log.WithFields(log.Fields{"eventListener": p.config.GetEventListenerName()}).Error("fail to get stdin") + return + } + out, err := p.cmd.StdinPipe() + if err != nil { + log.WithFields(log.Fields{"eventListener": p.config.GetEventListenerName()}).Error("fail to get stdout") + return + } + events := strings.Split(p.config.GetString("events", ""), ",") + for i, event := range events { + events[i] = strings.TrimSpace(event) + } + + p.registerEventListener(p.config.GetEventListenerName(), + events, + in, + out) + } +} + +func (p *Process) createStdoutLogEventEmitter() logger.LogEventEmitter { + if p.config.GetBytes("stdout_capture_maxbytes", 0) <= 0 && p.config.GetBool("stdout_events_enabled", false) { + return logger.NewStdoutLogEventEmitter(p.config.GetProgramName(), p.config.GetGroupName(), func() int { + return p.GetPid() + }) + } else { + return logger.NewNullLogEventEmitter() + } +} + +func (p *Process) createStderrLogEventEmitter() logger.LogEventEmitter { + if p.config.GetBytes("stderr_capture_maxbytes", 0) <= 0 && p.config.GetBool("stderr_events_enabled", false) { + return logger.NewStdoutLogEventEmitter(p.config.GetProgramName(), p.config.GetGroupName(), func() int { + return p.GetPid() + }) + } else { + return logger.NewNullLogEventEmitter() + } +} + +func (p *Process) registerEventListener(eventListenerName string, + _events []string, + stdin io.Reader, + stdout io.Writer) { + eventListener := events.NewEventListener(eventListenerName, + p.supervisor_id, + stdin, + stdout, + p.config.GetInt("buffer_size", 100)) + events.RegisterEventListener(eventListenerName, _events, eventListener) +} + +func (p *Process) unregisterEventListener(eventListenerName string) { + events.UnregisterEventListener(eventListenerName) +} + +func (p *Process) createLogger(logFile string, maxBytes int64, backups int, logEventEmitter logger.LogEventEmitter) logger.Logger { + var mylogger logger.Logger + mylogger = logger.NewNullLogger(logEventEmitter) + + if logFile == "/dev/stdout" { + mylogger = logger.NewStdoutLogger(logEventEmitter) + } else if logFile == "/dev/stderr" { + mylogger = logger.NewStderrLogger(logEventEmitter) + } else if logFile == "syslog" { + mylogger = logger.NewSysLogger(p.GetName(), logEventEmitter) + } else if len(logFile) > 0 { + mylogger = logger.NewFileLogger(logFile, maxBytes, backups, logEventEmitter, logger.NewNullLocker()) + } + return mylogger +} + +func (p *Process) setUser() error { + userName := p.config.GetString("user", "") + if len(userName) == 0 { + return nil + } + + //check if group is provided + pos := strings.Index(userName, ":") + groupName := "" + if pos != -1 { + groupName = userName[pos+1:] + userName = userName[0:pos] + } + u, err := user.Lookup(userName) + if err != nil { + return err + } + uid, err := strconv.ParseUint(u.Uid, 10, 32) + if err != nil { + return err + } + gid, err := strconv.ParseUint(u.Gid, 10, 32) + if err != nil && groupName == "" { + return err + } + if groupName != "" { + g, err := user.LookupGroup(groupName) + if err != nil { + return err + } + gid, err = strconv.ParseUint(g.Gid, 10, 32) + if err != nil { + return err + } + } + set_user_id(p.cmd.SysProcAttr, uint32(uid), uint32(gid)) + return nil +} + +//send signal to process to stop it +func (p *Process) Stop(wait bool) { + p.lock.RLock() + p.stopByUser = true + p.lock.RUnlock() + log.WithFields(log.Fields{"program": p.GetName()}).Info("stop the program") + sig, err := signals.ToSignal(p.config.GetString("stopsignal", "")) + if err == nil { + p.Signal(sig) + } + waitsecs := time.Duration(p.config.GetInt("stopwaitsecs", 10)) * time.Second + endTime := time.Now().Add(waitsecs) + go func() { + //wait at most "stopwaitsecs" seconds + for { + //if it already exits + if p.state != STARTING && p.state != RUNNING && p.state != STOPPING { + break + } + //if endTime reaches, raise signal syscall.SIGKILL + if endTime.Before(time.Now()) { + log.WithFields(log.Fields{"program": p.GetName()}).Info("force to kill the program") + p.Signal(syscall.SIGKILL) + break + } else { + time.Sleep(1 * time.Second) + } + } + }() + if wait { + for { + // if the program exits + if p.state != STARTING && p.state != RUNNING && p.state != STOPPING { + break + } + time.Sleep(1 * time.Second) + } + } +} + +func (p *Process) GetStatus() string { + if p.cmd.ProcessState.Exited() { + return p.cmd.ProcessState.String() + } + return "running" +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/process_manager.go b/vendor/github.com/rpoletaev/supervisord/process/process_manager.go new file mode 100644 index 000000000..c99e72481 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/process_manager.go @@ -0,0 +1,160 @@ +package process + +import ( + "strings" + "sync" + + "github.com/rpoletaev/supervisord/config" + log "github.com/sirupsen/logrus" +) + +type ProcessManager struct { + procs map[string]*Process + eventListeners map[string]*Process + lock sync.Mutex +} + +func NewProcessManager() *ProcessManager { + return &ProcessManager{procs: make(map[string]*Process), + eventListeners: make(map[string]*Process), + } +} + +func (pm *ProcessManager) CreateProcess(supervisor_id string, config *config.ConfigEntry) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + if config.IsProgram() { + return pm.createProgram(supervisor_id, config) + } else if config.IsEventListener() { + return pm.createEventListener(supervisor_id, config) + } else { + return nil + } +} + +func (pm *ProcessManager) StartAutoStartPrograms() { + pm.ForEachProcess(func(proc *Process) { + if proc.isAutoStart() { + proc.Start(false) + } + }) +} + +func (pm *ProcessManager) createProgram(supervisor_id string, config *config.ConfigEntry) *Process { + procName := config.GetProgramName() + + proc, ok := pm.procs[procName] + + if !ok { + proc = NewProcess(supervisor_id, config) + pm.procs[procName] = proc + } + log.Info("create process:", procName) + return proc +} + +func (pm *ProcessManager) createEventListener(supervisor_id string, config *config.ConfigEntry) *Process { + eventListenerName := config.GetEventListenerName() + + evtListener, ok := pm.eventListeners[eventListenerName] + + if !ok { + evtListener = NewProcess(supervisor_id, config) + pm.eventListeners[eventListenerName] = evtListener + } + log.Info("create event listener:", eventListenerName) + return evtListener +} + +func (pm *ProcessManager) Add(name string, proc *Process) { + pm.lock.Lock() + defer pm.lock.Unlock() + pm.procs[name] = proc + log.Info("add process:", name) +} + +// remove the process from the manager +// +// Arguments: +// name - the name of program +// +// Return the process or nil +func (pm *ProcessManager) Remove(name string) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + proc, _ := pm.procs[name] + delete(pm.procs, name) + log.Info("remove process:", name) + return proc +} + +// return process if found or nil if not found +func (pm *ProcessManager) Find(name string) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + proc, ok := pm.procs[name] + if ok { + log.Debug("succeed to find process:", name) + } else { + //remove group field if it is included + if pos := strings.Index(name, ":"); pos != -1 { + proc, ok = pm.procs[name[pos+1:]] + } + if !ok { + log.Info("fail to find process:", name) + } + } + return proc +} + +// clear all the processes +func (pm *ProcessManager) Clear() { + pm.lock.Lock() + defer pm.lock.Unlock() + pm.procs = make(map[string]*Process) +} + +func (pm *ProcessManager) ForEachProcess(procFunc func(p *Process)) { + pm.lock.Lock() + defer pm.lock.Unlock() + + procs := pm.getAllProcess() + for _, proc := range procs { + procFunc(proc) + } +} + +func (pm *ProcessManager) getAllProcess() []*Process { + tmpProcs := make([]*Process, 0) + for _, proc := range pm.procs { + tmpProcs = append(tmpProcs, proc) + } + return sortProcess(tmpProcs) +} + +func (pm *ProcessManager) StopAllProcesses() { + pm.ForEachProcess(func(proc *Process) { + proc.Stop(true) + }) +} + +func sortProcess(procs []*Process) []*Process { + prog_configs := make([]*config.ConfigEntry, 0) + for _, proc := range procs { + if proc.config.IsProgram() { + prog_configs = append(prog_configs, proc.config) + } + } + + result := make([]*Process, 0) + p := config.NewProcessSorter() + for _, config := range p.SortProgram(prog_configs) { + for _, proc := range procs { + if proc.config == config { + result = append(result, proc) + } + } + } + + return result +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go b/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go new file mode 100644 index 000000000..ec28fb5c4 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go @@ -0,0 +1,11 @@ +// +build !windows + +package process + +import ( + "syscall" +) + +func set_user_id(procAttr *syscall.SysProcAttr, uid uint32, gid uint32) { + procAttr.Credential = &syscall.Credential{Uid: uid, Gid: gid, NoSetGroups: true} +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go b/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go new file mode 100644 index 000000000..d114e4068 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go @@ -0,0 +1,11 @@ +// +build windows + +package process + +import ( + "syscall" +) + +func set_user_id(_ *syscall.SysProcAttr, _ uint32, _ uint32) { + +} diff --git a/vendor/github.com/rpoletaev/supervisord/signals/signal.go b/vendor/github.com/rpoletaev/supervisord/signals/signal.go new file mode 100644 index 000000000..16483a1d6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/signals/signal.go @@ -0,0 +1,34 @@ +// +build !windows + +package signals + +import ( + "os" + "syscall" +) + +//convert a signal name to signal +func ToSignal(signalName string) (os.Signal, error) { + if signalName == "HUP" { + return syscall.SIGHUP, nil + } else if signalName == "INT" { + return syscall.SIGINT, nil + } else if signalName == "QUIT" { + return syscall.SIGQUIT, nil + } else if signalName == "KILL" { + return syscall.SIGKILL, nil + } else if signalName == "USR1" { + return syscall.SIGUSR1, nil + } else if signalName == "USR2" { + return syscall.SIGUSR2, nil + } else { + return syscall.SIGTERM, nil + + } + +} + +func Kill(process *os.Process, sig os.Signal) error { + localSig := sig.(syscall.Signal) + return syscall.Kill(-process.Pid, localSig) +} diff --git a/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go b/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go new file mode 100644 index 000000000..5c3e539ac --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go @@ -0,0 +1,46 @@ +// +build windows + +package signals + +import ( + "errors" + "fmt" + log "github.com/sirupsen/logrus" + "os" + "os/exec" + "syscall" +) + +//convert a signal name to signal +func ToSignal(signalName string) (os.Signal, error) { + if signalName == "HUP" { + return syscall.SIGHUP, nil + } else if signalName == "INT" { + return syscall.SIGINT, nil + } else if signalName == "QUIT" { + return syscall.SIGQUIT, nil + } else if signalName == "KILL" { + return syscall.SIGKILL, nil + } else if signalName == "USR1" { + log.Warn("signal USR1 is not supported in windows") + return nil, errors.New("signal USR1 is not supported in windows") + } else if signalName == "USR2" { + log.Warn("signal USR2 is not supported in windows") + return nil, errors.New("signal USR2 is not supported in windows") + } else { + return syscall.SIGTERM, nil + + } + +} + +func Kill(process *os.Process, sig os.Signal) error { + //Signal command can't kill children processes, call taskkill command to kill them + cmd := exec.Command("taskkill", "/F", "/T", "/PID", fmt.Sprintf("%d", process.Pid)) + err := cmd.Start() + if err == nil { + return cmd.Wait() + } + //if fail to find taskkill, fallback to normal signal + return process.Signal(sig) +} diff --git a/vendor/github.com/rpoletaev/supervisord/supervisor.go b/vendor/github.com/rpoletaev/supervisord/supervisor.go new file mode 100644 index 000000000..cc47db0f0 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/supervisor.go @@ -0,0 +1,586 @@ +package main + +import ( + "fmt" + "net/http" + "os" + "strings" + "sync" + "time" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/faults" + "github.com/rpoletaev/supervisord/logger" + "github.com/rpoletaev/supervisord/process" + "github.com/rpoletaev/supervisord/signals" + "github.com/rpoletaev/supervisord/types" + "github.com/rpoletaev/supervisord/util" + + log "github.com/sirupsen/logrus" +) + +const ( + SUPERVISOR_VERSION = "3.0" +) + +type Supervisor struct { + config *config.Config + procMgr *process.ProcessManager + xmlRPC *XmlRPC + logger logger.Logger + restarting bool +} + +type StartProcessArgs struct { + Name string + Wait bool `default:"true"` +} + +type ProcessStdin struct { + Name string + Chars string +} + +type RemoteCommEvent struct { + Type string + Data string +} + +type StateInfo struct { + Statecode int `xml:"statecode"` + Statename string `xml:"statename"` +} + +type RpcTaskResult struct { + Name string `xml:"name"` + Group string `xml:"group"` + Status int `xml:"status"` + Description string `xml:"description"` +} + +type LogReadInfo struct { + Offset int + Length int +} + +type ProcessLogReadInfo struct { + Name string + Offset int + Length int +} + +type ProcessTailLog struct { + LogData string + Offset int64 + Overflow bool +} + +func NewSupervisor(configFile string) *Supervisor { + return &Supervisor{config: config.NewConfig(configFile), + procMgr: process.NewProcessManager(), + xmlRPC: NewXmlRPC(), + restarting: false} +} + +func (s *Supervisor) GetConfig() *config.Config { + return s.config +} + +func (s *Supervisor) GetVersion(r *http.Request, args *struct{}, reply *struct{ Version string }) error { + reply.Version = SUPERVISOR_VERSION + return nil +} + +func (s *Supervisor) GetSupervisorVersion(r *http.Request, args *struct{}, reply *struct{ Version string }) error { + reply.Version = SUPERVISOR_VERSION + return nil +} + +func (s *Supervisor) GetIdentification(r *http.Request, args *struct{}, reply *struct{ Id string }) error { + reply.Id = s.GetSupervisorId() + return nil +} + +func (s *Supervisor) GetSupervisorId() string { + entry, ok := s.config.GetSupervisord() + if ok { + return entry.GetString("identifier", "supervisor") + } else { + return "supervisor" + } +} + +func (s *Supervisor) GetState(r *http.Request, args *struct{}, reply *struct{ StateInfo StateInfo }) error { + //statecode statename + //======================= + // 2 FATAL + // 1 RUNNING + // 0 RESTARTING + // -1 SHUTDOWN + log.Debug("Get state") + reply.StateInfo.Statecode = 1 + reply.StateInfo.Statename = "RUNNING" + return nil +} + +func (s *Supervisor) GetPID(r *http.Request, args *struct{}, reply *struct{ Pid int }) error { + reply.Pid = os.Getpid() + return nil +} + +func (s *Supervisor) ReadLog(r *http.Request, args *LogReadInfo, reply *struct{ Log string }) error { + data, err := s.logger.ReadLog(int64(args.Offset), int64(args.Length)) + reply.Log = data + return err +} + +func (s *Supervisor) ClearLog(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + err := s.logger.ClearAllLogFile() + reply.Ret = err == nil + return err +} + +func (s *Supervisor) Shutdown(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + reply.Ret = true + log.Info("received rpc request to stop all processes & exit") + s.procMgr.StopAllProcesses() + go func() { + time.Sleep(1 * time.Second) + os.Exit(0) + }() + return nil +} + +func (s *Supervisor) Restart(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + log.Info("Receive instruction to restart") + s.restarting = true + reply.Ret = true + return nil +} + +func (s *Supervisor) IsRestarting() bool { + return s.restarting +} + +func getProcessInfo(proc *process.Process) *types.ProcessInfo { + return &types.ProcessInfo{Name: proc.GetName(), + Group: proc.GetGroup(), + Description: proc.GetDescription(), + Start: int(proc.GetStartTime().Unix()), + Stop: int(proc.GetStopTime().Unix()), + Now: int(time.Now().Unix()), + State: int(proc.GetState()), + Statename: proc.GetState().String(), + Spawnerr: "", + Exitstatus: proc.GetExitstatus(), + Logfile: proc.GetStdoutLogfile(), + Stdout_logfile: proc.GetStdoutLogfile(), + Stderr_logfile: proc.GetStderrLogfile(), + Pid: proc.GetPid()} + +} + +func (s *Supervisor) GetAllProcessInfo(r *http.Request, args *struct{}, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + reply.AllProcessInfo = make([]types.ProcessInfo, 0) + s.procMgr.ForEachProcess(func(proc *process.Process) { + procInfo := getProcessInfo(proc) + reply.AllProcessInfo = append(reply.AllProcessInfo, *procInfo) + }) + + return nil +} + +func (s *Supervisor) GetProcessInfo(r *http.Request, args *struct{ Name string }, reply *struct{ ProcInfo types.ProcessInfo }) error { + log.Debug("Get process info of: ", args.Name) + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("no process named %s", args.Name) + } + + reply.ProcInfo = *getProcessInfo(proc) + return nil +} + +func (s *Supervisor) StartProcess(r *http.Request, args *StartProcessArgs, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + + if proc == nil { + return fmt.Errorf("fail to find process %s", args.Name) + } + proc.Start(args.Wait) + reply.Success = true + return nil +} + +func (s *Supervisor) StartAllProcesses(r *http.Request, args *struct { + Wait bool `default:"true"` +}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.Start(args.Wait) + processInfo := *getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: processInfo.Name, + Group: processInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + return nil +} + +func (s *Supervisor) StartProcessGroup(r *http.Request, args *StartProcessArgs, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + log.WithFields(log.Fields{"group": args.Name}).Info("start process group") + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + proc.Start(args.Wait) + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + + return nil +} + +func (s *Supervisor) StopProcess(r *http.Request, args *StartProcessArgs, reply *struct{ Success bool }) error { + log.WithFields(log.Fields{"program": args.Name}).Info("stop process") + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("fail to find process %s", args.Name) + } + proc.Stop(args.Wait) + reply.Success = true + return nil +} + +func (s *Supervisor) StopProcessGroup(r *http.Request, args *StartProcessArgs, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + log.WithFields(log.Fields{"group": args.Name}).Info("stop process group") + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + proc.Stop(args.Wait) + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + return nil +} + +func (s *Supervisor) StopAllProcesses(r *http.Request, args *struct { + Wait bool `default:"true"` +}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.Stop(args.Wait) + processInfo := *getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: processInfo.Name, + Group: processInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + return nil +} + +func (s *Supervisor) SignalProcess(r *http.Request, args *types.ProcessSignal, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + reply.Success = false + return fmt.Errorf("No process named %s", args.Name) + } + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + reply.Success = true + return nil +} + +func (s *Supervisor) SignalProcessGroup(r *http.Request, args *types.ProcessSignal, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + } + }) + + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + return nil +} + +func (s *Supervisor) SignalAllProcesses(r *http.Request, args *types.ProcessSignal, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + }) + s.procMgr.ForEachProcess(func(proc *process.Process) { + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + }) + return nil +} + +func (s *Supervisor) SendProcessStdin(r *http.Request, args *ProcessStdin, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + log.WithFields(log.Fields{"program": args.Name}).Error("program does not exist") + return fmt.Errorf("NOT_RUNNING") + } + if proc.GetState() != process.RUNNING { + log.WithFields(log.Fields{"program": args.Name}).Error("program does not run") + return fmt.Errorf("NOT_RUNNING") + } + err := proc.SendProcessStdin(args.Chars) + if err == nil { + reply.Success = true + } else { + reply.Success = false + } + return err +} + +func (s *Supervisor) SendRemoteCommEvent(r *http.Request, args *RemoteCommEvent, reply *struct{ Success bool }) error { + events.EmitEvent(events.NewRemoteCommunicationEvent(args.Type, args.Data)) + reply.Success = true + return nil +} + +func (s *Supervisor) Reload() (error, []string, []string, []string) { + //get the previous loaded programs + prevPrograms := s.config.GetProgramNames() + prevProgGroup := s.config.ProgramGroup.Clone() + + loaded_programs, err := s.config.Load() + + if err == nil { + s.setSupervisordInfo() + s.startEventListeners() + s.createPrograms(prevPrograms) + s.startHttpServer() + s.startAutoStartPrograms() + } + removedPrograms := util.Sub(prevPrograms, loaded_programs) + for _, removedProg := range removedPrograms { + log.WithFields(log.Fields{"program": removedProg}).Info("the program is removed and will be stopped") + s.config.RemoveProgram(removedProg) + proc := s.procMgr.Remove(removedProg) + if proc != nil { + proc.Stop(false) + } + + } + addedGroup, changedGroup, removedGroup := s.config.ProgramGroup.Sub(prevProgGroup) + return err, addedGroup, changedGroup, removedGroup + +} + +func (s *Supervisor) WaitForExit() { + for { + if s.IsRestarting() { + s.procMgr.StopAllProcesses() + break + } + time.Sleep(10 * time.Second) + } +} + +func (s *Supervisor) createPrograms(prevPrograms []string) { + + programs := s.config.GetProgramNames() + for _, entry := range s.config.GetPrograms() { + s.procMgr.CreateProcess(s.GetSupervisorId(), entry) + } + removedPrograms := util.Sub(prevPrograms, programs) + for _, p := range removedPrograms { + s.procMgr.Remove(p) + } +} + +func (s *Supervisor) startAutoStartPrograms() { + s.procMgr.StartAutoStartPrograms() +} + +func (s *Supervisor) startEventListeners() { + eventListeners := s.config.GetEventListeners() + for _, entry := range eventListeners { + s.procMgr.CreateProcess(s.GetSupervisorId(), entry) + } + + if len(eventListeners) > 0 { + time.Sleep(1 * time.Second) + } +} + +func (s *Supervisor) startHttpServer() { + httpServerConfig, ok := s.config.GetInetHttpServer() + if ok { + addr := httpServerConfig.GetString("port", "") + if addr != "" { + go s.xmlRPC.StartInetHttpServer(httpServerConfig.GetString("username", ""), httpServerConfig.GetString("password", ""), addr, s) + } + } + + httpServerConfig, ok = s.config.GetUnixHttpServer() + if ok { + env := config.NewStringExpression("here", s.config.GetConfigFileDir()) + sockFile, err := env.Eval(httpServerConfig.GetString("file", "/tmp/supervisord.sock")) + if err == nil { + go s.xmlRPC.StartUnixHttpServer(httpServerConfig.GetString("username", ""), httpServerConfig.GetString("password", ""), sockFile, s) + } + } + +} + +func (s *Supervisor) setSupervisordInfo() { + supervisordConf, ok := s.config.GetSupervisord() + if ok { + //set supervisord log + + env := config.NewStringExpression("here", s.config.GetConfigFileDir()) + logFile, err := env.Eval(supervisordConf.GetString("logfile", "supervisord.log")) + logFile, err = process.Path_expand(logFile) + logEventEmitter := logger.NewNullLogEventEmitter() + s.logger = logger.NewNullLogger(logEventEmitter) + if err == nil { + logfile_maxbytes := int64(supervisordConf.GetBytes("logfile_maxbytes", 50*1024*1024)) + logfile_backups := supervisordConf.GetInt("logfile_backups", 10) + loglevel := supervisordConf.GetString("loglevel", "info") + switch logFile { + case "/dev/null": + s.logger = logger.NewNullLogger(logEventEmitter) + case "syslog": + s.logger = logger.NewSysLogger("supervisord", logEventEmitter) + case "/dev/stdout": + s.logger = logger.NewStdoutLogger(logEventEmitter) + case "/dev/stderr": + s.logger = logger.NewStderrLogger(logEventEmitter) + case "": + s.logger = logger.NewNullLogger(logEventEmitter) + default: + s.logger = logger.NewFileLogger(logFile, logfile_maxbytes, logfile_backups, logEventEmitter, &sync.Mutex{}) + } + log.SetOutput(s.logger) + log.SetLevel(toLogLevel(loglevel)) + log.SetFormatter(&log.TextFormatter{DisableColors: true}) + } + //set the pid + pidfile, err := env.Eval(supervisordConf.GetString("pidfile", "supervisord.pid")) + if err == nil { + f, err := os.Create(pidfile) + if err == nil { + fmt.Fprintf(f, "%d", os.Getpid()) + f.Close() + } + } + } +} + +func toLogLevel(level string) log.Level { + switch strings.ToLower(level) { + case "critical": + return log.FatalLevel + case "error": + return log.ErrorLevel + case "warn": + return log.WarnLevel + case "info": + return log.InfoLevel + default: + return log.DebugLevel + } +} + +func (s *Supervisor) ReloadConfig(r *http.Request, args *struct{}, reply *types.ReloadConfigResult) error { + log.Info("start to reload config") + err, addedGroup, changedGroup, removedGroup := s.Reload() + if len(addedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(addedGroup, ",")}).Info("added groups") + } + + if len(changedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(changedGroup, ",")}).Info("changed groups") + } + + if len(removedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(removedGroup, ",")}).Info("removed groups") + } + reply.AddedGroup = addedGroup + reply.ChangedGroup = changedGroup + reply.RemovedGroup = removedGroup + return err +} + +func (s *Supervisor) AddProcessGroup(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + reply.Success = false + return nil +} + +func (s *Supervisor) RemoveProcessGroup(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + reply.Success = false + return nil +} + +func (s *Supervisor) ReadProcessStdoutLog(r *http.Request, args *ProcessLogReadInfo, reply *struct{ LogData string }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, err = proc.StdoutLog.ReadLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) ReadProcessStderrLog(r *http.Request, args *ProcessLogReadInfo, reply *struct{ LogData string }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, err = proc.StderrLog.ReadLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) TailProcessStdoutLog(r *http.Request, args *ProcessLogReadInfo, reply *ProcessTailLog) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, reply.Offset, reply.Overflow, err = proc.StdoutLog.ReadTailLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) ClearProcessLogs(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + err1 := proc.StdoutLog.ClearAllLogFile() + err2 := proc.StderrLog.ClearAllLogFile() + reply.Success = err1 == nil && err2 == nil + if err1 != nil { + return err1 + } + return err2 +} + +func (s *Supervisor) ClearAllProcessLogs(r *http.Request, args *struct{}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.StdoutLog.ClearAllLogFile() + proc.StderrLog.ClearAllLogFile() + procInfo := getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: procInfo.Name, + Group: procInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + + return nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/util/util.go b/vendor/github.com/rpoletaev/supervisord/util/util.go new file mode 100644 index 000000000..e103b0ecc --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/util/util.go @@ -0,0 +1,64 @@ +package util + +// return true if the elem is in the array arr +func InArray(elem interface{}, arr []interface{}) bool { + for _, e := range arr { + if e == elem { + return true + } + } + return false +} + +//return true if the array arr1 contains all elements of array arr2 +func HasAllElements(arr1 []interface{}, arr2 []interface{}) bool { + for _, e2 := range arr2 { + if !InArray(e2, arr1) { + return false + } + } + return true +} + +func StringArrayToInterfacArray(arr []string) []interface{} { + result := make([]interface{}, 0) + for _, s := range arr { + result = append(result, s) + } + return result +} + +func Sub(arr_1 []string, arr_2 []string) []string { + result := make([]string, 0) + for _, s := range arr_1 { + exist := false + for _, s2 := range arr_2 { + if s == s2 { + exist = true + } + } + if !exist { + result = append(result, s) + } + } + return result +} + +func IsSameStringArray(arr_1 []string, arr_2 []string) bool { + if len(arr_1) != len(arr_2) { + return false + } + for _, s := range arr_1 { + exist := false + for _, s2 := range arr_2 { + if s2 == s { + exist = true + break + } + } + if !exist { + return false + } + } + return true +} diff --git a/vendor/github.com/rpoletaev/supervisord/version.go b/vendor/github.com/rpoletaev/supervisord/version.go new file mode 100644 index 000000000..12dc42aee --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/version.go @@ -0,0 +1,24 @@ +package main + +import ( + "fmt" +) + +const VERSION = "1.0.008" + +type VersionCommand struct { +} + +var versionCommand VersionCommand + +func (v VersionCommand) Execute(args []string) error { + fmt.Println(VERSION) + return nil +} + +func init() { + parser.AddCommand("version", + "show the version of supervisor", + "display the supervisor version", + &versionCommand) +} diff --git a/vendor/github.com/rpoletaev/supervisord/xmlrpc.go b/vendor/github.com/rpoletaev/supervisord/xmlrpc.go new file mode 100644 index 000000000..e4381000f --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/xmlrpc.go @@ -0,0 +1,136 @@ +package main + +import ( + "crypto/sha1" + "encoding/hex" + "io" + "net" + "net/http" + "os" + "strings" + + "github.com/gorilla/rpc" + "github.com/ochinchina/gorilla-xmlrpc/xml" + log "github.com/sirupsen/logrus" +) + +type XmlRPC struct { + listeners map[string]net.Listener + // true if RPC is started + started bool +} + +type httpBasicAuth struct { + user string + password string + handler http.Handler +} + +func NewHttpBasicAuth(user string, password string, handler http.Handler) *httpBasicAuth { + if user != "" && password != "" { + log.Debug("require authentication") + } + return &httpBasicAuth{user: user, password: password, handler: handler} +} + +func (h *httpBasicAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if h.user == "" || h.password == "" { + log.Debug("no auth required") + h.handler.ServeHTTP(w, r) + return + } + username, password, ok := r.BasicAuth() + if ok && username == h.user { + if strings.HasPrefix(h.password, "{SHA}") { + log.Debug("auth with SHA") + hash := sha1.New() + io.WriteString(hash, password) + if hex.EncodeToString(hash.Sum(nil)) == h.password[5:] { + h.handler.ServeHTTP(w, r) + return + } + } else if password == h.password { + log.Debug("Auth with normal password") + h.handler.ServeHTTP(w, r) + return + } + } + w.Header().Set("WWW-Authenticate", "Basic realm=\"supervisor\"") + w.WriteHeader(401) +} + +func NewXmlRPC() *XmlRPC { + return &XmlRPC{listeners: make(map[string]net.Listener), started: false} +} + +func (p *XmlRPC) Stop() { + for _, listener := range p.listeners { + listener.Close() + } +} + +func (p *XmlRPC) StartUnixHttpServer(user string, password string, listenAddr string, s *Supervisor) { + os.Remove(listenAddr) + p.startHttpServer(user, password, "unix", listenAddr, s) +} + +func (p *XmlRPC) StartInetHttpServer(user string, password string, listenAddr string, s *Supervisor) { + p.startHttpServer(user, password, "tcp", listenAddr, s) +} + +func (p *XmlRPC) startHttpServer(user string, password string, protocol string, listenAddr string, s *Supervisor) { + if p.started { + return + } + p.started = true + mux := http.NewServeMux() + mux.Handle("/RPC2", NewHttpBasicAuth(user, password, p.createRPCServer(s))) + listener, err := net.Listen(protocol, listenAddr) + if err == nil { + p.listeners[protocol] = listener + http.Serve(listener, mux) + } else { + log.WithFields(log.Fields{"addr": listenAddr, "protocol": protocol}).Error("fail to listen on address") + } + +} +func (p *XmlRPC) createRPCServer(s *Supervisor) *rpc.Server { + RPC := rpc.NewServer() + xmlrpcCodec := xml.NewCodec() + RPC.RegisterCodec(xmlrpcCodec, "text/xml") + RPC.RegisterService(s, "") + + xmlrpcCodec.RegisterAlias("supervisor.getVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getAPIVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getIdentification", "Supervisor.GetIdentification") + xmlrpcCodec.RegisterAlias("supervisor.getState", "Supervisor.GetState") + xmlrpcCodec.RegisterAlias("supervisor.getPID", "Supervisor.GetPID") + xmlrpcCodec.RegisterAlias("supervisor.readLog", "Supervisor.ReadLog") + xmlrpcCodec.RegisterAlias("supervisor.clearLog", "Supervisor.ClearLog") + xmlrpcCodec.RegisterAlias("supervisor.shutdown", "Supervisor.Shutdown") + xmlrpcCodec.RegisterAlias("supervisor.restart", "Supervisor.Restart") + xmlrpcCodec.RegisterAlias("supervisor.getProcessInfo", "Supervisor.GetProcessInfo") + xmlrpcCodec.RegisterAlias("supervisor.getSupervisorVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getAllProcessInfo", "Supervisor.GetAllProcessInfo") + xmlrpcCodec.RegisterAlias("supervisor.startProcess", "Supervisor.StartProcess") + xmlrpcCodec.RegisterAlias("supervisor.startAllProcesses", "Supervisor.StartAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.startProcessGroup", "Supervisor.StartProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.stopProcess", "Supervisor.StopProcess") + xmlrpcCodec.RegisterAlias("supervisor.stopProcessGroup", "Supervisor.StopProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.stopAllProcesses", "Supervisor.StopAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.signalProcess", "Supervisor.SignalProcess") + xmlrpcCodec.RegisterAlias("supervisor.signalProcessGroup", "Supervisor.SignalProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.signalAllProcesses", "Supervisor.SignalAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.sendProcessStdin", "Supervisor.SendProcessStdin") + xmlrpcCodec.RegisterAlias("supervisor.sendRemoteCommEvent", "Supervisor.SendRemoteCommEvent") + xmlrpcCodec.RegisterAlias("supervisor.reloadConfig", "Supervisor.ReloadConfig") + xmlrpcCodec.RegisterAlias("supervisor.addProcessGroup", "Supervisor.AddProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.removeProcessGroup", "Supervisor.RemoveProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.readProcessStdoutLog", "Supervisor.ReadProcessStdoutLog") + xmlrpcCodec.RegisterAlias("supervisor.readProcessStderrLog", "Supervisor.ReadProcessStderrLog") + xmlrpcCodec.RegisterAlias("supervisor.tailProcessStdoutLog", "Supervisor.TailProcessStdoutLog") + xmlrpcCodec.RegisterAlias("supervisor.tailProcessStderrLog", "Supervisor.TailProcessStderrLog") + xmlrpcCodec.RegisterAlias("supervisor.clearProcessLogs", "Supervisor.ClearProcessLogs") + xmlrpcCodec.RegisterAlias("supervisor.clearAllProcessLogs", "Supervisor.ClearAllProcessLogs") + return RPC +} diff --git a/vendor/vendor.json b/vendor/vendor.json index a8bbc84e1..1565ffc5c 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -75,6 +75,12 @@ "revision": "1e59b77b52bf8e4b449a57e6f79f21226d571845", "revisionTime": "2017-11-13T18:07:20Z" }, + { + "checksumSHA1": "UcxIsr0IzcSKDqGVnK1HsxnSSVU=", + "path": "github.com/gorilla/rpc", + "revision": "22c016f3df3febe0c1f6727598b6389507e03a18", + "revisionTime": "2016-09-23T22:06:01Z" + }, { "checksumSHA1": "SGSXlSU1TFtg5aTlVA9v4Ka86lU=", "origin": "github.com/centrifugal/centrifugo/vendor/github.com/gorilla/securecookie", @@ -220,6 +226,18 @@ "revision": "179d4d0c4d8d407a32af483c2354df1d2c91e6c3", "revisionTime": "2013-12-21T20:05:32Z" }, + { + "checksumSHA1": "8p5uEwUdi9/xh/XpF1ULlZ00k2w=", + "path": "github.com/ochinchina/go-ini", + "revision": "4dcbd5514a9220bb68c8d45eabf79b25479ea2d1", + "revisionTime": "2018-03-10T02:35:15Z" + }, + { + "checksumSHA1": "E3jV3ILrMbnBTbR4GXkRf7rXc5Q=", + "path": "github.com/ochinchina/gorilla-xmlrpc/xml", + "revision": "ecf2fe693a2ca10ce68d2c7d4c559f1a57d2c845", + "revisionTime": "2017-10-12T05:53:24Z" + }, { "checksumSHA1": "BoXdUBWB8UnSlFlbnuTQaPqfCGk=", "path": "github.com/op/go-logging", @@ -257,6 +275,66 @@ "revision": "2315d5715e36303a941d907f038da7f7c44c773b", "revisionTime": "2017-11-01T20:10:47Z" }, + { + "checksumSHA1": "sESN0ZfX2JIOB6pcxXuw5yXo4+E=", + "path": "github.com/rogpeppe/go-charset/charset", + "revision": "e9ff06f347d3f5d0013d59ed83754f0e88de10d4", + "revisionTime": "2015-06-15T17:25:32Z" + }, + { + "checksumSHA1": "MyUzunzysfhOlm/yJfV89oC+mO4=", + "path": "github.com/rogpeppe/go-charset/data", + "revision": "e9ff06f347d3f5d0013d59ed83754f0e88de10d4", + "revisionTime": "2015-06-15T17:25:32Z" + }, + { + "checksumSHA1": "4yHilxHn118WAI/J+/uQd+lVky0=", + "path": "github.com/rpoletaev/supervisord", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "UHnNA1Cx5MtPY68fBrM/ank3bUY=", + "path": "github.com/rpoletaev/supervisord/config", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "7079G1HzEBpr9xCFA7S7OYtw3F4=", + "path": "github.com/rpoletaev/supervisord/events", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "A+d9lhIE1xvY1fdypT5GdO9C3wY=", + "path": "github.com/rpoletaev/supervisord/faults", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "YLXHdj1snMGqXXiFNQciKtUqTgM=", + "path": "github.com/rpoletaev/supervisord/logger", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "HHkvC6JQjF9hZ4RblCEEKV6k3Is=", + "path": "github.com/rpoletaev/supervisord/process", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "TKy4P7QqrqVfiKwGbXBkP5XNPY4=", + "path": "github.com/rpoletaev/supervisord/signals", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "F4x0/vDYzuOYgOMp3NlFbbTX1Vg=", + "path": "github.com/rpoletaev/supervisord/util", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, { "checksumSHA1": "eDQ6f1EsNf+frcRO/9XukSEchm8=", "path": "github.com/satori/go.uuid", From 3c1261755b1ca6e39ba956198ba3ed95235c074d Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 15 Jun 2018 17:02:51 +0300 Subject: [PATCH 055/169] requested changes --- packages/tcpserver/type1.go | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 5cbb48c17..12d179b30 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -225,7 +225,6 @@ func saveNewTransactions(r *DisRequest) error { } queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) - // err = queueTx.Create() } if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { From 654cb9b160ab6cef0033ed0ae915c39557eec2a3 Mon Sep 17 00:00:00 2001 From: Dmitriy Chertkov Date: Mon, 18 Jun 2018 16:59:10 +0500 Subject: [PATCH 056/169] Added Content-Disposition header for binary files --- packages/api/data.go | 5 +- packages/api/template_test.go | 88 +++++++++++++++++++---------------- 2 files changed, 51 insertions(+), 42 deletions(-) diff --git a/packages/api/data.go b/packages/api/data.go index 54e741bd2..88912efa5 100644 --- a/packages/api/data.go +++ b/packages/api/data.go @@ -45,7 +45,8 @@ func dataHandler() hr.Handle { return } - data, err := model.GetColumnByID(tblname, column, ps.ByName(`id`)) + id := ps.ByName(`id`) + data, err := model.GetColumnByID(tblname, column, id) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("selecting data from table") errorAPI(w, `E_NOTFOUND`, http.StatusNotFound) @@ -59,6 +60,7 @@ func dataHandler() hr.Handle { } w.Header().Set("Content-Type", "application/octet-stream") + w.Header().Set("Content-Disposition", "attachment") w.Header().Set("Access-Control-Allow-Origin", "*") w.Write([]byte(data)) return @@ -88,6 +90,7 @@ func binary(w http.ResponseWriter, r *http.Request, ps hr.Params) { } w.Header().Set("Content-Type", bin.MimeType) + w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"`, bin.Name)) w.Header().Set("Access-Control-Allow-Origin", "*") w.Write(bin.Data) return diff --git a/packages/api/template_test.go b/packages/api/template_test.go index ea73bf31e..cbedbf6b8 100644 --- a/packages/api/template_test.go +++ b/packages/api/template_test.go @@ -19,12 +19,17 @@ package api import ( "crypto/md5" "encoding/base64" + "encoding/hex" "fmt" + "io/ioutil" + "net/http" "net/url" "strings" "testing" "time" + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/stretchr/testify/assert" ) @@ -168,10 +173,8 @@ func TestMobile(t *testing.T) { } func TestCutoff(t *testing.T) { - if err := keyLogin(1); err != nil { - t.Error(err) - return - } + assert.NoError(t, keyLogin(1)) + name := randName(`tbl`) form := url.Values{ "Name": {name}, @@ -180,13 +183,10 @@ func TestCutoff(t *testing.T) { {"name":"long_text", "type":"text", "index":"0", "conditions":"true"}, {"name":"short_text", "type":"varchar", "index":"0", "conditions":"true"} ]`}, - "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}, - } - err := postTx(`NewTable`, &form) - if err != nil { - t.Error(err) - return + "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}, + "ApplicationId": {"1"}, } + assert.NoError(t, postTx(`NewTable`, &form)) form = url.Values{ "Name": {name}, "Value": {` @@ -200,48 +200,35 @@ func TestCutoff(t *testing.T) { } } `}, - "Conditions": {`true`}, - } - if err := postTx(`NewContract`, &form); err != nil { - t.Error(err) - return + "Conditions": {`true`}, + "ApplicationId": {"1"}, } + assert.NoError(t, postTx(`NewContract`, &form)) shortText := crypto.RandSeq(30) longText := crypto.RandSeq(100) - err = postTx(name, &url.Values{ + assert.NoError(t, postTx(name, &url.Values{ "ShortText": {shortText}, "LongText": {longText}, - }) - if err != nil { - t.Error(err) - return - } + })) + var ret contentResult template := `DBFind(Name: ` + name + `, Source: mysrc).Cutoff("short_text,long_text")` start := time.Now() - err = sendPost(`content`, &url.Values{`template`: {template}}, &ret) + assert.NoError(t, sendPost(`content`, &url.Values{`template`: {template}}, &ret)) duration := time.Since(start) - if err != nil { - t.Error(err) - return - } if int(duration.Seconds()) > 0 { t.Errorf(`Too much time for template parsing`) return } - err = postTx(name, &url.Values{ + assert.NoError(t, postTx(name, &url.Values{ "ShortText": {shortText}, "LongText": {longText}, - }) + })) template = `DBFind("` + name + `", mysrc).Columns("id,name,short_text,long_text").Cutoff("short_text,long_text").WhereId(2).Vars(prefix)` - err = sendPost(`content`, &url.Values{`template`: {template}}, &ret) - if err != nil { - t.Error(err) - return - } + assert.NoError(t, sendPost(`content`, &url.Values{`template`: {template}}, &ret)) linkLongText := fmt.Sprintf("/data/1_%s/2/long_text/%x", name, md5.Sum([]byte(longText))) @@ -250,14 +237,18 @@ func TestCutoff(t *testing.T) { t.Errorf("Wrong image tree %s != %s", RawToString(ret.Tree), want) } - data, err := sendRawRequest("GET", linkLongText, nil) + resp, err := http.Get(apiAddress + consts.ApiPath + linkLongText) if err != nil { t.Error(err) return } - if string(data) != longText { - t.Errorf("Wrong text %s", data) - } + defer resp.Body.Close() + + data, err := ioutil.ReadAll(resp.Body) + assert.NoError(t, err) + + assert.Equal(t, "attachment", resp.Header.Get("Content-Disposition")) + assert.Equal(t, longText, string(data)) } var imageData = `iVBORw0KGgoAAAANSUhEUgAAADIAAAAyCAIAAACRXR/mAAAACXBIWXMAAAsTAAALEwEAmpwYAAAARklEQVRYw+3OMQ0AIBAEwQOzaCLBBQZfAd0XFLMCNjOyb1o7q2Ey82VYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYrwqjmwKzLUjCbwAAAABJRU5ErkJggg==` @@ -326,6 +317,8 @@ func TestStringToBinary(t *testing.T) { contract := randName("binary") content := randName("content") + filename := randName("file") + mimeType := "text/plain" form := url.Values{ "Value": {` @@ -335,19 +328,23 @@ func TestStringToBinary(t *testing.T) { } conditions {} action { - UploadBinary("Name,AppID,Data,DataMimeType", "test", 1, StringToBytes($Content), "text/plain") + UploadBinary("Name,ApplicationId,Data,DataMimeType", "` + filename + `", 1, StringToBytes($Content), "` + mimeType + `") } } `}, - "Conditions": {"true"}, + "ApplicationId": {"1"}, + "Conditions": {"true"}, } assert.NoError(t, postTx("NewContract", &form)) form = url.Values{"Content": {content}} assert.NoError(t, postTx(contract, &form)) + pubKey, err := hex.DecodeString(gPublic) + assert.NoError(t, err) + keyID := converter.Int64ToStr(crypto.Address(pubKey)) form = url.Values{ - "template": {`SetVar(link, Binary(Name: test, AppID: 1)) #link#`}, + "template": {`SetVar(link, Binary(Name: ` + filename + `, AppID: 1, MemberID: ` + keyID + `)) #link#`}, } var ret struct { Tree []struct { @@ -356,7 +353,16 @@ func TestStringToBinary(t *testing.T) { } assert.NoError(t, sendPost(`content`, &form, &ret)) - data, err := sendRawRequest("GET", strings.TrimSpace(ret.Tree[0].Link), nil) + resp, err := http.Get(apiAddress + consts.ApiPath + strings.TrimSpace(ret.Tree[0].Link)) + if err != nil { + t.Error(err) + return + } + defer resp.Body.Close() + + data, err := ioutil.ReadAll(resp.Body) assert.NoError(t, err) assert.Equal(t, content, string(data)) + assert.Equal(t, mimeType, resp.Header.Get("Content-Type")) + assert.Equal(t, `attachment; filename="`+filename+`"`, resp.Header.Get("Content-Disposition")) } From a55284bd4672096d1e64a7b0456212313c367a83 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 18 Jun 2018 15:52:12 +0300 Subject: [PATCH 057/169] change update permissions for notifications table --- packages/migration/tables_data.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/tables_data.go b/packages/migration/tables_data.go index 71bf76a8d..be8177b61 100644 --- a/packages/migration/tables_data.go +++ b/packages/migration/tables_data.go @@ -98,7 +98,7 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co 'ContractConditions("MainCondition")'), ('12', 'notifications', '{"insert":"ContractAccess(\"notifications_Send\", \"CheckNodesBan\")", - "update":"ContractConditions(\"MainCondition\")", + "update":"ContractAccess(\"notifications_Send\", \"notifications_Close\", \"notifications_Process\")", "new_column":"ContractConditions(\"MainCondition\")"}', '{"date_closed":"ContractAccess(\"notifications_Close\")", "sender":"false", From e76e726a267c9df672edc933376e2bea509b1ed8 Mon Sep 17 00:00:00 2001 From: Dmitriy Chertkov Date: Mon, 18 Jun 2018 17:57:57 +0500 Subject: [PATCH 058/169] Fixed changing schema of system_parameters table --- packages/conf/syspar/syspar.go | 4 ++-- packages/daemons/block_generator.go | 2 +- packages/model/system_parameters.go | 5 +++++ packages/parser/common.go | 2 +- packages/parser/common_parse_data_full.go | 2 +- packages/utils/utils.go | 4 ++-- 6 files changed, 12 insertions(+), 7 deletions(-) diff --git a/packages/conf/syspar/syspar.go b/packages/conf/syspar/syspar.go index abb88aa6d..96b7706ca 100644 --- a/packages/conf/syspar/syspar.go +++ b/packages/conf/syspar/syspar.go @@ -202,9 +202,9 @@ func GetNumberOfNodes() int64 { return int64(len(nodesByPosition)) } -func GetNumberOfNodesFromDB() int64 { +func GetNumberOfNodesFromDB(transaction *model.DbTransaction) int64 { sp := &model.SystemParameter{} - sp.Get(FullNodes) + sp.GetTransaction(transaction, FullNodes) var fullNodes []map[string]interface{} if len(sp.Value) > 0 { if err := json.Unmarshal([]byte(sp.Value), &fullNodes); err != nil { diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index daa70067c..e6dfefb8e 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -60,7 +60,7 @@ func BlockGenerator(ctx context.Context, d *daemon) error { return err } - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + blockTimeCalculator, err := utils.BuildBlockTimeCalculator(nil) if err != nil { d.logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") return err diff --git a/packages/model/system_parameters.go b/packages/model/system_parameters.go index f3137aa99..3ed2c7f91 100644 --- a/packages/model/system_parameters.go +++ b/packages/model/system_parameters.go @@ -22,6 +22,11 @@ func (sp *SystemParameter) Get(name string) (bool, error) { return isFound(DBConn.Where("name = ?", name).First(sp)) } +// GetTransaction is retrieving model from database using transaction +func (sp *SystemParameter) GetTransaction(transaction *DbTransaction, name string) (bool, error) { + return isFound(GetDB(transaction).Where("name = ?", name).First(sp)) +} + // GetJSONField returns fields as json func (sp *SystemParameter) GetJSONField(jsonField string, name string) (string, error) { var result string diff --git a/packages/parser/common.go b/packages/parser/common.go index d61617cf1..c53941457 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -303,7 +303,7 @@ func InsertIntoBlockchain(transaction *model.DbTransaction, block *Block) error RollbacksHash: rollbackTxsHash, Tx: int32(len(block.Parsers)), } - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + blockTimeCalculator, err := utils.BuildBlockTimeCalculator(transaction) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") return err diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index 8ef68d753..88c0a8410 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -763,7 +763,7 @@ func (b *Block) CheckBlock() error { // skip time validation for first block if b.Header.BlockID > 1 { - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + blockTimeCalculator, err := utils.BuildBlockTimeCalculator(nil) if err != nil { logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") return err diff --git a/packages/utils/utils.go b/packages/utils/utils.go index 1f6e66c97..ffd352e94 100644 --- a/packages/utils/utils.go +++ b/packages/utils/utils.go @@ -534,7 +534,7 @@ func GetHostPort(h string) string { return fmt.Sprintf("%s:%d", h, consts.DEFAULT_TCP_PORT) } -func BuildBlockTimeCalculator() (BlockTimeCalculator, error) { +func BuildBlockTimeCalculator(transaction *model.DbTransaction) (BlockTimeCalculator, error) { var btc BlockTimeCalculator firstBlock := model.Block{} found, err := firstBlock.Get(1) @@ -554,7 +554,7 @@ func BuildBlockTimeCalculator() (BlockTimeCalculator, error) { btc = NewBlockTimeCalculator(time.Unix(firstBlock.Time, 0), blockGenerationDuration, blocksGapDuration, - syspar.GetNumberOfNodesFromDB(), + syspar.GetNumberOfNodesFromDB(transaction), ) return btc, nil } From 8ac71e286f57ad2df343ae014ad64c987ed70a90 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 18 Jun 2018 16:16:35 +0300 Subject: [PATCH 059/169] add reles_access for 'Apla Consensus asbl' --- packages/migration/first_ecosystems_data.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/first_ecosystems_data.go b/packages/migration/first_ecosystems_data.go index 45c1a4614..20ce06909 100644 --- a/packages/migration/first_ecosystems_data.go +++ b/packages/migration/first_ecosystems_data.go @@ -5,7 +5,7 @@ INSERT INTO "1_ecosystems" ("id", "name", "is_valued") VALUES ('1', 'platform ec INSERT INTO "1_roles" ("id", "default_page", "role_name", "deleted", "role_type", "date_created","creator","roles_access") VALUES - ('3','', 'Apla Consensus asbl', '0', '3', NOW(), '{}', '{}'), + ('3','', 'Apla Consensus asbl', '0', '3', NOW(), '{}', '{"rids": "1"}'), ('4','', 'Candidate for validators', '0', '3', NOW(), '{}', '{}'), ('5','', 'Validator', '0', '3', NOW(), '{}', '{}'), ('6','', 'Investor with voting rights', '0', '3', NOW(), '{}', '{}'), From 775251a8ba1a6bb78be37c0927d8dbec29862a26 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 060/169] move changes --- cmd/config.go | 4 +- packages/conf/conf.go | 26 +++ packages/conf/runmode.go | 40 ++++ packages/migration/{ => vde}/vde.go | 0 packages/smart/funcs.go | 41 ++++ packages/vdemanager/config.go | 65 +++++++ packages/vdemanager/manager.go | 286 ++++++++++++++++++++++++++++ 7 files changed, 460 insertions(+), 2 deletions(-) create mode 100644 packages/conf/runmode.go rename packages/migration/{ => vde}/vde.go (100%) create mode 100644 packages/vdemanager/config.go create mode 100644 packages/vdemanager/manager.go diff --git a/cmd/config.go b/cmd/config.go index c6779160a..90ace6d01 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().BoolVar(&conf.Config.PrivateBlockchain, "privateBlockchain", false, "Is blockchain private") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) @@ -147,7 +147,7 @@ func init() { viper.BindPFlag("TLSCert", configCmd.Flags().Lookup("tls-cert")) viper.BindPFlag("TLSKey", configCmd.Flags().Lookup("tls-key")) viper.BindPFlag("MaxPageGenerationTime", configCmd.Flags().Lookup("mpgt")) - viper.BindPFlag("PrivateBlockchain", configCmd.Flags().Lookup("privateBlockchain")) viper.BindPFlag("TempDir", configCmd.Flags().Lookup("tempDir")) viper.BindPFlag("NodesAddr", configCmd.Flags().Lookup("nodesAddr")) + viper.BindPFlag("RunningMode", configCmd.Flags().Lookup("runMode")) } diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 7ac76b6e8..2e175b01c 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -89,6 +89,7 @@ type GlobalConfig struct { TLS bool // TLS is on/off. It is required for https TLSCert string // TLSCert is a filepath of the fullchain of certificate. TLSKey string // TLSKey is a filepath of the private key. + RunningMode string MaxPageGenerationTime int64 // in milliseconds @@ -216,3 +217,28 @@ func FillRuntimeKey() error { func GetNodesAddr() []string { return Config.NodesAddr[:] } + +// IsPrivateBlockchain check running mode +func (c *GlobalConfig) IsPrivateBlockchain() bool { + return RunMode(c.RunningMode).IsPrivateBlockchain() +} + +// IsPublicBlockchain check running mode +func (c *GlobalConfig) IsPublicBlockchain() bool { + return RunMode(c.RunningMode).IsPublicBlockchain() +} + +// IsVDE check running mode +func (c *GlobalConfig) IsVDE() bool { + return RunMode(c.RunningMode).IsVDE() +} + +// IsVDEMaster check running mode +func (c *GlobalConfig) IsVDEMaster() bool { + return RunMode(c.RunningMode).IsVDEMaster() +} + +// IsSupportingVDE check running mode +func (c *GlobalConfig) IsSupportingVDE() bool { + return RunMode(c.RunningMode).IsSupportingVDE() +} diff --git a/packages/conf/runmode.go b/packages/conf/runmode.go new file mode 100644 index 000000000..a03f2aeb0 --- /dev/null +++ b/packages/conf/runmode.go @@ -0,0 +1,40 @@ +package conf + +// PrivateBlockchain const label for running mode +const privateBlockchain RunMode = "PrivateBlockchain" + +// PublicBlockchain const label for running mode +const publicBlockchain RunMode = "PublicBlockchain" + +// VDEManager const label for running mode +const vdeMaster RunMode = "VDEMaster" + +// VDE const label for running mode +const vde RunMode = "VDE" + +type RunMode string + +// IsPublicBlockchain returns true if mode equal PublicBlockchain +func (rm RunMode) IsPublicBlockchain() bool { + return rm == publicBlockchain +} + +// IsPrivateBlockchain returns true if mode equal PrivateBlockchain +func (rm RunMode) IsPrivateBlockchain() bool { + return rm == privateBlockchain +} + +// IsVDEMaster returns true if mode equal vdeMaster +func (rm RunMode) IsVDEMaster() bool { + return rm == vdeMaster +} + +// IsVDE returns true if mode equal vde +func (rm RunMode) IsVDE() bool { + return rm == vde +} + +// IsSupportingVDE returns true if mode support vde +func (rm RunMode) IsSupportingVDE() bool { + return rm.IsVDE() || rm.IsVDEMaster() +} diff --git a/packages/migration/vde.go b/packages/migration/vde/vde.go similarity index 100% rename from packages/migration/vde.go rename to packages/migration/vde/vde.go diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index df6e8dd22..8358cb003 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -46,6 +46,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/script" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/satori/go.uuid" "github.com/shopspring/decimal" @@ -254,6 +255,21 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["UpdateCron"] = UpdateCron vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: + f["HTTPRequest"] = HTTPRequest + f["GetMapKeys"] = GetMapKeys + f["SortedKeys"] = SortedKeys + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDE"] = StopVDE + f["GetVDEList"] = GetVDEList + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan @@ -1670,3 +1686,28 @@ func StringToBytes(src string) []byte { func BytesToString(src []byte) string { return string(src) } + +// CreateVDE allow create new VDE throw vdemanager +func CreateVDE(sc *SmartContract, name, dbUser, dbPassword string, port int64) error { + return vdemanager.Manager.CreateVDE(name, dbUser, dbPassword, int(port)) +} + +// DeleteVDE delete vde +func DeleteVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.DeleteVDE(name) +} + +// StartVDE run VDE process +func StartVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StartVDE(name) +} + +// StopVDE stops VDE process +func StopVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StopVDE(name) +} + +// GetVDEList returns list VDE process with statuses +func GetVDEList(sc *SmartContract, name string) (map[string]string, error) { + return vdemanager.Manager.ListProcess() +} diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go new file mode 100644 index 000000000..450ff5aac --- /dev/null +++ b/packages/vdemanager/config.go @@ -0,0 +1,65 @@ +package vdemanager + +import ( + "fmt" + "os/exec" + "path/filepath" +) + +const ( + inidDBCommand = "initDatabase" + genKeysCommand = "generateKeys" + startCommand = "start" +) +// ChildVDEConfig struct to manage child entry +type ChildVDEConfig struct { + Executable string + Name string + Directory string + DBUser string + DBPassword string + ConfigFileName string + HTTPPort int +} + +func (c ChildVDEConfig) configCommand() *exec.Cmd { + + args := []string{ + "config", + fmt.Sprintf("--path=%s", c.configPath()), + fmt.Sprintf("--dbUser=%s", c.DBUser), + fmt.Sprintf("--dbPassword=%s", c.DBPassword), + fmt.Sprintf("--dbName=%s", c.Name), + fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--dataDir=%s", c.Directory), + fmt.Sprintf("--keysDir=%s", c.Directory), + fmt.Sprintf("--runMode=VDE") + } + + return exec.Command(c.Executable, args...) +} + +func (c ChildVDEConfig) initDBCommand() exec.Cmd { + return getCommand(inidDBCommand) +} + +func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { + return getCommand(genKeysCommand) +} + +func (c ChildVDEConfig) startCommand() exec.Cmd { + retturn getCommand(startCommand) +} + +func (c ChildVDEConfig) configPath() string { + return filepath.Join(c.Directory, ConfigFileName) +} + +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + return args := []string{ + commandName, + fmt.Sprintf("--config=%s", c.configPath()), + } + + return exec.Command(c.Executable, args...) +} \ No newline at end of file diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go new file mode 100644 index 000000000..d35362ce2 --- /dev/null +++ b/packages/vdemanager/manager.go @@ -0,0 +1,286 @@ +package vdemanager + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + + "github.com/GenesisKernel/go-genesis/packages/conf" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/model" + pConf "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/process" + log "github.com/sirupsen/logrus" +) + +const ( + childFolder = "configs" + createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` + createDBTemplate = `CREATE DATABASE %s OWNER %s` + + dropDBTemplate = `DROP OWNED BY %s CASCADE` + dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` + commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` +) + +var ( + errWrongMode = errors.New("node must be running as VDEMaster") +) + +// VDEManager struct +type VDEManager struct { + processes *process.ProcessManager +} + +var ( + Manager *VDEManager + childConfigsPath string +) + +// InitVDEManager create init instance of VDEManager +func InitVDEManager() error { + if err := prepareWorkDir(); err != nil { + return err + } + + return initProcessManager() +} + +func prepareWorkDir() error { + childConfigsPath = path.Join(conf.Config.DataDir, childFolder) + + if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { + if err := os.Mkdir(childConfigsPath, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") + return err + } + } + + return nil +} + +// CreateVDE creates one instance of VDE +func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + + config := ChildVDEConfig{ + Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Name: name, + Directory: path.Join(childConfigsPath, name) + DBUser: dbUser, + DBPassword: dbPassword, + ConfigFileName: consts.DefaultConfigFile, + HTTPPort: port, + } + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("creating new VDE") + return errWrongMode + } + + if err := mgr.createVDEDB(name, dbUser, dbPassword); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on creating VDE DB") + return err + } + + if err := mgr.initVDEDir(name); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "DirName": name, "error": err}).Error("on init VDE dir") + return err + } + + cmd := config.configCommand() + if err := cmd.Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run config command") + return err + } + + if err := config.generateKeysCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run generateKeys command") + return err + } + + if err := config.initDBCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run initDB command") + return err + } + + procConfEntry := pConf.NewConfigEntry(config.Directory) + procConfEntry.Name = "program:" + name + command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) + procConfEntry.AddKeyValue("command", command) + proc := process.NewProcess("vdeMaster", confEntry) + + mgr.processes.Add(name, proc) + mgr.processes.Find(name).Start(true) + return nil +} + +// ListProcess returns list of process names with state of process +func (mgr *VDEManager) ListProcess() (map[string]string, error) { + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("get VDE list") + return nil, errWrongMode + } + + list := make(map[string]string) + + mgr.processes.ForEachProcess(func(p *process.Process) { + list[p.GetName()] = p.GetState().String() + }) + + return list, nil +} + +// DeleteVDE stop VDE process and remove VDE folder +func (mgr *VDEManager) DeleteVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("deleting VDE") + return errWrongMode + } + + p := mgr.processes.Find(name) + if p != nil { + p.Stop(true) + } + + vdeDir := path.Join(childConfigsPath, name) + vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) + vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Errorf("Getting config from path %s", vdeConfigPath) + return err + } + + dropDBquery := fmt.Sprintf(dropDBTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropDBquery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") + return err + } + + dropVDERoleQuery := fmt.Sprintf(dropDBRoleTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropVDERoleQuery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db user") + return err + } + + return os.RemoveAll(vdeDir) +} + +// StartVDE find process and then start him +func (mgr *VDEManager) StartVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("starting VDE") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.STOPPED || + state == process.EXITED || + state == process.FATAL { + proc.Start(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE started") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on starting VDE") + return err +} + +// StopVDE find process with definded name and then stop him +func (mgr *VDEManager) StopVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("on stopping VDE process") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.RUNNING || + state == process.STARTING { + proc.Stop(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE is stoped") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on stoping VDE") + return err +} + +func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { + + if err := model.DBConn.Exec(fmt.Sprintf(createRoleTemplate, login, pass)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB User") + return err + } + + if err := model.DBConn.Exec(fmt.Sprintf(createDBTemplate, vdeName, login)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB") + return err + } + + return nil +} + +func (mgr *VDEManager) initVDEDir(vdeName string) error { + + vdeDirName := path.Join(childConfigsPath, vdeName) + if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { + if err := os.Mkdir(vdeDirName, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") + return err + } + } + + return nil +} + +func initProcessManager() error { + Manager = &VDEManager{ + processes: process.NewProcessManager(), + } + + list, err := ioutil.ReadDir(childConfigsPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") + return err + } + + for _, item := range list { + if item.IsDir() { + procDir := path.Join(childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + confEntry := pConf.NewConfigEntry(procDir) + confEntry.Name = "program:" + item.Name() + confEntry.AddKeyValue("command", commandStr) + confEntry.AddKeyValue("redirect_stderr", "true") + confEntry.AddKeyValue("autostart", "true") + confEntry.AddKeyValue("autorestart", "true") + + proc := process.NewProcess("vdeMaster", confEntry) + Manager.processes.Add(item.Name(), proc) + } + } + + return nil +} From 55159b5afea76592c7ff824d9e939d72f8066516 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:01 +0300 Subject: [PATCH 061/169] setup vde mode for vm in default handler --- packages/api/api.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index 1bdeb76d2..44a877c47 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -30,6 +30,7 @@ import ( hr "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" @@ -241,10 +242,8 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { - // Getting and validating request parameters - vde := r.FormValue(`vde`) - if vde == `1` || vde == `true` { - data.vm = smart.GetVM(true, data.ecosystemId) + if conf.Config.IsSupportingVDE() { + data.vm = smart.GetVM(true, consts.DefaultVDE) if data.vm == nil { return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) } @@ -252,6 +251,7 @@ func fillParams(params map[string]int) apiHandle { } else { data.vm = smart.GetVM(false, 0) } + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { From 0a5a2d0772a78b368c52da2c0a5a79ecfb46a819 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 062/169] separate routes by vde --- packages/api/route.go | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/packages/api/route.go b/packages/api/route.go index 1fb9e45b0..be778c441 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -19,6 +19,7 @@ package api import ( "strings" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/utils/tx" @@ -50,14 +51,8 @@ func Route(route *hr.Router) { route.Handle(`OPTIONS`, consts.ApiPath+`*name`, optionsHandler()) route.Handle(`GET`, consts.ApiPath+`data/:table/:id/:column/:hash`, dataHandler()) - get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) - get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) - get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`contract/:name`, ``, authWallet, getContract) get(`contracts`, `?limit ?offset:int64`, authWallet, getContracts) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) - get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) - get(`ecosystems`, ``, authWallet, ecosystems) get(`getuid`, ``, getUID) get(`list/:name`, `?limit ?offset:int64,?columns:string`, authWallet, list) get(`row/:name/:id`, `?columns:string`, authWallet, row) @@ -67,11 +62,7 @@ func Route(route *hr.Router) { get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) - get(`txstatus/:hash`, ``, authWallet, txstatus) get(`test/:name`, ``, getTest) - get(`history/:table/:id`, ``, authWallet, getHistory) - get(`block/:id`, ``, getBlockInfo) - get(`maxblockid`, ``, getMaxBlockID) get(`version`, ``, getVersion) get(`avatar/:ecosystem/:member`, ``, getAvatar) get(`config/:option`, ``, getConfigOption) @@ -80,7 +71,6 @@ func Route(route *hr.Router) { post(`content/page/:name`, `?lang:string`, authWallet, getPage) post(`content/menu/:name`, `?lang:string`, authWallet, getMenu) post(`content/hash/:name`, ``, getPageHash) - post(`vde/create`, ``, authWallet, vdeCreate) post(`login`, `?pubkey signature:hex,?key_id ?mobile:string,?ecosystem ?expire ?role_id:int64`, login) post(`prepare/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, authWallet, contractHandlers.prepareContract) post(`prepareMultiple`, `data:string`, authWallet, contractHandlers.prepareMultipleContract) @@ -93,6 +83,20 @@ func Route(route *hr.Router) { post(`updnotificator`, `ids:string`, updateNotificator) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) + + if !conf.Config.IsSupportingVDE() { + get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) + get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) + get(`txstatus/:hash`, ``, authWallet, txstatus) + get(`history/:table/:id`, ``, authWallet, getHistory) + get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) + get(`block/:id`, ``, getBlockInfo) + get(`maxblockid`, ``, getMaxBlockID) + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) + get(`systemparams`, `?names:string`, authWallet, systemParams) + get(`ecosystems`, ``, authWallet, ecosystems) + } } func processParams(input string) (params map[string]int) { From f1e9f313dececda7d53bba0a24d301c5d1716ebe Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 063/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- packages/model/db.go | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; diff --git a/packages/model/db.go b/packages/model/db.go index 4a6c7b0be..db9665979 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -10,6 +10,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/migration" + "github.com/GenesisKernel/go-genesis/packages/migration/vde" "github.com/jinzhu/gorm" log "github.com/sirupsen/logrus" @@ -155,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(migration.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error } // ExecSchema is executing schema From febb028de4509ab08216b064e028b12042aa7335 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 15:59:38 +0300 Subject: [PATCH 064/169] temp commit --- packages/consts/log_types.go | 2 ++ packages/smart/smart.go | 10 +++++++++- packages/vdemanager/config.go | 29 +++++++++++++++-------------- packages/vdemanager/manager.go | 10 ++++++++-- 4 files changed, 34 insertions(+), 17 deletions(-) diff --git a/packages/consts/log_types.go b/packages/consts/log_types.go index d44b81bad..5f421a00b 100644 --- a/packages/consts/log_types.go +++ b/packages/consts/log_types.go @@ -54,4 +54,6 @@ const ( BCActualizationError = "BCActualizationError" SchedulerError = "SchedulerError" SyncProcess = "SyncProcess" + WrongModeError = "WrongModeError" + VDEManagerError = "VDEManagerError" ) diff --git a/packages/smart/smart.go b/packages/smart/smart.go index bee64083b..ea25a1c7d 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -486,7 +486,15 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } state := converter.StrToInt64(prefix) vm := newVM() - EmbedFuncs(vm, script.VMTypeVDE) + + var vmt script.VMType + if conf.Config.IsVDE() { + vmt = script.VMTypeVDE + } else if conf.Config.IsVDEMaster() { + vmt = script.VMTypeVDEMaster + } + + EmbedFuncs(vm, vmt) smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index 450ff5aac..bcafa10ff 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -7,10 +7,11 @@ import ( ) const ( - inidDBCommand = "initDatabase" + inidDBCommand = "initDatabase" genKeysCommand = "generateKeys" - startCommand = "start" + startCommand = "start" ) + // ChildVDEConfig struct to manage child entry type ChildVDEConfig struct { Executable string @@ -30,36 +31,36 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--httpPort=%d", c.HTTPPort), fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - fmt.Sprintf("--runMode=VDE") + "--runMode=VDE", } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() exec.Cmd { - return getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() *exec.Cmd { + return c.getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { - return getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { + return c.getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() exec.Cmd { - retturn getCommand(startCommand) +func (c ChildVDEConfig) startCommand() *exec.Cmd { + return c.getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, ConfigFileName) + return filepath.Join(c.Directory, c.ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - return args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } return exec.Command(c.Executable, args...) -} \ No newline at end of file +} diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d35362ce2..d1a37d413 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -66,10 +66,16 @@ func prepareWorkDir() error { // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") + return err + } + config := ChildVDEConfig{ - Executable: path.Join(conf.Config.DataDir, consts.NodeExecutableFileName), + Executable: execPath, Name: name, - Directory: path.Join(childConfigsPath, name) + Directory: path.Join(childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, From e7c0a7e37a4e1b36f175033b3c34cfd26f828f93 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 8 May 2018 09:59:10 +0300 Subject: [PATCH 065/169] temporary commit --- cmd/config.go | 2 +- packages/conf/conf.go | 35 +++++++++-- packages/consts/consts.go | 3 + packages/daemons/common.go | 13 ++++- packages/daylight/daemonsctl/daemonsctl.go | 27 ++++----- packages/daylight/start.go | 44 +++++++------- packages/script/vminit.go | 2 + packages/smart/smart.go | 6 +- packages/tcpserver/tcpserver.go | 6 ++ packages/vdemanager/manager.go | 68 ++++++++++------------ 10 files changed, 123 insertions(+), 83 deletions(-) diff --git a/cmd/config.go b/cmd/config.go index 90ace6d01..127f7c827 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "CommonBlockchain", "Node running mode") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "PublicBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 2e175b01c..b91be9b38 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -133,10 +133,33 @@ func LoadConfig(path string) error { if err != nil { return errors.Wrapf(err, "marshalling config to global struct variable") } - return nil } +// GetConfigFromPath read config from path and returns GlobalConfig struct +func GetConfigFromPath(path string) (*GlobalConfig, error) { + log.WithFields(log.Fields{"path": path}).Info("Loading config") + + _, err := os.Stat(path) + if os.IsNotExist(err) { + return nil, errors.Errorf("Unable to load config file %s", path) + } + + viper.SetConfigFile(path) + err = viper.ReadInConfig() + if err != nil { + return nil, errors.Wrapf(err, "reading config") + } + + c := &GlobalConfig{} + err = viper.Unmarshal(c) + if err != nil { + return c, errors.Wrapf(err, "marshalling config to global struct variable") + } + + return c, nil +} + // SaveConfig save global parameters to configFile func SaveConfig(path string) error { dir := filepath.Dir(path) @@ -219,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c *GlobalConfig) IsPrivateBlockchain() bool { +func (c GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c *GlobalConfig) IsPublicBlockchain() bool { +func (c GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c *GlobalConfig) IsVDE() bool { +func (c GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c *GlobalConfig) IsVDEMaster() bool { +func (c GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c *GlobalConfig) IsSupportingVDE() bool { +func (c GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } diff --git a/packages/consts/consts.go b/packages/consts/consts.go index 9684221d7..45b07c9b6 100644 --- a/packages/consts/consts.go +++ b/packages/consts/consts.go @@ -157,3 +157,6 @@ const TxRequestExpire = 1 * time.Minute // DefaultTempDirName is default name of temporary directory const DefaultTempDirName = "genesis-temp" + +// DefaultVDE allways is 1 +const DefaultVDE = 1 diff --git a/packages/daemons/common.go b/packages/daemons/common.go index 861c03983..8f1bb4d6b 100644 --- a/packages/daemons/common.go +++ b/packages/daemons/common.go @@ -130,7 +130,7 @@ func StartDaemons() { utils.CancelFunc = cancel utils.ReturnCh = make(chan string) - daemonsToStart := serverList + daemonsToStart := getDaemonsToStart() if conf.Config.TestRollBack { daemonsToStart = rollbackList } @@ -156,3 +156,14 @@ func getHostPort(h string) string { } return fmt.Sprintf("%s:%d", h, consts.DEFAULT_TCP_PORT) } + +func getDaemonsToStart() []string { + if conf.Config.IsSupportingVDE() { + return []string{ + "Notificator", + "Scheduler", + } + } + + return serverList +} diff --git a/packages/daylight/daemonsctl/daemonsctl.go b/packages/daylight/daemonsctl/daemonsctl.go index cdddac4d8..84cac3036 100644 --- a/packages/daylight/daemonsctl/daemonsctl.go +++ b/packages/daylight/daemonsctl/daemonsctl.go @@ -14,17 +14,19 @@ import ( // RunAllDaemons start daemons, load contracts and tcpserver func RunAllDaemons() error { - logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) - - daemons.InitialLoad(logEntry) - err := syspar.SysUpdate(nil) - if err != nil { - log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) - return err - } - - if data, ok := parser.GetDataFromFirstBlock(); ok { - syspar.SetFirstBlockData(data) + if !conf.Config.IsSupportingVDE() { + logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) + + daemons.InitialLoad(logEntry) + err := syspar.SysUpdate(nil) + if err != nil { + log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) + return err + } + + if data, ok := parser.GetDataFromFirstBlock(); ok { + syspar.SetFirstBlockData(data) + } } log.Info("load contracts") @@ -36,8 +38,7 @@ func RunAllDaemons() error { log.Info("start daemons") daemons.StartDaemons() - err = tcpserver.TcpListener(conf.Config.TCPServer.Str()) - if err != nil { + if err := tcpserver.TcpListener(conf.Config.TCPServer.Str()); err != nil { log.Errorf("can't start tcp servers, stop") return err } diff --git a/packages/daylight/start.go b/packages/daylight/start.go index c2017942c..946523556 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -39,6 +39,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/service" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" @@ -181,15 +182,6 @@ func initRoutes(listenHost string) { httpListener(listenHost, route) } -func logBlockchainMode() { - mode := "private" - if !conf.Config.PrivateBlockchain { - mode = "non private" - } - - log.WithFields(log.Fields{"mode": mode}).Error("Node running mode") -} - // Start starts the main code of the program func Start() { var err error @@ -218,7 +210,7 @@ func Start() { } } - logBlockchainMode() + log.WithFields(log.Fields{"mode": conf.Config.RunningMode}).Info("Node running mode") f := utils.LockOrDie(conf.Config.LockFilePath) defer f.Unlock() @@ -259,22 +251,28 @@ func Start() { os.Exit(1) } - var availableBCGap int64 = consts.AvailableBCGap - if syspar.GetRbBlocks1() > consts.AvailableBCGap { - availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap - } + if !conf.Config.IsSupportingVDE() { + var availableBCGap int64 = consts.AvailableBCGap + if syspar.GetRbBlocks1() > consts.AvailableBCGap { + availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap + } - blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) - blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) - blockGenerationTime := blockGenerationDuration + blocksGapDuration + blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) + blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) + blockGenerationTime := blockGenerationDuration + blocksGapDuration - checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) - na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) - na.Run() + checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) + na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) + na.Run() - err = service.InitNodesBanService() - if err != nil { - log.WithError(err).Fatal("Can't init ban service") + err = service.InitNodesBanService() + if err != nil { + log.WithError(err).Fatal("Can't init ban service") + } + } + + if conf.Config.IsVDEMaster() { + vdemanager.InitVDEManager() } } diff --git a/packages/script/vminit.go b/packages/script/vminit.go index 84d9a561b..a82309641 100644 --- a/packages/script/vminit.go +++ b/packages/script/vminit.go @@ -69,6 +69,8 @@ const ( VMTypeSmart VMType = 1 // VMTypeVDE is vde vm type VMTypeVDE VMType = 2 + // VMTypeVDEMaster is VDEMaster type + VMTypeVDEMaster VMType = 3 TagFile = "file" TagAddress = "address" diff --git a/packages/smart/smart.go b/packages/smart/smart.go index ea25a1c7d..612476eb3 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -877,7 +877,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect sign") return retError(ErrIncorrectSign) } - if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.PrivateBlockchain { + if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { if sc.TxSmart.TokenEcosystem == 0 { sc.TxSmart.TokenEcosystem = 1 } @@ -999,8 +999,8 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { result = result[:255] } } - if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && - !sc.VDE && !conf.Config.PrivateBlockchain && sc.TxContract.Name != `@1NewUser` { + + if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { apl := sc.TxUsedCost.Mul(fuelRate) wltAmount, ierr := decimal.NewFromString(payWallet.Amount) diff --git a/packages/tcpserver/tcpserver.go b/packages/tcpserver/tcpserver.go index c533456dc..1b11f111b 100644 --- a/packages/tcpserver/tcpserver.go +++ b/packages/tcpserver/tcpserver.go @@ -22,6 +22,8 @@ import ( "sync/atomic" "time" + "github.com/GenesisKernel/go-genesis/packages/conf" + "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/service" @@ -107,6 +109,10 @@ func HandleTCPRequest(rw net.Conn) { // TcpListener is listening tcp address func TcpListener(laddr string) error { + if conf.Config.IsSupportingVDE() { + return nil + } + if strings.HasPrefix(laddr, "127.") { log.Warn("Listening at local address: ", laddr) } diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index d1a37d413..4cca4ac8e 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -24,7 +24,7 @@ const ( dropDBTemplate = `DROP OWNED BY %s CASCADE` dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` - commandTemplate = `%s -VDEMode=true -configPath=%s -workDir=%s` + commandTemplate = `%s start --config=%s` ) var ( @@ -33,49 +33,35 @@ var ( // VDEManager struct type VDEManager struct { - processes *process.ProcessManager + processes *process.ProcessManager + execPath string + childConfigsPath string } var ( - Manager *VDEManager - childConfigsPath string + Manager *VDEManager ) -// InitVDEManager create init instance of VDEManager -func InitVDEManager() error { - if err := prepareWorkDir(); err != nil { - return err - } - - return initProcessManager() -} - -func prepareWorkDir() error { - childConfigsPath = path.Join(conf.Config.DataDir, childFolder) +func prepareWorkDir() (string, error) { + childConfigsPath := path.Join(conf.Config.DataDir, childFolder) if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { if err := os.Mkdir(childConfigsPath, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") - return err + return "", err } } - return nil + return childConfigsPath, nil } // CreateVDE creates one instance of VDE func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { - execPath, err := os.Executable() - if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("on getting executable path") - return err - } - config := ChildVDEConfig{ - Executable: execPath, + Executable: mgr.execPath, Name: name, - Directory: path.Join(childConfigsPath, name), + Directory: path.Join(mgr.childConfigsPath, name), DBUser: dbUser, DBPassword: dbPassword, ConfigFileName: consts.DefaultConfigFile, @@ -117,7 +103,7 @@ func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) erro procConfEntry.Name = "program:" + name command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) procConfEntry.AddKeyValue("command", command) - proc := process.NewProcess("vdeMaster", confEntry) + proc := process.NewProcess("vdeMaster", procConfEntry) mgr.processes.Add(name, proc) mgr.processes.Find(name).Start(true) @@ -153,7 +139,7 @@ func (mgr *VDEManager) DeleteVDE(name string) error { p.Stop(true) } - vdeDir := path.Join(childConfigsPath, name) + vdeDir := path.Join(mgr.childConfigsPath, name) vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) if err != nil { @@ -250,7 +236,7 @@ func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { func (mgr *VDEManager) initVDEDir(vdeName string) error { - vdeDirName := path.Join(childConfigsPath, vdeName) + vdeDirName := path.Join(mgr.childConfigsPath, vdeName) if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { if err := os.Mkdir(vdeDirName, 0700); err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") @@ -261,21 +247,33 @@ func (mgr *VDEManager) initVDEDir(vdeName string) error { return nil } -func initProcessManager() error { +func InitVDEManager() { + + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on determine executable path") + } + + childConfigsPath, err := prepareWorkDir() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on prepare child configs folder") + } + Manager = &VDEManager{ - processes: process.NewProcessManager(), + processes: process.NewProcessManager(), + execPath: execPath, + childConfigsPath: childConfigsPath, } list, err := ioutil.ReadDir(childConfigsPath) if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Error("Initialising VDE list") - return err + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Fatal("on read child VDE directory") } for _, item := range list { if item.IsDir() { - procDir := path.Join(childConfigsPath, item.Name()) - commandStr := fmt.Sprintf(commandTemplate, bin(), filepath.Join(procDir, consts.DefaultConfigFile), procDir) + procDir := path.Join(Manager.childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) confEntry := pConf.NewConfigEntry(procDir) confEntry.Name = "program:" + item.Name() confEntry.AddKeyValue("command", commandStr) @@ -287,6 +285,4 @@ func initProcessManager() error { Manager.processes.Add(item.Name(), proc) } } - - return nil } From 685ab4b7926905ad6dff145a23b99812d9eb6e82 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 066/169] temporary commit --- packages/api/api.go | 8 +- packages/api/login.go | 5 +- packages/api/vde.go | 4 +- packages/daemons/block_generator_tx.go | 2 +- packages/daylight/start.go | 8 + .../vde/{vde.go => vde_data_contracts.go} | 267 ++---------------- packages/migration/vde/vde_data_keys.go | 6 + packages/migration/vde/vde_data_members.go | 7 + packages/migration/vde/vde_data_menu.go | 45 +++ packages/migration/vde/vde_data_pages.go | 5 + packages/migration/vde/vde_data_parameters.go | 18 ++ packages/migration/vde/vde_data_tables.go | 68 +++++ packages/migration/vde/vde_schema.go | 143 ++++++++++ packages/model/db.go | 9 +- packages/parser/common.go | 2 +- packages/smart/smart.go | 13 +- packages/template/template.go | 2 +- 17 files changed, 342 insertions(+), 270 deletions(-) rename packages/migration/vde/{vde.go => vde_data_contracts.go} (60%) create mode 100644 packages/migration/vde/vde_data_keys.go create mode 100644 packages/migration/vde/vde_data_members.go create mode 100644 packages/migration/vde/vde_data_menu.go create mode 100644 packages/migration/vde/vde_data_pages.go create mode 100644 packages/migration/vde/vde_data_parameters.go create mode 100644 packages/migration/vde/vde_data_tables.go create mode 100644 packages/migration/vde/vde_schema.go diff --git a/packages/api/api.go b/packages/api/api.go index 44a877c47..c24d3260e 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -243,15 +243,11 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { if conf.Config.IsSupportingVDE() { - data.vm = smart.GetVM(true, consts.DefaultVDE) - if data.vm == nil { - return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) - } data.vde = true - } else { - data.vm = smart.GetVM(false, 0) } + data.vm = smart.GetVM() + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { diff --git a/packages/api/login.go b/packages/api/login.go index b55fe85c3..ef8114139 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,7 +128,8 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM(false, 0) + vm := smart.GetVM() + contract := smart.VMGetContract(vm, "NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) @@ -207,7 +208,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, ecosystemID)), + IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, consts.DefaultVDE)), } data.result = &result diff --git a/packages/api/vde.go b/packages/api/vde.go index d494dba3e..cf83ec6b0 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -67,8 +67,8 @@ func InitSmartContract(sc *smart.SmartContract, data []byte) error { if err := msgpack.Unmarshal(data, &sc.TxSmart); err != nil { return err } - sc.TxContract = smart.VMGetContractByID(smart.GetVM(sc.VDE, sc.TxSmart.EcosystemID), - int32(sc.TxSmart.Type)) + + sc.TxContract = smart.VMGetContractByID(smart.GetVM(), int32(sc.TxSmart.Type)) if sc.TxContract == nil { return fmt.Errorf(`unknown contract %d`, sc.TxSmart.Type) } diff --git a/packages/daemons/block_generator_tx.go b/packages/daemons/block_generator_tx.go index 9b5ddb977..d96e58f8c 100644 --- a/packages/daemons/block_generator_tx.go +++ b/packages/daemons/block_generator_tx.go @@ -45,7 +45,7 @@ func (dtx *DelayedTx) RunForBlockID(blockID int64) { } func (dtx *DelayedTx) createTx(delayedContactID, keyID int64) error { - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, callDelayedContract, uint32(firstEcosystemID)) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/daylight/start.go b/packages/daylight/start.go index 946523556..98394511e 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -37,6 +37,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/publisher" "github.com/GenesisKernel/go-genesis/packages/service" + "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/vdemanager" @@ -271,6 +272,13 @@ func Start() { } } + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde_data_contracts.go similarity index 60% rename from packages/migration/vde/vde.go rename to packages/migration/vde/vde_data_contracts.go index 640338e93..4e5ca29ab 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -1,247 +1,6 @@ package vde -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES +var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "conditions") VALUES ('1','MainCondition','contract MainCondition { conditions { if EcosysParam("founder_account")!=$key_id @@ -927,7 +686,7 @@ MenuItem( UpdateCron($Id) } }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { + ('23', 'UploadBinary', 'contract UploadBinary { data { Name string Data bytes "file" @@ -954,5 +713,23 @@ MenuItem( $result = $Id } - }', 'ContractConditions("MainCondition")'); - ` + }', 'ContractConditions("MainCondition")'), + ('24', 'NewUser','contract NewUser { + data { + NewPubkey string + } + conditions { + $newId = PubToID($NewPubkey) + if $newId == 0 { + error "Wrong pubkey" + } + if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { + error "User already exists" + } + + $amount = Money(1000) * Money(1000000000000000000) + } + action { + DBInsert("keys", "id, pub", $newId, $NewPubKey) + } + }', 'ContractConditions("MainCondition")');` diff --git a/packages/migration/vde/vde_data_keys.go b/packages/migration/vde/vde_data_keys.go new file mode 100644 index 000000000..42e26c843 --- /dev/null +++ b/packages/migration/vde/vde_data_keys.go @@ -0,0 +1,6 @@ +package vde + +var keysDataSQL = ` +INSERT INTO "%[1]d_keys" (id, pub) +VALUES (4544233900443112470, '489347a1205c818d9a02f285faaedd0122a56138e3d985f5e1b4f6a9470f90f692a00a3453771dd7feea388ceb7aefeaf183e299c70ad1aecb7f870bfada3b86'); +` diff --git a/packages/migration/vde/vde_data_members.go b/packages/migration/vde/vde_data_members.go new file mode 100644 index 000000000..069f1ea2b --- /dev/null +++ b/packages/migration/vde/vde_data_members.go @@ -0,0 +1,7 @@ +package vde + +var membersDataSQL = ` +INSERT INTO "%[1]d_members" ("id", "member_name") +VALUES('%[2]d', 'founder'), +('4544233900443112470', 'guest'); +` diff --git a/packages/migration/vde/vde_data_menu.go b/packages/migration/vde/vde_data_menu.go new file mode 100644 index 000000000..b52a1699f --- /dev/null +++ b/packages/migration/vde/vde_data_menu.go @@ -0,0 +1,45 @@ +package vde + +var menuDataSQL = ` +INSERT INTO "%[1]d_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true');` diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go new file mode 100644 index 000000000..90ef6eab4 --- /dev/null +++ b/packages/migration/vde/vde_data_pages.go @@ -0,0 +1,5 @@ +package vde + +var pagesDataSQL = ` +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +` diff --git a/packages/migration/vde/vde_data_parameters.go b/packages/migration/vde/vde_data_parameters.go new file mode 100644 index 000000000..3ba29e2f9 --- /dev/null +++ b/packages/migration/vde/vde_data_parameters.go @@ -0,0 +1,18 @@ +package vde + +var parametersDataSQL = ` +INSERT INTO "%[1]d_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go new file mode 100644 index 000000000..4223e825a --- /dev/null +++ b/packages/migration/vde/vde_data_tables.go @@ -0,0 +1,68 @@ +package vde + +var tablesDataSQL = ` +INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"menu": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")", +"validate_count": "ContractConditions(\"MainCondition\")", +"validate_mode": "ContractConditions(\"MainCondition\")", +"app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go new file mode 100644 index 000000000..c3fda993f --- /dev/null +++ b/packages/migration/vde/vde_schema.go @@ -0,0 +1,143 @@ +package vde + +import ( + "strings" +) + +// GetVDEScript returns script for VDE schema +func GetVDEScript() string { + scripts := []string{ + schemaVDE, + membersDataSQL, + menuDataSQL, + pagesDataSQL, + parametersDataSQL, + tablesDataSQL, + contractsDataSQL, + keysDataSQL, + } + + return strings.Join(scripts, "\r\n") +} + +var schemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_keys" ( + "id" bigint NOT NULL DEFAULT '0', + "pub" bytea NOT NULL DEFAULT '', + "multi" bigint NOT NULL DEFAULT '0', + "deleted" bigint NOT NULL DEFAULT '0', + "blocked" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_keys" ADD CONSTRAINT "%[1]d_keys_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_members"; + CREATE TABLE "%[1]d_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_members" ADD CONSTRAINT "%[1]d_members_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_languages"; CREATE TABLE "%[1]d_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_languages" ADD CONSTRAINT "%[1]d_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_languages_index_name" ON "%[1]d_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_menu"; CREATE TABLE "%[1]d_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_menu" ADD CONSTRAINT "%[1]d_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_menu_index_name" ON "%[1]d_menu" (name); + + DROP TABLE IF EXISTS "%[1]d_pages"; CREATE TABLE "%[1]d_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_pages" ADD CONSTRAINT "%[1]d_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_pages_index_name" ON "%[1]d_pages" (name); + + DROP TABLE IF EXISTS "%[1]d_blocks"; CREATE TABLE "%[1]d_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_blocks" ADD CONSTRAINT "%[1]d_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_blocks_index_name" ON "%[1]d_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_signatures"; CREATE TABLE "%[1]d_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_signatures" ADD CONSTRAINT "%[1]d_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_contracts" ADD CONSTRAINT "%[1]d_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_parameters"; + CREATE TABLE "%[1]d_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_parameters" ADD CONSTRAINT "%[1]d_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_parameters_index_name" ON "%[1]d_parameters" (name); + + DROP TABLE IF EXISTS "%[1]d_cron"; + CREATE TABLE "%[1]d_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_cron" ADD CONSTRAINT "%[1]d_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_binaries"; + CREATE TABLE "%[1]d_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_binaries" ADD CONSTRAINT "%[1]d_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_binaries_index_app_id_member_id_name" ON "%[1]d_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + ` diff --git a/packages/model/db.go b/packages/model/db.go index db9665979..7e32186ea 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -156,7 +156,7 @@ func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, f // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(vde.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.GetVDEScript(), id, wallet)).Error } // ExecSchema is executing schema @@ -385,5 +385,12 @@ func InitDB(cfg conf.DBConfig) error { return err } + if conf.Config.IsSupportingVDE() { + if err := ExecSchemaLocalData(consts.DefaultVDE, conf.Config.KeyID); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE schema") + return err + } + } + return nil } diff --git a/packages/parser/common.go b/packages/parser/common.go index d61617cf1..328c28dfb 100644 --- a/packages/parser/common.go +++ b/packages/parser/common.go @@ -506,7 +506,7 @@ func (p *Parser) CallContract(flags int) (resultContract string, err error) { VDE: false, Rollback: true, SysUpdate: false, - VM: smart.GetVM(false, 0), + VM: smart.GetVM(), TxSmart: *p.TxSmart, TxData: p.TxData, TxContract: p.TxContract, diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 612476eb3..bb358e96e 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -66,7 +66,6 @@ const ( var ( smartVM *script.VM - smartVDE map[int64]*script.VM smartTest = make(map[string]string) ErrCurrentBalance = errors.New(`current balance is not enough`) @@ -118,17 +117,10 @@ func newVM() *script.VM { func init() { smartVM = newVM() - smartVDE = make(map[int64]*script.VM) } // GetVM is returning smart vm -func GetVM(vde bool, ecosystemID int64) *script.VM { - if vde { - if v, ok := smartVDE[ecosystemID]; ok { - return v - } - return nil - } +func GetVM() *script.VM { return smartVM } @@ -495,7 +487,6 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro } EmbedFuncs(vm, vmt) - smartVDE[state] = vm LoadSysFuncs(vm, int(state)) for _, item := range contracts { list, err := script.ContractsList(item[`value`]) @@ -828,7 +819,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { methods := []string{`init`, `conditions`, `action`, `rollback`} sc.AppendStack(sc.TxContract.Name) - sc.VM = GetVM(sc.VDE, sc.TxSmart.EcosystemID) + sc.VM = GetVM() if (flags&CallRollback) == 0 && (flags&CallAction) != 0 { if !sc.VDE { toID = sc.BlockData.KeyID diff --git a/packages/template/template.go b/packages/template/template.go index 5c0dc1842..8beb4882b 100644 --- a/packages/template/template.go +++ b/packages/template/template.go @@ -692,7 +692,7 @@ func Template2JSON(input string, timeout *bool, vars *map[string]string) []byte isvde := (*vars)[`vde`] == `true` || (*vars)[`vde`] == `1` sc := smart.SmartContract{ VDE: isvde, - VM: smart.GetVM(isvde, converter.StrToInt64((*vars)[`ecosystem_id`])), + VM: smart.GetVM(), TxSmart: tx.SmartContract{ Header: tx.Header{ EcosystemID: converter.StrToInt64((*vars)[`ecosystem_id`]), From 3249822355fa7cf011992021f6dbe9eb4cd151ed Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 067/169] fix login --- packages/api/api.go | 37 ++++--- packages/api/login.go | 4 +- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- packages/migration/vde/vde_data_pages.go | 2 +- packages/migration/vde/vde_schema.go | 29 +++++ packages/smart/smart.go | 8 +- 6 files changed, 148 insertions(+), 43 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index c24d3260e..9e55102aa 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -133,9 +133,6 @@ func errorAPI(w http.ResponseWriter, err interface{}, code int, params ...interf func getPrefix(data *apiData) (prefix string) { prefix = converter.Int64ToStr(data.ecosystemId) - if data.vde { - prefix += `_vde` - } return } @@ -274,6 +271,10 @@ func fillParams(params map[string]int) apiHandle { } func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (int64, string, error) { + if conf.Config.IsSupportingVDE() { + return consts.DefaultVDE, "1", nil + } + ecosystemID := data.ecosystemId if data.params[`ecosystem`].(int64) > 0 { ecosystemID = data.params[`ecosystem`].(int64) @@ -288,9 +289,9 @@ func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (in } } prefix := converter.Int64ToStr(ecosystemID) - if data.vde { - prefix += `_vde` - } + // if data.vde { + // prefix += `_vde` + // } return ecosystemID, prefix, nil } @@ -299,18 +300,20 @@ func fillTokenData(data *apiData, claims *JWTClaims, logger *log.Entry) error { data.keyId = converter.StrToInt64(claims.KeyID) data.isMobile = claims.IsMobile data.roleId = converter.StrToInt64(claims.RoleID) - ecosystem := &model.Ecosystem{} - found, err := ecosystem.Get(data.ecosystemId) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") - return err - } + if !conf.Config.IsSupportingVDE() { + ecosystem := &model.Ecosystem{} + found, err := ecosystem.Get(data.ecosystemId) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") + return err + } - if !found { - err := fmt.Errorf("ecosystem not found") - logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") - } + if !found { + err := fmt.Errorf("ecosystem not found") + logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") + } - data.ecosystemName = ecosystem.Name + data.ecosystemName = ecosystem.Name + } return nil } diff --git a/packages/api/login.go b/packages/api/login.go index ef8114139..d9c7f8de6 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -128,9 +128,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En params := make([]byte, 0) params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) - vm := smart.GetVM() - - contract := smart.VMGetContract(vm, "NewUser", 1) + contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) err = tx.BuildTransaction(tx.SmartContract{ diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go index 90ef6eab4..b013166b1 100644 --- a/packages/migration/vde/vde_data_pages.go +++ b/packages/migration/vde/vde_data_pages.go @@ -1,5 +1,5 @@ package vde var pagesDataSQL = ` -INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('1', 'default_page', '', 'admin_menu', 'true'),('2','admin_index','','admin_menu','true'); ` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go index c3fda993f..7edf5da94 100644 --- a/packages/migration/vde/vde_schema.go +++ b/packages/migration/vde/vde_schema.go @@ -140,4 +140,33 @@ var schemaVDE = ` ); ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + + DROP TABLE IF EXISTS "%[1]d_notifications"; + CREATE TABLE "%[1]d_notifications" ( + "id" bigint NOT NULL DEFAULT '0', + "recipient" jsonb, + "sender" jsonb, + "notification" jsonb, + "page_params" jsonb, + "processing_info" jsonb, + "page_name" varchar(255) NOT NULL DEFAULT '', + "date_created" timestamp, + "date_start_processing" timestamp, + "date_closed" timestamp, + "closed" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_notifications" ADD CONSTRAINT "%[1]d_notifications_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_roles_participants"; + CREATE TABLE "%[1]d_roles_participants" ( + "id" bigint NOT NULL DEFAULT '0', + "role" jsonb, + "member" jsonb, + "appointed" jsonb, + "date_created" timestamp, + "date_deleted" timestamp, + "deleted" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_roles_participants" ADD CONSTRAINT "%[1]d_roles_participants_pkey" PRIMARY KEY ("id"); + ` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index bb358e96e..7a42e7190 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -174,6 +174,7 @@ func VMRun(vm *script.VM, block *script.Block, params []interface{}, extend *map func VMGetContract(vm *script.VM, name string, state uint32) *Contract { name = script.StateName(state, name) obj, ok := vm.Objects[name] + if ok && obj.Type == script.ObjContract { return &Contract{Name: name, Block: obj.Value.(*script.Block)} } @@ -469,15 +470,15 @@ func LoadContract(transaction *model.DbTransaction, prefix string) (err error) { func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err error) { var contracts []map[string]string - if !model.IsTable(prefix + `_vde_contracts`) { + if !model.IsTable(prefix + `_contracts`) { return } - contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_vde_contracts" order by id`, -1) + contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_contracts" order by id`, -1) if err != nil { return err } state := converter.StrToInt64(prefix) - vm := newVM() + vm := GetVM() var vmt script.VMType if conf.Config.IsVDE() { @@ -502,6 +503,7 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro WalletID: 0, TokenID: 0, } + if err = vmCompile(vm, item[`value`], &owner); err != nil { log.WithFields(log.Fields{"names": names, "error": err}).Error("Load VDE Contract") } else { From fedf6dc319a58711b2d1e892dac8644eb78ec3f1 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 068/169] temporary commit --- packages/api/login.go | 50 +++++++- packages/api/route.go | 6 +- packages/api/vde.go | 7 +- packages/api/vde_test.go | 120 ++----------------- packages/migration/vde/vde_data_contracts.go | 41 +++++++ packages/smart/smart.go | 1 + 6 files changed, 105 insertions(+), 120 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index d9c7f8de6..9e0f9a07e 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -19,12 +19,14 @@ package api import ( "fmt" "net/http" + "strings" "time" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/notificator" "github.com/GenesisKernel/go-genesis/packages/publisher" + msgpack "gopkg.in/vmihailenco/msgpack.v2" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" @@ -131,20 +133,60 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En contract := smart.GetContract("NewUser", 1) info := contract.Block.Info.(*script.ContractInfo) - err = tx.BuildTransaction(tx.SmartContract{ + // scHeader, err := getHeader("NewUser", data) + if err != nil { + return errorAPI(w, "E_EMPTYOBJECT", http.StatusBadRequest) + } + + sc := tx.SmartContract{ Header: tx.Header{ Type: int(info.ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, NetworkID: consts.NETWORK_ID, + PublicKey: pubkey, }, SignedBy: smart.PubToID(NodePublicKey), Data: params, - }, NodePrivateKey, NodePublicKey, string(hexPubKey)) - if err != nil { - log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } + + if conf.Config.IsSupportingVDE() { + + signPrms := []string{sc.ForSign()} + signPrms = append(signPrms, string(hexPubKey)) + signature, err := crypto.Sign( + NodePrivateKey, + strings.Join(signPrms, ","), + ) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") + return err + } + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + serializedContract, err := msgpack.Marshal(sc) + if err != nil { + logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") + return errorAPI(w, err, http.StatusInternalServerError) + } + // signature := data.params[`signature`].([]byte) + // if len(signature) == 0 { + // log.WithFields(log.Fields{"type": consts.EmptyObject, "params": data.params}).Error("signature is empty") + // } + + fmt.Println(len(signature)) + ret, err := VDEContract(serializedContract, data) + if err != nil { + return errorAPI(w, err, http.StatusInternalServerError) + } + data.result = ret + } else { + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, string(hexPubKey)) + if err != nil { + log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") + } + } + } if ecosystemID > 1 && len(pubkey) == 0 { diff --git a/packages/api/route.go b/packages/api/route.go index be778c441..16f4857a4 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -59,7 +59,7 @@ func Route(route *hr.Router) { get(`interface/page/:name`, ``, authWallet, getPageRow) get(`interface/menu/:name`, ``, authWallet, getMenuRow) get(`interface/block/:name`, ``, authWallet, getBlockInterfaceRow) - get(`systemparams`, `?names:string`, authWallet, systemParams) + // get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) get(`test/:name`, ``, getTest) @@ -81,7 +81,7 @@ func Route(route *hr.Router) { post(`test/:name`, ``, getTest) post(`content`, `template ?source:string`, jsonContent) post(`updnotificator`, `ids:string`, updateNotificator) - + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) if !conf.Config.IsSupportingVDE() { @@ -92,7 +92,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) diff --git a/packages/api/vde.go b/packages/api/vde.go index cf83ec6b0..9891ffddb 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -173,17 +173,22 @@ func VDEContract(contractData []byte, data *apiData) (result *contractResult, er result.Message = &txstatusError{Type: "panic", Error: err.Error()} return } + if data.token != nil && data.token.Valid { if auth, err := data.token.SignedString([]byte(jwtSecret)); err == nil { sc.TxData[`auth_token`] = auth } } + if ret, err = sc.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction); err == nil { result.Result = ret } else { if errResult := json.Unmarshal([]byte(err.Error()), &result.Message); errResult != nil { - log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "text": err.Error(), + log.WithFields(log.Fields{ + "type": consts.JSONUnmarshallError, + "text": err.Error(), "error": errResult}).Error("unmarshalling contract error") + result.Message = &txstatusError{Type: "panic", Error: errResult.Error()} } } diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index c0d6b7d68..bd32c97de 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -24,6 +24,7 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" @@ -33,121 +34,16 @@ import ( ) func TestVDECreate(t *testing.T) { - var ( - err error - retid int64 - ret vdeCreateResult - ) - - assert.NoError(t, keyLogin(1)) - - if err = sendPost(`vde/create`, nil, &ret); err != nil && - err.Error() != `400 {"error": "E_VDECREATED", "msg": "Virtual Dedicated Ecosystem is already created" }` { - t.Error(err) - return - } - - rnd := `rnd` + crypto.RandSeq(6) - form := url.Values{`Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - - retid, _, err = postTxResult(`NewContract`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active 5", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - assert.NoError(t, postTx(`EditContract`, &form)) - - form = url.Values{`Name`: {rnd}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - - retid, _, err = postTxResult(`NewParameter`, &form) - assert.NoError(t, err) + require.NoError(t, keyLogin(1)) - form = url.Values{`Name`: {`new_table`}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - if err = postTx(`NewParameter`, &form); err != nil && err.Error() != - `500 {"error": "E_SERVER", "msg": "{\"type\":\"warning\",\"error\":\"Parameter new_table already exists\"}" }` { - t.Error(err) - return + form := url.Values{ + "VDEName": {"testvde"}, + "DBUser": {"vdeuser"}, + "DBPassword": {"vdepassword"}, + "VDEAPIPort": {"8000"}, } - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, `Conditions`: {`true`}, - `vde`: {`1`}} - - assert.NoError(t, postTx(`EditParameter`, &form)) - - form = url.Values{"Name": {`menu` + rnd}, "Value": {`first - second - third`}, "Title": {`My Menu`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewMenu`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, - `Conditions`: {`true`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditMenu`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Append)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendMenu`, &form)) - - form = url.Values{"Name": {`page` + rnd}, "Value": {`Page`}, "Menu": {`government`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewPage`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit page value`}, - `Conditions`: {`true`}, "Menu": {`government`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditPage`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Test Page)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendPage`, &form)) - - form = url.Values{"Name": {`block` + rnd}, "Value": {`Page block`}, "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewBlock`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit block value`}, - `Conditions`: {`true`}, `vde`: {`1`}} - assert.NoError(t, postTx(`EditBlock`, &form)) - - name := randName(`tbl`) - form = url.Values{"Name": {name}, `vde`: {`true`}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "1", - "conditions":"true"}, - {"name":"Amount", "type":"number","index": "0", "conditions":"true"}, - {"name":"Active", "type":"character","index": "0", "conditions":"true"}]`}, - "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} - assert.NoError(t, postTx(`NewTable`, &form)) - - form = url.Values{"Name": {name}, `vde`: {`true`}, - "Permissions": {`{"insert": "ContractConditions(\"MainCondition\")", - "update" : "true", "new_column": "ContractConditions(\"MainCondition\")"}`}} - assert.NoError(t, postTx(`EditTable`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {"true"}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newColRead`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {"ContractConditions(\"MainCondition\")"}} - assert.NoError(t, postTx(`EditColumn`, &form)) + require.NoError(t, postTx("NewVDE", &form)) - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`EditColumn`, &form)) } func TestVDEParams(t *testing.T) { diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 7a42e7190..8da0cc30b 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -860,6 +860,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { return retError(ErrEmptyPublicKey) } sc.PublicKeys = append(sc.PublicKeys, public) + var CheckSignResult bool CheckSignResult, err = utils.CheckSign(sc.PublicKeys, sc.TxData[`forsign`].(string), sc.TxSmart.BinSignatures, false) if err != nil { From 5d08f8383c4ca4504b203bc0c2294a8d439ba101 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 069/169] move changes --- packages/migration/vde/vde.go | 958 ++++++++++++++++++++++++++++++++++ packages/smart/funcs.go | 15 + packages/vdemanager/config.go | 4 +- 3 files changed, 975 insertions(+), 2 deletions(-) create mode 100644 packages/migration/vde/vde.go diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go new file mode 100644 index 000000000..b63cf858d --- /dev/null +++ b/packages/migration/vde/vde.go @@ -0,0 +1,958 @@ +package migration + +var SchemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_vde_members"; + CREATE TABLE "%[1]d_vde_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); + + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); + + DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); + + + INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); + + INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_vde_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_vde_parameters"; + CREATE TABLE "%[1]d_vde_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); + + INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); + + DROP TABLE IF EXISTS "%[1]d_vde_cron"; + CREATE TABLE "%[1]d_vde_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_vde_binaries"; + CREATE TABLE "%[1]d_vde_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_vde_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); + + INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "menu": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")", + "validate_count": "ContractConditions(\"MainCondition\")", + "validate_mode": "ContractConditions(\"MainCondition\")", + "app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); + + INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES + ('1','MainCondition','contract MainCondition { + conditions { + if EcosysParam("founder_account")!=$key_id + { + warning "Sorry, you do not have access to this action." + } + } + }', 'ContractConditions("MainCondition")'), + ('2','NewContract','contract NewContract { + data { + Value string + Conditions string + Wallet string "optional" + TokenEcosystem int "optional" + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + $walletContract = $key_id + if $Wallet { + $walletContract = AddressToId($Wallet) + if $walletContract == 0 { + error Sprintf("wrong wallet %%s", $Wallet) + } + } + var list array + list = ContractsList($Value) + + if Len(list) == 0 { + error "must be the name" + } + + var i int + while i < Len(list) { + if IsObject(list[i], $ecosystem_id) { + warning Sprintf("Contract or function %%s exists", list[i] ) + } + i = i + 1 + } + + $contract_name = list[0] + if !$TokenEcosystem { + $TokenEcosystem = 1 + } else { + if !SysFuel($TokenEcosystem) { + warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) + } + } + } + action { + var root, id int + root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) + id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", + $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) + FlushContract(root, id, false) + $result = id + } + func rollback() { + var list array + list = ContractsList($Value) + var i int + while i < Len(list) { + RollbackContract(list[i]) + i = i + 1 + } + } + func price() int { + return SysParamInt("contract_price") + } + }', 'ContractConditions("MainCondition")'), + ('3','EditContract','contract EditContract { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("contracts", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + + var row array + row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) + if !Len(row) { + error Sprintf("Contract %%d does not exist", $Id) + } + $cur = row[0] + if $Value { + var list, curlist array + list = ContractsList($Value) + curlist = ContractsList($cur["value"]) + if Len(list) != Len(curlist) { + error "Contracts cannot be removed or inserted" + } + var i int + while i < Len(list) { + var j int + var ok bool + while j < Len(curlist) { + if curlist[j] == list[i] { + ok = true + break + } + j = j + 1 + } + if !ok { + error "Contracts or functions names cannot be changed" + } + i = i + 1 + } + } + } + action { + var root int + var pars, vals array + + if $Value { + root = CompileContract($Value, $ecosystem_id, 0, 0) + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("contracts", $Id, Join(pars, ","), vals...) + } + if $Value { + FlushContract(root, $Id, false) + } + } + }', 'ContractConditions("MainCondition")'), + ('4','NewParameter','contract NewParameter { + data { + Name string + Value string + Conditions string + } + conditions { + var ret array + ValidateCondition($Conditions, $ecosystem_id) + ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) + if Len(ret) > 0 { + warning Sprintf( "Parameter %%s already exists", $Name) + } + } + action { + $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('5','EditParameter','contract EditParameter { + data { + Id int + Value string + Conditions string + } + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("parameters", $Id, onlyConditions()) + ValidateCondition($Conditions, $ecosystem_id) + } + action { + DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('6', 'NewMenu','contract NewMenu { + data { + Name string + Value string + Title string "optional" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("menu").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Menu %%s already exists", $Name) + } + } + action { + DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) + } + func price() int { + return SysParamInt("menu_price") + } + }', 'ContractConditions("MainCondition")'), + ('7','EditMenu','contract EditMenu { + data { + Id int + Value string "optional" + Title string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value && !$Title + } + conditions { + RowConditions("menu", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Title { + pars[Len(pars)] = "title" + vals[Len(vals)] = $Title + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("menu", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('8','AppendMenu','contract AppendMenu { + data { + Id int + Value string + } + conditions { + RowConditions("menu", $Id, false) + } + action { + var row map + row = DBRow("menu").Columns("value").WhereId($Id) + DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('9','NewPage','contract NewPage { + data { + Name string + Value string + Menu string + Conditions string + ValidateCount int "optional" + ApplicationId int "optional" + ValidateMode int "optional" + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("pages").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Page %%s already exists", $Name) + } + + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", + $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) + } + func price() int { + return SysParamInt("page_price") + } + }', 'ContractConditions("MainCondition")'), + ('10','EditPage','contract EditPage { + data { + Id int + Value string "optional" + Menu string "optional" + Conditions string "optional" + ValidateCount int "optional" + ValidateMode string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$Menu + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + RowConditions("pages", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Menu { + pars[Len(pars)] = "menu" + vals[Len(vals)] = $Menu + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if $ValidateCount { + pars[Len(pars)] = "validate_count" + vals[Len(vals)] = $ValidateCount + } + if $ValidateMode { + if $ValidateMode != "1" { + $ValidateMode = "0" + } + pars[Len(pars)] = "validate_mode" + vals[Len(vals)] = $ValidateMode + } + if Len(vals) > 0 { + DBUpdate("pages", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('11','AppendPage','contract AppendPage { + data { + Id int + Value string + } + conditions { + RowConditions("pages", $Id, false) + } + action { + var row map + row = DBRow("pages").Columns("value").WhereId($Id) + DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('12','NewBlock','contract NewBlock { + data { + Name string + Value string + Conditions string + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("blocks").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Block %%s already exists", $Name) + } + } + action { + DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) + } + }', 'ContractConditions("MainCondition")'), + ('13','EditBlock','contract EditBlock { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + + conditions { + RowConditions("blocks", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("blocks", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('14','NewTable','contract NewTable { + data { + Name string + Columns string + Permissions string + ApplicationId int "optional" + } + conditions { + TableConditions($Name, $Columns, $Permissions) + } + action { + CreateTable($Name, $Columns, $Permissions, $ApplicationId) + } + func rollback() { + RollbackTable($Name) + } + func price() int { + return SysParamInt("table_price") + } + }', 'ContractConditions("MainCondition")'), + ('15','EditTable','contract EditTable { + data { + Name string + Permissions string + } + conditions { + TableConditions($Name, "", $Permissions) + } + action { + PermTable($Name, $Permissions ) + } + }', 'ContractConditions("MainCondition")'), + ('16','NewColumn','contract NewColumn { + data { + TableName string + Name string + Type string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, $Type, $Permissions) + } + action { + CreateColumn($TableName, $Name, $Type, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('17','EditColumn','contract EditColumn { + data { + TableName string + Name string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, "", $Permissions) + } + action { + PermColumn($TableName, $Name, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('18','NewLang','contract NewLang { + data { + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } + } + action { + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('19','EditLang','contract EditLang { + data { + Id int + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + } + action { + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('20','Import','contract Import { + data { + Data string + } + conditions { + $list = JSONDecode($Data) + } + func ImportList(row array, cnt string) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + idata = row[i] + if(cnt == "pages"){ + $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) + $page_id = One($ret_page, "id") + if ($page_id != nil){ + idata["Id"] = Int($page_id) + CallContract("EditPage", idata) + } else { + CallContract("NewPage", idata) + } + } + if(cnt == "blocks"){ + $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) + $block_id = One($ret_block, "id") + if ($block_id != nil){ + idata["Id"] = Int($block_id) + CallContract("EditBlock", idata) + } else { + CallContract("NewBlock", idata) + } + } + if(cnt == "menus"){ + $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) + $menu_id = One($ret_menu, "id") + $menu_value = One($ret_menu, "value") + if ($menu_id != nil){ + idata["Id"] = Int($menu_id) + idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) + CallContract("EditMenu", idata) + } else { + CallContract("NewMenu", idata) + } + } + if(cnt == "parameters"){ + $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) + $param_id = One($ret_param, "id") + if ($param_id != nil){ + idata["Id"] = Int($param_id) + CallContract("EditParameter", idata) + } else { + CallContract("NewParameter", idata) + } + } + if(cnt == "languages"){ + $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) + $lang_id = One($ret_lang, "id") + if ($lang_id != nil){ + CallContract("EditLang", idata) + } else { + CallContract("NewLang", idata) + } + } + if(cnt == "contracts"){ + if IsObject(idata["Name"], $ecosystem_id){ + } else { + CallContract("NewContract", idata) + } + } + if(cnt == "tables"){ + $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) + $table_id = One($ret_table, "id") + if ($table_id != nil){ + } else { + CallContract("NewTable", idata) + } + } + i = i + 1 + } + } + func ImportData(row array) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + var list array + var tblname, columns string + idata = row[i] + i = i + 1 + tblname = idata["Table"] + columns = Join(idata["Columns"], ",") + list = idata["Data"] + if !list { + continue + } + var j int + while j < Len(list) { + var ilist array + ilist = list[j] + DBInsert(tblname, columns, ilist) + j=j+1 + } + } + } + action { + ImportList($list["pages"], "pages") + ImportList($list["blocks"], "blocks") + ImportList($list["menus"], "menus") + ImportList($list["parameters"], "parameters") + ImportList($list["languages"], "languages") + ImportList($list["contracts"], "contracts") + ImportList($list["tables"], "tables") + ImportData($list["data"]) + } + }', 'ContractConditions("MainCondition")'), + ('21', 'NewCron','contract NewCron { + data { + Cron string + Contract string + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", + $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($result) + } + }', 'ContractConditions("MainCondition")'), + ('22','EditCron','contract EditCron { + data { + Id int + Contract string + Cron string "optional" + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ConditionById("cron", true) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", + $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($Id) + } + }', 'ContractConditions("MainCondition")'), + ('23', 'UploadBinary', contract UploadBinary { + data { + Name string + Data bytes "file" + AppID int + DataMimeType string "optional" + MemberID int "optional" + } + conditions { + $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) + } + action { + var hash string + hash = MD5($Data) + + if $DataMimeType == "" { + $DataMimeType = "application/octet-stream" + } + + if $Id != 0 { + DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) + } else { + $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) + } + + $result = $Id + } + }', 'ContractConditions("MainCondition")'); + ` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 8358cb003..22861a3a7 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -270,6 +270,21 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: + f["HTTPRequest"] = HTTPRequest + f["GetMapKeys"] = GetMapKeys + f["SortedKeys"] = SortedKeys + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDE"] = StopVDE + f["GetVDEList"] = GetVDEList + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index bcafa10ff..a3f6c30b8 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -31,10 +31,10 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort), + fmt.Sprintf("--httpPort=%d", c.HTTPPort) fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - "--runMode=VDE", + fmt.Sprintf("--runMode=VDE") } return exec.Command(c.Executable, args...) From e380f79a2573e4869aa50e77707885c7257fd21f Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 070/169] separate routes by vde --- packages/api/route.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/api/route.go b/packages/api/route.go index 16f4857a4..ef84e9637 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -85,14 +85,15 @@ func Route(route *hr.Router) { methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) if !conf.Config.IsSupportingVDE() { + get(`txstatus/:hash`, ``, authWallet, txstatus) + get(`txstatusMultiple`, `data:string`, authWallet, txstatusMulti) get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) - get(`txstatus/:hash`, ``, authWallet, txstatus) get(`history/:table/:id`, ``, authWallet, getHistory) get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) From 300996a20a3ada64c5188b89a8cc382d2e822058 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 071/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; From 98c9fb28b3b9ef90fae4ed03417168feea10d4b7 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 15:59:38 +0300 Subject: [PATCH 072/169] temp commit --- packages/vdemanager/config.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index a3f6c30b8..bcafa10ff 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -31,10 +31,10 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--httpPort=%d", c.HTTPPort), fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - fmt.Sprintf("--runMode=VDE") + "--runMode=VDE", } return exec.Command(c.Executable, args...) From a6fbc3881d4309f7ef44bedc25509de2d70e71ad Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 073/169] temporary commit --- packages/daylight/start.go | 7 + packages/migration/vde/vde.go | 958 ------------------- packages/migration/vde/vde_data_contracts.go | 152 +-- 3 files changed, 26 insertions(+), 1091 deletions(-) delete mode 100644 packages/migration/vde/vde.go diff --git a/packages/daylight/start.go b/packages/daylight/start.go index 98394511e..aede916a8 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -279,6 +279,13 @@ func Start() { } } + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go deleted file mode 100644 index 640338e93..000000000 --- a/packages/migration/vde/vde.go +++ /dev/null @@ -1,958 +0,0 @@ -package vde - -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES - ('1','MainCondition','contract MainCondition { - conditions { - if EcosysParam("founder_account")!=$key_id - { - warning "Sorry, you do not have access to this action." - } - } - }', 'ContractConditions("MainCondition")'), - ('2','NewContract','contract NewContract { - data { - Value string - Conditions string - Wallet string "optional" - TokenEcosystem int "optional" - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - $walletContract = $key_id - if $Wallet { - $walletContract = AddressToId($Wallet) - if $walletContract == 0 { - error Sprintf("wrong wallet %%s", $Wallet) - } - } - var list array - list = ContractsList($Value) - - if Len(list) == 0 { - error "must be the name" - } - - var i int - while i < Len(list) { - if IsObject(list[i], $ecosystem_id) { - warning Sprintf("Contract or function %%s exists", list[i] ) - } - i = i + 1 - } - - $contract_name = list[0] - if !$TokenEcosystem { - $TokenEcosystem = 1 - } else { - if !SysFuel($TokenEcosystem) { - warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) - } - } - } - action { - var root, id int - root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) - id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", - $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) - FlushContract(root, id, false) - $result = id - } - func rollback() { - var list array - list = ContractsList($Value) - var i int - while i < Len(list) { - RollbackContract(list[i]) - i = i + 1 - } - } - func price() int { - return SysParamInt("contract_price") - } - }', 'ContractConditions("MainCondition")'), - ('3','EditContract','contract EditContract { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("contracts", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - - var row array - row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) - if !Len(row) { - error Sprintf("Contract %%d does not exist", $Id) - } - $cur = row[0] - if $Value { - var list, curlist array - list = ContractsList($Value) - curlist = ContractsList($cur["value"]) - if Len(list) != Len(curlist) { - error "Contracts cannot be removed or inserted" - } - var i int - while i < Len(list) { - var j int - var ok bool - while j < Len(curlist) { - if curlist[j] == list[i] { - ok = true - break - } - j = j + 1 - } - if !ok { - error "Contracts or functions names cannot be changed" - } - i = i + 1 - } - } - } - action { - var root int - var pars, vals array - - if $Value { - root = CompileContract($Value, $ecosystem_id, 0, 0) - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("contracts", $Id, Join(pars, ","), vals...) - } - if $Value { - FlushContract(root, $Id, false) - } - } - }', 'ContractConditions("MainCondition")'), - ('4','NewParameter','contract NewParameter { - data { - Name string - Value string - Conditions string - } - conditions { - var ret array - ValidateCondition($Conditions, $ecosystem_id) - ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) - if Len(ret) > 0 { - warning Sprintf( "Parameter %%s already exists", $Name) - } - } - action { - $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('5','EditParameter','contract EditParameter { - data { - Id int - Value string - Conditions string - } - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("parameters", $Id, onlyConditions()) - ValidateCondition($Conditions, $ecosystem_id) - } - action { - DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('6', 'NewMenu','contract NewMenu { - data { - Name string - Value string - Title string "optional" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("menu").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Menu %%s already exists", $Name) - } - } - action { - DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) - } - func price() int { - return SysParamInt("menu_price") - } - }', 'ContractConditions("MainCondition")'), - ('7','EditMenu','contract EditMenu { - data { - Id int - Value string "optional" - Title string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value && !$Title - } - conditions { - RowConditions("menu", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Title { - pars[Len(pars)] = "title" - vals[Len(vals)] = $Title - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("menu", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('8','AppendMenu','contract AppendMenu { - data { - Id int - Value string - } - conditions { - RowConditions("menu", $Id, false) - } - action { - var row map - row = DBRow("menu").Columns("value").WhereId($Id) - DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('9','NewPage','contract NewPage { - data { - Name string - Value string - Menu string - Conditions string - ValidateCount int "optional" - ApplicationId int "optional" - ValidateMode int "optional" - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("pages").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Page %%s already exists", $Name) - } - - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", - $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) - } - func price() int { - return SysParamInt("page_price") - } - }', 'ContractConditions("MainCondition")'), - ('10','EditPage','contract EditPage { - data { - Id int - Value string "optional" - Menu string "optional" - Conditions string "optional" - ValidateCount int "optional" - ValidateMode string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value && !$Menu - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - RowConditions("pages", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Menu { - pars[Len(pars)] = "menu" - vals[Len(vals)] = $Menu - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if $ValidateCount { - pars[Len(pars)] = "validate_count" - vals[Len(vals)] = $ValidateCount - } - if $ValidateMode { - if $ValidateMode != "1" { - $ValidateMode = "0" - } - pars[Len(pars)] = "validate_mode" - vals[Len(vals)] = $ValidateMode - } - if Len(vals) > 0 { - DBUpdate("pages", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('11','AppendPage','contract AppendPage { - data { - Id int - Value string - } - conditions { - RowConditions("pages", $Id, false) - } - action { - var row map - row = DBRow("pages").Columns("value").WhereId($Id) - DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('12','NewBlock','contract NewBlock { - data { - Name string - Value string - Conditions string - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("blocks").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Block %%s already exists", $Name) - } - } - action { - DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) - } - }', 'ContractConditions("MainCondition")'), - ('13','EditBlock','contract EditBlock { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - - conditions { - RowConditions("blocks", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("blocks", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('14','NewTable','contract NewTable { - data { - Name string - Columns string - Permissions string - ApplicationId int "optional" - } - conditions { - TableConditions($Name, $Columns, $Permissions) - } - action { - CreateTable($Name, $Columns, $Permissions, $ApplicationId) - } - func rollback() { - RollbackTable($Name) - } - func price() int { - return SysParamInt("table_price") - } - }', 'ContractConditions("MainCondition")'), - ('15','EditTable','contract EditTable { - data { - Name string - Permissions string - } - conditions { - TableConditions($Name, "", $Permissions) - } - action { - PermTable($Name, $Permissions ) - } - }', 'ContractConditions("MainCondition")'), - ('16','NewColumn','contract NewColumn { - data { - TableName string - Name string - Type string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, $Type, $Permissions) - } - action { - CreateColumn($TableName, $Name, $Type, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('17','EditColumn','contract EditColumn { - data { - TableName string - Name string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, "", $Permissions) - } - action { - PermColumn($TableName, $Name, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { - data { - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) - } - } - action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('19','EditLang','contract EditLang { - data { - Id int - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - } - action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('20','Import','contract Import { - data { - Data string - } - conditions { - $list = JSONDecode($Data) - } - func ImportList(row array, cnt string) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - idata = row[i] - if(cnt == "pages"){ - $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) - $page_id = One($ret_page, "id") - if ($page_id != nil){ - idata["Id"] = Int($page_id) - CallContract("EditPage", idata) - } else { - CallContract("NewPage", idata) - } - } - if(cnt == "blocks"){ - $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) - $block_id = One($ret_block, "id") - if ($block_id != nil){ - idata["Id"] = Int($block_id) - CallContract("EditBlock", idata) - } else { - CallContract("NewBlock", idata) - } - } - if(cnt == "menus"){ - $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) - $menu_id = One($ret_menu, "id") - $menu_value = One($ret_menu, "value") - if ($menu_id != nil){ - idata["Id"] = Int($menu_id) - idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) - CallContract("EditMenu", idata) - } else { - CallContract("NewMenu", idata) - } - } - if(cnt == "parameters"){ - $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) - $param_id = One($ret_param, "id") - if ($param_id != nil){ - idata["Id"] = Int($param_id) - CallContract("EditParameter", idata) - } else { - CallContract("NewParameter", idata) - } - } - if(cnt == "languages"){ - $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) - $lang_id = One($ret_lang, "id") - if ($lang_id != nil){ - CallContract("EditLang", idata) - } else { - CallContract("NewLang", idata) - } - } - if(cnt == "contracts"){ - if IsObject(idata["Name"], $ecosystem_id){ - } else { - CallContract("NewContract", idata) - } - } - if(cnt == "tables"){ - $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) - $table_id = One($ret_table, "id") - if ($table_id != nil){ - } else { - CallContract("NewTable", idata) - } - } - i = i + 1 - } - } - func ImportData(row array) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - var list array - var tblname, columns string - idata = row[i] - i = i + 1 - tblname = idata["Table"] - columns = Join(idata["Columns"], ",") - list = idata["Data"] - if !list { - continue - } - var j int - while j < Len(list) { - var ilist array - ilist = list[j] - DBInsert(tblname, columns, ilist) - j=j+1 - } - } - } - action { - ImportList($list["pages"], "pages") - ImportList($list["blocks"], "blocks") - ImportList($list["menus"], "menus") - ImportList($list["parameters"], "parameters") - ImportList($list["languages"], "languages") - ImportList($list["contracts"], "contracts") - ImportList($list["tables"], "tables") - ImportData($list["data"]) - } - }', 'ContractConditions("MainCondition")'), - ('21', 'NewCron','contract NewCron { - data { - Cron string - Contract string - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", - $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($result) - } - }', 'ContractConditions("MainCondition")'), - ('22','EditCron','contract EditCron { - data { - Id int - Contract string - Cron string "optional" - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ConditionById("cron", true) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", - $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($Id) - } - }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { - data { - Name string - Data bytes "file" - AppID int - DataMimeType string "optional" - MemberID int "optional" - } - conditions { - $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) - } - action { - var hash string - hash = MD5($Data) - - if $DataMimeType == "" { - $DataMimeType = "application/octet-stream" - } - - if $Id != 0 { - DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) - } else { - $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) - } - - $result = $Id - } - }', 'ContractConditions("MainCondition")'); - ` diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 755e626c7..4e5ca29ab 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,113 +483,38 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang', 'contract NewLang { + ('18','NewLang','contract NewLang { data { - ApplicationId int "optional" - Name string - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Name string + Trans string + AppID int } - conditions { - if $ApplicationId == 0 { - warning "Application id cannot equal 0" - } - - if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { - warning Sprintf( "Language resource %%s already exists", $Name) - } - - var j int - while j < Len($IdLanguage) { - if $IdLanguage[j] == "" { - info("Locale empty") - } - if $Value[j] == "" { - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } } - action { - var i,len,lenshar int - var res,langarr string - len = Len($IdLanguage) - lenshar = Len($Value) - while i < len { - if i + 1 == len { - res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) - } else { - res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) - } - i = i + 1 - } - if len > 0 { - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - } - $result = CreateLanguage($Name, $Trans, $ApplicationId) + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string "optional" - ApplicationId int "optional" - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Id int + Name string + Trans string + AppID int } - conditions { - var j int - while j < Len($IdLanguage) { - if ($IdLanguage[j] == ""){ - info("Locale empty") - } - if ($Value[j] == ""){ - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") } - action { - var i,len int - var res,langarr string - len = Len($IdLanguage) - while i < len { - if (i + 1 == len){ - res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) - } - else { - res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) - } - i = i + 1 - } - - $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() - if !$row{ - warning "Language not found" - } - - if $ApplicationId == 0 { - $ApplicationId = Int($row["app_id"]) - } - if $Name == "" { - $Name = $row["name"] - } - - if (len > 0){ - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - - } - EditLanguage($Id, $Name, $Trans, $ApplicationId) + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -794,7 +719,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { - Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -802,48 +726,10 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - } - action { - DBInsert("keys", "id, pub", $newId, $NewPubKey) - } - }', 'ContractConditions("MainCondition")'), - ('25', 'NewVDE', 'contract NewVDE { - data { - VDEName string - DBUser string - DBPassword string - VDEAPIPort int - } - conditions { + $amount = Money(1000) * Money(1000000000000000000) } - action { - CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) - } - }', 'ContractConditions("MainCondition")'), - ('26', 'ListVDE', 'contract ListVDE { - data { - VDEName string - } - - conditions { - - } - - action { - GetVDEList($VDEName) - } - }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { - data { - VDEName string - } - - conditions { - } - - action { - StartVDE($VDEName) + DBInsert("keys", "id, pub", $newId, $NewPubKey) } }', 'ContractConditions("MainCondition")');` From e0364967c977ea2cca9a59caab69b6518bd782bd Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 074/169] fix login --- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- 1 file changed, 92 insertions(+), 19 deletions(-) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) From c3a2fd1e77ecbaa11ce5e0e6ce29f0c1fcfcee2d Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 075/169] temporary commit --- packages/api/route.go | 2 +- packages/migration/vde/vde_data_contracts.go | 41 ++++++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/packages/api/route.go b/packages/api/route.go index ef84e9637..56b547f61 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -93,7 +93,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` From e8545b5d135f69210ec394a9d6c5929b044ec373 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 12:05:42 +0300 Subject: [PATCH 076/169] temp commit --- packages/api/login.go | 37 +++++++++----------- packages/migration/vde/vde_data_contracts.go | 3 +- packages/migration/vde/vde_data_tables.go | 10 +++++- packages/smart/funcs.go | 3 -- 4 files changed, 28 insertions(+), 25 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index 9e0f9a07e..7882de84e 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -114,6 +114,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) + fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -126,21 +127,16 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En return err } + pubkey = data.params[`pubkey`].([]byte) hexPubKey := hex.EncodeToString(pubkey) - params := make([]byte, 0) - params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) + params := converter.EncodeLength(int64(len(hexPubKey))) + params = append(params, hexPubKey...) contract := smart.GetContract("NewUser", 1) - info := contract.Block.Info.(*script.ContractInfo) - - // scHeader, err := getHeader("NewUser", data) - if err != nil { - return errorAPI(w, "E_EMPTYOBJECT", http.StatusBadRequest) - } sc := tx.SmartContract{ Header: tx.Header{ - Type: int(info.ID), + Type: int(contract.Block.Info.(*script.ContractInfo).ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, @@ -154,34 +150,34 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En if conf.Config.IsSupportingVDE() { signPrms := []string{sc.ForSign()} - signPrms = append(signPrms, string(hexPubKey)) - signature, err := crypto.Sign( - NodePrivateKey, - strings.Join(signPrms, ","), - ) + signPrms = append(signPrms, hexPubKey) + signData := strings.Join(signPrms, ",") + signature, err := crypto.Sign(NodePrivateKey, signData) if err != nil { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") return err } + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + + if sc.PublicKey, err = hex.DecodeString(NodePublicKey); err != nil { + log.WithFields(log.Fields{"type": consts.ConversionError, "error": err}).Error("decoding public key from hex") + return err + } + serializedContract, err := msgpack.Marshal(sc) if err != nil { logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") return errorAPI(w, err, http.StatusInternalServerError) } - // signature := data.params[`signature`].([]byte) - // if len(signature) == 0 { - // log.WithFields(log.Fields{"type": consts.EmptyObject, "params": data.params}).Error("signature is empty") - // } - fmt.Println(len(signature)) ret, err := VDEContract(serializedContract, data) if err != nil { return errorAPI(w, err, http.StatusInternalServerError) } data.result = ret } else { - err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, string(hexPubKey)) + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, hexPubKey) if err != nil { log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } @@ -216,6 +212,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } + fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 755e626c7..c26fa16d6 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -804,7 +804,8 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } } action { - DBInsert("keys", "id, pub", $newId, $NewPubKey) + DBInsert("keys", "id", $newId) + SetPubKey($newId, StringToBytes($NewPubkey)) } }', 'ContractConditions("MainCondition")'), ('25', 'NewVDE', 'contract NewVDE { diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go index 4223e825a..955514d55 100644 --- a/packages/migration/vde/vde_data_tables.go +++ b/packages/migration/vde/vde_data_tables.go @@ -64,5 +64,13 @@ INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") "data": "ContractConditions(\"MainCondition\")", "hash": "ContractConditions(\"MainCondition\")", "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); + 'ContractConditions("MainCondition")'), + ('9', 'keys', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"pub": "ContractConditions(\"MainCondition\")", + "multi": "ContractConditions(\"MainCondition\")", + "deleted": "ContractConditions(\"MainCondition\")", + "blocked": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); ` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 22861a3a7..cedfd6812 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -304,9 +304,6 @@ func GetTableName(sc *SmartContract, tblname string, ecosystem int64) string { return strings.ToLower(tblname[1:]) } prefix := converter.Int64ToStr(ecosystem) - if sc.VDE { - prefix += `_vde` - } return strings.ToLower(fmt.Sprintf(`%s_%s`, prefix, tblname)) } From 3708cdb87fc7601f656d3a3155cc02fc37689443 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 21:28:09 +0300 Subject: [PATCH 077/169] remove fmt from login api handlers --- packages/api/login.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index 7882de84e..a6548fcfb 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -17,7 +17,6 @@ package api import ( - "fmt" "net/http" "strings" "time" @@ -114,7 +113,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) - fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -212,7 +210,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } - fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") @@ -245,7 +242,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, consts.DefaultVDE)), + IsVDE: conf.Config.IsSupportingVDE(), } data.result = &result From 0eed002b571023c6eb117d355b12dd9d10507bb9 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:53:47 +0300 Subject: [PATCH 078/169] add drop db function --- packages/model/db.go | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/model/db.go b/packages/model/db.go index 7e32186ea..fdf4d54aa 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -394,3 +394,25 @@ func InitDB(cfg conf.DBConfig) error { return nil } + +// DropDatabase kill all process and drop database +func DropDatabase(name string) error { + query := `SELECT + pg_terminate_backend (pg_stat_activity.pid) + FROM + pg_stat_activity + WHERE + pg_stat_activity.datname = ?` + + if err := DBConn.Exec(query, name).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on kill db process") + return err + } + + if err := DBConn.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", name)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on drop db") + return err + } + + return nil +} From b395599cefbc7ce0d96f890c76d95a2dc542237b Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:54:14 +0300 Subject: [PATCH 079/169] fix manager --- packages/api/vde_test.go | 36 +++++++++++++++++--- packages/migration/vde/vde_data_contracts.go | 25 +++++++++++--- packages/smart/funcs.go | 8 ++--- packages/vdemanager/manager.go | 18 +++++----- 4 files changed, 67 insertions(+), 20 deletions(-) diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index bd32c97de..990809436 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -37,15 +37,43 @@ func TestVDECreate(t *testing.T) { require.NoError(t, keyLogin(1)) form := url.Values{ - "VDEName": {"testvde"}, - "DBUser": {"vdeuser"}, + "VDEName": {"myvde3"}, + "DBUser": {"myvdeuser3"}, "DBPassword": {"vdepassword"}, - "VDEAPIPort": {"8000"}, + "VDEAPIPort": {"8004"}, } - require.NoError(t, postTx("NewVDE", &form)) + assert.NoError(t, postTx("NewVDE", &form)) +} + +func TestVDEList(t *testing.T) { + require.NoError(t, keyLogin(1)) + fmt.Println(postTx("ListVDE", nil)) } +func TestStopVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("StopVDE", &form)) +} + +func TestRunVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RunVDE", &form)) +} + +func TestRemoveVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RemoveVDE", &form)) +} func TestVDEParams(t *testing.T) { assert.NoError(t, keyLogin(1)) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index c26fa16d6..4297f287a 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -824,19 +824,27 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } }', 'ContractConditions("MainCondition")'), ('26', 'ListVDE', 'contract ListVDE { + data {} + + conditions {} + + action { + GetVDEList() + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { data { VDEName string } conditions { - } action { - GetVDEList($VDEName) + StartVDE($VDEName) } }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { + ('28', 'StopVDE', 'contract StopVDE { data { VDEName string } @@ -845,6 +853,15 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } action { - StartVDE($VDEName) + StopVDEProcess($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('29', 'RemoveVDE', 'contract RemoveVDE { + data { + VDEName string + } + conditions {} + action{ + DeleteVDE($VDEName) } }', 'ContractConditions("MainCondition")');` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index cedfd6812..002792d05 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -266,7 +266,7 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["CreateVDE"] = CreateVDE f["DeleteVDE"] = DeleteVDE f["StartVDE"] = StartVDE - f["StopVDE"] = StopVDE + f["StopVDEProcess"] = StopVDEProcess f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) @@ -1714,12 +1714,12 @@ func StartVDE(sc *SmartContract, name string) error { return vdemanager.Manager.StartVDE(name) } -// StopVDE stops VDE process -func StopVDE(sc *SmartContract, name string) error { +// StopVDEProcess stops VDE process +func StopVDEProcess(sc *SmartContract, name string) error { return vdemanager.Manager.StopVDE(name) } // GetVDEList returns list VDE process with statuses -func GetVDEList(sc *SmartContract, name string) (map[string]string, error) { +func GetVDEList(sc *SmartContract) (map[string]string, error) { return vdemanager.Manager.ListProcess() } diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go index 4cca4ac8e..0e628edca 100644 --- a/packages/vdemanager/manager.go +++ b/packages/vdemanager/manager.go @@ -7,6 +7,7 @@ import ( "os" "path" "path/filepath" + "time" "github.com/GenesisKernel/go-genesis/packages/conf" @@ -22,7 +23,8 @@ const ( createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` createDBTemplate = `CREATE DATABASE %s OWNER %s` - dropDBTemplate = `DROP OWNED BY %s CASCADE` + dropDBTemplate = `DROP DATABASE IF EXISTS %s` + dropOwnedTemplate = `DROP OWNED BY %s CASCADE` dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` commandTemplate = `%s start --config=%s` ) @@ -101,7 +103,8 @@ func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) erro procConfEntry := pConf.NewConfigEntry(config.Directory) procConfEntry.Name = "program:" + name - command := fmt.Sprintf("%s --configPath=%s", config.Executable, config.Directory) + command := fmt.Sprintf("%s start --config=%s", config.Executable, filepath.Join(config.Directory, consts.DefaultConfigFile)) + log.Infoln(command) procConfEntry.AddKeyValue("command", command) proc := process.NewProcess("vdeMaster", procConfEntry) @@ -134,10 +137,7 @@ func (mgr *VDEManager) DeleteVDE(name string) error { return errWrongMode } - p := mgr.processes.Find(name) - if p != nil { - p.Stop(true) - } + mgr.StopVDE(name) vdeDir := path.Join(mgr.childConfigsPath, name) vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) @@ -147,8 +147,8 @@ func (mgr *VDEManager) DeleteVDE(name string) error { return err } - dropDBquery := fmt.Sprintf(dropDBTemplate, vdeConfig.DB.User) - if err := model.DBConn.Exec(dropDBquery).Error; err != nil { + time.Sleep(1 * time.Second) + if err := model.DropDatabase(vdeConfig.DB.Name); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") return err } @@ -274,6 +274,7 @@ func InitVDEManager() { if item.IsDir() { procDir := path.Join(Manager.childConfigsPath, item.Name()) commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) + log.Info(commandStr) confEntry := pConf.NewConfigEntry(procDir) confEntry.Name = "program:" + item.Name() confEntry.AddKeyValue("command", commandStr) @@ -283,6 +284,7 @@ func InitVDEManager() { proc := process.NewProcess("vdeMaster", confEntry) Manager.processes.Add(item.Name(), proc) + proc.Start(true) } } } From cec911a78698ed583b82baca29a41c5322c01271 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 080/169] move changes --- packages/migration/vde/vde.go | 958 ++++++++++++++++++++++++++++++++++ packages/smart/funcs.go | 15 - 2 files changed, 958 insertions(+), 15 deletions(-) create mode 100644 packages/migration/vde/vde.go diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go new file mode 100644 index 000000000..b63cf858d --- /dev/null +++ b/packages/migration/vde/vde.go @@ -0,0 +1,958 @@ +package migration + +var SchemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_vde_members"; + CREATE TABLE "%[1]d_vde_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); + + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); + + DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); + + + INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); + + INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_vde_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_vde_parameters"; + CREATE TABLE "%[1]d_vde_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); + + INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); + + DROP TABLE IF EXISTS "%[1]d_vde_cron"; + CREATE TABLE "%[1]d_vde_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_vde_binaries"; + CREATE TABLE "%[1]d_vde_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_vde_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); + + INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "menu": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")", + "validate_count": "ContractConditions(\"MainCondition\")", + "validate_mode": "ContractConditions(\"MainCondition\")", + "app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); + + INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES + ('1','MainCondition','contract MainCondition { + conditions { + if EcosysParam("founder_account")!=$key_id + { + warning "Sorry, you do not have access to this action." + } + } + }', 'ContractConditions("MainCondition")'), + ('2','NewContract','contract NewContract { + data { + Value string + Conditions string + Wallet string "optional" + TokenEcosystem int "optional" + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + $walletContract = $key_id + if $Wallet { + $walletContract = AddressToId($Wallet) + if $walletContract == 0 { + error Sprintf("wrong wallet %%s", $Wallet) + } + } + var list array + list = ContractsList($Value) + + if Len(list) == 0 { + error "must be the name" + } + + var i int + while i < Len(list) { + if IsObject(list[i], $ecosystem_id) { + warning Sprintf("Contract or function %%s exists", list[i] ) + } + i = i + 1 + } + + $contract_name = list[0] + if !$TokenEcosystem { + $TokenEcosystem = 1 + } else { + if !SysFuel($TokenEcosystem) { + warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) + } + } + } + action { + var root, id int + root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) + id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", + $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) + FlushContract(root, id, false) + $result = id + } + func rollback() { + var list array + list = ContractsList($Value) + var i int + while i < Len(list) { + RollbackContract(list[i]) + i = i + 1 + } + } + func price() int { + return SysParamInt("contract_price") + } + }', 'ContractConditions("MainCondition")'), + ('3','EditContract','contract EditContract { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("contracts", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + + var row array + row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) + if !Len(row) { + error Sprintf("Contract %%d does not exist", $Id) + } + $cur = row[0] + if $Value { + var list, curlist array + list = ContractsList($Value) + curlist = ContractsList($cur["value"]) + if Len(list) != Len(curlist) { + error "Contracts cannot be removed or inserted" + } + var i int + while i < Len(list) { + var j int + var ok bool + while j < Len(curlist) { + if curlist[j] == list[i] { + ok = true + break + } + j = j + 1 + } + if !ok { + error "Contracts or functions names cannot be changed" + } + i = i + 1 + } + } + } + action { + var root int + var pars, vals array + + if $Value { + root = CompileContract($Value, $ecosystem_id, 0, 0) + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("contracts", $Id, Join(pars, ","), vals...) + } + if $Value { + FlushContract(root, $Id, false) + } + } + }', 'ContractConditions("MainCondition")'), + ('4','NewParameter','contract NewParameter { + data { + Name string + Value string + Conditions string + } + conditions { + var ret array + ValidateCondition($Conditions, $ecosystem_id) + ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) + if Len(ret) > 0 { + warning Sprintf( "Parameter %%s already exists", $Name) + } + } + action { + $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('5','EditParameter','contract EditParameter { + data { + Id int + Value string + Conditions string + } + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("parameters", $Id, onlyConditions()) + ValidateCondition($Conditions, $ecosystem_id) + } + action { + DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('6', 'NewMenu','contract NewMenu { + data { + Name string + Value string + Title string "optional" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("menu").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Menu %%s already exists", $Name) + } + } + action { + DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) + } + func price() int { + return SysParamInt("menu_price") + } + }', 'ContractConditions("MainCondition")'), + ('7','EditMenu','contract EditMenu { + data { + Id int + Value string "optional" + Title string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value && !$Title + } + conditions { + RowConditions("menu", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Title { + pars[Len(pars)] = "title" + vals[Len(vals)] = $Title + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("menu", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('8','AppendMenu','contract AppendMenu { + data { + Id int + Value string + } + conditions { + RowConditions("menu", $Id, false) + } + action { + var row map + row = DBRow("menu").Columns("value").WhereId($Id) + DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('9','NewPage','contract NewPage { + data { + Name string + Value string + Menu string + Conditions string + ValidateCount int "optional" + ApplicationId int "optional" + ValidateMode int "optional" + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("pages").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Page %%s already exists", $Name) + } + + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", + $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) + } + func price() int { + return SysParamInt("page_price") + } + }', 'ContractConditions("MainCondition")'), + ('10','EditPage','contract EditPage { + data { + Id int + Value string "optional" + Menu string "optional" + Conditions string "optional" + ValidateCount int "optional" + ValidateMode string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$Menu + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + RowConditions("pages", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Menu { + pars[Len(pars)] = "menu" + vals[Len(vals)] = $Menu + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if $ValidateCount { + pars[Len(pars)] = "validate_count" + vals[Len(vals)] = $ValidateCount + } + if $ValidateMode { + if $ValidateMode != "1" { + $ValidateMode = "0" + } + pars[Len(pars)] = "validate_mode" + vals[Len(vals)] = $ValidateMode + } + if Len(vals) > 0 { + DBUpdate("pages", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('11','AppendPage','contract AppendPage { + data { + Id int + Value string + } + conditions { + RowConditions("pages", $Id, false) + } + action { + var row map + row = DBRow("pages").Columns("value").WhereId($Id) + DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('12','NewBlock','contract NewBlock { + data { + Name string + Value string + Conditions string + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("blocks").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Block %%s already exists", $Name) + } + } + action { + DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) + } + }', 'ContractConditions("MainCondition")'), + ('13','EditBlock','contract EditBlock { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + + conditions { + RowConditions("blocks", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("blocks", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('14','NewTable','contract NewTable { + data { + Name string + Columns string + Permissions string + ApplicationId int "optional" + } + conditions { + TableConditions($Name, $Columns, $Permissions) + } + action { + CreateTable($Name, $Columns, $Permissions, $ApplicationId) + } + func rollback() { + RollbackTable($Name) + } + func price() int { + return SysParamInt("table_price") + } + }', 'ContractConditions("MainCondition")'), + ('15','EditTable','contract EditTable { + data { + Name string + Permissions string + } + conditions { + TableConditions($Name, "", $Permissions) + } + action { + PermTable($Name, $Permissions ) + } + }', 'ContractConditions("MainCondition")'), + ('16','NewColumn','contract NewColumn { + data { + TableName string + Name string + Type string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, $Type, $Permissions) + } + action { + CreateColumn($TableName, $Name, $Type, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('17','EditColumn','contract EditColumn { + data { + TableName string + Name string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, "", $Permissions) + } + action { + PermColumn($TableName, $Name, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('18','NewLang','contract NewLang { + data { + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } + } + action { + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('19','EditLang','contract EditLang { + data { + Id int + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + } + action { + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('20','Import','contract Import { + data { + Data string + } + conditions { + $list = JSONDecode($Data) + } + func ImportList(row array, cnt string) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + idata = row[i] + if(cnt == "pages"){ + $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) + $page_id = One($ret_page, "id") + if ($page_id != nil){ + idata["Id"] = Int($page_id) + CallContract("EditPage", idata) + } else { + CallContract("NewPage", idata) + } + } + if(cnt == "blocks"){ + $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) + $block_id = One($ret_block, "id") + if ($block_id != nil){ + idata["Id"] = Int($block_id) + CallContract("EditBlock", idata) + } else { + CallContract("NewBlock", idata) + } + } + if(cnt == "menus"){ + $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) + $menu_id = One($ret_menu, "id") + $menu_value = One($ret_menu, "value") + if ($menu_id != nil){ + idata["Id"] = Int($menu_id) + idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) + CallContract("EditMenu", idata) + } else { + CallContract("NewMenu", idata) + } + } + if(cnt == "parameters"){ + $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) + $param_id = One($ret_param, "id") + if ($param_id != nil){ + idata["Id"] = Int($param_id) + CallContract("EditParameter", idata) + } else { + CallContract("NewParameter", idata) + } + } + if(cnt == "languages"){ + $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) + $lang_id = One($ret_lang, "id") + if ($lang_id != nil){ + CallContract("EditLang", idata) + } else { + CallContract("NewLang", idata) + } + } + if(cnt == "contracts"){ + if IsObject(idata["Name"], $ecosystem_id){ + } else { + CallContract("NewContract", idata) + } + } + if(cnt == "tables"){ + $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) + $table_id = One($ret_table, "id") + if ($table_id != nil){ + } else { + CallContract("NewTable", idata) + } + } + i = i + 1 + } + } + func ImportData(row array) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + var list array + var tblname, columns string + idata = row[i] + i = i + 1 + tblname = idata["Table"] + columns = Join(idata["Columns"], ",") + list = idata["Data"] + if !list { + continue + } + var j int + while j < Len(list) { + var ilist array + ilist = list[j] + DBInsert(tblname, columns, ilist) + j=j+1 + } + } + } + action { + ImportList($list["pages"], "pages") + ImportList($list["blocks"], "blocks") + ImportList($list["menus"], "menus") + ImportList($list["parameters"], "parameters") + ImportList($list["languages"], "languages") + ImportList($list["contracts"], "contracts") + ImportList($list["tables"], "tables") + ImportData($list["data"]) + } + }', 'ContractConditions("MainCondition")'), + ('21', 'NewCron','contract NewCron { + data { + Cron string + Contract string + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", + $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($result) + } + }', 'ContractConditions("MainCondition")'), + ('22','EditCron','contract EditCron { + data { + Id int + Contract string + Cron string "optional" + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ConditionById("cron", true) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", + $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($Id) + } + }', 'ContractConditions("MainCondition")'), + ('23', 'UploadBinary', contract UploadBinary { + data { + Name string + Data bytes "file" + AppID int + DataMimeType string "optional" + MemberID int "optional" + } + conditions { + $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) + } + action { + var hash string + hash = MD5($Data) + + if $DataMimeType == "" { + $DataMimeType = "application/octet-stream" + } + + if $Id != 0 { + DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) + } else { + $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) + } + + $result = $Id + } + }', 'ContractConditions("MainCondition")'); + ` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 002792d05..6bde1d49e 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -270,21 +270,6 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) - case script.VMTypeVDEMaster: - f["HTTPRequest"] = HTTPRequest - f["GetMapKeys"] = GetMapKeys - f["SortedKeys"] = SortedKeys - f["Date"] = Date - f["HTTPPostJSON"] = HTTPPostJSON - f["ValidateCron"] = ValidateCron - f["UpdateCron"] = UpdateCron - f["CreateVDE"] = CreateVDE - f["DeleteVDE"] = DeleteVDE - f["StartVDE"] = StartVDE - f["StopVDE"] = StopVDE - f["GetVDEList"] = GetVDEList - vmExtendCost(vm, getCost) - vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan From 11c130cd65daccf07a68ec3c4b7ca777b7ad6033 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 081/169] separate routes by vde --- packages/api/route.go | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/api/route.go b/packages/api/route.go index 56b547f61..9e94e3ec6 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -93,7 +93,6 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) From 11c0bcbfacea7438b98cf35e4c3be29a4dabbb36 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 082/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; From 2ac5c89ee1ec6811bf303b88ce846cec8d7bdfe1 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 8 May 2018 09:59:10 +0300 Subject: [PATCH 083/169] temporary commit --- packages/daylight/start.go | 7 ------- 1 file changed, 7 deletions(-) diff --git a/packages/daylight/start.go b/packages/daylight/start.go index aede916a8..98394511e 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -279,13 +279,6 @@ func Start() { } } - if conf.Config.IsSupportingVDE() { - if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { - log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") - Exit(1) - } - } - if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } From 50cb18452247367525b7bdb9796ba6b54dbb8946 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 084/169] temporary commit --- packages/migration/vde/vde.go | 958 ------------------- packages/migration/vde/vde_data_contracts.go | 170 +--- 2 files changed, 19 insertions(+), 1109 deletions(-) delete mode 100644 packages/migration/vde/vde.go diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go deleted file mode 100644 index 640338e93..000000000 --- a/packages/migration/vde/vde.go +++ /dev/null @@ -1,958 +0,0 @@ -package vde - -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES - ('1','MainCondition','contract MainCondition { - conditions { - if EcosysParam("founder_account")!=$key_id - { - warning "Sorry, you do not have access to this action." - } - } - }', 'ContractConditions("MainCondition")'), - ('2','NewContract','contract NewContract { - data { - Value string - Conditions string - Wallet string "optional" - TokenEcosystem int "optional" - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - $walletContract = $key_id - if $Wallet { - $walletContract = AddressToId($Wallet) - if $walletContract == 0 { - error Sprintf("wrong wallet %%s", $Wallet) - } - } - var list array - list = ContractsList($Value) - - if Len(list) == 0 { - error "must be the name" - } - - var i int - while i < Len(list) { - if IsObject(list[i], $ecosystem_id) { - warning Sprintf("Contract or function %%s exists", list[i] ) - } - i = i + 1 - } - - $contract_name = list[0] - if !$TokenEcosystem { - $TokenEcosystem = 1 - } else { - if !SysFuel($TokenEcosystem) { - warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) - } - } - } - action { - var root, id int - root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) - id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", - $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) - FlushContract(root, id, false) - $result = id - } - func rollback() { - var list array - list = ContractsList($Value) - var i int - while i < Len(list) { - RollbackContract(list[i]) - i = i + 1 - } - } - func price() int { - return SysParamInt("contract_price") - } - }', 'ContractConditions("MainCondition")'), - ('3','EditContract','contract EditContract { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("contracts", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - - var row array - row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) - if !Len(row) { - error Sprintf("Contract %%d does not exist", $Id) - } - $cur = row[0] - if $Value { - var list, curlist array - list = ContractsList($Value) - curlist = ContractsList($cur["value"]) - if Len(list) != Len(curlist) { - error "Contracts cannot be removed or inserted" - } - var i int - while i < Len(list) { - var j int - var ok bool - while j < Len(curlist) { - if curlist[j] == list[i] { - ok = true - break - } - j = j + 1 - } - if !ok { - error "Contracts or functions names cannot be changed" - } - i = i + 1 - } - } - } - action { - var root int - var pars, vals array - - if $Value { - root = CompileContract($Value, $ecosystem_id, 0, 0) - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("contracts", $Id, Join(pars, ","), vals...) - } - if $Value { - FlushContract(root, $Id, false) - } - } - }', 'ContractConditions("MainCondition")'), - ('4','NewParameter','contract NewParameter { - data { - Name string - Value string - Conditions string - } - conditions { - var ret array - ValidateCondition($Conditions, $ecosystem_id) - ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) - if Len(ret) > 0 { - warning Sprintf( "Parameter %%s already exists", $Name) - } - } - action { - $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('5','EditParameter','contract EditParameter { - data { - Id int - Value string - Conditions string - } - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("parameters", $Id, onlyConditions()) - ValidateCondition($Conditions, $ecosystem_id) - } - action { - DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('6', 'NewMenu','contract NewMenu { - data { - Name string - Value string - Title string "optional" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("menu").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Menu %%s already exists", $Name) - } - } - action { - DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) - } - func price() int { - return SysParamInt("menu_price") - } - }', 'ContractConditions("MainCondition")'), - ('7','EditMenu','contract EditMenu { - data { - Id int - Value string "optional" - Title string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value && !$Title - } - conditions { - RowConditions("menu", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Title { - pars[Len(pars)] = "title" - vals[Len(vals)] = $Title - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("menu", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('8','AppendMenu','contract AppendMenu { - data { - Id int - Value string - } - conditions { - RowConditions("menu", $Id, false) - } - action { - var row map - row = DBRow("menu").Columns("value").WhereId($Id) - DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('9','NewPage','contract NewPage { - data { - Name string - Value string - Menu string - Conditions string - ValidateCount int "optional" - ApplicationId int "optional" - ValidateMode int "optional" - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("pages").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Page %%s already exists", $Name) - } - - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", - $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) - } - func price() int { - return SysParamInt("page_price") - } - }', 'ContractConditions("MainCondition")'), - ('10','EditPage','contract EditPage { - data { - Id int - Value string "optional" - Menu string "optional" - Conditions string "optional" - ValidateCount int "optional" - ValidateMode string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value && !$Menu - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - RowConditions("pages", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Menu { - pars[Len(pars)] = "menu" - vals[Len(vals)] = $Menu - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if $ValidateCount { - pars[Len(pars)] = "validate_count" - vals[Len(vals)] = $ValidateCount - } - if $ValidateMode { - if $ValidateMode != "1" { - $ValidateMode = "0" - } - pars[Len(pars)] = "validate_mode" - vals[Len(vals)] = $ValidateMode - } - if Len(vals) > 0 { - DBUpdate("pages", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('11','AppendPage','contract AppendPage { - data { - Id int - Value string - } - conditions { - RowConditions("pages", $Id, false) - } - action { - var row map - row = DBRow("pages").Columns("value").WhereId($Id) - DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('12','NewBlock','contract NewBlock { - data { - Name string - Value string - Conditions string - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("blocks").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Block %%s already exists", $Name) - } - } - action { - DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) - } - }', 'ContractConditions("MainCondition")'), - ('13','EditBlock','contract EditBlock { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - - conditions { - RowConditions("blocks", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("blocks", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('14','NewTable','contract NewTable { - data { - Name string - Columns string - Permissions string - ApplicationId int "optional" - } - conditions { - TableConditions($Name, $Columns, $Permissions) - } - action { - CreateTable($Name, $Columns, $Permissions, $ApplicationId) - } - func rollback() { - RollbackTable($Name) - } - func price() int { - return SysParamInt("table_price") - } - }', 'ContractConditions("MainCondition")'), - ('15','EditTable','contract EditTable { - data { - Name string - Permissions string - } - conditions { - TableConditions($Name, "", $Permissions) - } - action { - PermTable($Name, $Permissions ) - } - }', 'ContractConditions("MainCondition")'), - ('16','NewColumn','contract NewColumn { - data { - TableName string - Name string - Type string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, $Type, $Permissions) - } - action { - CreateColumn($TableName, $Name, $Type, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('17','EditColumn','contract EditColumn { - data { - TableName string - Name string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, "", $Permissions) - } - action { - PermColumn($TableName, $Name, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { - data { - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) - } - } - action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('19','EditLang','contract EditLang { - data { - Id int - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - } - action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('20','Import','contract Import { - data { - Data string - } - conditions { - $list = JSONDecode($Data) - } - func ImportList(row array, cnt string) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - idata = row[i] - if(cnt == "pages"){ - $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) - $page_id = One($ret_page, "id") - if ($page_id != nil){ - idata["Id"] = Int($page_id) - CallContract("EditPage", idata) - } else { - CallContract("NewPage", idata) - } - } - if(cnt == "blocks"){ - $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) - $block_id = One($ret_block, "id") - if ($block_id != nil){ - idata["Id"] = Int($block_id) - CallContract("EditBlock", idata) - } else { - CallContract("NewBlock", idata) - } - } - if(cnt == "menus"){ - $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) - $menu_id = One($ret_menu, "id") - $menu_value = One($ret_menu, "value") - if ($menu_id != nil){ - idata["Id"] = Int($menu_id) - idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) - CallContract("EditMenu", idata) - } else { - CallContract("NewMenu", idata) - } - } - if(cnt == "parameters"){ - $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) - $param_id = One($ret_param, "id") - if ($param_id != nil){ - idata["Id"] = Int($param_id) - CallContract("EditParameter", idata) - } else { - CallContract("NewParameter", idata) - } - } - if(cnt == "languages"){ - $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) - $lang_id = One($ret_lang, "id") - if ($lang_id != nil){ - CallContract("EditLang", idata) - } else { - CallContract("NewLang", idata) - } - } - if(cnt == "contracts"){ - if IsObject(idata["Name"], $ecosystem_id){ - } else { - CallContract("NewContract", idata) - } - } - if(cnt == "tables"){ - $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) - $table_id = One($ret_table, "id") - if ($table_id != nil){ - } else { - CallContract("NewTable", idata) - } - } - i = i + 1 - } - } - func ImportData(row array) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - var list array - var tblname, columns string - idata = row[i] - i = i + 1 - tblname = idata["Table"] - columns = Join(idata["Columns"], ",") - list = idata["Data"] - if !list { - continue - } - var j int - while j < Len(list) { - var ilist array - ilist = list[j] - DBInsert(tblname, columns, ilist) - j=j+1 - } - } - } - action { - ImportList($list["pages"], "pages") - ImportList($list["blocks"], "blocks") - ImportList($list["menus"], "menus") - ImportList($list["parameters"], "parameters") - ImportList($list["languages"], "languages") - ImportList($list["contracts"], "contracts") - ImportList($list["tables"], "tables") - ImportData($list["data"]) - } - }', 'ContractConditions("MainCondition")'), - ('21', 'NewCron','contract NewCron { - data { - Cron string - Contract string - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", - $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($result) - } - }', 'ContractConditions("MainCondition")'), - ('22','EditCron','contract EditCron { - data { - Id int - Contract string - Cron string "optional" - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ConditionById("cron", true) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", - $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($Id) - } - }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { - data { - Name string - Data bytes "file" - AppID int - DataMimeType string "optional" - MemberID int "optional" - } - conditions { - $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) - } - action { - var hash string - hash = MD5($Data) - - if $DataMimeType == "" { - $DataMimeType = "application/octet-stream" - } - - if $Id != 0 { - DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) - } else { - $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) - } - - $result = $Id - } - }', 'ContractConditions("MainCondition")'); - ` diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4297f287a..4e5ca29ab 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,113 +483,38 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang', 'contract NewLang { + ('18','NewLang','contract NewLang { data { - ApplicationId int "optional" - Name string - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Name string + Trans string + AppID int } - conditions { - if $ApplicationId == 0 { - warning "Application id cannot equal 0" - } - - if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { - warning Sprintf( "Language resource %%s already exists", $Name) - } - - var j int - while j < Len($IdLanguage) { - if $IdLanguage[j] == "" { - info("Locale empty") - } - if $Value[j] == "" { - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } } - action { - var i,len,lenshar int - var res,langarr string - len = Len($IdLanguage) - lenshar = Len($Value) - while i < len { - if i + 1 == len { - res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) - } else { - res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) - } - i = i + 1 - } - if len > 0 { - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - } - $result = CreateLanguage($Name, $Trans, $ApplicationId) + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string "optional" - ApplicationId int "optional" - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Id int + Name string + Trans string + AppID int } - conditions { - var j int - while j < Len($IdLanguage) { - if ($IdLanguage[j] == ""){ - info("Locale empty") - } - if ($Value[j] == ""){ - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") } - action { - var i,len int - var res,langarr string - len = Len($IdLanguage) - while i < len { - if (i + 1 == len){ - res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) - } - else { - res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) - } - i = i + 1 - } - - $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() - if !$row{ - warning "Language not found" - } - - if $ApplicationId == 0 { - $ApplicationId = Int($row["app_id"]) - } - if $Name == "" { - $Name = $row["name"] - } - - if (len > 0){ - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - - } - EditLanguage($Id, $Name, $Trans, $ApplicationId) + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -794,7 +719,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { - Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -802,66 +726,10 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - } - action { - DBInsert("keys", "id", $newId) - SetPubKey($newId, StringToBytes($NewPubkey)) - } - }', 'ContractConditions("MainCondition")'), - ('25', 'NewVDE', 'contract NewVDE { - data { - VDEName string - DBUser string - DBPassword string - VDEAPIPort int - } - - conditions { - } - - action { - CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) - } - }', 'ContractConditions("MainCondition")'), - ('26', 'ListVDE', 'contract ListVDE { - data {} - - conditions {} - - action { - GetVDEList() - } - }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { - data { - VDEName string - } - - conditions { - } - - action { - StartVDE($VDEName) - } - }', 'ContractConditions("MainCondition")'), - ('28', 'StopVDE', 'contract StopVDE { - data { - VDEName string - } - conditions { + $amount = Money(1000) * Money(1000000000000000000) } - action { - StopVDEProcess($VDEName) - } - }', 'ContractConditions("MainCondition")'), - ('29', 'RemoveVDE', 'contract RemoveVDE { - data { - VDEName string - } - conditions {} - action{ - DeleteVDE($VDEName) + DBInsert("keys", "id, pub", $newId, $NewPubKey) } }', 'ContractConditions("MainCondition")');` From 46ac90d1959a68fa73d54963ad43292974db719a Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 085/169] fix login --- packages/api/login.go | 5 +- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- 2 files changed, 95 insertions(+), 21 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index a6548fcfb..248802a49 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -127,10 +127,11 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En pubkey = data.params[`pubkey`].([]byte) hexPubKey := hex.EncodeToString(pubkey) - params := converter.EncodeLength(int64(len(hexPubKey))) - params = append(params, hexPubKey...) + params := make([]byte, 0) + params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) contract := smart.GetContract("NewUser", 1) + info := contract.Block.Info.(*script.ContractInfo) sc := tx.SmartContract{ Header: tx.Header{ diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) From 18d049710ef2befbad1352a15dceb4eb72141e3d Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 086/169] temporary commit --- packages/api/login.go | 1 - packages/migration/vde/vde_data_contracts.go | 41 ++++++++++++++++++++ 2 files changed, 41 insertions(+), 1 deletion(-) diff --git a/packages/api/login.go b/packages/api/login.go index 248802a49..180e1dfe8 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -169,7 +169,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") return errorAPI(w, err, http.StatusInternalServerError) } - ret, err := VDEContract(serializedContract, data) if err != nil { return errorAPI(w, err, http.StatusInternalServerError) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` From 33cefecbc3f38dd0464726839cf72d4ade0cff04 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 12:40:33 +0300 Subject: [PATCH 087/169] move changes --- packages/conf/conf.go | 10 +- packages/migration/vde/vde.go | 958 ++++++++++++++++++++++++++++++++++ packages/smart/funcs.go | 15 + packages/vdemanager/config.go | 22 +- 4 files changed, 989 insertions(+), 16 deletions(-) create mode 100644 packages/migration/vde/vde.go diff --git a/packages/conf/conf.go b/packages/conf/conf.go index b91be9b38..59887d12e 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -242,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c GlobalConfig) IsPrivateBlockchain() bool { +func (c *GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c GlobalConfig) IsPublicBlockchain() bool { +func (c *GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c GlobalConfig) IsVDE() bool { +func (c *GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c GlobalConfig) IsVDEMaster() bool { +func (c *GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c GlobalConfig) IsSupportingVDE() bool { +func (c *GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go new file mode 100644 index 000000000..b63cf858d --- /dev/null +++ b/packages/migration/vde/vde.go @@ -0,0 +1,958 @@ +package migration + +var SchemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_vde_members"; + CREATE TABLE "%[1]d_vde_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); + + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); + INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); + + DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); + + + INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); + + INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); + + DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_vde_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_vde_parameters"; + CREATE TABLE "%[1]d_vde_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); + + INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); + + DROP TABLE IF EXISTS "%[1]d_vde_cron"; + CREATE TABLE "%[1]d_vde_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_vde_binaries"; + CREATE TABLE "%[1]d_vde_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_vde_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); + + INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "menu": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")", + "validate_count": "ContractConditions(\"MainCondition\")", + "validate_mode": "ContractConditions(\"MainCondition\")", + "app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); + + INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES + ('1','MainCondition','contract MainCondition { + conditions { + if EcosysParam("founder_account")!=$key_id + { + warning "Sorry, you do not have access to this action." + } + } + }', 'ContractConditions("MainCondition")'), + ('2','NewContract','contract NewContract { + data { + Value string + Conditions string + Wallet string "optional" + TokenEcosystem int "optional" + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + $walletContract = $key_id + if $Wallet { + $walletContract = AddressToId($Wallet) + if $walletContract == 0 { + error Sprintf("wrong wallet %%s", $Wallet) + } + } + var list array + list = ContractsList($Value) + + if Len(list) == 0 { + error "must be the name" + } + + var i int + while i < Len(list) { + if IsObject(list[i], $ecosystem_id) { + warning Sprintf("Contract or function %%s exists", list[i] ) + } + i = i + 1 + } + + $contract_name = list[0] + if !$TokenEcosystem { + $TokenEcosystem = 1 + } else { + if !SysFuel($TokenEcosystem) { + warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) + } + } + } + action { + var root, id int + root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) + id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", + $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) + FlushContract(root, id, false) + $result = id + } + func rollback() { + var list array + list = ContractsList($Value) + var i int + while i < Len(list) { + RollbackContract(list[i]) + i = i + 1 + } + } + func price() int { + return SysParamInt("contract_price") + } + }', 'ContractConditions("MainCondition")'), + ('3','EditContract','contract EditContract { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("contracts", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + + var row array + row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) + if !Len(row) { + error Sprintf("Contract %%d does not exist", $Id) + } + $cur = row[0] + if $Value { + var list, curlist array + list = ContractsList($Value) + curlist = ContractsList($cur["value"]) + if Len(list) != Len(curlist) { + error "Contracts cannot be removed or inserted" + } + var i int + while i < Len(list) { + var j int + var ok bool + while j < Len(curlist) { + if curlist[j] == list[i] { + ok = true + break + } + j = j + 1 + } + if !ok { + error "Contracts or functions names cannot be changed" + } + i = i + 1 + } + } + } + action { + var root int + var pars, vals array + + if $Value { + root = CompileContract($Value, $ecosystem_id, 0, 0) + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("contracts", $Id, Join(pars, ","), vals...) + } + if $Value { + FlushContract(root, $Id, false) + } + } + }', 'ContractConditions("MainCondition")'), + ('4','NewParameter','contract NewParameter { + data { + Name string + Value string + Conditions string + } + conditions { + var ret array + ValidateCondition($Conditions, $ecosystem_id) + ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) + if Len(ret) > 0 { + warning Sprintf( "Parameter %%s already exists", $Name) + } + } + action { + $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('5','EditParameter','contract EditParameter { + data { + Id int + Value string + Conditions string + } + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("parameters", $Id, onlyConditions()) + ValidateCondition($Conditions, $ecosystem_id) + } + action { + DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('6', 'NewMenu','contract NewMenu { + data { + Name string + Value string + Title string "optional" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("menu").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Menu %%s already exists", $Name) + } + } + action { + DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) + } + func price() int { + return SysParamInt("menu_price") + } + }', 'ContractConditions("MainCondition")'), + ('7','EditMenu','contract EditMenu { + data { + Id int + Value string "optional" + Title string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value && !$Title + } + conditions { + RowConditions("menu", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Title { + pars[Len(pars)] = "title" + vals[Len(vals)] = $Title + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("menu", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('8','AppendMenu','contract AppendMenu { + data { + Id int + Value string + } + conditions { + RowConditions("menu", $Id, false) + } + action { + var row map + row = DBRow("menu").Columns("value").WhereId($Id) + DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('9','NewPage','contract NewPage { + data { + Name string + Value string + Menu string + Conditions string + ValidateCount int "optional" + ApplicationId int "optional" + ValidateMode int "optional" + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("pages").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Page %%s already exists", $Name) + } + + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", + $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) + } + func price() int { + return SysParamInt("page_price") + } + }', 'ContractConditions("MainCondition")'), + ('10','EditPage','contract EditPage { + data { + Id int + Value string "optional" + Menu string "optional" + Conditions string "optional" + ValidateCount int "optional" + ValidateMode string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$Menu + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + RowConditions("pages", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Menu { + pars[Len(pars)] = "menu" + vals[Len(vals)] = $Menu + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if $ValidateCount { + pars[Len(pars)] = "validate_count" + vals[Len(vals)] = $ValidateCount + } + if $ValidateMode { + if $ValidateMode != "1" { + $ValidateMode = "0" + } + pars[Len(pars)] = "validate_mode" + vals[Len(vals)] = $ValidateMode + } + if Len(vals) > 0 { + DBUpdate("pages", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('11','AppendPage','contract AppendPage { + data { + Id int + Value string + } + conditions { + RowConditions("pages", $Id, false) + } + action { + var row map + row = DBRow("pages").Columns("value").WhereId($Id) + DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('12','NewBlock','contract NewBlock { + data { + Name string + Value string + Conditions string + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("blocks").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Block %%s already exists", $Name) + } + } + action { + DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) + } + }', 'ContractConditions("MainCondition")'), + ('13','EditBlock','contract EditBlock { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + + conditions { + RowConditions("blocks", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("blocks", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('14','NewTable','contract NewTable { + data { + Name string + Columns string + Permissions string + ApplicationId int "optional" + } + conditions { + TableConditions($Name, $Columns, $Permissions) + } + action { + CreateTable($Name, $Columns, $Permissions, $ApplicationId) + } + func rollback() { + RollbackTable($Name) + } + func price() int { + return SysParamInt("table_price") + } + }', 'ContractConditions("MainCondition")'), + ('15','EditTable','contract EditTable { + data { + Name string + Permissions string + } + conditions { + TableConditions($Name, "", $Permissions) + } + action { + PermTable($Name, $Permissions ) + } + }', 'ContractConditions("MainCondition")'), + ('16','NewColumn','contract NewColumn { + data { + TableName string + Name string + Type string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, $Type, $Permissions) + } + action { + CreateColumn($TableName, $Name, $Type, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('17','EditColumn','contract EditColumn { + data { + TableName string + Name string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, "", $Permissions) + } + action { + PermColumn($TableName, $Name, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('18','NewLang','contract NewLang { + data { + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } + } + action { + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('19','EditLang','contract EditLang { + data { + Id int + Name string + Trans string + AppID int + } + conditions { + EvalCondition("parameters", "changing_language", "value") + } + action { + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) + } + }', 'ContractConditions("MainCondition")'), + ('20','Import','contract Import { + data { + Data string + } + conditions { + $list = JSONDecode($Data) + } + func ImportList(row array, cnt string) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + idata = row[i] + if(cnt == "pages"){ + $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) + $page_id = One($ret_page, "id") + if ($page_id != nil){ + idata["Id"] = Int($page_id) + CallContract("EditPage", idata) + } else { + CallContract("NewPage", idata) + } + } + if(cnt == "blocks"){ + $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) + $block_id = One($ret_block, "id") + if ($block_id != nil){ + idata["Id"] = Int($block_id) + CallContract("EditBlock", idata) + } else { + CallContract("NewBlock", idata) + } + } + if(cnt == "menus"){ + $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) + $menu_id = One($ret_menu, "id") + $menu_value = One($ret_menu, "value") + if ($menu_id != nil){ + idata["Id"] = Int($menu_id) + idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) + CallContract("EditMenu", idata) + } else { + CallContract("NewMenu", idata) + } + } + if(cnt == "parameters"){ + $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) + $param_id = One($ret_param, "id") + if ($param_id != nil){ + idata["Id"] = Int($param_id) + CallContract("EditParameter", idata) + } else { + CallContract("NewParameter", idata) + } + } + if(cnt == "languages"){ + $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) + $lang_id = One($ret_lang, "id") + if ($lang_id != nil){ + CallContract("EditLang", idata) + } else { + CallContract("NewLang", idata) + } + } + if(cnt == "contracts"){ + if IsObject(idata["Name"], $ecosystem_id){ + } else { + CallContract("NewContract", idata) + } + } + if(cnt == "tables"){ + $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) + $table_id = One($ret_table, "id") + if ($table_id != nil){ + } else { + CallContract("NewTable", idata) + } + } + i = i + 1 + } + } + func ImportData(row array) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + var list array + var tblname, columns string + idata = row[i] + i = i + 1 + tblname = idata["Table"] + columns = Join(idata["Columns"], ",") + list = idata["Data"] + if !list { + continue + } + var j int + while j < Len(list) { + var ilist array + ilist = list[j] + DBInsert(tblname, columns, ilist) + j=j+1 + } + } + } + action { + ImportList($list["pages"], "pages") + ImportList($list["blocks"], "blocks") + ImportList($list["menus"], "menus") + ImportList($list["parameters"], "parameters") + ImportList($list["languages"], "languages") + ImportList($list["contracts"], "contracts") + ImportList($list["tables"], "tables") + ImportData($list["data"]) + } + }', 'ContractConditions("MainCondition")'), + ('21', 'NewCron','contract NewCron { + data { + Cron string + Contract string + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", + $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($result) + } + }', 'ContractConditions("MainCondition")'), + ('22','EditCron','contract EditCron { + data { + Id int + Contract string + Cron string "optional" + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ConditionById("cron", true) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", + $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($Id) + } + }', 'ContractConditions("MainCondition")'), + ('23', 'UploadBinary', contract UploadBinary { + data { + Name string + Data bytes "file" + AppID int + DataMimeType string "optional" + MemberID int "optional" + } + conditions { + $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) + } + action { + var hash string + hash = MD5($Data) + + if $DataMimeType == "" { + $DataMimeType = "application/octet-stream" + } + + if $Id != 0 { + DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) + } else { + $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) + } + + $result = $Id + } + }', 'ContractConditions("MainCondition")'); + ` diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 6bde1d49e..002792d05 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -270,6 +270,21 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: + f["HTTPRequest"] = HTTPRequest + f["GetMapKeys"] = GetMapKeys + f["SortedKeys"] = SortedKeys + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDE"] = StopVDE + f["GetVDEList"] = GetVDEList + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index bcafa10ff..c5d06f741 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -31,33 +31,33 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort), + fmt.Sprintf("--httpPort=%d", c.HTTPPort) fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - "--runMode=VDE", + fmt.Sprintf("--runMode=VDE") } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() *exec.Cmd { - return c.getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() exec.Cmd { + return getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { - return c.getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { + return getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() *exec.Cmd { - return c.getCommand(startCommand) +func (c ChildVDEConfig) startCommand() exec.Cmd { + retturn getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, c.ConfigFileName) + return filepath.Join(c.Directory, ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + return args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } From 45e94fe0a5886a4ed1724840ca35b4be69957707 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:37:49 +0300 Subject: [PATCH 088/169] separate routes by vde --- packages/api/route.go | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/api/route.go b/packages/api/route.go index 9e94e3ec6..ef84e9637 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -93,6 +93,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) From 16fd798d2f1f29b448760396a158efe92292cf59 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 13:38:36 +0300 Subject: [PATCH 089/169] separate vde migration to own package --- packages/migration/vde/vde.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go index b63cf858d..640338e93 100644 --- a/packages/migration/vde/vde.go +++ b/packages/migration/vde/vde.go @@ -1,4 +1,4 @@ -package migration +package vde var SchemaVDE = ` DROP TABLE IF EXISTS "%[1]d_vde_members"; From b928afd97c2a6e9e837b5ca69a11c2ee9598972b Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 7 May 2018 15:59:38 +0300 Subject: [PATCH 090/169] temp commit --- packages/vdemanager/config.go | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go index c5d06f741..bcafa10ff 100644 --- a/packages/vdemanager/config.go +++ b/packages/vdemanager/config.go @@ -31,33 +31,33 @@ func (c ChildVDEConfig) configCommand() *exec.Cmd { fmt.Sprintf("--dbUser=%s", c.DBUser), fmt.Sprintf("--dbPassword=%s", c.DBPassword), fmt.Sprintf("--dbName=%s", c.Name), - fmt.Sprintf("--httpPort=%d", c.HTTPPort) + fmt.Sprintf("--httpPort=%d", c.HTTPPort), fmt.Sprintf("--dataDir=%s", c.Directory), fmt.Sprintf("--keysDir=%s", c.Directory), - fmt.Sprintf("--runMode=VDE") + "--runMode=VDE", } return exec.Command(c.Executable, args...) } -func (c ChildVDEConfig) initDBCommand() exec.Cmd { - return getCommand(inidDBCommand) +func (c ChildVDEConfig) initDBCommand() *exec.Cmd { + return c.getCommand(inidDBCommand) } -func (c ChildVDEConfig) generateKeysCommand() exec.Cmd { - return getCommand(genKeysCommand) +func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { + return c.getCommand(genKeysCommand) } -func (c ChildVDEConfig) startCommand() exec.Cmd { - retturn getCommand(startCommand) +func (c ChildVDEConfig) startCommand() *exec.Cmd { + return c.getCommand(startCommand) } func (c ChildVDEConfig) configPath() string { - return filepath.Join(c.Directory, ConfigFileName) + return filepath.Join(c.Directory, c.ConfigFileName) } -func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { - return args := []string{ +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + args := []string{ commandName, fmt.Sprintf("--config=%s", c.configPath()), } From ce60d34d53f9ccb093029897e2b4d9a8d72e20cc Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 8 May 2018 09:59:10 +0300 Subject: [PATCH 091/169] temporary commit --- packages/conf/conf.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 59887d12e..b91be9b38 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -242,26 +242,26 @@ func GetNodesAddr() []string { } // IsPrivateBlockchain check running mode -func (c *GlobalConfig) IsPrivateBlockchain() bool { +func (c GlobalConfig) IsPrivateBlockchain() bool { return RunMode(c.RunningMode).IsPrivateBlockchain() } // IsPublicBlockchain check running mode -func (c *GlobalConfig) IsPublicBlockchain() bool { +func (c GlobalConfig) IsPublicBlockchain() bool { return RunMode(c.RunningMode).IsPublicBlockchain() } // IsVDE check running mode -func (c *GlobalConfig) IsVDE() bool { +func (c GlobalConfig) IsVDE() bool { return RunMode(c.RunningMode).IsVDE() } // IsVDEMaster check running mode -func (c *GlobalConfig) IsVDEMaster() bool { +func (c GlobalConfig) IsVDEMaster() bool { return RunMode(c.RunningMode).IsVDEMaster() } // IsSupportingVDE check running mode -func (c *GlobalConfig) IsSupportingVDE() bool { +func (c GlobalConfig) IsSupportingVDE() bool { return RunMode(c.RunningMode).IsSupportingVDE() } From f4313fabe655c7244d8e779314a7ecd13e3cc228 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 17:15:56 +0300 Subject: [PATCH 092/169] temporary commit --- packages/daylight/start.go | 7 + packages/migration/vde/vde.go | 958 ------------------- packages/migration/vde/vde_data_contracts.go | 152 +-- 3 files changed, 26 insertions(+), 1091 deletions(-) delete mode 100644 packages/migration/vde/vde.go diff --git a/packages/daylight/start.go b/packages/daylight/start.go index 98394511e..aede916a8 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -279,6 +279,13 @@ func Start() { } } + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + if conf.Config.IsVDEMaster() { vdemanager.InitVDEManager() } diff --git a/packages/migration/vde/vde.go b/packages/migration/vde/vde.go deleted file mode 100644 index 640338e93..000000000 --- a/packages/migration/vde/vde.go +++ /dev/null @@ -1,958 +0,0 @@ -package vde - -var SchemaVDE = ` - DROP TABLE IF EXISTS "%[1]d_vde_members"; - CREATE TABLE "%[1]d_vde_members" ( - "id" bigint NOT NULL DEFAULT '0', - "member_name" varchar(255) NOT NULL DEFAULT '', - "image_id" bigint, - "member_info" jsonb - ); - ALTER TABLE ONLY "%[1]d_vde_members" ADD CONSTRAINT "%[1]d_vde_members_pkey" PRIMARY KEY ("id"); - - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('%[2]d', 'founder'); - INSERT INTO "%[1]d_vde_members" ("id", "member_name") VALUES('4544233900443112470', 'guest'); - - DROP TABLE IF EXISTS "%[1]d_vde_languages"; CREATE TABLE "%[1]d_vde_languages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "res" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_languages" ADD CONSTRAINT "%[1]d_vde_languages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_languages_index_name" ON "%[1]d_vde_languages" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_menu"; CREATE TABLE "%[1]d_vde_menu" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "title" character varying(255) NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_menu" ADD CONSTRAINT "%[1]d_vde_menu_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_menu_index_name" ON "%[1]d_vde_menu" (name); - - - INSERT INTO "%[1]d_vde_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( - Icon: "icon-screen-desktop", - Page: "interface", - Vde: "true", - Title: "Interface" -) -MenuItem( - Icon: "icon-docs", - Page: "tables", - Vde: "true", - Title: "Tables" -) -MenuItem( - Icon: "icon-briefcase", - Page: "contracts", - Vde: "true", - Title: "Smart Contracts" -) -MenuItem( - Icon: "icon-settings", - Page: "parameters", - Vde: "true", - Title: "Ecosystem parameters" -) -MenuItem( - Icon: "icon-globe", - Page: "languages", - Vde: "true", - Title: "Language resources" -) -MenuItem( - Icon: "icon-cloud-upload", - Page: "import", - Vde: "true", - Title: "Import" -) -MenuItem( - Icon: "icon-cloud-download", - Page: "export", - Vde: "true", - Title: "Export" -)','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_pages"; CREATE TABLE "%[1]d_vde_pages" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "menu" character varying(255) NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '', - "validate_count" bigint NOT NULL DEFAULT '1', - "app_id" bigint NOT NULL DEFAULT '0', - "validate_mode" character(1) NOT NULL DEFAULT '0' - ); - ALTER TABLE ONLY "%[1]d_vde_pages" ADD CONSTRAINT "%[1]d_vde_pages_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_pages_index_name" ON "%[1]d_vde_pages" (name); - - INSERT INTO "%[1]d_vde_pages" ("id","name","value","menu","conditions") VALUES('2','admin_index','','admin_menu','true'); - - DROP TABLE IF EXISTS "%[1]d_vde_blocks"; CREATE TABLE "%[1]d_vde_blocks" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_blocks" ADD CONSTRAINT "%[1]d_vde_blocks_pkey" PRIMARY KEY (id); - CREATE INDEX "%[1]d_vde_blocks_index_name" ON "%[1]d_vde_blocks" (name); - - DROP TABLE IF EXISTS "%[1]d_vde_signatures"; CREATE TABLE "%[1]d_vde_signatures" ( - "id" bigint NOT NULL DEFAULT '0', - "name" character varying(100) NOT NULL DEFAULT '', - "value" jsonb, - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_signatures" ADD CONSTRAINT "%[1]d_vde_signatures_pkey" PRIMARY KEY (name); - - CREATE TABLE "%[1]d_vde_contracts" ( - "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_contracts" ADD CONSTRAINT "%[1]d_vde_contracts_pkey" PRIMARY KEY (id); - - DROP TABLE IF EXISTS "%[1]d_vde_parameters"; - CREATE TABLE "%[1]d_vde_parameters" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) UNIQUE NOT NULL DEFAULT '', - "value" text NOT NULL DEFAULT '', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_parameters" ADD CONSTRAINT "%[1]d_vde_parameters_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_parameters_index_name" ON "%[1]d_vde_parameters" (name); - - INSERT INTO "%[1]d_vde_parameters" ("id","name", "value", "conditions") VALUES - ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), - ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), - ('10','stylesheet', 'body { - /* You can define your custom styles here or create custom CSS rules */ - }', 'ContractConditions("MainCondition")'), - ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); - - DROP TABLE IF EXISTS "%[1]d_vde_cron"; - CREATE TABLE "%[1]d_vde_cron" ( - "id" bigint NOT NULL DEFAULT '0', - "owner" bigint NOT NULL DEFAULT '0', - "cron" varchar(255) NOT NULL DEFAULT '', - "contract" varchar(255) NOT NULL DEFAULT '', - "counter" bigint NOT NULL DEFAULT '0', - "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', - "conditions" text NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_cron" ADD CONSTRAINT "%[1]d_vde_cron_pkey" PRIMARY KEY ("id"); - - DROP TABLE IF EXISTS "%[1]d_vde_binaries"; - CREATE TABLE "%[1]d_vde_binaries" ( - "id" bigint NOT NULL DEFAULT '0', - "app_id" bigint NOT NULL DEFAULT '1', - "member_id" bigint NOT NULL DEFAULT '0', - "name" varchar(255) NOT NULL DEFAULT '', - "data" bytea NOT NULL DEFAULT '', - "hash" varchar(32) NOT NULL DEFAULT '', - "mime_type" varchar(255) NOT NULL DEFAULT '' - ); - ALTER TABLE ONLY "%[1]d_vde_binaries" ADD CONSTRAINT "%[1]d_vde_binaries_pkey" PRIMARY KEY (id); - CREATE UNIQUE INDEX "%[1]d_vde_binaries_index_app_id_member_id_name" ON "%[1]d_vde_binaries" (app_id, member_id, name); - - CREATE TABLE "%[1]d_vde_tables" ( - "id" bigint NOT NULL DEFAULT '0', - "name" varchar(100) UNIQUE NOT NULL DEFAULT '', - "permissions" jsonb, - "columns" jsonb, - "conditions" text NOT NULL DEFAULT '', - "app_id" bigint NOT NULL DEFAULT '1' - ); - ALTER TABLE ONLY "%[1]d_vde_tables" ADD CONSTRAINT "%[1]d_vde_tables_pkey" PRIMARY KEY ("id"); - CREATE INDEX "%[1]d_vde_tables_index_name" ON "%[1]d_vde_tables" (name); - - INSERT INTO "%[1]d_vde_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "false", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('2', 'languages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{ "name": "ContractConditions(\"MainCondition\")", - "res": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), - ('3', 'menu', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('4', 'pages', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "menu": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")", - "validate_count": "ContractConditions(\"MainCondition\")", - "validate_mode": "ContractConditions(\"MainCondition\")", - "app_id": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('5', 'blocks', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('6', 'signatures', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"name": "ContractConditions(\"MainCondition\")", - "value": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractAccess("EditTable")'), - ('7', 'cron', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"owner": "ContractConditions(\"MainCondition\")", - "cron": "ContractConditions(\"MainCondition\")", - "contract": "ContractConditions(\"MainCondition\")", - "counter": "ContractConditions(\"MainCondition\")", - "till": "ContractConditions(\"MainCondition\")", - "conditions": "ContractConditions(\"MainCondition\")" - }', 'ContractConditions("MainCondition")'), - ('8', 'binaries', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", - "new_column": "ContractConditions(\"MainCondition\")"}', - '{"app_id": "ContractConditions(\"MainCondition\")", - "member_id": "ContractConditions(\"MainCondition\")", - "name": "ContractConditions(\"MainCondition\")", - "data": "ContractConditions(\"MainCondition\")", - "hash": "ContractConditions(\"MainCondition\")", - "mime_type": "ContractConditions(\"MainCondition\")"}', - 'ContractConditions("MainCondition")'); - - INSERT INTO "%[1]d_vde_contracts" ("id", "name", "value", "conditions") VALUES - ('1','MainCondition','contract MainCondition { - conditions { - if EcosysParam("founder_account")!=$key_id - { - warning "Sorry, you do not have access to this action." - } - } - }', 'ContractConditions("MainCondition")'), - ('2','NewContract','contract NewContract { - data { - Value string - Conditions string - Wallet string "optional" - TokenEcosystem int "optional" - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - $walletContract = $key_id - if $Wallet { - $walletContract = AddressToId($Wallet) - if $walletContract == 0 { - error Sprintf("wrong wallet %%s", $Wallet) - } - } - var list array - list = ContractsList($Value) - - if Len(list) == 0 { - error "must be the name" - } - - var i int - while i < Len(list) { - if IsObject(list[i], $ecosystem_id) { - warning Sprintf("Contract or function %%s exists", list[i] ) - } - i = i + 1 - } - - $contract_name = list[0] - if !$TokenEcosystem { - $TokenEcosystem = 1 - } else { - if !SysFuel($TokenEcosystem) { - warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) - } - } - } - action { - var root, id int - root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) - id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", - $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) - FlushContract(root, id, false) - $result = id - } - func rollback() { - var list array - list = ContractsList($Value) - var i int - while i < Len(list) { - RollbackContract(list[i]) - i = i + 1 - } - } - func price() int { - return SysParamInt("contract_price") - } - }', 'ContractConditions("MainCondition")'), - ('3','EditContract','contract EditContract { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("contracts", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - - var row array - row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) - if !Len(row) { - error Sprintf("Contract %%d does not exist", $Id) - } - $cur = row[0] - if $Value { - var list, curlist array - list = ContractsList($Value) - curlist = ContractsList($cur["value"]) - if Len(list) != Len(curlist) { - error "Contracts cannot be removed or inserted" - } - var i int - while i < Len(list) { - var j int - var ok bool - while j < Len(curlist) { - if curlist[j] == list[i] { - ok = true - break - } - j = j + 1 - } - if !ok { - error "Contracts or functions names cannot be changed" - } - i = i + 1 - } - } - } - action { - var root int - var pars, vals array - - if $Value { - root = CompileContract($Value, $ecosystem_id, 0, 0) - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("contracts", $Id, Join(pars, ","), vals...) - } - if $Value { - FlushContract(root, $Id, false) - } - } - }', 'ContractConditions("MainCondition")'), - ('4','NewParameter','contract NewParameter { - data { - Name string - Value string - Conditions string - } - conditions { - var ret array - ValidateCondition($Conditions, $ecosystem_id) - ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) - if Len(ret) > 0 { - warning Sprintf( "Parameter %%s already exists", $Name) - } - } - action { - $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('5','EditParameter','contract EditParameter { - data { - Id int - Value string - Conditions string - } - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("parameters", $Id, onlyConditions()) - ValidateCondition($Conditions, $ecosystem_id) - } - action { - DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) - } - }', 'ContractConditions("MainCondition")'), - ('6', 'NewMenu','contract NewMenu { - data { - Name string - Value string - Title string "optional" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("menu").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Menu %%s already exists", $Name) - } - } - action { - DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) - } - func price() int { - return SysParamInt("menu_price") - } - }', 'ContractConditions("MainCondition")'), - ('7','EditMenu','contract EditMenu { - data { - Id int - Value string "optional" - Title string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value && !$Title - } - conditions { - RowConditions("menu", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Title { - pars[Len(pars)] = "title" - vals[Len(vals)] = $Title - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("menu", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('8','AppendMenu','contract AppendMenu { - data { - Id int - Value string - } - conditions { - RowConditions("menu", $Id, false) - } - action { - var row map - row = DBRow("menu").Columns("value").WhereId($Id) - DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('9','NewPage','contract NewPage { - data { - Name string - Value string - Menu string - Conditions string - ValidateCount int "optional" - ApplicationId int "optional" - ValidateMode int "optional" - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("pages").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Page %%s already exists", $Name) - } - - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", - $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) - } - func price() int { - return SysParamInt("page_price") - } - }', 'ContractConditions("MainCondition")'), - ('10','EditPage','contract EditPage { - data { - Id int - Value string "optional" - Menu string "optional" - Conditions string "optional" - ValidateCount int "optional" - ValidateMode string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value && !$Menu - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - - if count < min { - count = min - } else { - if count > max { - count = max - } - } - - return count - } - conditions { - RowConditions("pages", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - $ValidateCount = preparePageValidateCount($ValidateCount) - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Menu { - pars[Len(pars)] = "menu" - vals[Len(vals)] = $Menu - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if $ValidateCount { - pars[Len(pars)] = "validate_count" - vals[Len(vals)] = $ValidateCount - } - if $ValidateMode { - if $ValidateMode != "1" { - $ValidateMode = "0" - } - pars[Len(pars)] = "validate_mode" - vals[Len(vals)] = $ValidateMode - } - if Len(vals) > 0 { - DBUpdate("pages", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('11','AppendPage','contract AppendPage { - data { - Id int - Value string - } - conditions { - RowConditions("pages", $Id, false) - } - action { - var row map - row = DBRow("pages").Columns("value").WhereId($Id) - DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) - } - }', 'ContractConditions("MainCondition")'), - ('12','NewBlock','contract NewBlock { - data { - Name string - Value string - Conditions string - ApplicationId int "optional" - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - - var row map - row = DBRow("blocks").Columns("id").Where("name = ?", $Name) - - if row { - warning Sprintf( "Block %%s already exists", $Name) - } - } - action { - DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) - } - }', 'ContractConditions("MainCondition")'), - ('13','EditBlock','contract EditBlock { - data { - Id int - Value string "optional" - Conditions string "optional" - } - - func onlyConditions() bool { - return $Conditions && !$Value - } - - conditions { - RowConditions("blocks", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("blocks", $Id, Join(pars, ","), vals...) - } - } - }', 'ContractConditions("MainCondition")'), - ('14','NewTable','contract NewTable { - data { - Name string - Columns string - Permissions string - ApplicationId int "optional" - } - conditions { - TableConditions($Name, $Columns, $Permissions) - } - action { - CreateTable($Name, $Columns, $Permissions, $ApplicationId) - } - func rollback() { - RollbackTable($Name) - } - func price() int { - return SysParamInt("table_price") - } - }', 'ContractConditions("MainCondition")'), - ('15','EditTable','contract EditTable { - data { - Name string - Permissions string - } - conditions { - TableConditions($Name, "", $Permissions) - } - action { - PermTable($Name, $Permissions ) - } - }', 'ContractConditions("MainCondition")'), - ('16','NewColumn','contract NewColumn { - data { - TableName string - Name string - Type string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, $Type, $Permissions) - } - action { - CreateColumn($TableName, $Name, $Type, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('17','EditColumn','contract EditColumn { - data { - TableName string - Name string - Permissions string - } - conditions { - ColumnCondition($TableName, $Name, "", $Permissions) - } - action { - PermColumn($TableName, $Name, $Permissions) - } - }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { - data { - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) - } - } - action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('19','EditLang','contract EditLang { - data { - Id int - Name string - Trans string - AppID int - } - conditions { - EvalCondition("parameters", "changing_language", "value") - } - action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) - } - }', 'ContractConditions("MainCondition")'), - ('20','Import','contract Import { - data { - Data string - } - conditions { - $list = JSONDecode($Data) - } - func ImportList(row array, cnt string) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - idata = row[i] - if(cnt == "pages"){ - $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) - $page_id = One($ret_page, "id") - if ($page_id != nil){ - idata["Id"] = Int($page_id) - CallContract("EditPage", idata) - } else { - CallContract("NewPage", idata) - } - } - if(cnt == "blocks"){ - $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) - $block_id = One($ret_block, "id") - if ($block_id != nil){ - idata["Id"] = Int($block_id) - CallContract("EditBlock", idata) - } else { - CallContract("NewBlock", idata) - } - } - if(cnt == "menus"){ - $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) - $menu_id = One($ret_menu, "id") - $menu_value = One($ret_menu, "value") - if ($menu_id != nil){ - idata["Id"] = Int($menu_id) - idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) - CallContract("EditMenu", idata) - } else { - CallContract("NewMenu", idata) - } - } - if(cnt == "parameters"){ - $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) - $param_id = One($ret_param, "id") - if ($param_id != nil){ - idata["Id"] = Int($param_id) - CallContract("EditParameter", idata) - } else { - CallContract("NewParameter", idata) - } - } - if(cnt == "languages"){ - $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) - $lang_id = One($ret_lang, "id") - if ($lang_id != nil){ - CallContract("EditLang", idata) - } else { - CallContract("NewLang", idata) - } - } - if(cnt == "contracts"){ - if IsObject(idata["Name"], $ecosystem_id){ - } else { - CallContract("NewContract", idata) - } - } - if(cnt == "tables"){ - $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) - $table_id = One($ret_table, "id") - if ($table_id != nil){ - } else { - CallContract("NewTable", idata) - } - } - i = i + 1 - } - } - func ImportData(row array) { - if !row { - return - } - var i int - while i < Len(row) { - var idata map - var list array - var tblname, columns string - idata = row[i] - i = i + 1 - tblname = idata["Table"] - columns = Join(idata["Columns"], ",") - list = idata["Data"] - if !list { - continue - } - var j int - while j < Len(list) { - var ilist array - ilist = list[j] - DBInsert(tblname, columns, ilist) - j=j+1 - } - } - } - action { - ImportList($list["pages"], "pages") - ImportList($list["blocks"], "blocks") - ImportList($list["menus"], "menus") - ImportList($list["parameters"], "parameters") - ImportList($list["languages"], "languages") - ImportList($list["contracts"], "contracts") - ImportList($list["tables"], "tables") - ImportData($list["data"]) - } - }', 'ContractConditions("MainCondition")'), - ('21', 'NewCron','contract NewCron { - data { - Cron string - Contract string - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", - $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($result) - } - }', 'ContractConditions("MainCondition")'), - ('22','EditCron','contract EditCron { - data { - Id int - Contract string - Cron string "optional" - Limit int "optional" - Till string "optional date" - Conditions string - } - conditions { - ConditionById("cron", true) - ValidateCron($Cron) - } - action { - if !$Till { - $Till = "1970-01-01 00:00:00" - } - if !HasPrefix($Contract, "@") { - $Contract = "@" + Str($ecosystem_id) + $Contract - } - DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", - $Cron, $Contract, $Limit, $Till, $Conditions) - UpdateCron($Id) - } - }', 'ContractConditions("MainCondition")'), - ('23', 'UploadBinary', contract UploadBinary { - data { - Name string - Data bytes "file" - AppID int - DataMimeType string "optional" - MemberID int "optional" - } - conditions { - $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) - } - action { - var hash string - hash = MD5($Data) - - if $DataMimeType == "" { - $DataMimeType = "application/octet-stream" - } - - if $Id != 0 { - DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) - } else { - $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) - } - - $result = $Id - } - }', 'ContractConditions("MainCondition")'); - ` diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 755e626c7..4e5ca29ab 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,113 +483,38 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang', 'contract NewLang { + ('18','NewLang','contract NewLang { data { - ApplicationId int "optional" - Name string - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Name string + Trans string + AppID int } - conditions { - if $ApplicationId == 0 { - warning "Application id cannot equal 0" - } - - if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { - warning Sprintf( "Language resource %%s already exists", $Name) - } - - var j int - while j < Len($IdLanguage) { - if $IdLanguage[j] == "" { - info("Locale empty") - } - if $Value[j] == "" { - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") + var row array + row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) + if Len(row) > 0 { + error Sprintf("The language resource %%s already exists", $Name) + } } - action { - var i,len,lenshar int - var res,langarr string - len = Len($IdLanguage) - lenshar = Len($Value) - while i < len { - if i + 1 == len { - res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) - } else { - res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) - } - i = i + 1 - } - if len > 0 { - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - } - $result = CreateLanguage($Name, $Trans, $ApplicationId) + DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string "optional" - ApplicationId int "optional" - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Id int + Name string + Trans string + AppID int } - conditions { - var j int - while j < Len($IdLanguage) { - if ($IdLanguage[j] == ""){ - info("Locale empty") - } - if ($Value[j] == ""){ - info("Value empty") - } - j = j + 1 - } EvalCondition("parameters", "changing_language", "value") } - action { - var i,len int - var res,langarr string - len = Len($IdLanguage) - while i < len { - if (i + 1 == len){ - res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) - } - else { - res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) - } - i = i + 1 - } - - $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() - if !$row{ - warning "Language not found" - } - - if $ApplicationId == 0 { - $ApplicationId = Int($row["app_id"]) - } - if $Name == "" { - $Name = $row["name"] - } - - if (len > 0){ - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - - } - EditLanguage($Id, $Name, $Trans, $ApplicationId) + DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) + UpdateLang($AppID, $Name, $Trans) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -794,7 +719,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { - Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -802,48 +726,10 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - } - action { - DBInsert("keys", "id, pub", $newId, $NewPubKey) - } - }', 'ContractConditions("MainCondition")'), - ('25', 'NewVDE', 'contract NewVDE { - data { - VDEName string - DBUser string - DBPassword string - VDEAPIPort int - } - conditions { + $amount = Money(1000) * Money(1000000000000000000) } - action { - CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) - } - }', 'ContractConditions("MainCondition")'), - ('26', 'ListVDE', 'contract ListVDE { - data { - VDEName string - } - - conditions { - - } - - action { - GetVDEList($VDEName) - } - }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { - data { - VDEName string - } - - conditions { - } - - action { - StartVDE($VDEName) + DBInsert("keys", "id, pub", $newId, $NewPubKey) } }', 'ContractConditions("MainCondition")');` From 8b37275d4b55beb2c245d6ce5407c2c0ee60f178 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 10 May 2018 22:37:36 +0300 Subject: [PATCH 093/169] fix login --- packages/migration/vde/vde_data_contracts.go | 111 +++++++++++++++---- 1 file changed, 92 insertions(+), 19 deletions(-) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 4e5ca29ab..ea83e591c 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -483,38 +483,113 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c PermColumn($TableName, $Name, $Permissions) } }', 'ContractConditions("MainCondition")'), - ('18','NewLang','contract NewLang { + ('18','NewLang', 'contract NewLang { data { - Name string - Trans string - AppID int + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { - EvalCondition("parameters", "changing_language", "value") - var row array - row = DBFind("languages").Columns("name").Where("name=? AND app_id=?", $Name, $AppID).Limit(1) - if Len(row) > 0 { - error Sprintf("The language resource %%s already exists", $Name) + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") } + action { - DBInsert("languages", "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('19','EditLang','contract EditLang { data { - Id int - Name string - Trans string - AppID int + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" } + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } EvalCondition("parameters", "changing_language", "value") } + action { - DBUpdate("languages", $Id, "name,res,app_id", $Name, $Trans, $AppID) - UpdateLang($AppID, $Name, $Trans) + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) } }', 'ContractConditions("MainCondition")'), ('20','Import','contract Import { @@ -726,8 +801,6 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { error "User already exists" } - - $amount = Money(1000) * Money(1000000000000000000) } action { DBInsert("keys", "id, pub", $newId, $NewPubKey) From d0c08553e67c47df70a5eb99ac7d0ee110c8b013 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 14 May 2018 09:18:14 +0300 Subject: [PATCH 094/169] temporary commit --- packages/api/route.go | 2 +- packages/migration/vde/vde_data_contracts.go | 41 ++++++++++++++++++++ 2 files changed, 42 insertions(+), 1 deletion(-) diff --git a/packages/api/route.go b/packages/api/route.go index ef84e9637..56b547f61 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -93,7 +93,7 @@ func Route(route *hr.Router) { get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`block/:id`, ``, getBlockInfo) get(`maxblockid`, ``, getMaxBlockID) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) get(`systemparams`, `?names:string`, authWallet, systemParams) get(`ecosystems`, ``, authWallet, ecosystems) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index ea83e591c..755e626c7 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -794,6 +794,7 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c NewPubkey string } conditions { + Println($NewPubkey) $newId = PubToID($NewPubkey) if $newId == 0 { error "Wrong pubkey" @@ -805,4 +806,44 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c action { DBInsert("keys", "id, pub", $newId, $NewPubKey) } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data { + VDEName string + } + + conditions { + + } + + action { + GetVDEList($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } }', 'ContractConditions("MainCondition")');` From 4462a92e72a9530f6912b611efff011a3d066395 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 12:05:42 +0300 Subject: [PATCH 095/169] temp commit --- packages/api/login.go | 8 +++++--- packages/migration/vde/vde_data_contracts.go | 3 ++- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index 180e1dfe8..ba107acd7 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -17,6 +17,7 @@ package api import ( + "fmt" "net/http" "strings" "time" @@ -113,6 +114,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) + fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -127,11 +129,10 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En pubkey = data.params[`pubkey`].([]byte) hexPubKey := hex.EncodeToString(pubkey) - params := make([]byte, 0) - params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) + params := converter.EncodeLength(int64(len(hexPubKey))) + params = append(params, hexPubKey...) contract := smart.GetContract("NewUser", 1) - info := contract.Block.Info.(*script.ContractInfo) sc := tx.SmartContract{ Header: tx.Header{ @@ -210,6 +211,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } + fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index 755e626c7..c26fa16d6 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -804,7 +804,8 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } } action { - DBInsert("keys", "id, pub", $newId, $NewPubKey) + DBInsert("keys", "id", $newId) + SetPubKey($newId, StringToBytes($NewPubkey)) } }', 'ContractConditions("MainCondition")'), ('25', 'NewVDE', 'contract NewVDE { From e5ffad74e31ace164e8e14c2e74a8a2c24325120 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Tue, 15 May 2018 21:28:09 +0300 Subject: [PATCH 096/169] remove fmt from login api handlers --- packages/api/login.go | 3 --- 1 file changed, 3 deletions(-) diff --git a/packages/api/login.go b/packages/api/login.go index ba107acd7..90cba58b4 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -17,7 +17,6 @@ package api import ( - "fmt" "net/http" "strings" "time" @@ -114,7 +113,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } else { pubkey = data.params[`pubkey`].([]byte) - fmt.Println(string(pubkey)) if len(pubkey) == 0 { logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("public key is empty") return errorAPI(w, `E_EMPTYPUBLIC`, http.StatusBadRequest) @@ -211,7 +209,6 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En } } - fmt.Println(string(pubkey)) verify, err := crypto.CheckSign(pubkey, nonceSalt+msg, data.params[`signature`].([]byte)) if err != nil { logger.WithFields(log.Fields{"type": consts.CryptoError, "pubkey": pubkey, "msg": msg, "signature": string(data.params["signature"].([]byte))}).Error("checking signature") From 8688df4e7c2c2b2b63c56129dc30bf3b0cc8c350 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:53:47 +0300 Subject: [PATCH 097/169] add drop db function --- packages/model/db.go | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/packages/model/db.go b/packages/model/db.go index fdf4d54aa..084ad61e9 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -416,3 +416,25 @@ func DropDatabase(name string) error { return nil } + +// DropDatabase kill all process and drop database +func DropDatabase(name string) error { + query := `SELECT + pg_terminate_backend (pg_stat_activity.pid) + FROM + pg_stat_activity + WHERE + pg_stat_activity.datname = ?` + + if err := DBConn.Exec(query, name).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on kill db process") + return err + } + + if err := DBConn.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", name)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on drop db") + return err + } + + return nil +} From 59ae8815aa0797bb62db029bde475da8c1f965b1 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 16 May 2018 20:54:14 +0300 Subject: [PATCH 098/169] fix manager --- packages/migration/vde/vde_data_contracts.go | 25 ++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go index c26fa16d6..4297f287a 100644 --- a/packages/migration/vde/vde_data_contracts.go +++ b/packages/migration/vde/vde_data_contracts.go @@ -824,19 +824,27 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } }', 'ContractConditions("MainCondition")'), ('26', 'ListVDE', 'contract ListVDE { + data {} + + conditions {} + + action { + GetVDEList() + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { data { VDEName string } conditions { - } action { - GetVDEList($VDEName) + StartVDE($VDEName) } }', 'ContractConditions("MainCondition")'), - ('27', 'RunVDE', 'contract RunVDE { + ('28', 'StopVDE', 'contract StopVDE { data { VDEName string } @@ -845,6 +853,15 @@ var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "c } action { - StartVDE($VDEName) + StopVDEProcess($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('29', 'RemoveVDE', 'contract RemoveVDE { + data { + VDEName string + } + conditions {} + action{ + DeleteVDE($VDEName) } }', 'ContractConditions("MainCondition")');` From 7f7741055df2367a25ad5d9edbc7754cf9df764d Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 15 Jun 2018 14:26:20 +0300 Subject: [PATCH 099/169] fix rebase errors --- packages/service/node_ban.go | 2 +- packages/smart/funcs.go | 15 --------------- 2 files changed, 1 insertion(+), 16 deletions(-) diff --git a/packages/service/node_ban.go b/packages/service/node_ban.go index e69810f77..d8292de5d 100644 --- a/packages/service/node_ban.go +++ b/packages/service/node_ban.go @@ -143,7 +143,7 @@ func (nbs *NodesBanService) newBadBlock(producer syspar.FullNode, blockId, block } params = append(append(params, converter.EncodeLength(int64(len(reason)))...), []byte(reason)...) - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, "NewBadBlock", 1) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 002792d05..6bde1d49e 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -270,21 +270,6 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) - case script.VMTypeVDEMaster: - f["HTTPRequest"] = HTTPRequest - f["GetMapKeys"] = GetMapKeys - f["SortedKeys"] = SortedKeys - f["Date"] = Date - f["HTTPPostJSON"] = HTTPPostJSON - f["ValidateCron"] = ValidateCron - f["UpdateCron"] = UpdateCron - f["CreateVDE"] = CreateVDE - f["DeleteVDE"] = DeleteVDE - f["StartVDE"] = StartVDE - f["StopVDE"] = StopVDE - f["GetVDEList"] = GetVDEList - vmExtendCost(vm, getCost) - vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: f["GetBlock"] = GetBlock f["UpdateNodesBan"] = UpdateNodesBan From 5c381205cc25e434c95c00c97a40b9461d5c74f9 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 15 Jun 2018 16:32:29 +0300 Subject: [PATCH 100/169] vendoring supervisord --- vendor/github.com/gorilla/rpc/LICENSE | 27 + vendor/github.com/gorilla/rpc/README.md | 7 + vendor/github.com/gorilla/rpc/doc.go | 81 ++ vendor/github.com/gorilla/rpc/map.go | 180 +++++ vendor/github.com/gorilla/rpc/server.go | 269 +++++++ vendor/github.com/ochinchina/go-ini/LICENSE | 21 + vendor/github.com/ochinchina/go-ini/README.md | 368 +++++++++ vendor/github.com/ochinchina/go-ini/doc.go | 49 ++ .../ochinchina/go-ini/env_replacer.go | 65 ++ vendor/github.com/ochinchina/go-ini/ini.go | 265 +++++++ vendor/github.com/ochinchina/go-ini/key.go | 282 +++++++ vendor/github.com/ochinchina/go-ini/loader.go | 349 ++++++++ .../ochinchina/go-ini/properties.go | 116 +++ .../github.com/ochinchina/go-ini/section.go | 177 +++++ .../ochinchina/gorilla-xmlrpc/LICENSE | 27 + .../ochinchina/gorilla-xmlrpc/xml/client.go | 26 + .../ochinchina/gorilla-xmlrpc/xml/doc.go | 50 ++ .../ochinchina/gorilla-xmlrpc/xml/fault.go | 51 ++ .../ochinchina/gorilla-xmlrpc/xml/rpc2xml.go | 149 ++++ .../ochinchina/gorilla-xmlrpc/xml/server.go | 118 +++ .../ochinchina/gorilla-xmlrpc/xml/xml2rpc.go | 219 +++++ .../rogpeppe/go-charset/charset/big5.go | 88 +++ .../rogpeppe/go-charset/charset/charset.go | 301 +++++++ .../rogpeppe/go-charset/charset/codepage.go | 133 ++++ .../rogpeppe/go-charset/charset/cp932.go | 195 +++++ .../rogpeppe/go-charset/charset/file.go | 40 + .../rogpeppe/go-charset/charset/local.go | 162 ++++ .../rogpeppe/go-charset/charset/utf16.go | 110 +++ .../rogpeppe/go-charset/charset/utf8.go | 51 ++ .../rogpeppe/go-charset/data/data_big5.dat.go | 18 + .../go-charset/data/data_charsets.json.go | 18 + .../go-charset/data/data_cp932.dat.go | 18 + .../go-charset/data/data_ibm437.cp.go | 18 + .../go-charset/data/data_ibm850.cp.go | 18 + .../go-charset/data/data_ibm866.cp.go | 18 + .../go-charset/data/data_iso-8859-1.cp.go | 18 + .../go-charset/data/data_iso-8859-10.cp.go | 18 + .../go-charset/data/data_iso-8859-15.cp.go | 18 + .../go-charset/data/data_iso-8859-2.cp.go | 18 + .../go-charset/data/data_iso-8859-3.cp.go | 18 + .../go-charset/data/data_iso-8859-4.cp.go | 18 + .../go-charset/data/data_iso-8859-5.cp.go | 18 + .../go-charset/data/data_iso-8859-6.cp.go | 18 + .../go-charset/data/data_iso-8859-7.cp.go | 18 + .../go-charset/data/data_iso-8859-8.cp.go | 18 + .../go-charset/data/data_iso-8859-9.cp.go | 18 + .../go-charset/data/data_jisx0201kana.dat.go | 18 + .../go-charset/data/data_koi8-r.cp.go | 18 + .../go-charset/data/data_windows-1250.cp.go | 18 + .../go-charset/data/data_windows-1251.cp.go | 18 + .../go-charset/data/data_windows-1252.cp.go | 18 + .../rogpeppe/go-charset/data/doc.go | 6 + .../rogpeppe/go-charset/data/generate.go | 97 +++ .../rpoletaev/supervisord/Gopkg.lock | 63 ++ .../rpoletaev/supervisord/Gopkg.toml | 46 ++ .../github.com/rpoletaev/supervisord/LICENSE | 21 + .../rpoletaev/supervisord/README.md | 161 ++++ .../rpoletaev/supervisord/circle.yml | 9 + .../rpoletaev/supervisord/config/config.go | 558 +++++++++++++ .../supervisord/config/process_group.go | 114 +++ .../supervisord/config/process_sort.go | 159 ++++ .../supervisord/config/string_expression.go | 88 +++ .../rpoletaev/supervisord/config_template.go | 137 ++++ .../rpoletaev/supervisord/content_checker.go | 149 ++++ .../github.com/rpoletaev/supervisord/ctl.go | 159 ++++ .../rpoletaev/supervisord/daemonize.go | 25 + .../supervisord/daemonize_windows.go | 7 + .../rpoletaev/supervisord/events/events.go | 745 ++++++++++++++++++ .../rpoletaev/supervisord/faults/faults.go | 30 + .../rpoletaev/supervisord/logger/log.go | 485 ++++++++++++ .../rpoletaev/supervisord/logger/log_unix.go | 16 + .../supervisord/logger/log_windows.go | 7 + .../github.com/rpoletaev/supervisord/main.go | 75 ++ .../supervisord/process/command_parser.go | 81 ++ .../rpoletaev/supervisord/process/path.go | 46 ++ .../supervisord/process/pdeathsig_linux.go | 12 + .../supervisord/process/pdeathsig_other.go | 12 + .../supervisord/process/pdeathsig_windows.go | 9 + .../rpoletaev/supervisord/process/process.go | 689 ++++++++++++++++ .../supervisord/process/process_manager.go | 160 ++++ .../supervisord/process/set_user_id.go | 11 + .../process/set_user_id_windows.go | 11 + .../rpoletaev/supervisord/signals/signal.go | 34 + .../supervisord/signals/signal_windows.go | 46 ++ .../rpoletaev/supervisord/supervisor.go | 586 ++++++++++++++ .../rpoletaev/supervisord/util/util.go | 64 ++ .../rpoletaev/supervisord/version.go | 24 + .../rpoletaev/supervisord/xmlrpc.go | 136 ++++ vendor/vendor.json | 78 ++ 89 files changed, 9528 insertions(+) create mode 100644 vendor/github.com/gorilla/rpc/LICENSE create mode 100644 vendor/github.com/gorilla/rpc/README.md create mode 100644 vendor/github.com/gorilla/rpc/doc.go create mode 100644 vendor/github.com/gorilla/rpc/map.go create mode 100644 vendor/github.com/gorilla/rpc/server.go create mode 100644 vendor/github.com/ochinchina/go-ini/LICENSE create mode 100644 vendor/github.com/ochinchina/go-ini/README.md create mode 100644 vendor/github.com/ochinchina/go-ini/doc.go create mode 100644 vendor/github.com/ochinchina/go-ini/env_replacer.go create mode 100644 vendor/github.com/ochinchina/go-ini/ini.go create mode 100644 vendor/github.com/ochinchina/go-ini/key.go create mode 100644 vendor/github.com/ochinchina/go-ini/loader.go create mode 100644 vendor/github.com/ochinchina/go-ini/properties.go create mode 100644 vendor/github.com/ochinchina/go-ini/section.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/big5.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/charset.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/codepage.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/cp932.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/file.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/local.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/utf16.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/utf8.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/doc.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/generate.go create mode 100644 vendor/github.com/rpoletaev/supervisord/Gopkg.lock create mode 100644 vendor/github.com/rpoletaev/supervisord/Gopkg.toml create mode 100644 vendor/github.com/rpoletaev/supervisord/LICENSE create mode 100644 vendor/github.com/rpoletaev/supervisord/README.md create mode 100644 vendor/github.com/rpoletaev/supervisord/circle.yml create mode 100644 vendor/github.com/rpoletaev/supervisord/config/config.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/process_group.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/process_sort.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/string_expression.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config_template.go create mode 100644 vendor/github.com/rpoletaev/supervisord/content_checker.go create mode 100644 vendor/github.com/rpoletaev/supervisord/ctl.go create mode 100644 vendor/github.com/rpoletaev/supervisord/daemonize.go create mode 100644 vendor/github.com/rpoletaev/supervisord/daemonize_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/events/events.go create mode 100644 vendor/github.com/rpoletaev/supervisord/faults/faults.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log_unix.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/main.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/command_parser.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/path.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/process.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/process_manager.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/set_user_id.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/signals/signal.go create mode 100644 vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/supervisor.go create mode 100644 vendor/github.com/rpoletaev/supervisord/util/util.go create mode 100644 vendor/github.com/rpoletaev/supervisord/version.go create mode 100644 vendor/github.com/rpoletaev/supervisord/xmlrpc.go diff --git a/vendor/github.com/gorilla/rpc/LICENSE b/vendor/github.com/gorilla/rpc/LICENSE new file mode 100644 index 000000000..0e5fb8728 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 Rodrigo Moraes. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/rpc/README.md b/vendor/github.com/gorilla/rpc/README.md new file mode 100644 index 000000000..75c26eaa8 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/README.md @@ -0,0 +1,7 @@ +rpc +=== +[![Build Status](https://travis-ci.org/gorilla/rpc.png?branch=master)](https://travis-ci.org/gorilla/rpc) + +gorilla/rpc is a foundation for RPC over HTTP services, providing access to the exported methods of an object through HTTP requests. + +Read the full documentation here: http://www.gorillatoolkit.org/pkg/rpc diff --git a/vendor/github.com/gorilla/rpc/doc.go b/vendor/github.com/gorilla/rpc/doc.go new file mode 100644 index 000000000..bc65b532a --- /dev/null +++ b/vendor/github.com/gorilla/rpc/doc.go @@ -0,0 +1,81 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package gorilla/rpc is a foundation for RPC over HTTP services, providing +access to the exported methods of an object through HTTP requests. + +This package derives from the standard net/rpc package but uses a single HTTP +request per call instead of persistent connections. Other differences +compared to net/rpc: + + - Multiple codecs can be registered in the same server. + - A codec is chosen based on the "Content-Type" header from the request. + - Service methods also receive http.Request as parameter. + - This package can be used on Google App Engine. + +Let's setup a server and register a codec and service: + + import ( + "http" + "github.com/gorilla/rpc" + "github.com/gorilla/rpc/json" + ) + + func init() { + s := rpc.NewServer() + s.RegisterCodec(json.NewCodec(), "application/json") + s.RegisterService(new(HelloService), "") + http.Handle("/rpc", s) + } + +This server handles requests to the "/rpc" path using a JSON codec. +A codec is tied to a content type. In the example above, the JSON codec is +registered to serve requests with "application/json" as the value for the +"Content-Type" header. If the header includes a charset definition, it is +ignored; only the media-type part is taken into account. + +A service can be registered using a name. If the name is empty, like in the +example above, it will be inferred from the service type. + +That's all about the server setup. Now let's define a simple service: + + type HelloArgs struct { + Who string + } + + type HelloReply struct { + Message string + } + + type HelloService struct {} + + func (h *HelloService) Say(r *http.Request, args *HelloArgs, reply *HelloReply) error { + reply.Message = "Hello, " + args.Who + "!" + return nil + } + +The example above defines a service with a method "HelloService.Say" and +the arguments and reply related to that method. + +The service must be exported (begin with an upper case letter) or local +(defined in the package registering the service). + +When a service is registered, the server inspects the service methods +and make available the ones that follow these rules: + + - The method name is exported. + - The method has three arguments: *http.Request, *args, *reply. + - All three arguments are pointers. + - The second and third arguments are exported or local. + - The method has return type error. + +All other methods are ignored. + +Gorilla has packages with common RPC codecs. Check out their documentation: + + JSON: http://gorilla-web.appspot.com/pkg/rpc/json +*/ +package rpc diff --git a/vendor/github.com/gorilla/rpc/map.go b/vendor/github.com/gorilla/rpc/map.go new file mode 100644 index 000000000..433f275b8 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/map.go @@ -0,0 +1,180 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package rpc + +import ( + "fmt" + "net/http" + "reflect" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +var ( + // Precompute the reflect.Type of error and http.Request + typeOfError = reflect.TypeOf((*error)(nil)).Elem() + typeOfRequest = reflect.TypeOf((*http.Request)(nil)).Elem() +) + +// ---------------------------------------------------------------------------- +// service +// ---------------------------------------------------------------------------- + +type service struct { + name string // name of service + rcvr reflect.Value // receiver of methods for the service + rcvrType reflect.Type // type of the receiver + methods map[string]*serviceMethod // registered methods + passReq bool +} + +type serviceMethod struct { + method reflect.Method // receiver method + argsType reflect.Type // type of the request argument + replyType reflect.Type // type of the response argument +} + +// ---------------------------------------------------------------------------- +// serviceMap +// ---------------------------------------------------------------------------- + +// serviceMap is a registry for services. +type serviceMap struct { + mutex sync.Mutex + services map[string]*service +} + +// register adds a new service using reflection to extract its methods. +func (m *serviceMap) register(rcvr interface{}, name string, passReq bool) error { + // Setup service. + s := &service{ + name: name, + rcvr: reflect.ValueOf(rcvr), + rcvrType: reflect.TypeOf(rcvr), + methods: make(map[string]*serviceMethod), + passReq: passReq, + } + if name == "" { + s.name = reflect.Indirect(s.rcvr).Type().Name() + if !isExported(s.name) { + return fmt.Errorf("rpc: type %q is not exported", s.name) + } + } + if s.name == "" { + return fmt.Errorf("rpc: no service name for type %q", + s.rcvrType.String()) + } + // Setup methods. + for i := 0; i < s.rcvrType.NumMethod(); i++ { + method := s.rcvrType.Method(i) + mtype := method.Type + + // offset the parameter indexes by one if the + // service methods accept an HTTP request pointer + var paramOffset int + if passReq { + paramOffset = 1 + } else { + paramOffset = 0 + } + + // Method must be exported. + if method.PkgPath != "" { + continue + } + // Method needs four ins: receiver, *http.Request, *args, *reply. + if mtype.NumIn() != 3+paramOffset { + continue + } + + // If the service methods accept an HTTP request pointer + if passReq { + // First argument must be a pointer and must be http.Request. + reqType := mtype.In(1) + if reqType.Kind() != reflect.Ptr || reqType.Elem() != typeOfRequest { + continue + } + } + // Next argument must be a pointer and must be exported. + args := mtype.In(1 + paramOffset) + if args.Kind() != reflect.Ptr || !isExportedOrBuiltin(args) { + continue + } + // Next argument must be a pointer and must be exported. + reply := mtype.In(2 + paramOffset) + if reply.Kind() != reflect.Ptr || !isExportedOrBuiltin(reply) { + continue + } + // Method needs one out: error. + if mtype.NumOut() != 1 { + continue + } + if returnType := mtype.Out(0); returnType != typeOfError { + continue + } + s.methods[method.Name] = &serviceMethod{ + method: method, + argsType: args.Elem(), + replyType: reply.Elem(), + } + } + if len(s.methods) == 0 { + return fmt.Errorf("rpc: %q has no exported methods of suitable type", + s.name) + } + // Add to the map. + m.mutex.Lock() + defer m.mutex.Unlock() + if m.services == nil { + m.services = make(map[string]*service) + } else if _, ok := m.services[s.name]; ok { + return fmt.Errorf("rpc: service already defined: %q", s.name) + } + m.services[s.name] = s + return nil +} + +// get returns a registered service given a method name. +// +// The method name uses a dotted notation as in "Service.Method". +func (m *serviceMap) get(method string) (*service, *serviceMethod, error) { + parts := strings.Split(method, ".") + if len(parts) != 2 { + err := fmt.Errorf("rpc: service/method request ill-formed: %q", method) + return nil, nil, err + } + m.mutex.Lock() + service := m.services[parts[0]] + m.mutex.Unlock() + if service == nil { + err := fmt.Errorf("rpc: can't find service %q", method) + return nil, nil, err + } + serviceMethod := service.methods[parts[1]] + if serviceMethod == nil { + err := fmt.Errorf("rpc: can't find method %q", method) + return nil, nil, err + } + return service, serviceMethod, nil +} + +// isExported returns true of a string is an exported (upper case) name. +func isExported(name string) bool { + rune, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(rune) +} + +// isExportedOrBuiltin returns true if a type is exported or a builtin. +func isExportedOrBuiltin(t reflect.Type) bool { + for t.Kind() == reflect.Ptr { + t = t.Elem() + } + // PkgPath will be non-empty even for an exported type, + // so we need to check the type name as well. + return isExported(t.Name()) || t.PkgPath() == "" +} diff --git a/vendor/github.com/gorilla/rpc/server.go b/vendor/github.com/gorilla/rpc/server.go new file mode 100644 index 000000000..d61b5eaa9 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/server.go @@ -0,0 +1,269 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package rpc + +import ( + "fmt" + "net/http" + "reflect" + "strings" +) + +// ---------------------------------------------------------------------------- +// Codec +// ---------------------------------------------------------------------------- + +// Codec creates a CodecRequest to process each request. +type Codec interface { + NewRequest(*http.Request) CodecRequest +} + +// CodecRequest decodes a request and encodes a response using a specific +// serialization scheme. +type CodecRequest interface { + // Reads request and returns the RPC method name. + Method() (string, error) + // Reads request filling the RPC method args. + ReadRequest(interface{}) error + // Writes response using the RPC method reply. The error parameter is + // the error returned by the method call, if any. + WriteResponse(http.ResponseWriter, interface{}, error) error +} + +// ---------------------------------------------------------------------------- +// Server +// ---------------------------------------------------------------------------- + +// NewServer returns a new RPC server. +func NewServer() *Server { + return &Server{ + codecs: make(map[string]Codec), + services: new(serviceMap), + } +} + +// RequestInfo contains all the information we pass to before/after functions +type RequestInfo struct { + Method string + Error error + Request *http.Request + StatusCode int +} + +// Server serves registered RPC services using registered codecs. +type Server struct { + codecs map[string]Codec + services *serviceMap + interceptFunc func(i *RequestInfo) *http.Request + beforeFunc func(i *RequestInfo) + afterFunc func(i *RequestInfo) +} + +// RegisterCodec adds a new codec to the server. +// +// Codecs are defined to process a given serialization scheme, e.g., JSON or +// XML. A codec is chosen based on the "Content-Type" header from the request, +// excluding the charset definition. +func (s *Server) RegisterCodec(codec Codec, contentType string) { + s.codecs[strings.ToLower(contentType)] = codec +} + +// RegisterService adds a new service to the server. +// +// The name parameter is optional: if empty it will be inferred from +// the receiver type name. +// +// Methods from the receiver will be extracted if these rules are satisfied: +// +// - The receiver is exported (begins with an upper case letter) or local +// (defined in the package registering the service). +// - The method name is exported. +// - The method has three arguments: *http.Request, *args, *reply. +// - All three arguments are pointers. +// - The second and third arguments are exported or local. +// - The method has return type error. +// +// All other methods are ignored. +func (s *Server) RegisterService(receiver interface{}, name string) error { + return s.services.register(receiver, name, true) +} + +// RegisterTCPService adds a new TCP service to the server. +// No HTTP request struct will be passed to the service methods. +// +// The name parameter is optional: if empty it will be inferred from +// the receiver type name. +// +// Methods from the receiver will be extracted if these rules are satisfied: +// +// - The receiver is exported (begins with an upper case letter) or local +// (defined in the package registering the service). +// - The method name is exported. +// - The method has two arguments: *args, *reply. +// - Both arguments are pointers. +// - Both arguments are exported or local. +// - The method has return type error. +// +// All other methods are ignored. +func (s *Server) RegisterTCPService(receiver interface{}, name string) error { + return s.services.register(receiver, name, false) +} + +// HasMethod returns true if the given method is registered. +// +// The method uses a dotted notation as in "Service.Method". +func (s *Server) HasMethod(method string) bool { + if _, _, err := s.services.get(method); err == nil { + return true + } + return false +} + +// RegisterInterceptFunc registers the specified function as the function +// that will be called before every request. The function is allowed to intercept +// the request e.g. add values to the context. +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterInterceptFunc(f func(i *RequestInfo) *http.Request) { + s.interceptFunc = f +} + +// RegisterBeforeFunc registers the specified function as the function +// that will be called before every request. +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterBeforeFunc(f func(i *RequestInfo)) { + s.beforeFunc = f +} + +// RegisterAfterFunc registers the specified function as the function +// that will be called after every request +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterAfterFunc(f func(i *RequestInfo)) { + s.afterFunc = f +} + +// ServeHTTP +func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if r.Method != "POST" { + s.writeError(w, 405, "rpc: POST method required, received "+r.Method) + return + } + contentType := r.Header.Get("Content-Type") + idx := strings.Index(contentType, ";") + if idx != -1 { + contentType = contentType[:idx] + } + var codec Codec + if contentType == "" && len(s.codecs) == 1 { + // If Content-Type is not set and only one codec has been registered, + // then default to that codec. + for _, c := range s.codecs { + codec = c + } + } else if codec = s.codecs[strings.ToLower(contentType)]; codec == nil { + s.writeError(w, 415, "rpc: unrecognized Content-Type: "+contentType) + return + } + // Create a new codec request. + codecReq := codec.NewRequest(r) + // Get service method to be called. + method, errMethod := codecReq.Method() + if errMethod != nil { + s.writeError(w, 400, errMethod.Error()) + return + } + serviceSpec, methodSpec, errGet := s.services.get(method) + if errGet != nil { + s.writeError(w, 400, errGet.Error()) + return + } + // Decode the args. + args := reflect.New(methodSpec.argsType) + if errRead := codecReq.ReadRequest(args.Interface()); errRead != nil { + s.writeError(w, 400, errRead.Error()) + return + } + + // Call the registered Intercept Function + if s.interceptFunc != nil { + req := s.interceptFunc(&RequestInfo{ + Request: r, + Method: method, + }) + if req != nil { + r = req + } + } + // Call the registered Before Function + if s.beforeFunc != nil { + s.beforeFunc(&RequestInfo{ + Request: r, + Method: method, + }) + } + + // Call the service method. + reply := reflect.New(methodSpec.replyType) + + // omit the HTTP request if the service method doesn't accept it + var errValue []reflect.Value + if serviceSpec.passReq { + errValue = methodSpec.method.Func.Call([]reflect.Value{ + serviceSpec.rcvr, + reflect.ValueOf(r), + args, + reply, + }) + } else { + errValue = methodSpec.method.Func.Call([]reflect.Value{ + serviceSpec.rcvr, + args, + reply, + }) + } + + // Cast the result to error if needed. + var errResult error + errInter := errValue[0].Interface() + if errInter != nil { + errResult = errInter.(error) + } + + // Prevents Internet Explorer from MIME-sniffing a response away + // from the declared content-type + w.Header().Set("x-content-type-options", "nosniff") + // Encode the response. + if errWrite := codecReq.WriteResponse(w, reply.Interface(), errResult); errWrite != nil { + s.writeError(w, 400, errWrite.Error()) + } else { + // Call the registered After Function + if s.afterFunc != nil { + s.afterFunc(&RequestInfo{ + Request: r, + Method: method, + Error: errResult, + StatusCode: 200, + }) + } + } +} + +func (s *Server) writeError(w http.ResponseWriter, status int, msg string) { + w.WriteHeader(status) + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + fmt.Fprint(w, msg) + if s.afterFunc != nil { + s.afterFunc(&RequestInfo{ + Error: fmt.Errorf(msg), + StatusCode: status, + }) + } +} diff --git a/vendor/github.com/ochinchina/go-ini/LICENSE b/vendor/github.com/ochinchina/go-ini/LICENSE new file mode 100644 index 000000000..6713cd967 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Steven Ou + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/ochinchina/go-ini/README.md b/vendor/github.com/ochinchina/go-ini/README.md new file mode 100644 index 000000000..6c67d5c1e --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/README.md @@ -0,0 +1,368 @@ +# Overview + +This is a golang library for reading/writing the .ini format file. The description on .ini file can be found at https://en.wikipedia.org/wiki/INI_file + +# Supported .ini format + +A .ini file contains one or more sections and each section contains one or more key/value pair. Following is an example of .ini file + +```ini +# this is a comment line +; this is also a comment line + +[section1] + +key1 = value1 + +[section2] + +key2 = value2 +``` + +## Comments +### Comments line + +A comments line is started with char '#' or ';' and it will be ignored when processing the .ini file. + +```ini + +# this is a comment line +; this is also a comment line + +``` + +### inline comments + +A comment can be appended in a tail of line. The inline comments must be started with ';' or '#' and its previous char must be a space. + +```ini +[section1] +key1 = value1 ;this is a inline comment +key2 = value2;this is not a inline comment +``` + +## Multiline value + +if a value is multiple line value, the value can be put between """ and """, an example: + +```ini + +[section1] + +multi-line-key = """this is a multi-line example, +multiple line can be put in a value, +this is multiple line is just for test""" + +single-line-key = this is a normal value +``` + +## Continuation line + +If a line is too long, user can devide one line to multiple line and on the end of line the char '\\' should be put: + +```ini +[section1] +key1 = this line is too long, \ +we need to write it to multiple line, \ +but actually it is one line from the point of user + +``` + +## Escape char + +This library supports the escape char, the escape char is started with char \\ + +|Common escape sequences Sequence | Meaning | +|---------------------------------|-----------------------------------------------------| +|\\\\ |\ (a single backslash, escaping the escape character)| +|\0 |Null character | +|\a |Bell/Alert/Audible | +|\b |Backspace, Bell character for some applications | +|\t |Tab character | +|\r |Carriage return | +|\n |Line feed | +|\\; |Semicolon | +|\\# |Number sign | +|\\= |Equals sign | +|\\: |Colon | +|\\x???? |Unicode character with hexadecimal code point | + + +## Environemnt variable support + +Environment variable can be embeded in the value of the key and the environment variable will be replaced. For example: + +```ini +[section1] +key1 = this value has env ${HOME} +key2 = this value has env with default ${SOME_ENV:-test},hihi +``` + +In the above example, the environment variable HOME is in the value of key1. So if the value of environment variable HOME is "/home/test", the value of key1 is "this value has env /home/test". + +For the key2, the environemnt SOME_ENV is included and if the environment variable SOME_ENV does not exist, its value will be "test" otherwise it will be the value of SOME_ENV environment variable. + +# API + +## import the library + +The go-ini library should be imported before using this library: + +```go +import ( + ini "github.com/ochinchina/go-ini" +) +``` +## Load .ini file + +.ini format file or string can be loaded by the method: + +### Load from a file + +```go +//Load the .ini from a file +ini := ini.Load( "fileName" ) + +``` + +### Load from a string or byte array in .ini format + +```go +ini_str := `[section1] +key1 = value1 +key2 = value 2 +` + +ini := ini.Load( ini_str ) +//load from a byte array + +ini = ini.Load( []byte(ini_str) ) + +``` + +### Load from a io.Reader + +```go + +var reader io.Reader = ... + +ini := ini.Load( reader ) + +``` + +### Load .ini from multiple source + +The Load() method can load .ini from multiple mixed sources. + +``` go +//load multiple sources: fileName, string, reader and byte array in one statement + +ini := ini.Load( "fileName", ini_str, reader ) +``` + +### Load the .ini in Ini object + +The Ini class also provide a method named Load(), this method can be called multiple times and the later loaded .ini will be appended to the Ini object. + +```go +//first load the .ini from a file +ini := ini.Load( "fileName" ) + +//append the .ini from string to the ini object +ini_str := `[section1] +key1 = value1 +key2 = value 2 +` +ini.Load( ini_str ) + +//append the .ini from a reader to the ini object +var reader io.Reader = ... +ini.Load( reader ) + +``` + +## Access the value of key in the .ini file + +After loading the .ini from a file/string/reader, we can access a keya under a section. This library provides three level API to access the value of a key in a section. + +### Access the value of key in Ini class level + +The value of key can be accessed in Ini class level. + +```go +ini := ini.Load(...) + +value, err := ini.GetValue( "section1", "key1") + +// if err is nil, the value is ok +if err == nil { + //the value exists and DO something according to the value +} +``` + +Sometimes we need to provide a default value if the key in the section does not exist, at this time the user can provide a default value by GetValueWithDefault() method. + +```go +ini := ini.Load(...) + +//if the section1 or key1 does not exist, return a default value(empty string) +value := ini.GetValueWithDefault( "section1", "key1", "" ) +``` +### Access the value of key in Section class level + +Call the GetSection() method by the section name on the Ini object at frist, and then call GetValue() on the section to get the value of key. + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + value, err := section.GetValue( "key1" ) + if err == nil { + //the value of key1 exists + } +} +``` + +The method GetValueWithDefault() ask user provide a default value if the key under section does not exist, the user provided default value will be returned. + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + //get the value of key1 and if the key1 does not exists, return the default empty string + value := section.GetValueWithDefault("key1", "" ) +} +``` + +### Access the value of key in Key class level + +The value of a key can be acccessed in the Key class level also. The method Key() on the section with keyname can be called even if the key does not exist. After getting a Key object, user can call Value() method to get the value of key. +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) +if err == nil { + //the Key() method always returns a Key object even if the key does not exist + value, err := section.Key( "key1" ).Value() + if err == nul { + //the value in key1 exists + } +} +``` +User can provide a default value to method ValueWithDefault() on the Key object to get the value of key and if the key does not exist the default value will be returned. + + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) +if err == nil { + //the Key() method always returns a Key object even if the key does not exist + value:= section.Key( "key1" ).ValueWithDefault("") +} +``` + +## Convert the string value to desired types + +Except for getting a string value of a key, you can also ask the library convert the string to one of following types: + +- bool +- int +- int64 +- uint64 +- float32 +- float64 + +For each data type, this library provides two methods GetXXX() and GetXXXWithDefault() on the Ini&Section class level where the XXX stands for the Bool, Int, Int64, Uint64, Float32, Float64. + +An example to ask the library convert the key to a int data type in Ini level: + +```go + +ini := ini.Load(...) + +value, err := ini.GetInt( "section1", "key1" ) + +if err == nil { + //at this time, the value of key1 exists and can be converted to integer +} + +value = ini.GetIntWithDefault( "section1", "key1", 0 ) + +``` + +An example to ask the library convert the key to a int data type in Section level: +```go + +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + value, err = section.GetInt( "key1" ) + if err == nil { + //at this time the key1 exists and its value can be converted to int + } + + value = section.GetIntWithDefault("key1", 0 ) +} +``` + +An example to ask the library convert the key to a int data type in Key level: +```go + +ini := ini.Load(...) +section, err := ini.GetSection( "section1" ) +if err == nil { + value, err := section.Key( "key1" ).Int() + if err == nil { + //at this time the key1 exists and its value can be converted to int + } + + //get with default value + value = section.Key( "key1" ).IntWithDefault( 0 ) +} +``` + +## Add the key&value to .ini file + +This library also provides API to add key&value to the .ini file. + +```go + +ini := ini.NewIni() + +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) +``` + +## Save the .ini to the file + +User can call the Write() method on Ini object to write the .ini contents to a io.Writer + +```go + +ini := ini.NewIni() +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) + +buf := bytes.NewBufferString("") +ini.Write( buf ) +``` + +If want to write to the file, there is a convinent API WriteToFile() with filename on the Ini object to write the .ini content to the file. + + +```go + +ini := ini.NewIni() +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) + +ini.WriteToFile( "test.ini" ) + +``` diff --git a/vendor/github.com/ochinchina/go-ini/doc.go b/vendor/github.com/ochinchina/go-ini/doc.go new file mode 100644 index 000000000..105ca3e98 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/doc.go @@ -0,0 +1,49 @@ +/* +A golang implemented library to read/write .ini format files. + +With this library, you can load the .ini file a string, a byte array, a file and a io.Reader. + + import ( + ini "github.com/ochinchina/go-ini" + ) + + + func main() { + //load from .ini file + ini := ini.Load( "myfile.ini") + //load from .ini format string + str_data := "[section1]\nkey1=value1\n[section2]\nkey2=value2" + ini = ini.Load( str_data ) + + //load .ini format byte array + ini = ini.Load( []byte(str_data) ) + + //load from io.Reader + var reader io.Reader = ... + + ini = ini.Load( reader ) + + //load from multiple source in one Load method + ini = ini.Load( "myfile.ini", reader, str_data, bytes_data ) + } + +The loaded Ini includes sections, you can access section: + + //get all the sections in the .ini + var sections []*Section = ini.Sections() + + //get a section by Name + var section *Section = ini.GetSection( sectionName ) + + +Then the key in a section can be accessed by method GetXXX() and GetXXXWithDefault(defValue): + //get the value of key + value, err := section.GetValue( "key1") + value = section.GetValueWithDefault("key1", "") + + //get value of key as int + i, err := section.GetInt( "key2" ) + i = section.GetIntWithDefault( "key2" ) + +*/ +package ini diff --git a/vendor/github.com/ochinchina/go-ini/env_replacer.go b/vendor/github.com/ochinchina/go-ini/env_replacer.go new file mode 100644 index 000000000..efbd5d92d --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/env_replacer.go @@ -0,0 +1,65 @@ +package ini + +import ( + "bytes" + "os" + "strings" +) + +func get_env_value(env string) (string, bool) { + pos := strings.Index(env, ":") + if pos == -1 { + return os.LookupEnv(env) + } + + real_env := env[0:pos] + def_value := env[pos+1:] + if len(def_value) > 0 && def_value[0] == '-' { + def_value = def_value[1:] + } + if value, ok := os.LookupEnv(real_env); ok { + return value, ok + } else { + return def_value, true + } +} + +func replace_env(s string) string { + n := len(s) + env_start_pos := -1 + result := bytes.NewBuffer(make([]byte, 0)) + + for i := 0; i < n; i++ { + //if env start flag "${" is found but env end flag "}" is not found + if env_start_pos >= 0 && s[i] != '}' { + continue + } + switch s[i] { + case '\\': + result.WriteByte(s[i]) + if i+1 < n { + i++ + result.WriteByte(s[i]) + } + case '$': + if i+1 < n && s[i+1] == '{' { + env_start_pos = i + i++ + } else { + result.WriteByte(s[i]) + } + case '}': + if env_start_pos >= 0 { + if env_value, ok := get_env_value(s[env_start_pos+2 : i]); ok { + result.WriteString(env_value) + } + env_start_pos = -1 + } else { + result.WriteByte(s[i]) + } + default: + result.WriteByte(s[i]) + } + } + return result.String() +} diff --git a/vendor/github.com/ochinchina/go-ini/ini.go b/vendor/github.com/ochinchina/go-ini/ini.go new file mode 100644 index 000000000..f628ae5a6 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/ini.go @@ -0,0 +1,265 @@ +package ini + +import ( + "bytes" + "fmt" + "io" + "os" +) + +// manage all the sections and their key values defined in the .ini file +// +type Ini struct { + defaultSectionName string + sections map[string]*Section +} + +func NewIni() *Ini { + return &Ini{defaultSectionName: "default", + sections: make(map[string]*Section)} +} + +func (ini *Ini) GetDefaultSectionName() string { + return ini.defaultSectionName +} + +func (ini *Ini) SetDefaultSectionName(defSectionName string) { + ini.defaultSectionName = defSectionName +} + +// create a new section if the section with name does not exist +// or return the exist one if the section with name already exists +// +func (ini *Ini) NewSection(name string) *Section { + if section, ok := ini.sections[name]; ok { + return section + } + section := NewSection(name) + ini.sections[name] = section + return section +} + +// add a section to the .ini file and overwrite the exist section +// with same name +func (ini *Ini) AddSection(section *Section) { + ini.sections[section.Name] = section +} + +// Get all the section name in the ini +// +// return all the section names +func (ini *Ini) Sections() []*Section { + r := make([]*Section, 0) + for _, section := range ini.sections { + r = append(r, section) + } + return r +} + +// check if a key exists or not in the Ini +// +// return true if the key in section exists +func (ini *Ini) HasKey(sectionName, key string) bool { + if section, ok := ini.sections[sectionName]; ok { + return section.HasKey(key) + } + return false +} + +// get section by section name +// +// return: section or nil +func (ini *Ini) GetSection(name string) (*Section, error) { + if section, ok := ini.sections[name]; ok { + return section, nil + } + return nil, noSuchSection(name) +} + +// return true if the section with name exists +// return false if the section with name does not exist +func (ini *Ini) HasSection(name string) bool { + _, err := ini.GetSection(name) + return err == nil +} + +// get the value of key in section +func (ini *Ini) GetValue(sectionName, key string) (string, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetValue(key) + } + return "", noSuchSection(sectionName) +} + +// get the value of the key in section +// if the key does not exist, return the defValue +func (ini *Ini) GetValueWithDefault(sectionName, key string, defValue string) string { + if section, ok := ini.sections[sectionName]; ok { + return section.GetValueWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in section as bool. +// return true if the value of the key is one of following(case insensitive): +// - true +// - yes +// - t +// - y +// - 1 +// return false for all other values +func (ini *Ini) GetBool(sectionName, key string) (bool, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetBool(key) + } + return false, noSuchSection(sectionName) +} + +// get the value of key as bool and return the default value if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetBoolWithDefault(sectionName, key string, defValue bool) bool { + if section, ok := ini.sections[sectionName]; ok { + return section.GetBoolWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as int +func (ini *Ini) GetInt(sectionName, key string) (int, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetIntWithDefault(sectionName, key string, defValue int) int { + if section, ok := ini.sections[sectionName]; ok { + return section.GetIntWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as uint +func (ini *Ini) GetUint(sectionName, key string) (uint, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetUintWithDefault(sectionName, key string, defValue uint) uint { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUintWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as int64 +func (ini *Ini) GetInt64(sectionName, key string) (int64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetInt64WithDefault(sectionName, key string, defValue int64) int64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt64WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as uint64 +func (ini *Ini) GetUint64(sectionName, key string) (uint64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as uint64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetUint64WithDefault(sectionName, key string, defValue uint64) uint64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint64WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as float32 +func (ini *Ini) GetFloat32(sectionName, key string) (float32, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat32(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as float32 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetFloat32WithDefault(sectionName, key string, defValue float32) float32 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat32WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as float64 +func (ini *Ini) GetFloat64(sectionName, key string) (float64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as float64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetFloat64WithDefault(sectionName, key string, defValue float64) float64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat64WithDefault(key, defValue) + } + return defValue +} + +func noSuchSection(sectionName string) error { + return fmt.Errorf("no such section:%s", sectionName) +} + +func (ini *Ini) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + ini.Write(buf) + return buf.String() +} + +// write the content of the .ini in the .ini file format, e.g. in following format: +// +// [section1] +// key1 = value1 +// key2 = value2 +// [section2] +// key3 = value3 +// key4 = value4 +func (ini *Ini) Write(writer io.Writer) error { + for _, section := range ini.sections { + err := section.Write(writer) + if err != nil { + return err + } + } + return nil +} + +// Write the conents of ini to a file +func (ini *Ini) WriteToFile(fileName string) error { + file, err := os.Create(fileName) + if err == nil { + defer file.Close() + return ini.Write(file) + } + return err +} diff --git a/vendor/github.com/ochinchina/go-ini/key.go b/vendor/github.com/ochinchina/go-ini/key.go new file mode 100644 index 000000000..a3dd881a0 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/key.go @@ -0,0 +1,282 @@ +package ini + +import ( + "fmt" + "strconv" + "strings" +) + +// represents the pair stored in the +// section of the .ini file +// +type Key interface { + // get name of the key + Name() string + + // get value of the key + Value() (string, error) + + //get the value of key and return defValue if + //the value does not exist + ValueWithDefault(defValue string) string + + // get the value as bool + // return true if the value is one of following(case insensitive): + // - true + // - yes + // - T + // - Y + // - 1 + // Any other value will return false + Bool() (bool, error) + + // get the value as bool and return the defValue if the + // value of the key does not exist + BoolWithDefault(defValue bool) bool + // get the value as int + Int() (int, error) + + // get value as int and return defValue if the + // value of the key does not exist + IntWithDefault(defValue int) int + + //get value as uint + Uint() (uint, error) + + //get value as uint and return defValue if the + //key does not exist or it is not uint format + UintWithDefault(defValue uint) uint + + // get the value as int64 + Int64() (int64, error) + + // get the value as int64 and return defValue + // if the value of the key does not exist + Int64WithDefault(defValue int64) int64 + + // get the value as uint64 + Uint64() (uint64, error) + + // get the value as uint64 and return defValue + // if the value of the key does not exist + Uint64WithDefault(defValue uint64) uint64 + + // get the value as float32 + Float32() (float32, error) + + // get the value as float32 and return defValue + // if the value of the key does not exist + Float32WithDefault(defValue float32) float32 + + // get the value as float64 + Float64() (float64, error) + + // get the value as the float64 and return defValue + // if the value of the key does not exist + Float64WithDefault(defValue float64) float64 + + // return a string as "key=value" format + // and if no value return empty string + String() string +} + +type nonExistKey struct { + name string +} + +func newNonExistKey(name string) *nonExistKey { + return &nonExistKey{name: name} +} + +func (nek *nonExistKey) Name() string { + return nek.name +} + +func (nek *nonExistKey) Value() (string, error) { + return "", nek.noSuchKey() +} + +func (nek *nonExistKey) ValueWithDefault(defValue string) string { + return defValue +} + +func (nek *nonExistKey) Bool() (bool, error) { + return false, nek.noSuchKey() +} + +func (nek *nonExistKey) BoolWithDefault(defValue bool) bool { + return defValue +} + +func (nek *nonExistKey) Int() (int, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) IntWithDefault(defValue int) int { + return defValue +} + +func (nek *nonExistKey) Uint() (uint, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) UintWithDefault(defValue uint) uint { + return defValue +} + +func (nek *nonExistKey) Int64() (int64, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) Int64WithDefault(defValue int64) int64 { + return defValue +} + +func (nek *nonExistKey) Uint64() (uint64, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) Uint64WithDefault(defValue uint64) uint64 { + return defValue +} + +func (nek *nonExistKey) Float32() (float32, error) { + return .0, nek.noSuchKey() +} + +func (nek *nonExistKey) Float32WithDefault(defValue float32) float32 { + return defValue +} + +func (nek *nonExistKey) Float64() (float64, error) { + return .0, nek.noSuchKey() +} + +func (nek *nonExistKey) Float64WithDefault(defValue float64) float64 { + return defValue +} + +func (nek *nonExistKey) String() string { + return "" +} + +func (nek *nonExistKey) noSuchKey() error { + return fmt.Errorf("no such key:%s", nek.name) +} + +type normalKey struct { + name string + value string +} + +var trueBoolValue = map[string]bool{"true": true, "t": true, "yes": true, "y": true, "1": true} + +func newNormalKey(name, value string) *normalKey { + return &normalKey{name: name, value: replace_env(value)} +} + +func (k *normalKey) Name() string { + return k.name +} + +func (k *normalKey) Value() (string, error) { + return k.value, nil +} + +func (k *normalKey) ValueWithDefault(defValue string) string { + return k.value +} + +func (k *normalKey) Bool() (bool, error) { + if _, ok := trueBoolValue[strings.ToLower(k.value)]; ok { + return true, nil + } + return false, nil +} + +func (k *normalKey) BoolWithDefault(defValue bool) bool { + v, err := k.Bool() + if err == nil { + return v + } + return defValue +} + +func (k *normalKey) Int() (int, error) { + return strconv.Atoi(k.value) +} + +func (k *normalKey) IntWithDefault(defValue int) int { + i, err := strconv.Atoi(k.value) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Uint() (uint, error) { + v, err := strconv.ParseUint(k.value, 0, 32) + return uint(v), err +} + +func (k *normalKey) UintWithDefault(defValue uint) uint { + i, err := k.Uint() + if err == nil { + return i + } + return defValue + +} + +func (k *normalKey) Int64() (int64, error) { + return strconv.ParseInt(k.value, 0, 64) +} + +func (k *normalKey) Int64WithDefault(defValue int64) int64 { + i, err := strconv.ParseInt(k.value, 0, 64) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Uint64() (uint64, error) { + return strconv.ParseUint(k.value, 0, 64) +} + +func (k *normalKey) Uint64WithDefault(defValue uint64) uint64 { + i, err := strconv.ParseUint(k.value, 0, 64) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Float32() (float32, error) { + f, err := strconv.ParseFloat(k.value, 32) + return float32(f), err +} + +func (k *normalKey) Float32WithDefault(defValue float32) float32 { + f, err := strconv.ParseFloat(k.value, 32) + if err == nil { + return float32(f) + } + return defValue +} + +func (k *normalKey) Float64() (float64, error) { + return strconv.ParseFloat(k.value, 64) +} + +func (k *normalKey) Float64WithDefault(defValue float64) float64 { + f, err := strconv.ParseFloat(k.value, 64) + if err == nil { + return f + } + return defValue +} + +func (k *normalKey) String() string { + return fmt.Sprintf("%s=%s", k.name, toEscape(k.value)) +} diff --git a/vendor/github.com/ochinchina/go-ini/loader.go b/vendor/github.com/ochinchina/go-ini/loader.go new file mode 100644 index 000000000..059b8c26a --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/loader.go @@ -0,0 +1,349 @@ +package ini + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "os" + "strconv" + "strings" + "unicode" +) + +// remove inline comments +// +// inline comments must start with ';' or '#' +// and the char before the ';' or '#' must be a space +// +func removeComments(value string) string { + n := len( value ) + i := 0 + for ;i < n; i++ { + if value[i] == '\\' { + i++ + } else if value[i] == ';' || value[i] == '#' { + if i > 0 && unicode.IsSpace( rune( value[i-1] ) ) { + return strings.TrimSpace( value[0:i] ) + } + } + } + return strings.TrimSpace( value ) +} + +// check if it is a oct char,e.g. must be char '0' to '7' +// +func isOctChar(ch byte) bool { + return ch >= '0' && ch <= '7' +} + +// check if the char is a hex char, e.g. the char +// must be '0'..'9' or 'a'..'f' or 'A'..'F' +// +func isHexChar(ch byte) bool { + return ch >= '0' && ch <= '9' || + ch >= 'a' && ch <= 'f' || + ch >= 'A' && ch <= 'F' +} + +func fromEscape(value string) string { + if strings.Index(value, "\\") == -1 { + return value + } + + r := "" + n := len(value) + for i := 0; i < n; i++ { + if value[i] == '\\' { + if i+1 < n { + i++ + //if is it oct + if i+2 < n && isOctChar(value[i]) && isOctChar(value[i+1]) && isOctChar(value[i+2]) { + t, err := strconv.ParseInt(value[i:i+3], 8, 32) + if err == nil { + r = r + string(rune(t)) + } + i += 2 + continue + } + switch value[i] { + case '0': + r = r + string(byte(0)) + case 'a': + r = r + "\a" + case 'b': + r = r + "\b" + case 'f': + r = r + "\f" + case 't': + r = r + "\t" + case 'r': + r = r + "\r" + case 'n': + r = r + "\n" + case 'v': + r = r + "\v" + case 'x': + i++ + if i+3 < n && isHexChar(value[i]) && + isHexChar(value[i+1]) && + isHexChar(value[i+2]) && + isHexChar(value[i+3]) { + + t, err := strconv.ParseInt(value[i:i+4], 16, 32) + if err == nil { + r = r + string(rune(t)) + } + i += 3 + } + default: + r = fmt.Sprintf("%s%c", r, value[i]) + } + } + } else { + r = fmt.Sprintf("%s%c", r, value[i]) + } + } + return r +} + +func toEscape(s string) string { + result := bytes.NewBuffer(make([]byte, 0)) + + n := len(s) + + for i := 0; i < n; i++ { + switch s[i] { + case 0: + result.WriteString("\\0") + case '\\': + result.WriteString("\\\\") + case '\a': + result.WriteString("\\a") + case '\b': + result.WriteString("\\b") + case '\t': + result.WriteString("\\t") + case '\r': + result.WriteString("\\r") + case '\n': + result.WriteString("\\n") + case ';': + result.WriteString("\\;") + case '#': + result.WriteString("\\#") + case '=': + result.WriteString("\\=") + case ':': + result.WriteString("\\:") + default: + result.WriteByte(s[i]) + } + } + return result.String() +} +func removeContinuationSuffix(value string) (string, bool) { + pos := strings.LastIndex(value, "\\") + n := len(value) + if pos == -1 || pos != n-1 { + return "", false + } + for pos >= 0 { + if value[pos] != '\\' { + return "", false + } + pos-- + if pos < 0 || value[pos] != '\\' { + return value[0 : n-1], true + } + pos-- + } + return "", false +} + +type lineReader struct { + reader *bufio.Scanner +} + +func newLineReader(reader io.Reader) *lineReader { + return &lineReader{reader: bufio.NewScanner(reader)} +} + +func (lr *lineReader) readLine() (string, error) { + if lr.reader.Scan() { + return lr.reader.Text(), nil + } + return "", errors.New("No data") + +} + +func readLinesUntilSuffix(lineReader *lineReader, suffix string) string { + r := "" + for { + line, err := lineReader.readLine() + if err != nil { + break + } + t := strings.TrimRightFunc(line, unicode.IsSpace) + if strings.HasSuffix(t, suffix) { + r = r + t[0:len(t)-len(suffix)] + break + } else { + r = r + line + "\n" + } + } + return r +} + +func readContinuationLines(lineReader *lineReader) string { + r := "" + for { + line, err := lineReader.readLine() + if err != nil { + break + } + line = strings.TrimRightFunc(line, unicode.IsSpace) + if t, continuation := removeContinuationSuffix(line); continuation { + r = r + t + } else { + r = r + line + break + } + } + return r +} + +/* +Load from the sources, the source can be one of: + - fileName + - a string includes .ini + - io.Reader the reader to load the .ini contents + - byte array incldues .ini content +*/ +func (ini *Ini) Load(sources ...interface{}) { + for _, source := range sources { + switch source.(type) { + case string: + s, _ := source.(string) + if _, err := os.Stat(s); err == nil { + ini.LoadFile(s) + } else { + ini.LoadString(s) + } + case io.Reader: + reader, _ := source.(io.Reader) + ini.LoadReader(reader) + case []byte: + b, _ := source.([]byte) + ini.LoadBytes(b) + } + } + +} + +// Explicitly loads .ini from a reader +// +func (ini *Ini) LoadReader(reader io.Reader) { + lineReader := newLineReader(reader) + var curSection *Section = nil + for { + line, err := lineReader.readLine() + if err != nil { + break + } + line = strings.TrimSpace(line) + + //empty line or comments line + if len(line) <= 0 || line[0] == ';' || line[0] == '#' { + continue + } + //if it is a section + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + sectionName := strings.TrimSpace(line[1 : len(line)-1]) + if len(sectionName) > 0 { + curSection = ini.NewSection(sectionName) + } + continue + } + pos := strings.IndexAny(line, "=;") + if pos != -1 { + key := strings.TrimSpace(line[0:pos]) + value := strings.TrimLeftFunc(line[pos+1:], unicode.IsSpace) + //if it is a multiline indicator + if strings.HasPrefix(value, "\"\"\"") { + t := strings.TrimRightFunc(value, unicode.IsSpace) + //if the end multiline indicator is found + if strings.HasSuffix(t, "\"\"\"") { + value = t[3 : len(t)-3] + } else { //read lines until end multiline indicator is found + value = value[3:] + "\n" + readLinesUntilSuffix(lineReader, "\"\"\"") + } + } else { + value = strings.TrimRightFunc(value, unicode.IsSpace) + //if is it a continuation line + if t, continuation := removeContinuationSuffix(value); continuation { + value = t + readContinuationLines(lineReader) + } + } + + if len(key) > 0 { + if curSection == nil && len(ini.defaultSectionName) > 0 { + curSection = ini.NewSection(ini.defaultSectionName) + } + if curSection != nil { + //remove the comments and convert escape char to real + curSection.Add(key, strings.TrimSpace(fromEscape(removeComments(value)))) + } + } + } + } +} + +// Load ini file from file named fileName +// +func (ini *Ini) LoadFile(fileName string) { + f, err := os.Open(fileName) + if err == nil { + defer f.Close() + ini.Load(f) + } +} + +var defaultSectionName string = "default" + +func SetDefaultSectionName(defSectionName string) { + defaultSectionName = defSectionName +} + +// load ini from the content which contains the .ini formated string +// +func (ini *Ini) LoadString(content string) { + ini.Load(bytes.NewBufferString(content)) +} + +// load .ini from a byte array which contains the .ini formated content +func (ini *Ini) LoadBytes(content []byte) { + ini.Load(bytes.NewBuffer(content)) +} + +/* +Load the .ini from one of following resource: + - file + - string in .ini format + - byte array in .ini format + - io.Reader a reader to load .ini content + +One or more source can be provided in this Load method, such as: + var reader1 io.Reader = ... + var reader2 io.Reader = ... + ini.Load( "./my.ini", "[section]\nkey=1", "./my2.ini", reader1, reader2 ) +*/ +func Load(sources ...interface{}) *Ini { + ini := NewIni() + ini.SetDefaultSectionName(defaultSectionName) + for _, source := range sources { + ini.Load(source) + } + return ini +} diff --git a/vendor/github.com/ochinchina/go-ini/properties.go b/vendor/github.com/ochinchina/go-ini/properties.go new file mode 100644 index 000000000..bf02b6e5c --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/properties.go @@ -0,0 +1,116 @@ +package ini + +type Properties struct { + ini *Ini +} + +func NewProperties() *Properties { + return &Properties{ini: NewIni()} +} + +func (p *Properties) Load(sources ...interface{}) { + p.ini.Load(sources) +} + +func (p *Properties) GetProperty(key string) (string, error) { + return p.ini.GetValue(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetPropertyWithDefault(key string, defValue string) string { + v, err := p.GetProperty(key) + if err == nil { + return v + } + return defValue +} + +func (p *Properties) GetBool(key string) (bool, error) { + return p.ini.GetBool(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetBoolWithDefault(key string, defValue bool) bool{ + v, err := p.GetBool(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetInt(key string) (int, error) { + return p.ini.GetInt(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetIntWithDefault(key string, defValue int) int { + v, err := p.GetInt(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetInt64(key string) (int64, error) { + return p.ini.GetInt64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetInt64WithDefault(key string, defValue int64) int64 { + v, err := p.GetInt64(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetUint64(key string) (uint64, error) { + return p.ini.GetUint64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetUint64WithDefault(key string, defValue uint64) uint64 { + v, err := p.GetUint64(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetUint(key string) (uint, error) { + return p.ini.GetUint(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetUintWithDefault(key string, defValue uint) uint { + v, err := p.GetUint(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetFloat32(key string) (float32, error) { + return p.ini.GetFloat32(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetFloat32WithDefault(key string, defValue float32) float32 { + v, err := p.GetFloat32(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetFloat64(key string) (float64, error) { + return p.ini.GetFloat64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetFloat64WithDefault(key string, defValue float64) float64 { + v, err := p.GetFloat64(key) + if err == nil { + return v + } else { + return defValue + } +} diff --git a/vendor/github.com/ochinchina/go-ini/section.go b/vendor/github.com/ochinchina/go-ini/section.go new file mode 100644 index 000000000..e04ba4277 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/section.go @@ -0,0 +1,177 @@ +package ini + +import ( + "bytes" + "fmt" + "io" +) + +// manages all the key/value defined in the .ini file format +type Section struct { + //Name of the section + Name string + //key values + keyValues map[string]Key +} + +// construct a new section with section name +func NewSection(name string) *Section { + return &Section{Name: name, + keyValues: make(map[string]Key)} +} + +// add key/value to the section and overwrite the old one +func (section *Section) Add(key, value string) { + section.keyValues[key] = newNormalKey(key, value) +} + +// check if the key is in the section +// +// return true if the section contains the key +func (section *Section) HasKey(key string) bool { + _, ok := section.keyValues[key] + return ok +} + +// Get all the keys in the section +// +// return: all keys in the section +func (section *Section) Keys() []Key { + r := make([]Key, 0) + for _, v := range section.keyValues { + r = append(r, v) + } + return r +} + +// Get the key. +// +// This method can be called even if the key is not in the +// section. +func (section *Section) Key(key string) Key { + if v, ok := section.keyValues[key]; ok { + return v + } + return newNonExistKey(key) +} + +// Get value of key as string +func (section *Section) GetValue(key string) (string, error) { + return section.Key(key).Value() +} + +// Get value of key and if the key does not exist, return the defValue +func (section *Section) GetValueWithDefault(key string, defValue string) string { + return section.Key(key).ValueWithDefault(defValue) +} + +// Get the value of key as bool, it will return true if the value of the key is one +// of following( case insensitive): +// - true +// - yes +// - t +// - y +// - 1 +func (section *Section) GetBool(key string) (bool, error) { + return section.Key(key).Bool() +} + +// Get the value of key as bool and if the key does not exist, return the +// default value +func (section *Section) GetBoolWithDefault(key string, defValue bool) bool { + return section.Key(key).BoolWithDefault(defValue) +} + +// Get the value of the key as int +func (section *Section) GetInt(key string) (int, error) { + return section.Key(key).Int() +} + +// Get the value of the key as int and if the key does not exist return +// the default value +func (section *Section) GetIntWithDefault(key string, defValue int) int { + return section.Key(key).IntWithDefault(defValue) +} + +// Get the value of the key as uint +func (section *Section) GetUint(key string) (uint, error) { + return section.Key(key).Uint() +} + +// Get the value of the key as int and if the key does not exist return +// the default value +func (section *Section) GetUintWithDefault(key string, defValue uint) uint { + return section.Key(key).UintWithDefault(defValue) +} + +// Get the value of the key as int64 +func (section *Section) GetInt64(key string) (int64, error) { + return section.Key(key).Int64() +} + +// Get the value of the key as int64 and if the key does not exist return +// the default value +func (section *Section) GetInt64WithDefault(key string, defValue int64) int64 { + return section.Key(key).Int64WithDefault(defValue) +} + +// Get the value of the key as uint64 +func (section *Section) GetUint64(key string) (uint64, error) { + return section.Key(key).Uint64() +} + +// Get the value of the key as uint64 and if the key does not exist return +// the default value +func (section *Section) GetUint64WithDefault(key string, defValue uint64) uint64 { + return section.Key(key).Uint64WithDefault(defValue) +} + +// Get the value of the key as float32 +func (section *Section) GetFloat32(key string) (float32, error) { + return section.Key(key).Float32() +} + +// Get the value of the key as float32 and if the key does not exist return +// the default value +func (section *Section) GetFloat32WithDefault(key string, defValue float32) float32 { + return section.Key(key).Float32WithDefault(defValue) +} + +// Get the value of the key as float64 +func (section *Section) GetFloat64(key string) (float64, error) { + return section.Key(key).Float64() +} + +// Get the value of the key as float64 and if the key does not exist return +// the default value +func (section *Section) GetFloat64WithDefault(key string, defValue float64) float64 { + return section.Key(key).Float64WithDefault(defValue) +} + +// convert the section content to the .ini section format, so the section content will +// be converted to following format: +// +// [sectionx] +// key1 = value1 +// key2 = value2 +// +func (section *Section) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + section.Write(buf) + return buf.String() +} + +// write the section content to the writer with .ini section format. +func (section *Section) Write(writer io.Writer) error { + _, err := fmt.Fprintf(writer, "[%s]\n", section.Name) + if err != nil { + return err + } + for _, v := range section.keyValues { + _, err = fmt.Fprintf(writer, "%s\n", v.String()) + if err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE b/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE new file mode 100644 index 000000000..2e907e487 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, Ivan Daniluk +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +* Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go new file mode 100644 index 000000000..d8cb0a4cf --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go @@ -0,0 +1,26 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "io" + "io/ioutil" +) + +// EncodeClientRequest encodes parameters for a XML-RPC client request. +func EncodeClientRequest(method string, args interface{}) ([]byte, error) { + xml, err := rpcRequest2XML(method, args) + return []byte(xml), err +} + +// DecodeClientResponse decodes the response body of a client request into +// the interface reply. +func DecodeClientResponse(r io.Reader, reply interface{}) error { + rawxml, err := ioutil.ReadAll(r) + if err != nil { + return FaultSystemError + } + return xml2RPC(string(rawxml), reply) +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go new file mode 100644 index 000000000..eebf2e8ae --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go @@ -0,0 +1,50 @@ +/* +XML-RPC implementation for the Gorilla/RPC toolkit. + +It's built on top of gorilla/rpc package in Go(Golang) language and implements XML-RPC, according to it's specification. Unlike net/rpc from Go strlib, gorilla/rpc allows usage of HTTP POST requests for RPC. + +XML-RPC spec: http://xmlrpc.scripting.com/spec.html + +Installation + +Assuming you already imported gorilla/rpc, use the following command: + + go get github.com/divan/gorilla-xmlrpc/xml + +Implementation details + +The main objective was to use standard encoding/xml package for XML marshalling/unmarshalling. Unfortunately, in current implementation there is no graceful way to implement common structre for marshal and unmarshal functions - marshalling doesn't handle interface{} types so far (though, it could be changed in the future). So, marshalling is implemented manually. + +Unmarshalling code first creates temporary structure for unmarshalling XML into, then converts it into the passed variable using reflect package. If XML struct member's name is lowercased, it's first letter will be uppercased, as in Go/Gorilla field name must be exported(first-letter uppercased). + +Marshalling code converts rpc directly to the string XML representation. + +For the better understanding, I use terms 'rpc2xml' and 'xml2rpc' instead of 'marshal' and 'unmarshall'. + +Types + +The following types are supported: + + XML-RPC Golang + ------- ------ + int, i4 int + double float64 + boolean bool + stringi string + dateTime.iso8601 time.Time + base64 []byte + struct struct + array []interface{} + nil nil + +TODO + +TODO list: + * Add more corner cases tests + +Examples + +Checkout examples in examples/ directory. + +*/ +package xml diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go new file mode 100644 index 000000000..4a24efb66 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go @@ -0,0 +1,51 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "fmt" + "io" +) + +// Default Faults +// NOTE: XMLRPC spec doesn't specify any Fault codes. +// These codes seems to be widely accepted, and taken from the http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php +var ( + FaultInvalidParams = Fault{Code: -32602, String: "Invalid Method Parameters"} + FaultWrongArgumentsNumber = Fault{Code: -32602, String: "Wrong Arguments Number"} + FaultInternalError = Fault{Code: -32603, String: "Internal Server Error"} + FaultApplicationError = Fault{Code: -32500, String: "Application Error"} + FaultSystemError = Fault{Code: -32400, String: "System Error"} + FaultDecode = Fault{Code: -32700, String: "Parsing error: not well formed"} +) + +// Fault represents XML-RPC Fault. +type Fault struct { + Code int `xml:"faultCode"` + String string `xml:"faultString"` +} + +// Error satisifies error interface for Fault. +func (f Fault) Error() string { + return fmt.Sprintf("%d: %s", f.Code, f.String) +} + +// Fault2XML is a quick 'marshalling' replacemnt for the Fault case. +func fault2XML(fault Fault, buffer io.Writer) { + fmt.Fprintf(buffer, "") + rpc2XML(fault, buffer) + fmt.Fprintf(buffer, "") +} + +type faultValue struct { + Value value `xml:"value"` +} + +// IsEmpty returns true if faultValue contain fault. +// +// faultValue should be a struct with 2 members. +func (f faultValue) IsEmpty() bool { + return len(f.Value.Struct) == 0 +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go new file mode 100644 index 000000000..6c17e5a2f --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go @@ -0,0 +1,149 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/base64" + "fmt" + "io" + "reflect" + "strings" + "time" +) + +func rpcRequest2XML(method string, rpc interface{}) (string, error) { + buffer := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buffer, "%s", method) + err := rpcParams2XML(rpc, buffer) + fmt.Fprintf(buffer, "") + return buffer.String(), err +} + +func rpcResponse2XMLStr(rpc interface{}) (string, error) { + buffer := bytes.NewBuffer(make([]byte, 0)) + err := rpcResponse2XML(rpc, buffer) + return buffer.String(), err +} + +func rpcResponse2XML(rpc interface{}, writer io.Writer) error { + fmt.Fprintf(writer, "") + err := rpcParams2XML(rpc, writer) + fmt.Fprintf(writer, "") + return err +} + +func rpcParams2XML(rpc interface{}, writer io.Writer) error { + var err error + fmt.Fprintf(writer, "") + for i := 0; i < reflect.ValueOf(rpc).Elem().NumField(); i++ { + fmt.Fprintf(writer, "") + err = rpc2XML(reflect.ValueOf(rpc).Elem().Field(i).Interface(), writer) + fmt.Fprintf(writer, "") + } + fmt.Fprintf(writer, "") + return err +} + +func rpc2XML(value interface{}, writer io.Writer) error { + fmt.Fprintf(writer, "") + switch reflect.ValueOf(value).Kind() { + case reflect.Int: + fmt.Fprintf(writer, "%d", value.(int)) + case reflect.Float64: + fmt.Fprintf(writer, "%f", value.(float64)) + case reflect.String: + string2XML(value.(string), writer) + case reflect.Bool: + bool2XML(value.(bool), writer) + case reflect.Struct: + if reflect.TypeOf(value).String() != "time.Time" { + struct2XML(value, writer) + } else { + time2XML(value.(time.Time), writer) + } + case reflect.Slice, reflect.Array: + // FIXME: is it the best way to recognize '[]byte'? + if reflect.TypeOf(value).String() != "[]uint8" { + array2XML(value, writer) + } else { + base642XML(value.([]byte), writer) + } + case reflect.Ptr: + if reflect.ValueOf(value).IsNil() { + fmt.Fprintf(writer, "") + } + } + fmt.Fprintf(writer, "") + return nil +} + +func bool2XML(value bool, writer io.Writer) { + var b string + if value { + b = "1" + } else { + b = "0" + } + fmt.Fprintf(writer, "%s", b) +} + +func string2XML(value string, writer io.Writer) { + value = strings.Replace(value, "&", "&", -1) + value = strings.Replace(value, "\"", """, -1) + value = strings.Replace(value, "<", "<", -1) + value = strings.Replace(value, ">", ">", -1) + fmt.Fprintf(writer, "%s", value) +} + +func struct2XML(value interface{}, writer io.Writer) { + fmt.Fprintf(writer, "") + for i := 0; i < reflect.TypeOf(value).NumField(); i++ { + field := reflect.ValueOf(value).Field(i) + field_type := reflect.TypeOf(value).Field(i) + var name string + if field_type.Tag.Get("xml") != "" { + name = field_type.Tag.Get("xml") + } else { + name = field_type.Name + } + fmt.Fprintf(writer, "") + fmt.Fprintf(writer, "%s", name) + rpc2XML(field.Interface(), writer) + fmt.Fprintf(writer, "") + } + fmt.Fprintf(writer, "") + return +} + +func array2XML(value interface{}, writer io.Writer) { + fmt.Fprintf(writer, "") + for i := 0; i < reflect.ValueOf(value).Len(); i++ { + rpc2XML(reflect.ValueOf(value).Index(i).Interface(), writer) + } + fmt.Fprintf(writer, "") +} + +func time2XML(t time.Time, writer io.Writer) { + /* + // TODO: find out whether we need to deal + // here with TZ + var tz string; + zone, offset := t.Zone() + if zone == "UTC" { + tz = "Z" + } else { + tz = fmt.Sprintf("%03d00", offset / 3600 ) + } + */ + fmt.Fprintf(writer, "%04d%02d%02dT%02d:%02d:%02d", + t.Year(), t.Month(), t.Day(), + t.Hour(), t.Minute(), t.Second()) +} + +func base642XML(data []byte, writer io.Writer) { + str := base64.StdEncoding.EncodeToString(data) + fmt.Fprintf(writer, "%s", str) +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go new file mode 100644 index 000000000..a1eb0a7e0 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go @@ -0,0 +1,118 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/xml" + "fmt" + "io/ioutil" + "net/http" + + "github.com/gorilla/rpc" +) + +// ---------------------------------------------------------------------------- +// Codec +// ---------------------------------------------------------------------------- + +// NewCodec returns a new XML-RPC Codec. +func NewCodec() *Codec { + return &Codec{ + aliases: make(map[string]string), + } +} + +// Codec creates a CodecRequest to process each request. +type Codec struct { + aliases map[string]string +} + +// RegisterAlias creates a method alias +func (c *Codec) RegisterAlias(alias, method string) { + c.aliases[alias] = method +} + +// NewRequest returns a CodecRequest. +func (c *Codec) NewRequest(r *http.Request) rpc.CodecRequest { + rawxml, err := ioutil.ReadAll(r.Body) + if err != nil { + return &CodecRequest{err: err} + } + defer r.Body.Close() + + var request ServerRequest + if err := xml.Unmarshal(rawxml, &request); err != nil { + return &CodecRequest{err: err} + } + request.rawxml = string(rawxml) + if method, ok := c.aliases[request.Method]; ok { + request.Method = method + } + return &CodecRequest{request: &request} +} + +// ---------------------------------------------------------------------------- +// CodecRequest +// ---------------------------------------------------------------------------- + +type ServerRequest struct { + Name xml.Name `xml:"methodCall"` + Method string `xml:"methodName"` + rawxml string +} + +// CodecRequest decodes and encodes a single request. +type CodecRequest struct { + request *ServerRequest + err error +} + +// Method returns the RPC method for the current request. +// +// The method uses a dotted notation as in "Service.Method". +func (c *CodecRequest) Method() (string, error) { + if c.err == nil { + return c.request.Method, nil + } + return "", c.err +} + +// ReadRequest fills the request object for the RPC method. +// +// args is the pointer to the Service.Args structure +// it gets populated from temporary XML structure +func (c *CodecRequest) ReadRequest(args interface{}) error { + c.err = xml2RPC(c.request.rawxml, args) + return nil +} + +// WriteResponse encodes the response and writes it to the ResponseWriter. +// +// response is the pointer to the Service.Response structure +// it gets encoded into the XML-RPC xml string +func (c *CodecRequest) WriteResponse(w http.ResponseWriter, response interface{}, methodErr error) error { + if c.err == nil { + c.err = methodErr + } + buffer := bytes.NewBuffer(make([]byte, 0)) + if c.err != nil { + var fault Fault + switch c.err.(type) { + case Fault: + fault = c.err.(Fault) + default: + fault = FaultApplicationError + fault.String += fmt.Sprintf(": %v", c.err) + } + fault2XML(fault, buffer) + } else { + rpcResponse2XML(response, buffer) + } + + w.Header().Set("Content-Type", "text/xml; charset=utf-8") + buffer.WriteTo(w) + return nil +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go new file mode 100644 index 000000000..48b08536f --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go @@ -0,0 +1,219 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/base64" + "encoding/xml" + "fmt" + "reflect" + "strconv" + "time" + "unicode" + "unicode/utf8" + + "github.com/rogpeppe/go-charset/charset" + _ "github.com/rogpeppe/go-charset/data" +) + +// Types used for unmarshalling +type response struct { + Name xml.Name `xml:"methodResponse"` + Params []param `xml:"params>param"` + Fault faultValue `xml:"fault,omitempty"` +} + +type param struct { + Value value `xml:"value"` +} + +type value struct { + Array []value `xml:"array>data>value"` + Struct []member `xml:"struct>member"` + String string `xml:"string"` + Int string `xml:"int"` + Int4 string `xml:"i4"` + Double string `xml:"double"` + Boolean string `xml:"boolean"` + DateTime string `xml:"dateTime.iso8601"` + Base64 string `xml:"base64"` + Raw string `xml:",innerxml"` // the value can be defualt string +} + +type member struct { + Name string `xml:"name"` + Value value `xml:"value"` +} + +func xml2RPC(xmlraw string, rpc interface{}) error { + // Unmarshal raw XML into the temporal structure + var ret response + decoder := xml.NewDecoder(bytes.NewReader([]byte(xmlraw))) + decoder.CharsetReader = charset.NewReader + err := decoder.Decode(&ret) + if err != nil { + return FaultDecode + } + + if !ret.Fault.IsEmpty() { + return getFaultResponse(ret.Fault) + } + + // Now, convert temporal structure into the + // passed rpc variable, according to it's structure + fieldNum := reflect.TypeOf(rpc).Elem().NumField() + //for i, param := range ret.Params { + for i := 0; i < fieldNum; i += 1 { + field := reflect.ValueOf(rpc).Elem().Field(i) + if len(ret.Params) > i { + err = value2Field(ret.Params[i].Value, &field) + } else if reflect.TypeOf(rpc).Elem().Field(i).Tag.Get("default") != "" { + err = value2Field(createValue(reflect.TypeOf(rpc).Elem().Field(i).Type.Kind(), reflect.TypeOf(rpc).Elem().Field(i).Tag.Get("default")), &field) + } + if err != nil { + return err + } + } + + return nil +} + +func createValue(kind reflect.Kind, val string) value { + v := value{} + if kind == reflect.Bool { + v.Boolean = val + } else if kind == reflect.Int { + v.Int = val + } + return v +} + +// getFaultResponse converts faultValue to Fault. +func getFaultResponse(fault faultValue) Fault { + var ( + code int + str string + ) + + for _, field := range fault.Value.Struct { + if field.Name == "faultCode" { + code, _ = strconv.Atoi(field.Value.Int) + } else if field.Name == "faultString" { + str = field.Value.String + if str == "" { + str = field.Value.Raw + } + } + } + + return Fault{Code: code, String: str} +} + +func value2Field(value value, field *reflect.Value) error { + if !field.CanSet() { + return FaultApplicationError + } + + var ( + err error + val interface{} + ) + + switch { + case value.Int != "": + val, _ = strconv.Atoi(value.Int) + case value.Int4 != "": + val, _ = strconv.Atoi(value.Int4) + case value.Double != "": + val, _ = strconv.ParseFloat(value.Double, 64) + case value.String != "": + val = value.String + case value.Boolean != "": + val = xml2Bool(value.Boolean) + case value.DateTime != "": + val, err = xml2DateTime(value.DateTime) + case value.Base64 != "": + val, err = xml2Base64(value.Base64) + case len(value.Struct) != 0: + if field.Kind() != reflect.Struct { + fault := FaultInvalidParams + fault.String += fmt.Sprintf("structure fields mismatch: %s != %s", field.Kind(), reflect.Struct.String()) + return fault + } + s := value.Struct + for i := 0; i < len(s); i++ { + // Uppercase first letter for field name to deal with + // methods in lowercase, which cannot be used + field_name := uppercaseFirst(s[i].Name) + f := field.FieldByName(field_name) + err = value2Field(s[i].Value, &f) + } + case len(value.Array) != 0: + a := value.Array + f := *field + slice := reflect.MakeSlice(reflect.TypeOf(f.Interface()), + len(a), len(a)) + for i := 0; i < len(a); i++ { + item := slice.Index(i) + err = value2Field(a[i], &item) + } + f = reflect.AppendSlice(f, slice) + val = f.Interface() + + default: + // value field is default to string, see http://en.wikipedia.org/wiki/XML-RPC#Data_types + // also can be + if value.Raw != "" { + val = value.Raw + } + } + + if val != nil { + if reflect.TypeOf(val) != reflect.TypeOf(field.Interface()) { + fault := FaultInvalidParams + fault.String += fmt.Sprintf(": fields type mismatch: %s != %s", + reflect.TypeOf(val), + reflect.TypeOf(field.Interface())) + return fault + } + + field.Set(reflect.ValueOf(val)) + } + + return err +} + +func xml2Bool(value string) bool { + var b bool + switch value { + case "1", "true", "TRUE", "True": + b = true + case "0", "false", "FALSE", "False": + b = false + } + return b +} + +func xml2DateTime(value string) (time.Time, error) { + var ( + year, month, day int + hour, minute, second int + ) + _, err := fmt.Sscanf(value, "%04d%02d%02dT%02d:%02d:%02d", + &year, &month, &day, + &hour, &minute, &second) + t := time.Date(year, time.Month(month), day, hour, minute, second, 0, time.Local) + return t, err +} + +func xml2Base64(value string) ([]byte, error) { + return base64.StdEncoding.DecodeString(value) +} + +func uppercaseFirst(in string) (out string) { + r, n := utf8.DecodeRuneInString(in) + return string(unicode.ToUpper(r)) + in[n:] +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/big5.go b/vendor/github.com/rogpeppe/go-charset/charset/big5.go new file mode 100644 index 000000000..e01fa1afd --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/big5.go @@ -0,0 +1,88 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("big5", fromBig5, nil) +} + +// Big5 consists of 89 fonts of 157 chars each +const ( + big5Max = 13973 + big5Font = 157 + big5Data = "big5.dat" +) + +type translateFromBig5 struct { + font int + scratch []byte + big5map []rune +} + +func (p *translateFromBig5) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = p.scratch[:0] + n := 0 + for len(data) > 0 { + c := int(data[0]) + data = data[1:] + n++ + if p.font == -1 { + // idle state + if c >= 0xa1 { + p.font = c + continue + } + if c == 26 { + c = '\n' + } + continue + } + f := p.font + p.font = -1 + r := utf8.RuneError + switch { + case c >= 64 && c <= 126: + c -= 64 + case c >= 161 && c <= 254: + c = c - 161 + 63 + default: + // bad big5 char + f = 255 + } + if f <= 254 { + f -= 161 + ix := f*big5Font + c + if ix < len(p.big5map) { + r = p.big5map[ix] + } + if r == -1 { + r = utf8.RuneError + } + } + p.scratch = appendRune(p.scratch, r) + } + return n, p.scratch, nil +} + +type big5Key bool + +func fromBig5(arg string) (Translator, error) { + big5map, err := cache(big5Key(false), func() (interface{}, error) { + data, err := readFile(big5Data) + if err != nil { + return nil, fmt.Errorf("charset: cannot open big5 data file: %v", err) + } + big5map := []rune(string(data)) + if len(big5map) != big5Max { + return nil, fmt.Errorf("charset: corrupt big5 data") + } + return big5map, nil + }) + if err != nil { + return nil, err + } + return &translateFromBig5{big5map: big5map.([]rune), font: -1}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/charset.go b/vendor/github.com/rogpeppe/go-charset/charset/charset.go new file mode 100644 index 000000000..a7af30ee6 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/charset.go @@ -0,0 +1,301 @@ +// The charset package implements translation between character sets. +// It uses Unicode as the intermediate representation. +// Because it can be large, the character set data is separated +// from the charset package. It can be embedded in the Go +// executable by importing the data package: +// +// import _ "code.google.com/p/go-charset/data" +// +// It can also made available in a data directory (by settting CharsetDir). +package charset + +import ( + "io" + "strings" + "unicode/utf8" +) + +// Charset holds information about a given character set. +type Charset struct { + Name string // Canonical name of character set. + Aliases []string // Known aliases. + Desc string // Description. + NoFrom bool // Not possible to translate from this charset. + NoTo bool // Not possible to translate to this charset. +} + +// Translator represents a character set converter. +// The Translate method translates the given data, +// and returns the number of bytes of data consumed, +// a slice containing the converted data (which may be +// overwritten on the next call to Translate), and any +// conversion error. If eof is true, the data represents +// the final bytes of the input. +type Translator interface { + Translate(data []byte, eof bool) (n int, cdata []byte, err error) +} + +// A Factory can be used to make character set translators. +type Factory interface { + // TranslatorFrom creates a translator that will translate from the named character + // set to UTF-8. + TranslatorFrom(name string) (Translator, error) // Create a Translator from this character set to. + + // TranslatorTo creates a translator that will translate from UTF-8 to the named character set. + TranslatorTo(name string) (Translator, error) // Create a Translator To this character set. + + // Names returns all the character set names accessibile through the factory. + Names() []string + + // Info returns information on the named character set. It returns nil if the + // factory doesn't recognise the given name. + Info(name string) *Charset +} + +var factories = []Factory{localFactory{}} + +// Register registers a new Factory which will be consulted when NewReader +// or NewWriter needs a character set translator for a given name. +func Register(factory Factory) { + factories = append(factories, factory) +} + +// NewReader returns a new Reader that translates from the named +// character set to UTF-8 as it reads r. +func NewReader(charset string, r io.Reader) (io.Reader, error) { + tr, err := TranslatorFrom(charset) + if err != nil { + return nil, err + } + return NewTranslatingReader(r, tr), nil +} + +// NewWriter returns a new WriteCloser writing to w. It converts writes +// of UTF-8 text into writes on w of text in the named character set. +// The Close is necessary to flush any remaining partially translated +// characters to the output. +func NewWriter(charset string, w io.Writer) (io.WriteCloser, error) { + tr, err := TranslatorTo(charset) + if err != nil { + return nil, err + } + return NewTranslatingWriter(w, tr), nil +} + +// Info returns information about a character set, or nil +// if the character set is not found. +func Info(name string) *Charset { + for _, f := range factories { + if info := f.Info(name); info != nil { + return info + } + } + return nil +} + +// Names returns the canonical names of all supported character sets, in alphabetical order. +func Names() []string { + // TODO eliminate duplicates + var names []string + for _, f := range factories { + names = append(names, f.Names()...) + } + return names +} + +// TranslatorFrom returns a translator that will translate from +// the named character set to UTF-8. +func TranslatorFrom(charset string) (Translator, error) { + var err error + var tr Translator + for _, f := range factories { + tr, err = f.TranslatorFrom(charset) + if err == nil { + break + } + } + if tr == nil { + return nil, err + } + return tr, nil +} + +// TranslatorTo returns a translator that will translate from UTF-8 +// to the named character set. +func TranslatorTo(charset string) (Translator, error) { + var err error + var tr Translator + for _, f := range factories { + tr, err = f.TranslatorTo(charset) + if err == nil { + break + } + } + if tr == nil { + return nil, err + } + return tr, nil +} + +func normalizedChar(c rune) rune { + switch { + case c >= 'A' && c <= 'Z': + c = c - 'A' + 'a' + case c == '_': + c = '-' + } + return c +} + +// NormalisedName returns s with all Roman capitals +// mapped to lower case, and '_' mapped to '-' +func NormalizedName(s string) string { + return strings.Map(normalizedChar, s) +} + +type translatingWriter struct { + w io.Writer + tr Translator + buf []byte // unconsumed data from writer. +} + +// NewTranslatingWriter returns a new WriteCloser writing to w. +// It passes the written bytes through the given Translator. +func NewTranslatingWriter(w io.Writer, tr Translator) io.WriteCloser { + return &translatingWriter{w: w, tr: tr} +} + +func (w *translatingWriter) Write(data []byte) (rn int, rerr error) { + wdata := data + if len(w.buf) > 0 { + w.buf = append(w.buf, data...) + wdata = w.buf + } + n, cdata, err := w.tr.Translate(wdata, false) + if err != nil { + // TODO + } + if n > 0 { + _, err = w.w.Write(cdata) + if err != nil { + return 0, err + } + } + w.buf = w.buf[:0] + if n < len(wdata) { + w.buf = append(w.buf, wdata[n:]...) + } + return len(data), nil +} + +func (p *translatingWriter) Close() error { + for { + n, data, err := p.tr.Translate(p.buf, true) + p.buf = p.buf[n:] + if err != nil { + // TODO + } + // If the Translator produces no data + // at EOF, then assume that it never will. + if len(data) == 0 { + break + } + n, err = p.w.Write(data) + if err != nil { + return err + } + if n < len(data) { + return io.ErrShortWrite + } + if len(p.buf) == 0 { + break + } + } + return nil +} + +type translatingReader struct { + r io.Reader + tr Translator + cdata []byte // unconsumed data from converter. + rdata []byte // unconverted data from reader. + err error // final error from reader. +} + +// NewTranslatingReader returns a new Reader that +// translates data using the given Translator as it reads r. +func NewTranslatingReader(r io.Reader, tr Translator) io.Reader { + return &translatingReader{r: r, tr: tr} +} + +func (r *translatingReader) Read(buf []byte) (int, error) { + for { + if len(r.cdata) > 0 { + n := copy(buf, r.cdata) + r.cdata = r.cdata[n:] + return n, nil + } + if r.err == nil { + r.rdata = ensureCap(r.rdata, len(r.rdata)+len(buf)) + n, err := r.r.Read(r.rdata[len(r.rdata):cap(r.rdata)]) + // Guard against non-compliant Readers. + if n == 0 && err == nil { + err = io.EOF + } + r.rdata = r.rdata[0 : len(r.rdata)+n] + r.err = err + } else if len(r.rdata) == 0 { + break + } + nc, cdata, cvterr := r.tr.Translate(r.rdata, r.err != nil) + if cvterr != nil { + // TODO + } + r.cdata = cdata + + // Ensure that we consume all bytes at eof + // if the converter refuses them. + if nc == 0 && r.err != nil { + nc = len(r.rdata) + } + + // Copy unconsumed data to the start of the rdata buffer. + r.rdata = r.rdata[0:copy(r.rdata, r.rdata[nc:])] + } + return 0, r.err +} + +// ensureCap returns s with a capacity of at least n bytes. +// If cap(s) < n, then it returns a new copy of s with the +// required capacity. +func ensureCap(s []byte, n int) []byte { + if n <= cap(s) { + return s + } + // logic adapted from appendslice1 in runtime + m := cap(s) + if m == 0 { + m = n + } else { + for { + if m < 1024 { + m += m + } else { + m += m / 4 + } + if m >= n { + break + } + } + } + t := make([]byte, len(s), m) + copy(t, s) + return t +} + +func appendRune(buf []byte, r rune) []byte { + n := len(buf) + buf = ensureCap(buf, n+utf8.UTFMax) + nu := utf8.EncodeRune(buf[n:n+utf8.UTFMax], r) + return buf[0 : n+nu] +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/codepage.go b/vendor/github.com/rogpeppe/go-charset/charset/codepage.go new file mode 100644 index 000000000..6864c8753 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/codepage.go @@ -0,0 +1,133 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("cp", fromCodePage, toCodePage) +} + +type translateFromCodePage struct { + byte2rune *[256]rune + scratch []byte +} + +type cpKeyFrom string +type cpKeyTo string + +func (p *translateFromCodePage) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, len(data)*utf8.UTFMax)[:0] + buf := p.scratch + for _, x := range data { + r := p.byte2rune[x] + if r < utf8.RuneSelf { + buf = append(buf, byte(r)) + continue + } + size := utf8.EncodeRune(buf[len(buf):cap(buf)], r) + buf = buf[0 : len(buf)+size] + } + return len(data), buf, nil +} + +type toCodePageInfo struct { + rune2byte map[rune]byte + // same gives the number of runes at start of code page that map exactly to + // unicode. + same rune +} + +type translateToCodePage struct { + toCodePageInfo + scratch []byte +} + +func (p *translateToCodePage) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, len(data)) + buf := p.scratch[:0] + + for i := 0; i < len(data); { + r := rune(data[i]) + size := 1 + if r >= utf8.RuneSelf { + r, size = utf8.DecodeRune(data[i:]) + if size == 1 && !eof && !utf8.FullRune(data[i:]) { + return i, buf, nil + } + } + + var b byte + if r < p.same { + b = byte(r) + } else { + var ok bool + b, ok = p.rune2byte[r] + if !ok { + b = '?' + } + } + buf = append(buf, b) + i += size + } + return len(data), buf, nil +} + +func fromCodePage(arg string) (Translator, error) { + runes, err := cache(cpKeyFrom(arg), func() (interface{}, error) { + data, err := readFile(arg) + if err != nil { + return nil, err + } + runes := []rune(string(data)) + if len(runes) != 256 { + return nil, fmt.Errorf("charset: %q has wrong rune count (%d)", arg, len(runes)) + } + r := new([256]rune) + copy(r[:], runes) + return r, nil + }) + if err != nil { + return nil, err + } + return &translateFromCodePage{byte2rune: runes.(*[256]rune)}, nil +} + +func toCodePage(arg string) (Translator, error) { + m, err := cache(cpKeyTo(arg), func() (interface{}, error) { + data, err := readFile(arg) + if err != nil { + return nil, err + } + + info := toCodePageInfo{ + rune2byte: make(map[rune]byte), + same: 256, + } + atStart := true + i := rune(0) + for _, r := range string(data) { + if atStart { + if r == i { + i++ + continue + } + info.same = i + atStart = false + } + info.rune2byte[r] = byte(i) + i++ + } + // TODO fix tables + // fmt.Printf("%s, same = %d\n", arg, info.same) + if i != 256 { + return nil, fmt.Errorf("charset: %q has wrong rune count (%d)", arg, i) + } + return info, nil + }) + if err != nil { + return nil, err + } + return &translateToCodePage{toCodePageInfo: m.(toCodePageInfo)}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/cp932.go b/vendor/github.com/rogpeppe/go-charset/charset/cp932.go new file mode 100644 index 000000000..9f46262ba --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/cp932.go @@ -0,0 +1,195 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("cp932", fromCP932, nil) +} + +// encoding details +// (Traditional) Shift-JIS +// +// 00..1f control characters +// 20 space +// 21..7f JIS X 0201:1976/1997 roman (see notes) +// 80 undefined +// 81..9f lead byte of JIS X 0208-1983 or JIS X 0202:1990/1997 +// a0 undefined +// a1..df JIS X 0201:1976/1997 katakana +// e0..ea lead byte of JIS X 0208-1983 or JIS X 0202:1990/1997 +// eb..ff undefined +// +// CP932 (windows-31J) +// +// this encoding scheme extends Shift-JIS in the following way +// +// eb..ec undefined (marked as lead bytes - see notes below) +// ed..ee lead byte of NEC-selected IBM extended characters +// ef undefined (marked as lead byte - see notes below) +// f0..f9 lead byte of User defined GAIJI (see note below) +// fa..fc lead byte of IBM extended characters +// fd..ff undefined +// +// +// Notes +// +// JISX 0201:1976/1997 roman +// this is the same as ASCII but with 0x5c (ASCII code for '\') +// representing the Yen currency symbol '¥' (U+00a5) +// This mapping is contentious, some conversion packages implent it +// others do not. +// The mapping files from The Unicode Consortium show cp932 mapping +// plain ascii in the range 00..7f whereas shift-jis maps 0x5c ('\') to the yen +// symbol (¥) and 0x7e ('~') to overline (¯) +// +// CP932 double-byte character codes: +// +// eb-ec, ef, f0-f9: +// Marked as DBCS LEAD BYTEs in the unicode mapping data +// obtained from: +// https://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP932.TXT +// +// but there are no defined mappings for codes in this range. +// It is not clear whether or not an implementation should +// consume one or two bytes before emitting an error char. + +const ( + kanaPages = 1 + kanaPageSize = 63 + kanaChar0 = 0xa1 + + cp932Pages = 45 // 81..84, 87..9f, e0..ea, ed..ee, fa..fc + cp932PageSize = 189 // 40..fc (including 7f) + cp932Char0 = 0x40 +) + +type jisTables struct { + page0 [256]rune + dbcsoff [256]int + cp932 []rune +} + +type translateFromCP932 struct { + tables *jisTables + scratch []byte +} + +func (p *translateFromCP932) Translate(data []byte, eof bool) (int, []byte, error) { + tables := p.tables + p.scratch = p.scratch[:0] + n := 0 + for i := 0; i < len(data); i++ { + b := data[i] + r := tables.page0[b] + if r != -1 { + p.scratch = appendRune(p.scratch, r) + n++ + continue + } + // DBCS + i++ + if i >= len(data) { + break + } + pnum := tables.dbcsoff[b] + ix := int(data[i]) - cp932Char0 + if pnum == -1 || ix < 0 || ix >= cp932PageSize { + r = utf8.RuneError + } else { + r = tables.cp932[pnum*cp932PageSize+ix] + } + p.scratch = appendRune(p.scratch, r) + n += 2 + } + return n, p.scratch, nil +} + +type cp932Key bool + +func fromCP932(arg string) (Translator, error) { + shiftJIS := arg == "shiftjis" + tables, err := cache(cp932Key(shiftJIS), func() (interface{}, error) { + tables := new(jisTables) + kana, err := jisGetMap("jisx0201kana.dat", kanaPageSize, kanaPages) + if err != nil { + return nil, err + } + tables.cp932, err = jisGetMap("cp932.dat", cp932PageSize, cp932Pages) + if err != nil { + return nil, err + } + + // jisx0201kana is mapped into 0xA1..0xDF + for i := 0; i < kanaPageSize; i++ { + tables.page0[i+kanaChar0] = kana[i] + } + + // 00..7f same as ascii in cp932 + for i := rune(0); i < 0x7f; i++ { + tables.page0[i] = i + } + + if shiftJIS { + // shift-jis uses JIS X 0201 for the ASCII range + // this is the same as ASCII apart from + // 0x5c ('\') maps to yen symbol (¥) and 0x7e ('~') maps to overline (¯) + tables.page0['\\'] = '¥' + tables.page0['~'] = '¯' + } + + // pre-calculate DBCS page numbers to mapping file page numbers + // and mark codes in page0 that are DBCS lead bytes + pnum := 0 + for i := 0x81; i <= 0x84; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0x87; i <= 0x9f; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0xe0; i <= 0xea; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + if shiftJIS { + return tables, nil + } + // add in cp932 extensions + for i := 0xed; i <= 0xee; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0xfa; i <= 0xfc; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + return tables, nil + }) + + if err != nil { + return nil, err + } + + return &translateFromCP932{tables: tables.(*jisTables)}, nil +} + +func jisGetMap(name string, pgsize, npages int) ([]rune, error) { + data, err := readFile(name) + if err != nil { + return nil, err + } + m := []rune(string(data)) + if len(m) != pgsize*npages { + return nil, fmt.Errorf("%q: incorrect length data", name) + } + return m, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/file.go b/vendor/github.com/rogpeppe/go-charset/charset/file.go new file mode 100644 index 000000000..a0c26225e --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/file.go @@ -0,0 +1,40 @@ +package charset + +import ( + "io" + "io/ioutil" + "os" + "path/filepath" +) + +var files = make(map[string]func() (io.ReadCloser, error)) + +// RegisterDataFile registers the existence of a given data +// file with the given name that may be used by a character-set converter. +// It is intended to be used by packages that wish to embed +// data in the executable binary, and should not be +// used normally. +func RegisterDataFile(name string, open func() (io.ReadCloser, error)) { + files[name] = open +} + +// CharsetDir gives the location of the default data file directory. +// This directory will be used for files with names that have not +// been registered with RegisterDataFile. +var CharsetDir = "/usr/local/lib/go-charset/datafiles" + +func readFile(name string) (data []byte, err error) { + var r io.ReadCloser + if open := files[name]; open != nil { + r, err = open() + if err != nil { + return + } + } else { + r, err = os.Open(filepath.Join(CharsetDir, name)) + if err != nil { + return + } + } + return ioutil.ReadAll(r) +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/local.go b/vendor/github.com/rogpeppe/go-charset/charset/local.go new file mode 100644 index 000000000..9776b962f --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/local.go @@ -0,0 +1,162 @@ +package charset + +import ( + "encoding/json" + "fmt" + "os" + "sync" +) + +var ( + readLocalCharsetsOnce sync.Once + localCharsets = make(map[string]*localCharset) +) + +type localCharset struct { + Charset + arg string + *class +} + +// A class of character sets. +// Each class can be instantiated with an argument specified in the config file. +// Many character sets can use a single class. +type class struct { + from, to func(arg string) (Translator, error) +} + +// The set of classes, indexed by class name. +var classes = make(map[string]*class) + +func registerClass(charset string, from, to func(arg string) (Translator, error)) { + classes[charset] = &class{from, to} +} + +type localFactory struct{} + +func (f localFactory) TranslatorFrom(name string) (Translator, error) { + f.init() + name = NormalizedName(name) + cs := localCharsets[name] + if cs == nil { + return nil, fmt.Errorf("character set %q not found", name) + } + if cs.from == nil { + return nil, fmt.Errorf("cannot translate from %q", name) + } + return cs.from(cs.arg) +} + +func (f localFactory) TranslatorTo(name string) (Translator, error) { + f.init() + name = NormalizedName(name) + cs := localCharsets[name] + if cs == nil { + return nil, fmt.Errorf("character set %q not found", name) + } + if cs.to == nil { + return nil, fmt.Errorf("cannot translate to %q", name) + } + return cs.to(cs.arg) +} + +func (f localFactory) Names() []string { + f.init() + var names []string + for name, cs := range localCharsets { + // add names only for non-aliases. + if localCharsets[cs.Name] == cs { + names = append(names, name) + } + } + return names +} + +func (f localFactory) Info(name string) *Charset { + f.init() + lcs := localCharsets[NormalizedName(name)] + if lcs == nil { + return nil + } + // copy the charset info so that callers can't mess with it. + cs := lcs.Charset + return &cs +} + +func (f localFactory) init() { + readLocalCharsetsOnce.Do(readLocalCharsets) +} + +// charsetEntry is the data structure for one entry in the JSON config file. +// If Alias is non-empty, it should be the canonical name of another +// character set; otherwise Class should be the name +// of an entry in classes, and Arg is the argument for +// instantiating it. +type charsetEntry struct { + Aliases []string + Desc string + Class string + Arg string +} + +// readCharsets reads the JSON config file. +// It's done once only, when first needed. +func readLocalCharsets() { + csdata, err := readFile("charsets.json") + if err != nil { + fmt.Fprintf(os.Stderr, "charset: cannot open \"charsets.json\": %v\n", err) + return + } + + var entries map[string]charsetEntry + err = json.Unmarshal(csdata, &entries) + if err != nil { + fmt.Fprintf(os.Stderr, "charset: cannot decode config file: %v\n", err) + } + for name, e := range entries { + class := classes[e.Class] + if class == nil { + continue + } + name = NormalizedName(name) + for i, a := range e.Aliases { + e.Aliases[i] = NormalizedName(a) + } + cs := &localCharset{ + Charset: Charset{ + Name: name, + Aliases: e.Aliases, + Desc: e.Desc, + NoFrom: class.from == nil, + NoTo: class.to == nil, + }, + arg: e.Arg, + class: class, + } + localCharsets[cs.Name] = cs + for _, a := range cs.Aliases { + localCharsets[a] = cs + } + } +} + +// A general cache store that local character set translators +// can use for persistent storage of data. +var ( + cacheMutex sync.Mutex + cacheStore = make(map[interface{}]interface{}) +) + +func cache(key interface{}, f func() (interface{}, error)) (interface{}, error) { + cacheMutex.Lock() + defer cacheMutex.Unlock() + if x := cacheStore[key]; x != nil { + return x, nil + } + x, err := f() + if err != nil { + return nil, err + } + cacheStore[key] = x + return x, err +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/utf16.go b/vendor/github.com/rogpeppe/go-charset/charset/utf16.go new file mode 100644 index 000000000..ebde794c9 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/utf16.go @@ -0,0 +1,110 @@ +package charset + +import ( + "encoding/binary" + "errors" + "unicode/utf8" +) + +func init() { + registerClass("utf16", fromUTF16, toUTF16) +} + +type translateFromUTF16 struct { + first bool + endian binary.ByteOrder + scratch []byte +} + +func (p *translateFromUTF16) Translate(data []byte, eof bool) (int, []byte, error) { + data = data[0 : len(data)&^1] // round to even number of bytes. + if len(data) < 2 { + return 0, nil, nil + } + n := 0 + if p.first && p.endian == nil { + switch binary.BigEndian.Uint16(data) { + case 0xfeff: + p.endian = binary.BigEndian + data = data[2:] + n += 2 + case 0xfffe: + p.endian = binary.LittleEndian + data = data[2:] + n += 2 + default: + p.endian = guessEndian(data) + } + p.first = false + } + + p.scratch = p.scratch[:0] + for ; len(data) > 0; data = data[2:] { + p.scratch = appendRune(p.scratch, rune(p.endian.Uint16(data))) + n += 2 + } + return n, p.scratch, nil +} + +func guessEndian(data []byte) binary.ByteOrder { + // XXX TODO + return binary.LittleEndian +} + +type translateToUTF16 struct { + first bool + endian binary.ByteOrder + scratch []byte +} + +func (p *translateToUTF16) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch[:0], (len(data)+1)*2) + if p.first { + p.scratch = p.scratch[0:2] + p.endian.PutUint16(p.scratch, 0xfeff) + p.first = false + } + n := 0 + for len(data) > 0 { + if !utf8.FullRune(data) && !eof { + break + } + r, size := utf8.DecodeRune(data) + // TODO if r > 65535? + + slen := len(p.scratch) + p.scratch = p.scratch[0 : slen+2] + p.endian.PutUint16(p.scratch[slen:], uint16(r)) + data = data[size:] + n += size + } + return n, p.scratch, nil +} + +func getEndian(arg string) (binary.ByteOrder, error) { + switch arg { + case "le": + return binary.LittleEndian, nil + case "be": + return binary.BigEndian, nil + case "": + return nil, nil + } + return nil, errors.New("charset: unknown utf16 endianness") +} + +func fromUTF16(arg string) (Translator, error) { + endian, err := getEndian(arg) + if err != nil { + return nil, err + } + return &translateFromUTF16{first: true, endian: endian}, nil +} + +func toUTF16(arg string) (Translator, error) { + endian, err := getEndian(arg) + if err != nil { + return nil, err + } + return &translateToUTF16{first: false, endian: endian}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/utf8.go b/vendor/github.com/rogpeppe/go-charset/charset/utf8.go new file mode 100644 index 000000000..23980b334 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/utf8.go @@ -0,0 +1,51 @@ +package charset + +import ( + "unicode/utf8" +) + +func init() { + registerClass("utf8", toUTF8, toUTF8) +} + +type translateToUTF8 struct { + scratch []byte +} + +var errorBytes = []byte(string(utf8.RuneError)) + +const errorRuneLen = len(string(utf8.RuneError)) + +func (p *translateToUTF8) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, (len(data))*errorRuneLen) + buf := p.scratch[:0] + for i := 0; i < len(data); { + // fast path for ASCII + if b := data[i]; b < utf8.RuneSelf { + buf = append(buf, b) + i++ + continue + } + _, size := utf8.DecodeRune(data[i:]) + if size == 1 { + if !eof && !utf8.FullRune(data) { + // When DecodeRune has converted only a single + // byte, we know there must be some kind of error + // because we know the byte's not ASCII. + // If we aren't at EOF, and it's an incomplete + // rune encoding, then we return to process + // the final bytes in a subsequent call. + return i, buf, nil + } + buf = append(buf, errorBytes...) + } else { + buf = append(buf, data[i:i+size]...) + } + i += size + } + return len(data), buf, nil +} + +func toUTF8(arg string) (Translator, error) { + return new(translateToUTF8), nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go new file mode 100644 index 000000000..398ebe339 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("big5.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("\u3000,、。.・;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗︲–︱—︳�︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅‾�_�﹉﹊﹍﹎﹋﹌#&*+-×÷±√<>=≤≥≠∞≒≡﹢﹣﹤﹥﹦∼∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂♁☉↑↓←→↖↗↙↘∥∣��/\$¥〒¢£%@℃℉$%@㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁▂▃▄▅▆▇█▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭╮╰╯═╞╪╡◢◣◥◤╱╲╳0123456789ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ〡〢〣〤〥〦〧〨〩�卄�ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρστυφχψωㄅㄆㄇㄈㄉㄊㄋㄌㄍㄎㄏㄐㄑㄒㄓㄔㄕㄖㄗㄘㄙㄚㄛㄜㄝㄞㄟㄠㄡㄢㄣㄤㄥㄦㄧㄨㄩ˙ˉˊˇˋ���������������������������������������������������������������一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿氓氛泣注泳沱泌泥河沽沾沼波沫法泓沸泄油況沮泗泅泱沿治泡泛泊沬泯泜泖泠炕炎炒炊炙爬爭爸版牧物狀狎狙狗狐玩玨玟玫玥甽疝疙疚的盂盲直知矽社祀祁秉秈空穹竺糾罔羌羋者肺肥肢肱股肫肩肴肪肯臥臾舍芳芝芙芭芽芟芹花芬芥芯芸芣芰芾芷虎虱初表軋迎返近邵邸邱邶采金長門阜陀阿阻附陂隹雨青非亟亭亮信侵侯便俠俑俏保促侶俘俟俊俗侮俐俄係俚俎俞侷兗冒冑冠剎剃削前剌剋則勇勉勃勁匍南卻厚叛咬哀咨哎哉咸咦咳哇哂咽咪品哄哈咯咫咱咻咩咧咿囿垂型垠垣垢城垮垓奕契奏奎奐姜姘姿姣姨娃姥姪姚姦威姻孩宣宦室客宥封屎屏屍屋峙峒巷帝帥帟幽庠度建弈弭彥很待徊律徇後徉怒思怠急怎怨恍恰恨恢恆恃恬恫恪恤扁拜挖按拼拭持拮拽指拱拷拯括拾拴挑挂政故斫施既春昭映昧是星昨昱昤曷柿染柱柔某柬架枯柵柩柯柄柑枴柚查枸柏柞柳枰柙柢柝柒歪殃殆段毒毗氟泉洋洲洪流津洌洱洞洗活洽派洶洛泵洹洧洸洩洮洵洎洫炫為炳炬炯炭炸炮炤爰牲牯牴狩狠狡玷珊玻玲珍珀玳甚甭畏界畎畋疫疤疥疢疣癸皆皇皈盈盆盃盅省盹相眉看盾盼眇矜砂研砌砍祆祉祈祇禹禺科秒秋穿突竿竽籽紂紅紀紉紇約紆缸美羿耄耐耍耑耶胖胥胚胃胄背胡胛胎胞胤胝致舢苧范茅苣苛苦茄若茂茉苒苗英茁苜苔苑苞苓苟苯茆虐虹虻虺衍衫要觔計訂訃貞負赴赳趴軍軌述迦迢迪迥迭迫迤迨郊郎郁郃酋酊重閂限陋陌降面革韋韭音頁風飛食首香乘亳倌倍倣俯倦倥俸倩倖倆值借倚倒們俺倀倔倨俱倡個候倘俳修倭倪俾倫倉兼冤冥冢凍凌准凋剖剜剔剛剝匪卿原厝叟哨唐唁唷哼哥哲唆哺唔哩哭員唉哮哪哦唧唇哽唏圃圄埂埔埋埃堉夏套奘奚娑娘娜娟娛娓姬娠娣娩娥娌娉孫屘宰害家宴宮宵容宸射屑展屐峭峽峻峪峨峰島崁峴差席師庫庭座弱徒徑徐恙恣恥恐恕恭恩息悄悟悚悍悔悌悅悖扇拳挈拿捎挾振捕捂捆捏捉挺捐挽挪挫挨捍捌效敉料旁旅時晉晏晃晒晌晅晁書朔朕朗校核案框桓根桂桔栩梳栗桌桑栽柴桐桀格桃株桅栓栘桁殊殉殷氣氧氨氦氤泰浪涕消涇浦浸海浙涓浬涉浮浚浴浩涌涊浹涅浥涔烊烘烤烙烈烏爹特狼狹狽狸狷玆班琉珮珠珪珞畔畝畜畚留疾病症疲疳疽疼疹痂疸皋皰益盍盎眩真眠眨矩砰砧砸砝破砷砥砭砠砟砲祕祐祠祟祖神祝祗祚秤秣秧租秦秩秘窄窈站笆笑粉紡紗紋紊素索純紐紕級紜納紙紛缺罟羔翅翁耆耘耕耙耗耽耿胱脂胰脅胭胴脆胸胳脈能脊胼胯臭臬舀舐航舫舨般芻茫荒荔荊茸荐草茵茴荏茲茹茶茗荀茱茨荃虔蚊蚪蚓蚤蚩蚌蚣蚜衰衷袁袂衽衹記訐討訌訕訊託訓訖訏訑豈豺豹財貢起躬軒軔軏辱送逆迷退迺迴逃追逅迸邕郡郝郢酒配酌釘針釗釜釙閃院陣陡陛陝除陘陞隻飢馬骨高鬥鬲鬼乾偺偽停假偃偌做偉健偶偎偕偵側偷偏倏偯偭兜冕凰剪副勒務勘動匐匏匙匿區匾參曼商啪啦啄啞啡啃啊唱啖問啕唯啤唸售啜唬啣唳啁啗圈國圉域堅堊堆埠埤基堂堵執培夠奢娶婁婉婦婪婀娼婢婚婆婊孰寇寅寄寂宿密尉專將屠屜屝崇崆崎崛崖崢崑崩崔崙崤崧崗巢常帶帳帷康庸庶庵庾張強彗彬彩彫得徙從徘御徠徜恿患悉悠您惋悴惦悽情悻悵惜悼惘惕惆惟悸惚惇戚戛扈掠控捲掖探接捷捧掘措捱掩掉掃掛捫推掄授掙採掬排掏掀捻捩捨捺敝敖救教敗啟敏敘敕敔斜斛斬族旋旌旎晝晚晤晨晦晞曹勗望梁梯梢梓梵桿桶梱梧梗械梃棄梭梆梅梔條梨梟梡梂欲殺毫毬氫涎涼淳淙液淡淌淤添淺清淇淋涯淑涮淞淹涸混淵淅淒渚涵淚淫淘淪深淮淨淆淄涪淬涿淦烹焉焊烽烯爽牽犁猜猛猖猓猙率琅琊球理現琍瓠瓶瓷甜產略畦畢異疏痔痕疵痊痍皎盔盒盛眷眾眼眶眸眺硫硃硎祥票祭移窒窕笠笨笛第符笙笞笮粒粗粕絆絃統紮紹紼絀細紳組累終紲紱缽羞羚翌翎習耜聊聆脯脖脣脫脩脰脤舂舵舷舶船莎莞莘荸莢莖莽莫莒莊莓莉莠荷荻荼莆莧處彪蛇蛀蚶蛄蚵蛆蛋蚱蚯蛉術袞袈被袒袖袍袋覓規訪訝訣訥許設訟訛訢豉豚販責貫貨貪貧赧赦趾趺軛軟這逍通逗連速逝逐逕逞造透逢逖逛途部郭都酗野釵釦釣釧釭釩閉陪陵陳陸陰陴陶陷陬雀雪雩章竟頂頃魚鳥鹵鹿麥麻傢傍傅備傑傀傖傘傚最凱割剴創剩勞勝勛博厥啻喀喧啼喊喝喘喂喜喪喔喇喋喃喳單喟唾喲喚喻喬喱啾喉喫喙圍堯堪場堤堰報堡堝堠壹壺奠婷媚婿媒媛媧孳孱寒富寓寐尊尋就嵌嵐崴嵇巽幅帽幀幃幾廊廁廂廄弼彭復循徨惑惡悲悶惠愜愣惺愕惰惻惴慨惱愎惶愉愀愒戟扉掣掌描揀揩揉揆揍插揣提握揖揭揮捶援揪換摒揚揹敞敦敢散斑斐斯普晰晴晶景暑智晾晷曾替期朝棺棕棠棘棗椅棟棵森棧棹棒棲棣棋棍植椒椎棉棚楮棻款欺欽殘殖殼毯氮氯氬港游湔渡渲湧湊渠渥渣減湛湘渤湖湮渭渦湯渴湍渺測湃渝渾滋溉渙湎湣湄湲湩湟焙焚焦焰無然煮焜牌犄犀猶猥猴猩琺琪琳琢琥琵琶琴琯琛琦琨甥甦畫番痢痛痣痙痘痞痠登發皖皓皴盜睏短硝硬硯稍稈程稅稀窘窗窖童竣等策筆筐筒答筍筋筏筑粟粥絞結絨絕紫絮絲絡給絢絰絳善翔翕耋聒肅腕腔腋腑腎脹腆脾腌腓腴舒舜菩萃菸萍菠菅萋菁華菱菴著萊菰萌菌菽菲菊萸萎萄菜萇菔菟虛蛟蛙蛭蛔蛛蛤蛐蛞街裁裂袱覃視註詠評詞証詁詔詛詐詆訴診訶詖象貂貯貼貳貽賁費賀貴買貶貿貸越超趁跎距跋跚跑跌跛跆軻軸軼辜逮逵週逸進逶鄂郵鄉郾酣酥量鈔鈕鈣鈉鈞鈍鈐鈇鈑閔閏開閑間閒閎隊階隋陽隅隆隍陲隄雁雅雄集雇雯雲韌項順須飧飪飯飩飲飭馮馭黃黍黑亂傭債傲傳僅傾催傷傻傯僇剿剷剽募勦勤勢勣匯嗟嗨嗓嗦嗎嗜嗇嗑嗣嗤嗯嗚嗡嗅嗆嗥嗉園圓塞塑塘塗塚塔填塌塭塊塢塒塋奧嫁嫉嫌媾媽媼媳嫂媲嵩嵯幌幹廉廈弒彙徬微愚意慈感想愛惹愁愈慎慌慄慍愾愴愧愍愆愷戡戢搓搾搞搪搭搽搬搏搜搔損搶搖搗搆敬斟新暗暉暇暈暖暄暘暍會榔業楚楷楠楔極椰概楊楨楫楞楓楹榆楝楣楛歇歲毀殿毓毽溢溯滓溶滂源溝滇滅溥溘溼溺溫滑準溜滄滔溪溧溴煎煙煩煤煉照煜煬煦煌煥煞煆煨煖爺牒猷獅猿猾瑯瑚瑕瑟瑞瑁琿瑙瑛瑜當畸瘀痰瘁痲痱痺痿痴痳盞盟睛睫睦睞督睹睪睬睜睥睨睢矮碎碰碗碘碌碉硼碑碓硿祺祿禁萬禽稜稚稠稔稟稞窟窠筷節筠筮筧粱粳粵經絹綑綁綏絛置罩罪署義羨群聖聘肆肄腱腰腸腥腮腳腫腹腺腦舅艇蒂葷落萱葵葦葫葉葬葛萼萵葡董葩葭葆虞虜號蛹蜓蜈蜇蜀蛾蛻蜂蜃蜆蜊衙裟裔裙補裘裝裡裊裕裒覜解詫該詳試詩詰誇詼詣誠話誅詭詢詮詬詹詻訾詨豢貊貉賊資賈賄貲賃賂賅跡跟跨路跳跺跪跤跦躲較載軾輊辟農運遊道遂達逼違遐遇遏過遍遑逾遁鄒鄗酬酪酩釉鈷鉗鈸鈽鉀鈾鉛鉋鉤鉑鈴鉉鉍鉅鈹鈿鉚閘隘隔隕雍雋雉雊雷電雹零靖靴靶預頑頓頊頒頌飼飴飽飾馳馱馴髡鳩麂鼎鼓鼠僧僮僥僖僭僚僕像僑僱僎僩兢凳劃劂匱厭嗾嘀嘛嘗嗽嘔嘆嘉嘍嘎嗷嘖嘟嘈嘐嗶團圖塵塾境墓墊塹墅塽壽夥夢夤奪奩嫡嫦嫩嫗嫖嫘嫣孵寞寧寡寥實寨寢寤察對屢嶄嶇幛幣幕幗幔廓廖弊彆彰徹慇愿態慷慢慣慟慚慘慵截撇摘摔撤摸摟摺摑摧搴摭摻敲斡旗旖暢暨暝榜榨榕槁榮槓構榛榷榻榫榴槐槍榭槌榦槃榣歉歌氳漳演滾漓滴漩漾漠漬漏漂漢滿滯漆漱漸漲漣漕漫漯澈漪滬漁滲滌滷熔熙煽熊熄熒爾犒犖獄獐瑤瑣瑪瑰瑭甄疑瘧瘍瘋瘉瘓盡監瞄睽睿睡磁碟碧碳碩碣禎福禍種稱窪窩竭端管箕箋筵算箝箔箏箸箇箄粹粽精綻綰綜綽綾綠緊綴網綱綺綢綿綵綸維緒緇綬罰翠翡翟聞聚肇腐膀膏膈膊腿膂臧臺與舔舞艋蓉蒿蓆蓄蒙蒞蒲蒜蓋蒸蓀蓓蒐蒼蓑蓊蜿蜜蜻蜢蜥蜴蜘蝕蜷蜩裳褂裴裹裸製裨褚裯誦誌語誣認誡誓誤說誥誨誘誑誚誧豪貍貌賓賑賒赫趙趕跼輔輒輕輓辣遠遘遜遣遙遞遢遝遛鄙鄘鄞酵酸酷酴鉸銀銅銘銖鉻銓銜銨鉼銑閡閨閩閣閥閤隙障際雌雒需靼鞅韶頗領颯颱餃餅餌餉駁骯骰髦魁魂鳴鳶鳳麼鼻齊億儀僻僵價儂儈儉儅凜劇劈劉劍劊勰厲嘮嘻嘹嘲嘿嘴嘩噓噎噗噴嘶嘯嘰墀墟增墳墜墮墩墦奭嬉嫻嬋嫵嬌嬈寮寬審寫層履嶝嶔幢幟幡廢廚廟廝廣廠彈影德徵慶慧慮慝慕憂慼慰慫慾憧憐憫憎憬憚憤憔憮戮摩摯摹撞撲撈撐撰撥撓撕撩撒撮播撫撚撬撙撢撳敵敷數暮暫暴暱樣樟槨樁樞標槽模樓樊槳樂樅槭樑歐歎殤毅毆漿潼澄潑潦潔澆潭潛潸潮澎潺潰潤澗潘滕潯潠潟熟熬熱熨牖犛獎獗瑩璋璃瑾璀畿瘠瘩瘟瘤瘦瘡瘢皚皺盤瞎瞇瞌瞑瞋磋磅確磊碾磕碼磐稿稼穀稽稷稻窯窮箭箱範箴篆篇篁箠篌糊締練緯緻緘緬緝編緣線緞緩綞緙緲緹罵罷羯翩耦膛膜膝膠膚膘蔗蔽蔚蓮蔬蔭蔓蔑蔣蔡蔔蓬蔥蓿蔆螂蝴蝶蝠蝦蝸蝨蝙蝗蝌蝓衛衝褐複褒褓褕褊誼諒談諄誕請諸課諉諂調誰論諍誶誹諛豌豎豬賠賞賦賤賬賭賢賣賜質賡赭趟趣踫踐踝踢踏踩踟踡踞躺輝輛輟輩輦輪輜輞輥適遮遨遭遷鄰鄭鄧鄱醇醉醋醃鋅銻銷鋪銬鋤鋁銳銼鋒鋇鋰銲閭閱霄霆震霉靠鞍鞋鞏頡頫頜颳養餓餒餘駝駐駟駛駑駕駒駙骷髮髯鬧魅魄魷魯鴆鴉鴃麩麾黎墨齒儒儘儔儐儕冀冪凝劑劓勳噙噫噹噩噤噸噪器噥噱噯噬噢噶壁墾壇壅奮嬝嬴學寰導彊憲憑憩憊懍憶憾懊懈戰擅擁擋撻撼據擄擇擂操撿擒擔撾整曆曉暹曄曇暸樽樸樺橙橫橘樹橄橢橡橋橇樵機橈歙歷氅濂澱澡濃澤濁澧澳激澹澶澦澠澴熾燉燐燒燈燕熹燎燙燜燃燄獨璜璣璘璟璞瓢甌甍瘴瘸瘺盧盥瞠瞞瞟瞥磨磚磬磧禦積穎穆穌穋窺篙簑築篤篛篡篩篦糕糖縊縑縈縛縣縞縝縉縐罹羲翰翱翮耨膳膩膨臻興艘艙蕊蕙蕈蕨蕩蕃蕉蕭蕪蕞螃螟螞螢融衡褪褲褥褫褡親覦諦諺諫諱謀諜諧諮諾謁謂諷諭諳諶諼豫豭貓賴蹄踱踴蹂踹踵輻輯輸輳辨辦遵遴選遲遼遺鄴醒錠錶鋸錳錯錢鋼錫錄錚錐錦錡錕錮錙閻隧隨險雕霎霑霖霍霓霏靛靜靦鞘頰頸頻頷頭頹頤餐館餞餛餡餚駭駢駱骸骼髻髭鬨鮑鴕鴣鴦鴨鴒鴛默黔龍龜優償儡儲勵嚎嚀嚐嚅嚇嚏壕壓壑壎嬰嬪嬤孺尷屨嶼嶺嶽嶸幫彌徽應懂懇懦懋戲戴擎擊擘擠擰擦擬擱擢擭斂斃曙曖檀檔檄檢檜櫛檣橾檗檐檠歜殮毚氈濘濱濟濠濛濤濫濯澀濬濡濩濕濮濰燧營燮燦燥燭燬燴燠爵牆獰獲璩環璦璨癆療癌盪瞳瞪瞰瞬瞧瞭矯磷磺磴磯礁禧禪穗窿簇簍篾篷簌篠糠糜糞糢糟糙糝縮績繆縷縲繃縫總縱繅繁縴縹繈縵縿縯罄翳翼聱聲聰聯聳臆臃膺臂臀膿膽臉膾臨舉艱薪薄蕾薜薑薔薯薛薇薨薊虧蟀蟑螳蟒蟆螫螻螺蟈蟋褻褶襄褸褽覬謎謗謙講謊謠謝謄謐豁谿豳賺賽購賸賻趨蹉蹋蹈蹊轄輾轂轅輿避遽還邁邂邀鄹醣醞醜鍍鎂錨鍵鍊鍥鍋錘鍾鍬鍛鍰鍚鍔闊闋闌闈闆隱隸雖霜霞鞠韓顆颶餵騁駿鮮鮫鮪鮭鴻鴿麋黏點黜黝黛鼾齋叢嚕嚮壙壘嬸彝懣戳擴擲擾攆擺擻擷斷曜朦檳檬櫃檻檸櫂檮檯歟歸殯瀉瀋濾瀆濺瀑瀏燻燼燾燸獷獵璧璿甕癖癘癒瞽瞿瞻瞼礎禮穡穢穠竄竅簫簧簪簞簣簡糧織繕繞繚繡繒繙罈翹翻職聶臍臏舊藏薩藍藐藉薰薺薹薦蟯蟬蟲蟠覆覲觴謨謹謬謫豐贅蹙蹣蹦蹤蹟蹕軀轉轍邇邃邈醫醬釐鎔鎊鎖鎢鎳鎮鎬鎰鎘鎚鎗闔闖闐闕離雜雙雛雞霤鞣鞦鞭韹額顏題顎顓颺餾餿餽餮馥騎髁鬃鬆魏魎魍鯊鯉鯽鯈鯀鵑鵝鵠黠鼕鼬儳嚥壞壟壢寵龐廬懲懷懶懵攀攏曠曝櫥櫝櫚櫓瀛瀟瀨瀚瀝瀕瀘爆爍牘犢獸獺璽瓊瓣疇疆癟癡矇礙禱穫穩簾簿簸簽簷籀繫繭繹繩繪羅繳羶羹羸臘藩藝藪藕藤藥藷蟻蠅蠍蟹蟾襠襟襖襞譁譜識證譚譎譏譆譙贈贊蹼蹲躇蹶蹬蹺蹴轔轎辭邊邋醱醮鏡鏑鏟鏃鏈鏜鏝鏖鏢鏍鏘鏤鏗鏨關隴難霪霧靡韜韻類願顛颼饅饉騖騙鬍鯨鯧鯖鯛鶉鵡鵲鵪鵬麒麗麓麴勸嚨嚷嚶嚴嚼壤孀孃孽寶巉懸懺攘攔攙曦朧櫬瀾瀰瀲爐獻瓏癢癥礦礪礬礫竇競籌籃籍糯糰辮繽繼纂罌耀臚艦藻藹蘑藺蘆蘋蘇蘊蠔蠕襤覺觸議譬警譯譟譫贏贍躉躁躅躂醴釋鐘鐃鏽闡霰飄饒饑馨騫騰騷騵鰓鰍鹹麵黨鼯齟齣齡儷儸囁囀囂夔屬巍懼懾攝攜斕曩櫻欄櫺殲灌爛犧瓖瓔癩矓籐纏續羼蘗蘭蘚蠣蠢蠡蠟襪襬覽譴護譽贓躊躍躋轟辯醺鐮鐳鐵鐺鐸鐲鐫闢霸霹露響顧顥饗驅驃驀騾髏魔魑鰭鰥鶯鶴鷂鶸麝黯鼙齜齦齧儼儻囈囊囉孿巔巒彎懿攤權歡灑灘玀瓤疊癮癬禳籠籟聾聽臟襲襯觼讀贖贗躑躓轡酈鑄鑑鑒霽霾韃韁顫饕驕驍髒鬚鱉鰱鰾鰻鷓鷗鼴齬齪龔囌巖戀攣攫攪曬欐瓚竊籤籣籥纓纖纔臢蘸蘿蠱變邐邏鑣鑠鑤靨顯饜驚驛驗髓體髑鱔鱗鱖鷥麟黴囑壩攬灞癱癲矗罐羈蠶蠹衢讓讒讖艷贛釀鑪靂靈靄韆顰驟鬢魘鱟鷹鷺鹼鹽鼇齷齲廳欖灣籬籮蠻觀躡釁鑲鑰顱饞髖鬣黌灤矚讚鑷韉驢驥纜讜躪釅鑽鑾鑼鱷鱸黷豔鑿鸚爨驪鬱鸛鸞籲ヾゝゞ々ぁあぃいぅうぇえぉおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろゎわゐゑをんァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミムメモャヤュユョヨラリルレロヮワヰヱヲンヴヵヶДЕЁЖЗИЙКЛМУФХЦЧШЩЪЫЬЭЮЯабвгдеёжзийклмнопрстуфхцчшщъыьэюя①②③④⑤⑥⑦⑧⑨⑩⑴⑵⑶⑷⑸⑹⑺⑻⑼⑽���������������������������������������������������������������������������������������������������������������������������������������������������������������乂乜凵匚厂万丌乇亍囗兀屮彳丏冇与丮亓仂仉仈冘勼卬厹圠夃夬尐巿旡殳毌气爿丱丼仨仜仩仡仝仚刌匜卌圢圣夗夯宁宄尒尻屴屳帄庀庂忉戉扐氕氶汃氿氻犮犰玊禸肊阞伎优伬仵伔仱伀价伈伝伂伅伢伓伄仴伒冱刓刉刐劦匢匟卍厊吇囡囟圮圪圴夼妀奼妅奻奾奷奿孖尕尥屼屺屻屾巟幵庄异弚彴忕忔忏扜扞扤扡扦扢扙扠扚扥旯旮朾朹朸朻机朿朼朳氘汆汒汜汏汊汔汋汌灱牞犴犵玎甪癿穵网艸艼芀艽艿虍襾邙邗邘邛邔阢阤阠阣佖伻佢佉体佤伾佧佒佟佁佘伭伳伿佡冏冹刜刞刡劭劮匉卣卲厎厏吰吷吪呔呅吙吜吥吘吽呏呁吨吤呇囮囧囥坁坅坌坉坋坒夆奀妦妘妠妗妎妢妐妏妧妡宎宒尨尪岍岏岈岋岉岒岊岆岓岕巠帊帎庋庉庌庈庍弅弝彸彶忒忑忐忭忨忮忳忡忤忣忺忯忷忻怀忴戺抃抌抎抏抔抇扱扻扺扰抁抈扷扽扲扴攷旰旴旳旲旵杅杇杙杕杌杈杝杍杚杋毐氙氚汸汧汫沄沋沏汱汯汩沚汭沇沕沜汦汳汥汻沎灴灺牣犿犽狃狆狁犺狅玕玗玓玔玒町甹疔疕皁礽耴肕肙肐肒肜芐芏芅芎芑芓芊芃芄豸迉辿邟邡邥邞邧邠阰阨阯阭丳侘佼侅佽侀侇佶佴侉侄佷佌侗佪侚佹侁佸侐侜侔侞侒侂侕佫佮冞冼冾刵刲刳剆刱劼匊匋匼厒厔咇呿咁咑咂咈呫呺呾呥呬呴呦咍呯呡呠咘呣呧呤囷囹坯坲坭坫坱坰坶垀坵坻坳坴坢坨坽夌奅妵妺姏姎妲姌姁妶妼姃姖妱妽姀姈妴姇孢孥宓宕屄屇岮岤岠岵岯岨岬岟岣岭岢岪岧岝岥岶岰岦帗帔帙弨弢弣弤彔徂彾彽忞忥怭怦怙怲怋怴怊怗怳怚怞怬怢怍怐怮怓怑怌怉怜戔戽抭抴拑抾抪抶拊抮抳抯抻抩抰抸攽斨斻昉旼昄昒昈旻昃昋昍昅旽昑昐曶朊枅杬枎枒杶杻枘枆构杴枍枌杺枟枑枙枃杽极杸杹枔欥殀歾毞氝沓泬泫泮泙沶泔沭泧沷泐泂沺泃泆泭泲泒泝沴沊沝沀泞泀洰泍泇沰泹泏泩泑炔炘炅炓炆炄炑炖炂炚炃牪狖狋狘狉狜狒狔狚狌狑玤玡玭玦玢玠玬玝瓝瓨甿畀甾疌疘皯盳盱盰盵矸矼矹矻矺矷祂礿秅穸穻竻籵糽耵肏肮肣肸肵肭舠芠苀芫芚芘芛芵芧芮芼芞芺芴芨芡芩苂芤苃芶芢虰虯虭虮豖迒迋迓迍迖迕迗邲邴邯邳邰阹阽阼阺陃俍俅俓侲俉俋俁俔俜俙侻侳俛俇俖侺俀侹俬剄剉勀勂匽卼厗厖厙厘咺咡咭咥哏哃茍咷咮哖咶哅哆咠呰咼咢咾呲哞咰垵垞垟垤垌垗垝垛垔垘垏垙垥垚垕壴复奓姡姞姮娀姱姝姺姽姼姶姤姲姷姛姩姳姵姠姾姴姭宨屌峐峘峌峗峋峛峞峚峉峇峊峖峓峔峏峈峆峎峟峸巹帡帢帣帠帤庰庤庢庛庣庥弇弮彖徆怷怹恔恲恞恅恓恇恉恛恌恀恂恟怤恄恘恦恮扂扃拏挍挋拵挎挃拫拹挏挌拸拶挀挓挔拺挕拻拰敁敃斪斿昶昡昲昵昜昦昢昳昫昺昝昴昹昮朏朐柁柲柈枺柜枻柸柘柀枷柅柫柤柟枵柍枳柷柶柮柣柂枹柎柧柰枲柼柆柭柌枮柦柛柺柉柊柃柪柋欨殂殄殶毖毘毠氠氡洨洴洭洟洼洿洒洊泚洳洄洙洺洚洑洀洝浂洁洘洷洃洏浀洇洠洬洈洢洉洐炷炟炾炱炰炡炴炵炩牁牉牊牬牰牳牮狊狤狨狫狟狪狦狣玅珌珂珈珅玹玶玵玴珫玿珇玾珃珆玸珋瓬瓮甮畇畈疧疪癹盄眈眃眄眅眊盷盻盺矧矨砆砑砒砅砐砏砎砉砃砓祊祌祋祅祄秕种秏秖秎窀穾竑笀笁籺籸籹籿粀粁紃紈紁罘羑羍羾耇耎耏耔耷胘胇胠胑胈胂胐胅胣胙胜胊胕胉胏胗胦胍臿舡芔苙苾苹茇苨茀苕茺苫苖苴苬苡苲苵茌苻苶苰苪苤苠苺苳苭虷虴虼虳衁衎衧衪衩觓訄訇赲迣迡迮迠郱邽邿郕郅邾郇郋郈釔釓陔陏陑陓陊陎倞倅倇倓倢倰倛俵俴倳倷倬俶俷倗倜倠倧倵倯倱倎党冔冓凊凄凅凈凎剡剚剒剞剟剕剢勍匎厞唦哢唗唒哧哳哤唚哿唄唈哫唑唅哱唊哻哷哸哠唎唃唋圁圂埌堲埕埒垺埆垽垼垸垶垿埇埐垹埁夎奊娙娖娭娮娕娏娗娊娞娳孬宧宭宬尃屖屔峬峿峮峱峷崀峹帩帨庨庮庪庬弳弰彧恝恚恧恁悢悈悀悒悁悝悃悕悛悗悇悜悎戙扆拲挐捖挬捄捅挶捃揤挹捋捊挼挩捁挴捘捔捙挭捇挳捚捑挸捗捀捈敊敆旆旃旄旂晊晟晇晑朒朓栟栚桉栲栳栻桋桏栖栱栜栵栫栭栯桎桄栴栝栒栔栦栨栮桍栺栥栠欬欯欭欱欴歭肂殈毦毤毨毣毢毧氥浺浣浤浶洍浡涒浘浢浭浯涑涍淯浿涆浞浧浠涗浰浼浟涂涘洯浨涋浾涀涄洖涃浻浽浵涐烜烓烑烝烋缹烢烗烒烞烠烔烍烅烆烇烚烎烡牂牸牷牶猀狺狴狾狶狳狻猁珓珙珥珖玼珧珣珩珜珒珛珔珝珚珗珘珨瓞瓟瓴瓵甡畛畟疰痁疻痄痀疿疶疺皊盉眝眛眐眓眒眣眑眕眙眚眢眧砣砬砢砵砯砨砮砫砡砩砳砪砱祔祛祏祜祓祒祑秫秬秠秮秭秪秜秞秝窆窉窅窋窌窊窇竘笐笄笓笅笏笈笊笎笉笒粄粑粊粌粈粍粅紞紝紑紎紘紖紓紟紒紏紌罜罡罞罠罝罛羖羒翃翂翀耖耾耹胺胲胹胵脁胻脀舁舯舥茳茭荄茙荑茥荖茿荁茦茜茢荂荎茛茪茈茼荍茖茤茠茷茯茩荇荅荌荓茞茬荋茧荈虓虒蚢蚨蚖蚍蚑蚞蚇蚗蚆蚋蚚蚅蚥蚙蚡蚧蚕蚘蚎蚝蚐蚔衃衄衭衵衶衲袀衱衿衯袃衾衴衼訒豇豗豻貤貣赶赸趵趷趶軑軓迾迵适迿迻逄迼迶郖郠郙郚郣郟郥郘郛郗郜郤酐酎酏釕釢釚陜陟隼飣髟鬯乿偰偪偡偞偠偓偋偝偲偈偍偁偛偊偢倕偅偟偩偫偣偤偆偀偮偳偗偑凐剫剭剬剮勖勓匭厜啵啶唼啍啐唴唪啑啢唶唵唰啒啅唌唲啥啎唹啈唭唻啀啋圊圇埻堔埢埶埜埴堀埭埽堈埸堋埳埏堇埮埣埲埥埬埡堎埼堐埧堁堌埱埩埰堍堄奜婠婘婕婧婞娸娵婭婐婟婥婬婓婤婗婃婝婒婄婛婈媎娾婍娹婌婰婩婇婑婖婂婜孲孮寁寀屙崞崋崝崚崠崌崨崍崦崥崏崰崒崣崟崮帾帴庱庴庹庲庳弶弸徛徖徟悊悐悆悾悰悺惓惔惏惤惙惝惈悱惛悷惊悿惃惍惀挲捥掊掂捽掽掞掭掝掗掫掎捯掇掐据掯捵掜捭掮捼掤挻掟捸掅掁掑掍捰敓旍晥晡晛晙晜晢朘桹梇梐梜桭桮梮梫楖桯梣梬梩桵桴梲梏桷梒桼桫桲梪梀桱桾梛梖梋梠梉梤桸桻梑梌梊桽欶欳欷欸殑殏殍殎殌氪淀涫涴涳湴涬淩淢涷淶淔渀淈淠淟淖涾淥淜淝淛淴淊涽淭淰涺淕淂淏淉淐淲淓淽淗淍淣涻烺焍烷焗烴焌烰焄烳焐烼烿焆焓焀烸烶焋焂焎牾牻牼牿猝猗猇猑猘猊猈狿猏猞玈珶珸珵琄琁珽琇琀珺珼珿琌琋珴琈畤畣痎痒痏痋痌痑痐皏皉盓眹眯眭眱眲眴眳眽眥眻眵硈硒硉硍硊硌砦硅硐祤祧祩祪祣祫祡离秺秸秶秷窏窔窐笵筇笴笥笰笢笤笳笘笪笝笱笫笭笯笲笸笚笣粔粘粖粣紵紽紸紶紺絅紬紩絁絇紾紿絊紻紨罣羕羜羝羛翊翋翍翐翑翇翏翉耟耞耛聇聃聈脘脥脙脛脭脟脬脞脡脕脧脝脢舑舸舳舺舴舲艴莐莣莨莍荺荳莤荴莏莁莕莙荵莔莩荽莃莌莝莛莪莋荾莥莯莈莗莰荿莦莇莮荶莚虙虖蚿蚷蛂蛁蛅蚺蚰蛈蚹蚳蚸蛌蚴蚻蚼蛃蚽蚾衒袉袕袨袢袪袚袑袡袟袘袧袙袛袗袤袬袌袓袎覂觖觙觕訰訧訬訞谹谻豜豝豽貥赽赻赹趼跂趹趿跁軘軞軝軜軗軠軡逤逋逑逜逌逡郯郪郰郴郲郳郔郫郬郩酖酘酚酓酕釬釴釱釳釸釤釹釪釫釷釨釮镺閆閈陼陭陫陱陯隿靪頄飥馗傛傕傔傞傋傣傃傌傎傝偨傜傒傂傇兟凔匒匑厤厧喑喨喥喭啷噅喢喓喈喏喵喁喣喒喤啽喌喦啿喕喡喎圌堩堷堙堞堧堣堨埵塈堥堜堛堳堿堶堮堹堸堭堬堻奡媯媔媟婺媢媞婸媦婼媥媬媕媮娷媄媊媗媃媋媩婻婽媌媜媏媓媝寪寍寋寔寑寊寎尌尰崷嵃嵫嵁嵋崿崵嵑嵎嵕崳崺嵒崽崱嵙嵂崹嵉崸崼崲崶嵀嵅幄幁彘徦徥徫惉悹惌惢惎惄愔惲愊愖愅惵愓惸惼惾惁愃愘愝愐惿愄愋扊掔掱掰揎揥揨揯揃撝揳揊揠揶揕揲揵摡揟掾揝揜揄揘揓揂揇揌揋揈揰揗揙攲敧敪敤敜敨敥斌斝斞斮旐旒晼晬晻暀晱晹晪晲朁椌棓椄棜椪棬棪棱椏棖棷棫棤棶椓椐棳棡椇棌椈楰梴椑棯棆椔棸棐棽棼棨椋椊椗棎棈棝棞棦棴棑椆棔棩椕椥棇欹欻欿欼殔殗殙殕殽毰毲毳氰淼湆湇渟湉溈渼渽湅湢渫渿湁湝湳渜渳湋湀湑渻渃渮湞湨湜湡渱渨湠湱湫渹渢渰湓湥渧湸湤湷湕湹湒湦渵渶湚焠焞焯烻焮焱焣焥焢焲焟焨焺焛牋牚犈犉犆犅犋猒猋猰猢猱猳猧猲猭猦猣猵猌琮琬琰琫琖琚琡琭琱琤琣琝琩琠琲瓻甯畯畬痧痚痡痦痝痟痤痗皕皒盚睆睇睄睍睅睊睎睋睌矞矬硠硤硥硜硭硱硪确硰硩硨硞硢祴祳祲祰稂稊稃稌稄窙竦竤筊笻筄筈筌筎筀筘筅粢粞粨粡絘絯絣絓絖絧絪絏絭絜絫絒絔絩絑絟絎缾缿罥罦羢羠羡翗聑聏聐胾胔腃腊腒腏腇脽腍脺臦臮臷臸臹舄舼舽舿艵茻菏菹萣菀菨萒菧菤菼菶萐菆菈菫菣莿萁菝菥菘菿菡菋菎菖菵菉萉萏菞萑萆菂菳菕菺菇菑菪萓菃菬菮菄菻菗菢萛菛菾蛘蛢蛦蛓蛣蛚蛪蛝蛫蛜蛬蛩蛗蛨蛑衈衖衕袺裗袹袸裀袾袶袼袷袽袲褁裉覕覘覗觝觚觛詎詍訹詙詀詗詘詄詅詒詈詑詊詌詏豟貁貀貺貾貰貹貵趄趀趉跘跓跍跇跖跜跏跕跙跈跗跅軯軷軺軹軦軮軥軵軧軨軶軫軱軬軴軩逭逴逯鄆鄬鄄郿郼鄈郹郻鄁鄀鄇鄅鄃酡酤酟酢酠鈁鈊鈥鈃鈚鈦鈏鈌鈀鈒釿釽鈆鈄鈧鈂鈜鈤鈙鈗鈅鈖镻閍閌閐隇陾隈隉隃隀雂雈雃雱雰靬靰靮頇颩飫鳦黹亃亄亶傽傿僆傮僄僊傴僈僂傰僁傺傱僋僉傶傸凗剺剸剻剼嗃嗛嗌嗐嗋嗊嗝嗀嗔嗄嗩喿嗒喍嗏嗕嗢嗖嗈嗲嗍嗙嗂圔塓塨塤塏塍塉塯塕塎塝塙塥塛堽塣塱壼嫇嫄嫋媺媸媱媵媰媿嫈媻嫆媷嫀嫊媴媶嫍媹媐寖寘寙尟尳嵱嵣嵊嵥嵲嵬嵞嵨嵧嵢巰幏幎幊幍幋廅廌廆廋廇彀徯徭惷慉慊愫慅愶愲愮慆愯慏愩慀戠酨戣戥戤揅揱揫搐搒搉搠搤搳摃搟搕搘搹搷搢搣搌搦搰搨摁搵搯搊搚摀搥搧搋揧搛搮搡搎敯斒旓暆暌暕暐暋暊暙暔晸朠楦楟椸楎楢楱椿楅楪椹楂楗楙楺楈楉椵楬椳椽楥棰楸椴楩楀楯楄楶楘楁楴楌椻楋椷楜楏楑椲楒椯楻椼歆歅歃歂歈歁殛嗀毻毼毹毷毸溛滖滈溏滀溟溓溔溠溱溹滆滒溽滁溞滉溷溰滍溦滏溲溾滃滜滘溙溒溎溍溤溡溿溳滐滊溗溮溣煇煔煒煣煠煁煝煢煲煸煪煡煂煘煃煋煰煟煐煓煄煍煚牏犍犌犑犐犎猼獂猻猺獀獊獉瑄瑊瑋瑒瑑瑗瑀瑏瑐瑎瑂瑆瑍瑔瓡瓿瓾瓽甝畹畷榃痯瘏瘃痷痾痼痹痸瘐痻痶痭痵痽皙皵盝睕睟睠睒睖睚睩睧睔睙睭矠碇碚碔碏碄碕碅碆碡碃硹碙碀碖硻祼禂祽祹稑稘稙稒稗稕稢稓稛稐窣窢窞竫筦筤筭筴筩筲筥筳筱筰筡筸筶筣粲粴粯綈綆綀綍絿綅絺綎絻綃絼綌綔綄絽綒罭罫罧罨罬羦羥羧翛翜耡腤腠腷腜腩腛腢腲朡腞腶腧腯腄腡舝艉艄艀艂艅蓱萿葖葶葹蒏蒍葥葑葀蒆葧萰葍葽葚葙葴葳葝蔇葞萷萺萴葺葃葸萲葅萩菙葋萯葂萭葟葰萹葎葌葒葯蓅蒎萻葇萶萳葨葾葄萫葠葔葮葐蜋蜄蛷蜌蛺蛖蛵蝍蛸蜎蜉蜁蛶蜍蜅裖裋裍裎裞裛裚裌裐覅覛觟觥觤觡觠觢觜触詶誆詿詡訿詷誂誄詵誃誁詴詺谼豋豊豥豤豦貆貄貅賌赨赩趑趌趎趏趍趓趔趐趒跰跠跬跱跮跐跩跣跢跧跲跫跴輆軿輁輀輅輇輈輂輋遒逿遄遉逽鄐鄍鄏鄑鄖鄔鄋鄎酮酯鉈鉒鈰鈺鉦鈳鉥鉞銃鈮鉊鉆鉭鉬鉏鉠鉧鉯鈶鉡鉰鈱鉔鉣鉐鉲鉎鉓鉌鉖鈲閟閜閞閛隒隓隑隗雎雺雽雸雵靳靷靸靲頏頍頎颬飶飹馯馲馰馵骭骫魛鳪鳭鳧麀黽僦僔僗僨僳僛僪僝僤僓僬僰僯僣僠凘劀劁勩勫匰厬嘧嘕嘌嘒嗼嘏嘜嘁嘓嘂嗺嘝嘄嗿嗹墉塼墐墘墆墁塿塴墋塺墇墑墎塶墂墈塻墔墏壾奫嫜嫮嫥嫕嫪嫚嫭嫫嫳嫢嫠嫛嫬嫞嫝嫙嫨嫟孷寠寣屣嶂嶀嵽嶆嵺嶁嵷嶊嶉嶈嵾嵼嶍嵹嵿幘幙幓廘廑廗廎廜廕廙廒廔彄彃彯徶愬愨慁慞慱慳慒慓慲慬憀慴慔慺慛慥愻慪慡慖戩戧戫搫摍摛摝摴摶摲摳摽摵摦撦摎撂摞摜摋摓摠摐摿搿摬摫摙摥摷敳斠暡暠暟朅朄朢榱榶槉榠槎榖榰榬榼榑榙榎榧榍榩榾榯榿槄榽榤槔榹槊榚槏榳榓榪榡榞槙榗榐槂榵榥槆歊歍歋殞殟殠毃毄毾滎滵滱漃漥滸漷滻漮漉潎漙漚漧漘漻漒滭漊漶潳滹滮漭潀漰漼漵滫漇漎潃漅滽滶漹漜滼漺漟漍漞漈漡熇熐熉熀熅熂熏煻熆熁熗牄牓犗犕犓獃獍獑獌瑢瑳瑱瑵瑲瑧瑮甀甂甃畽疐瘖瘈瘌瘕瘑瘊瘔皸瞁睼瞅瞂睮瞀睯睾瞃碲碪碴碭碨硾碫碞碥碠碬碢碤禘禊禋禖禕禔禓禗禈禒禐稫穊稰稯稨稦窨窫窬竮箈箜箊箑箐箖箍箌箛箎箅箘劄箙箤箂粻粿粼粺綧綷緂綣綪緁緀緅綝緎緄緆緋緌綯綹綖綼綟綦綮綩綡緉罳翢翣翥翞耤聝聜膉膆膃膇膍膌膋舕蒗蒤蒡蒟蒺蓎蓂蒬蒮蒫蒹蒴蓁蓍蒪蒚蒱蓐蒝蒧蒻蒢蒔蓇蓌蒛蒩蒯蒨蓖蒘蒶蓏蒠蓗蓔蓒蓛蒰蒑虡蜳蜣蜨蝫蝀蜮蜞蜡蜙蜛蝃蜬蝁蜾蝆蜠蜲蜪蜭蜼蜒蜺蜱蜵蝂蜦蜧蜸蜤蜚蜰蜑裷裧裱裲裺裾裮裼裶裻裰裬裫覝覡覟覞觩觫觨誫誙誋誒誏誖谽豨豩賕賏賗趖踉踂跿踍跽踊踃踇踆踅跾踀踄輐輑輎輍鄣鄜鄠鄢鄟鄝鄚鄤鄡鄛酺酲酹酳銥銤鉶銛鉺銠銔銪銍銦銚銫鉹銗鉿銣鋮銎銂銕銢鉽銈銡銊銆銌銙銧鉾銇銩銝銋鈭隞隡雿靘靽靺靾鞃鞀鞂靻鞄鞁靿韎韍頖颭颮餂餀餇馝馜駃馹馻馺駂馽駇骱髣髧鬾鬿魠魡魟鳱鳲鳵麧僿儃儰僸儆儇僶僾儋儌僽儊劋劌勱勯噈噂噌嘵噁噊噉噆噘噚噀嘳嘽嘬嘾嘸嘪嘺圚墫墝墱墠墣墯墬墥墡壿嫿嫴嫽嫷嫶嬃嫸嬂嫹嬁嬇嬅嬏屧嶙嶗嶟嶒嶢嶓嶕嶠嶜嶡嶚嶞幩幝幠幜緳廛廞廡彉徲憋憃慹憱憰憢憉憛憓憯憭憟憒憪憡憍慦憳戭摮摰撖撠撅撗撜撏撋撊撌撣撟摨撱撘敶敺敹敻斲斳暵暰暩暲暷暪暯樀樆樗槥槸樕槱槤樠槿槬槢樛樝槾樧槲槮樔槷槧橀樈槦槻樍槼槫樉樄樘樥樏槶樦樇槴樖歑殥殣殢殦氁氀毿氂潁漦潾澇濆澒澍澉澌潢潏澅潚澖潶潬澂潕潲潒潐潗澔澓潝漀潡潫潽潧澐潓澋潩潿澕潣潷潪潻熲熯熛熰熠熚熩熵熝熥熞熤熡熪熜熧熳犘犚獘獒獞獟獠獝獛獡獚獙獢璇璉璊璆璁瑽璅璈瑼瑹甈甇畾瘥瘞瘙瘝瘜瘣瘚瘨瘛皜皝皞皛瞍瞏瞉瞈磍碻磏磌磑磎磔磈磃磄磉禚禡禠禜禢禛歶稹窲窴窳箷篋箾箬篎箯箹篊箵糅糈糌糋緷緛緪緧緗緡縃緺緦緶緱緰緮緟罶羬羰羭翭翫翪翬翦翨聤聧膣膟膞膕膢膙膗舖艏艓艒艐艎艑蔤蔻蔏蔀蔩蔎蔉蔍蔟蔊蔧蔜蓻蔫蓺蔈蔌蓴蔪蓲蔕蓷蓫蓳蓼蔒蓪蓩蔖蓾蔨蔝蔮蔂蓽蔞蓶蔱蔦蓧蓨蓰蓯蓹蔘蔠蔰蔋蔙蔯虢蝖蝣蝤蝷蟡蝳蝘蝔蝛蝒蝡蝚蝑蝞蝭蝪蝐蝎蝟蝝蝯蝬蝺蝮蝜蝥蝏蝻蝵蝢蝧蝩衚褅褌褔褋褗褘褙褆褖褑褎褉覢覤覣觭觰觬諏諆誸諓諑諔諕誻諗誾諀諅諘諃誺誽諙谾豍貏賥賟賙賨賚賝賧趠趜趡趛踠踣踥踤踮踕踛踖踑踙踦踧踔踒踘踓踜踗踚輬輤輘輚輠輣輖輗遳遰遯遧遫鄯鄫鄩鄪鄲鄦鄮醅醆醊醁醂醄醀鋐鋃鋄鋀鋙銶鋏鋱鋟鋘鋩鋗鋝鋌鋯鋂鋨鋊鋈鋎鋦鋍鋕鋉鋠鋞鋧鋑鋓銵鋡鋆銴镼閬閫閮閰隤隢雓霅霈霂靚鞊鞎鞈韐韏頞頝頦頩頨頠頛頧颲餈飺餑餔餖餗餕駜駍駏駓駔駎駉駖駘駋駗駌骳髬髫髳髲髱魆魃魧魴魱魦魶魵魰魨魤魬鳼鳺鳽鳿鳷鴇鴀鳹鳻鴈鴅鴄麃黓鼏鼐儜儓儗儚儑凞匴叡噰噠噮噳噦噣噭噲噞噷圜圛壈墽壉墿墺壂墼壆嬗嬙嬛嬡嬔嬓嬐嬖嬨嬚嬠嬞寯嶬嶱嶩嶧嶵嶰嶮嶪嶨嶲嶭嶯嶴幧幨幦幯廩廧廦廨廥彋徼憝憨憖懅憴懆懁懌憺憿憸憌擗擖擐擏擉撽撉擃擛擳擙攳敿敼斢曈暾曀曊曋曏暽暻暺曌朣樴橦橉橧樲橨樾橝橭橶橛橑樨橚樻樿橁橪橤橐橏橔橯橩橠樼橞橖橕橍橎橆歕歔歖殧殪殫毈毇氄氃氆澭濋澣濇澼濎濈潞濄澽澞濊澨瀄澥澮澺澬澪濏澿澸澢濉澫濍澯澲澰燅燂熿熸燖燀燁燋燔燊燇燏熽燘熼燆燚燛犝犞獩獦獧獬獥獫獪瑿璚璠璔璒璕璡甋疀瘯瘭瘱瘽瘳瘼瘵瘲瘰皻盦瞚瞝瞡瞜瞛瞢瞣瞕瞙瞗磝磩磥磪磞磣磛磡磢磭磟磠禤穄穈穇窶窸窵窱窷篞篣篧篝篕篥篚篨篹篔篪篢篜篫篘篟糒糔糗糐糑縒縡縗縌縟縠縓縎縜縕縚縢縋縏縖縍縔縥縤罃罻罼罺羱翯耪耩聬膱膦膮膹膵膫膰膬膴膲膷膧臲艕艖艗蕖蕅蕫蕍蕓蕡蕘蕀蕆蕤蕁蕢蕄蕑蕇蕣蔾蕛蕱蕎蕮蕵蕕蕧蕠薌蕦蕝蕔蕥蕬虣虥虤螛螏螗螓螒螈螁螖螘蝹螇螣螅螐螑螝螄螔螜螚螉褞褦褰褭褮褧褱褢褩褣褯褬褟觱諠諢諲諴諵諝謔諤諟諰諈諞諡諨諿諯諻貑貒貐賵賮賱賰賳赬赮趥趧踳踾踸蹀蹅踶踼踽蹁踰踿躽輶輮輵輲輹輷輴遶遹遻邆郺鄳鄵鄶醓醐醑醍醏錧錞錈錟錆錏鍺錸錼錛錣錒錁鍆錭錎錍鋋錝鋺錥錓鋹鋷錴錂錤鋿錩錹錵錪錔錌錋鋾錉錀鋻錖閼闍閾閹閺閶閿閵閽隩雔霋霒霐鞙鞗鞔韰韸頵頯頲餤餟餧餩馞駮駬駥駤駰駣駪駩駧骹骿骴骻髶髺髹髷鬳鮀鮅鮇魼魾魻鮂鮓鮒鮐魺鮕魽鮈鴥鴗鴠鴞鴔鴩鴝鴘鴢鴐鴙鴟麈麆麇麮麭黕黖黺鼒鼽儦儥儢儤儠儩勴嚓嚌嚍嚆嚄嚃噾嚂噿嚁壖壔壏壒嬭嬥嬲嬣嬬嬧嬦嬯嬮孻寱寲嶷幬幪徾徻懃憵憼懧懠懥懤懨懞擯擩擣擫擤擨斁斀斶旚曒檍檖檁檥檉檟檛檡檞檇檓檎檕檃檨檤檑橿檦檚檅檌檒歛殭氉濌澩濴濔濣濜濭濧濦濞濲濝濢濨燡燱燨燲燤燰燢獳獮獯璗璲璫璐璪璭璱璥璯甐甑甒甏疄癃癈癉癇皤盩瞵瞫瞲瞷瞶瞴瞱瞨矰磳磽礂磻磼磲礅磹磾礄禫禨穜穛穖穘穔穚窾竀竁簅簏篲簀篿篻簎篴簋篳簂簉簃簁篸篽簆篰篱簐簊糨縭縼繂縳顈縸縪繉繀繇縩繌縰縻縶繄縺罅罿罾罽翴翲耬膻臄臌臊臅臇膼臩艛艚艜薃薀薏薧薕薠薋薣蕻薤薚薞蕷蕼薉薡蕺蕸蕗薎薖薆薍薙薝薁薢薂薈薅蕹蕶薘薐薟虨螾螪螭蟅螰螬螹螵螼螮蟉蟃蟂蟌螷螯蟄蟊螴螶螿螸螽蟞螲褵褳褼褾襁襒褷襂覭覯覮觲觳謞謘謖謑謅謋謢謏謒謕謇謍謈謆謜謓謚豏豰豲豱豯貕貔賹赯蹎蹍蹓蹐蹌蹇轃轀邅遾鄸醚醢醛醙醟醡醝醠鎡鎃鎯鍤鍖鍇鍼鍘鍜鍶鍉鍐鍑鍠鍭鎏鍌鍪鍹鍗鍕鍒鍏鍱鍷鍻鍡鍞鍣鍧鎀鍎鍙闇闀闉闃闅閷隮隰隬霠霟霘霝霙鞚鞡鞜鞞鞝韕韔韱顁顄顊顉顅顃餥餫餬餪餳餲餯餭餱餰馘馣馡騂駺駴駷駹駸駶駻駽駾駼騃骾髾髽鬁髼魈鮚鮨鮞鮛鮦鮡鮥鮤鮆鮢鮠鮯鴳鵁鵧鴶鴮鴯鴱鴸鴰鵅鵂鵃鴾鴷鵀鴽翵鴭麊麉麍麰黈黚黻黿鼤鼣鼢齔龠儱儭儮嚘嚜嚗嚚嚝嚙奰嬼屩屪巀幭幮懘懟懭懮懱懪懰懫懖懩擿攄擽擸攁攃擼斔旛曚曛曘櫅檹檽櫡櫆檺檶檷櫇檴檭歞毉氋瀇瀌瀍瀁瀅瀔瀎濿瀀濻瀦濼濷瀊爁燿燹爃燽獶璸瓀璵瓁璾璶璻瓂甔甓癜癤癙癐癓癗癚皦皽盬矂瞺磿礌礓礔礉礐礒礑禭禬穟簜簩簙簠簟簭簝簦簨簢簥簰繜繐繖繣繘繢繟繑繠繗繓羵羳翷翸聵臑臒臐艟艞薴藆藀藃藂薳薵薽藇藄薿藋藎藈藅薱薶藒蘤薸薷薾虩蟧蟦蟢蟛蟫蟪蟥蟟蟳蟤蟔蟜蟓蟭蟘蟣螤蟗蟙蠁蟴蟨蟝襓襋襏襌襆襐襑襉謪謧謣謳謰謵譇謯謼謾謱謥謷謦謶謮謤謻謽謺豂豵貙貘貗賾贄贂贀蹜蹢蹠蹗蹖蹞蹥蹧蹛蹚蹡蹝蹩蹔轆轇轈轋鄨鄺鄻鄾醨醥醧醯醪鎵鎌鎒鎷鎛鎝鎉鎧鎎鎪鎞鎦鎕鎈鎙鎟鎍鎱鎑鎲鎤鎨鎴鎣鎥闒闓闑隳雗雚巂雟雘雝霣霢霥鞬鞮鞨鞫鞤鞪鞢鞥韗韙韖韘韺顐顑顒颸饁餼餺騏騋騉騍騄騑騊騅騇騆髀髜鬈鬄鬅鬩鬵魊魌魋鯇鯆鯃鮿鯁鮵鮸鯓鮶鯄鮹鮽鵜鵓鵏鵊鵛鵋鵙鵖鵌鵗鵒鵔鵟鵘鵚麎麌黟鼁鼀鼖鼥鼫鼪鼩鼨齌齕儴儵劖勷厴嚫嚭嚦嚧嚪嚬壚壝壛夒嬽嬾嬿巃幰徿懻攇攐攍攉攌攎斄旞旝曞櫧櫠櫌櫑櫙櫋櫟櫜櫐櫫櫏櫍櫞歠殰氌瀙瀧瀠瀖瀫瀡瀢瀣瀩瀗瀤瀜瀪爌爊爇爂爅犥犦犤犣犡瓋瓅璷瓃甖癠矉矊矄矱礝礛礡礜礗礞禰穧穨簳簼簹簬簻糬糪繶繵繸繰繷繯繺繲繴繨罋罊羃羆羷翽翾聸臗臕艤艡艣藫藱藭藙藡藨藚藗藬藲藸藘藟藣藜藑藰藦藯藞藢蠀蟺蠃蟶蟷蠉蠌蠋蠆蟼蠈蟿蠊蠂襢襚襛襗襡襜襘襝襙覈覷覶觶譐譈譊譀譓譖譔譋譕譑譂譒譗豃豷豶貚贆贇贉趬趪趭趫蹭蹸蹳蹪蹯蹻軂轒轑轏轐轓辴酀鄿醰醭鏞鏇鏏鏂鏚鏐鏹鏬鏌鏙鎩鏦鏊鏔鏮鏣鏕鏄鏎鏀鏒鏧镽闚闛雡霩霫霬霨霦鞳鞷鞶韝韞韟顜顙顝顗颿颽颻颾饈饇饃馦馧騚騕騥騝騤騛騢騠騧騣騞騜騔髂鬋鬊鬎鬌鬷鯪鯫鯠鯞鯤鯦鯢鯰鯔鯗鯬鯜鯙鯥鯕鯡鯚鵷鶁鶊鶄鶈鵱鶀鵸鶆鶋鶌鵽鵫鵴鵵鵰鵩鶅鵳鵻鶂鵯鵹鵿鶇鵨麔麑黀黼鼭齀齁齍齖齗齘匷嚲嚵嚳壣孅巆巇廮廯忀忁懹攗攖攕攓旟曨曣曤櫳櫰櫪櫨櫹櫱櫮櫯瀼瀵瀯瀷瀴瀱灂瀸瀿瀺瀹灀瀻瀳灁爓爔犨獽獼璺皫皪皾盭矌矎矏矍矲礥礣礧礨礤礩禲穮穬穭竷籉籈籊籇籅糮繻繾纁纀羺翿聹臛臙舋艨艩蘢藿蘁藾蘛蘀藶蘄蘉蘅蘌藽蠙蠐蠑蠗蠓蠖襣襦覹觷譠譪譝譨譣譥譧譭趮躆躈躄轙轖轗轕轘轚邍酃酁醷醵醲醳鐋鐓鏻鐠鐏鐔鏾鐕鐐鐨鐙鐍鏵鐀鏷鐇鐎鐖鐒鏺鐉鏸鐊鏿鏼鐌鏶鐑鐆闞闠闟霮霯鞹鞻韽韾顠顢顣顟飁飂饐饎饙饌饋饓騲騴騱騬騪騶騩騮騸騭髇髊髆鬐鬒鬑鰋鰈鯷鰅鰒鯸鱀鰇鰎鰆鰗鰔鰉鶟鶙鶤鶝鶒鶘鶐鶛鶠鶔鶜鶪鶗鶡鶚鶢鶨鶞鶣鶿鶩鶖鶦鶧麙麛麚黥黤黧黦鼰鼮齛齠齞齝齙龑儺儹劘劗囃嚽嚾孈孇巋巏廱懽攛欂櫼欃櫸欀灃灄灊灈灉灅灆爝爚爙獾甗癪矐礭礱礯籔籓糲纊纇纈纋纆纍罍羻耰臝蘘蘪蘦蘟蘣蘜蘙蘧蘮蘡蘠蘩蘞蘥蠩蠝蠛蠠蠤蠜蠫衊襭襩襮襫觺譹譸譅譺譻贐贔趯躎躌轞轛轝酆酄酅醹鐿鐻鐶鐩鐽鐼鐰鐹鐪鐷鐬鑀鐱闥闤闣霵霺鞿韡顤飉飆飀饘饖騹騽驆驄驂驁騺騿髍鬕鬗鬘鬖鬺魒鰫鰝鰜鰬鰣鰨鰩鰤鰡鶷鶶鶼鷁鷇鷊鷏鶾鷅鷃鶻鶵鷎鶹鶺鶬鷈鶱鶭鷌鶳鷍鶲鹺麜黫黮黭鼛鼘鼚鼱齎齥齤龒亹囆囅囋奱孋孌巕巑廲攡攠攦攢欋欈欉氍灕灖灗灒爞爟犩獿瓘瓕瓙瓗癭皭礵禴穰穱籗籜籙籛籚糴糱纑罏羇臞艫蘴蘵蘳蘬蘲蘶蠬蠨蠦蠪蠥襱覿覾觻譾讄讂讆讅譿贕躕躔躚躒躐躖躗轠轢酇鑌鑐鑊鑋鑏鑇鑅鑈鑉鑆霿韣顪顩飋饔饛驎驓驔驌驏驈驊驉驒驐髐鬙鬫鬻魖魕鱆鱈鰿鱄鰹鰳鱁鰼鰷鰴鰲鰽鰶鷛鷒鷞鷚鷋鷐鷜鷑鷟鷩鷙鷘鷖鷵鷕鷝麶黰鼵鼳鼲齂齫龕龢儽劙壨壧奲孍巘蠯彏戁戃戄攩攥斖曫欑欒欏毊灛灚爢玂玁玃癰矔籧籦纕艬蘺虀蘹蘼蘱蘻蘾蠰蠲蠮蠳襶襴襳觾讌讎讋讈豅贙躘轤轣醼鑢鑕鑝鑗鑞韄韅頀驖驙鬞鬟鬠鱒鱘鱐鱊鱍鱋鱕鱙鱌鱎鷻鷷鷯鷣鷫鷸鷤鷶鷡鷮鷦鷲鷰鷢鷬鷴鷳鷨鷭黂黐黲黳鼆鼜鼸鼷鼶齃齏齱齰齮齯囓囍孎屭攭曭曮欓灟灡灝灠爣瓛瓥矕礸禷禶籪纗羉艭虃蠸蠷蠵衋讔讕躞躟躠躝醾醽釂鑫鑨鑩雥靆靃靇韇韥驞髕魙鱣鱧鱦鱢鱞鱠鸂鷾鸇鸃鸆鸅鸀鸁鸉鷿鷽鸄麠鼞齆齴齵齶囔攮斸欘欙欗欚灢爦犪矘矙礹籩籫糶纚纘纛纙臠臡虆虇虈襹襺襼襻觿讘讙躥躤躣鑮鑭鑯鑱鑳靉顲饟鱨鱮鱭鸋鸍鸐鸏鸒鸑麡黵鼉齇齸齻齺齹圞灦籯蠼趲躦釃鑴鑸鑶鑵驠鱴鱳鱱鱵鸔鸓黶鼊龤灨灥糷虪蠾蠽蠿讞貜躩軉靋顳顴飌饡馫驤驦驧鬤鸕鸗齈戇欞爧虌躨钂钀钁驩驨鬮鸙爩虋讟钃鱹麷癵驫鱺鸝灩灪麤齾齉龘�����������������������������������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go b/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go new file mode 100644 index 000000000..a2e578d4a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("charsets.json", func() (io.ReadCloser, error) { + r := strings.NewReader("{\n\"8bit\": {\n\t\"Desc\": \"raw 8-bit data\",\n\t\"Class\": \"8bit\",\n\t\"Comment\": \"special class for raw 8bit data that has been converted to utf-8\"\n},\n\"big5\": {\n\t\"Desc\": \"Big 5 (HKU)\",\n\t\"Class\": \"big5\",\n\t\"Comment\": \"Traditional Chinese\"\n},\n\"euc-jp\": {\n\t\"Aliases\":[\"x-euc-jp\"],\n\t\"Desc\": \"Japanese Extended UNIX Code\",\n\t\"Class\": \"euc-jp\"\n},\n\"gb2312\": {\n\t\"Aliases\":[\"iso-ir-58\", \"chinese\", \"gb_2312-80\"],\n\t\"Desc\": \"Chinese mixed one byte\",\n\t\"Class\": \"gb2312\"\n},\n\"ibm437\": {\n\t\"Aliases\":[\"437\", \"cp437\"],\n\t\"Desc\": \"IBM PC: CP 437\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm437.cp\",\n\t\"Comment\": \"originally from jhelling@cs.ruu.nl (Jeroen Hellingman)\"\n},\n\"ibm850\": {\n\t\"Aliases\":[\"850\", \"cp850\"],\n\t\"Desc\": \"IBM PS/2: CP 850\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm850.cp\",\n\t\"Comment\": \"originally from jhelling@cs.ruu.nl (Jeroen Hellingman)\"\n},\n\"ibm866\": {\n\t\"Aliases\":[\"cp866\", \"866\"],\n\t\"Desc\": \"Russian MS-DOS CP 866\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm866.cp\"\n},\n\"iso-8859-1\": {\n\t\"Aliases\":[\"iso-ir-100\", \"ibm819\", \"l1\", \"iso8859-1\", \"iso-latin-1\", \"iso_8859-1:1987\", \"cp819\", \"iso_8859-1\", \"iso8859_1\", \"latin1\"],\n\t\"Desc\": \"Latin-1\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-1.cp\"\n},\n\"iso-8859-10\": {\n\t\"Aliases\":[\"iso_8859-10:1992\", \"l6\", \"iso-ir-157\", \"latin6\"],\n\t\"Desc\": \"Latin-6\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-10.cp\",\n\t\"Comment\": \"originally from dkuug.dk:i18n/charmaps/ISO_8859-10:1993\"\n},\n\"iso-8859-15\": {\n\t\"Aliases\":[\"l9-iso-8859-15\", \"latin9\"],\n\t\"Desc\": \"Latin-9\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-15.cp\"\n},\n\"iso-8859-2\": {\n\t\"Aliases\":[\"iso-ir-101\", \"iso_8859-2:1987\", \"l2\", \"iso_8859-2\", \"latin2\"],\n\t\"Desc\": \"Latin-2\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-2.cp\"\n},\n\"iso-8859-3\": {\n\t\"Aliases\":[\"iso-ir-109\", \"l3\", \"iso_8859-3:1988\", \"iso_8859-3\", \"latin3\"],\n\t\"Desc\": \"Latin-3\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-3.cp\"\n},\n\"iso-8859-4\": {\n\t\"Aliases\":[\"iso-ir-110\", \"iso_8859-4:1988\", \"l4\", \"iso_8859-4\", \"latin4\"],\n\t\"Desc\": \"Latin-4\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-4.cp\"\n},\n\"iso-8859-5\": {\n\t\"Aliases\":[\"cyrillic\", \"iso_8859-5\", \"iso-ir-144\", \"iso_8859-5:1988\"],\n\t\"Desc\": \"Part 5 (Cyrillic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-5.cp\"\n},\n\"iso-8859-6\": {\n\t\"Aliases\":[\"ecma-114\", \"iso_8859-6:1987\", \"arabic\", \"iso_8859-6\", \"asmo-708\", \"iso-ir-127\"],\n\t\"Desc\": \"Part 6 (Arabic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-6.cp\"\n},\n\"iso-8859-7\": {\n\t\"Aliases\":[\"greek8\", \"elot_928\", \"ecma-118\", \"greek\", \"iso_8859-7\", \"iso_8859-7:1987\", \"iso-ir-126\"],\n\t\"Desc\": \"Part 7 (Greek)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-7.cp\"\n},\n\"iso-8859-8\": {\n\t\"Aliases\":[\"iso_8859-8:1988\", \"hebrew\", \"iso_8859-8\", \"iso-ir-138\"],\n\t\"Desc\": \"Part 8 (Hebrew)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-8.cp\"\n},\n\"iso-8859-9\": {\n\t\"Aliases\":[\"l5\", \"iso_8859-9:1989\", \"iso_8859-9\", \"iso-ir-148\", \"latin5\"],\n\t\"Desc\": \"Latin-5\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-9.cp\"\n},\n\"koi8-r\": {\n\t\"Desc\": \"KOI8-R (RFC1489)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"koi8-r.cp\"\n},\n\"shift_jis\": {\n\t\"Aliases\":[\"sjis\", \"ms_kanji\", \"x-sjis\"],\n\t\"Desc\": \"Shift-JIS Japanese\",\n\t\"Class\": \"cp932\",\n\t\"Arg\": \"shiftjis\"\n},\n\"utf-16\": {\n\t\"Aliases\":[\"utf16\"],\n\t\"Desc\": \"Unicode UTF-16\",\n\t\"Class\": \"utf16\"\n},\n\"utf-16be\": {\n\t\"Aliases\":[\"utf16be\"],\n\t\"Desc\": \"Unicode UTF-16 big endian\",\n\t\"Class\": \"utf16\",\n\t\"Arg\": \"be\"\n},\n\"utf-16le\": {\n\t\"Aliases\":[\"utf16le\"],\n\t\"Desc\": \"Unicode UTF-16 little endian\",\n\t\"Class\": \"utf16\",\n\t\"Arg\": \"le\"\n},\n\"utf-8\": {\n\t\"Aliases\":[\"utf8\", \"ascii\", \"us-ascii\"],\n\t\"Desc\": \"Unicode UTF-8\",\n\t\"Class\": \"utf8\"\n},\n\"windows-1250\": {\n\t\"Desc\": \"MS Windows CP 1250 (Central Europe)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1250.cp\"\n},\n\"windows-1251\": {\n\t\"Desc\": \"MS Windows CP 1251 (Cyrillic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1251.cp\"\n},\n\"windows-1252\": {\n\t\"Desc\": \"MS Windows CP 1252 (Latin 1)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1252.cp\"\n},\n\"windows-31j\": {\n\t\"Aliases\":[\"cp932\"],\n\t\"Desc\": \"MS-Windows Japanese (cp932)\",\n\t\"Class\": \"cp932\",\n\t\"Arg\": \"cp932\"\n}\n}\n") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go new file mode 100644 index 000000000..0e53a5c1e --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("cp932.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("\u3000、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈〉《》「」『』【】+-±×�÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓�����������∈∋⊆⊇⊂⊃∪∩��������∧∨¬⇒⇔∀∃�����������∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬�������ʼn♯♭♪†‡¶����◯���������������0123456789�������ABCDEFGHIJKLMNOPQRSTUVWXYZ�������abcdefghijklmnopqrstuvwxyz����ぁあぃいぅうぇえぉおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろゎわゐゑをん�����������ァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミ�ムメモャヤュユョヨラリルレロヮワヰヱヲンヴヵヶ��������ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩ��������αβγδεζηθικλμνξοπρστυφχψω��������������������������������������АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ���������������абвгдеёжзийклмн�опрстуфхцчшщъыьэюя�������������─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂��������������������������������������������������������������①②③④⑤⑥⑦⑧⑨⑩⑪⑫⑬⑭⑮⑯⑰⑱⑲⑳ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ�㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡��������㍻�〝〟№㏍℡㊤㊥㊦㊧㊨㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪�����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円�園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫�橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救�朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨�劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降�項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止�死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳�準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨�逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻�操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄�逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬�凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅�楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷�斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆�摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲�沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕��������������������������������������������弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭�凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸�噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀�它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠�怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫�捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎�梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯�麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝�烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿�痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰�窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷�縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤�艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬�蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧�諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜�轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙�閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃�騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯�黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠堯槇遙瑤凜熙����������������������������������������������������������������������������������������纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏�塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙�蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑��ⅰⅱⅲⅳⅴⅵⅶⅷⅸⅹ¬¦'"ⅰⅱⅲⅳⅴⅵⅶⅷⅸⅹⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ¬¦'"㈱№℡∵纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊�兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神�祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go new file mode 100644 index 000000000..ecd0631ef --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm437.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞∅∈∩≡±≥≤⌠⌡÷≈°•·√ⁿ²∎\u00a0") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go new file mode 100644 index 000000000..ea833fa4c --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm850.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´\u00ad±‗¾¶§÷¸°¨·¹³²∎\u00a0") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go new file mode 100644 index 000000000..338c99c93 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm866.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп������������������������������������������������рстуфхцчшщъыьэюяЁё��������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go new file mode 100644 index 000000000..dab316145 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-1.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go new file mode 100644 index 000000000..252aef1e1 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-10.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ĄĒĢĪĨĶ§ĻĐŠŦŽ\u00adŪŊ°ąēģīĩķ·ļĐšŧž—ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go new file mode 100644 index 000000000..26e0764ac --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-15.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£€¥Š§š©ª«¬\u00ad®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go new file mode 100644 index 000000000..d8a5f95ed --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-2.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0Ą˘Ł¤ĽŚ§¨ŠŞŤŹ\u00adŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go new file mode 100644 index 000000000..d632e8707 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-3.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0Ħ˘£¤�Ĥ§¨İŞĞĴ\u00ad�Ż°ħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go new file mode 100644 index 000000000..f9874c17f --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-4.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ĄĸŖ¤ĨĻ§¨ŠĒĢŦ\u00adŽ¯°ą˛ŗ´ĩļˇ¸šēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖ×ØŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go new file mode 100644 index 000000000..0ab027a0a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-5.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ЁЂЃЄЅІЇЈЉЊЋЌ\u00adЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go new file mode 100644 index 000000000..b1eaf9677 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-6.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0���¤�������،\u00ad�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go new file mode 100644 index 000000000..1057692b8 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-7.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0‘’£��¦§¨©�«¬\u00ad�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go new file mode 100644 index 000000000..e0e035aaa --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-8.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0�¢£¤¥¦§¨©×«¬\u00ad®‾°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת�����") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go new file mode 100644 index 000000000..1c1a5bc68 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-9.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go new file mode 100644 index 000000000..a26c174db --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("jisx0201kana.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("。「」、・ヲァィゥェォャュョッーアイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン゙゚") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go new file mode 100644 index 000000000..831fae5c4 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("koi8-r.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥\u00a0⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go new file mode 100644 index 000000000..5147af073 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1250.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź\u00a0ˇ˘Ł¤Ą¦§¨©Ş«¬\u00ad®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go new file mode 100644 index 000000000..2722b19b8 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1251.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fЂЃ‚ѓ„…†‡�‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ\u00a0ЎўЈ¤Ґ¦§Ё©Є«¬\u00ad®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go new file mode 100644 index 000000000..bf3b67e02 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1252.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/doc.go b/vendor/github.com/rogpeppe/go-charset/data/doc.go new file mode 100644 index 000000000..630a83d53 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/doc.go @@ -0,0 +1,6 @@ +// The data package embeds all the charset +// data files as Go data. It registers the data with the charset +// package as a side effect of its import. To use: +// +// import _ "code.google.com/p/go-charset" +package data diff --git a/vendor/github.com/rogpeppe/go-charset/data/generate.go b/vendor/github.com/rogpeppe/go-charset/data/generate.go new file mode 100644 index 000000000..156ee2c63 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/generate.go @@ -0,0 +1,97 @@ +// +build ignore + +// go run generate.go && go fmt + +// The generate-charset-data command generates the Go source code +// for code.google.com/p/go-charset/data from the data files +// found in code.google.com/p/go-charset/datafiles. +// It should be run in the go-charset root directory. +// The resulting Go files will need gofmt'ing. +package main + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "text/template" +) + +type info struct { + Path string +} + +var tfuncs = template.FuncMap{ + "basename": func(s string) string { + return filepath.Base(s) + }, + "read": func(path string) ([]byte, error) { + return ioutil.ReadFile(path) + }, +} + +var tmpl = template.Must(template.New("").Funcs(tfuncs).Parse(` + // This file is automatically generated by generate-charset-data. + // Do not hand-edit. + + package data + import ( + "code.google.com/p/go-charset/charset" + "io" + "io/ioutil" + "strings" + ) + + func init() { + charset.RegisterDataFile({{basename .Path | printf "%q"}}, func() (io.ReadCloser, error) { + r := strings.NewReader({{read .Path | printf "%q"}}) + return ioutil.NopCloser(r), nil + }) + } +`)) + +var docTmpl = template.Must(template.New("").Funcs(tfuncs).Parse(` + // This file is automatically generated by generate-charset-data. + // Do not hand-edit. + + // The {{basename .Package}} package embeds all the charset + // data files as Go data. It registers the data with the charset + // package as a side effect of its import. To use: + // + // import _ "code.google.com/p/go-charset" + package {{basename .Package}} +`)) + +func main() { + dataDir := filepath.Join("..", "datafiles") + d, err := os.Open(dataDir) + if err != nil { + fatalf("%v", err) + } + names, err := d.Readdirnames(0) + if err != nil { + fatalf("cannot read datafiles dir: %v", err) + } + for _, name := range names { + writeFile("data_"+name+".go", tmpl, info{ + Path: filepath.Join(dataDir, name), + }) + } +} + +func writeFile(name string, t *template.Template, data interface{}) { + w, err := os.Create(name) + if err != nil { + fatalf("cannot create output file: %v", err) + } + defer w.Close() + err = t.Execute(w, data) + if err != nil { + fatalf("template execute %q: %v", name, err) + } +} + +func fatalf(f string, a ...interface{}) { + fmt.Fprintf(os.Stderr, "%s\n", fmt.Sprintf(f, a...)) + os.Exit(2) +} diff --git a/vendor/github.com/rpoletaev/supervisord/Gopkg.lock b/vendor/github.com/rpoletaev/supervisord/Gopkg.lock new file mode 100644 index 000000000..0bbb9ad99 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/Gopkg.lock @@ -0,0 +1,63 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/sirupsen/logrus" + packages = ["."] + revision = "a3f95b5c423586578a4e099b11a46c2479628cac" + version = "1.0.2" + +[[projects]] + name = "github.com/go-ini/ini" + packages = ["."] + revision = "20b96f641a5ea98f2f8619ff4f3e061cff4833bd" + version = "v1.28.2" + +[[projects]] + name = "github.com/gorilla/rpc" + packages = ["."] + revision = "22c016f3df3febe0c1f6727598b6389507e03a18" + version = "v1.1.0" + +[[projects]] + name = "github.com/jessevdk/go-flags" + packages = ["."] + revision = "96dc06278ce32a0e9d957d590bb987c81ee66407" + version = "v1.3.0" + +[[projects]] + branch = "master" + name = "github.com/kardianos/osext" + packages = ["."] + revision = "ae77be60afb1dcacde03767a8c37337fad28ac14" + +[[projects]] + branch = "master" + name = "github.com/rpoletaev/gorilla-xmlrpc" + packages = ["xml"] + revision = "d37a0d21ebabd0d9b59cd868499d6af4673ee9fe" + +[[projects]] + branch = "master" + name = "github.com/rogpeppe/go-charset" + packages = ["charset","data"] + revision = "e9ff06f347d3f5d0013d59ed83754f0e88de10d4" + +[[projects]] + name = "github.com/sevlyar/go-daemon" + packages = ["."] + revision = "1ae26ef5036ad04968706917222a23c535673d8c" + version = "v0.1.1" + +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = ["unix"] + revision = "c84c1ab9fd18cdd4c23dd021c10f5f46dea95e46" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "d765dd3cd60e45504031ba3cbd8f4b516e314572e0047939f23118993a985b67" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/rpoletaev/supervisord/Gopkg.toml b/vendor/github.com/rpoletaev/supervisord/Gopkg.toml new file mode 100644 index 000000000..b9b3db7f3 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/Gopkg.toml @@ -0,0 +1,46 @@ + +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + + +[[constraint]] + name = "github.com/sirupsen/logrus" + version = "1.0.2" + +[[constraint]] + name = "github.com/go-ini/ini" + version = "1.28.2" + +[[constraint]] + name = "github.com/gorilla/rpc" + version = "1.1.0" + +[[constraint]] + name = "github.com/jessevdk/go-flags" + version = "1.3.0" + +[[constraint]] + branch = "master" + name = "github.com/rpoletaev/gorilla-xmlrpc" + +[[constraint]] + name = "github.com/sevlyar/go-daemon" + version = "0.1.1" diff --git a/vendor/github.com/rpoletaev/supervisord/LICENSE b/vendor/github.com/rpoletaev/supervisord/LICENSE new file mode 100644 index 000000000..6713cd967 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Steven Ou + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/rpoletaev/supervisord/README.md b/vendor/github.com/rpoletaev/supervisord/README.md new file mode 100644 index 000000000..2d4384a5e --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/README.md @@ -0,0 +1,161 @@ +# Why this project? + +The python script supervisord is a powerful tool used by a lot of guys to manage the processes. I like the tool supervisord also. + +But this tool requires us to install the big python environment. In some situation, for example in the docker environment, the python is too big for us. + +In this project, the supervisord is re-implemented in go-lang. The compiled supervisord is very suitable for these environment that the python is not installed. + +# Compile the supervisord + +Before compiling the supervisord, make sure the go-lang is installed in your environement. + +To compile the go-lang version supervisord, run following commands: + +```shell +$ mkdir ~/go-supervisor +$ export GOPATH=~/go-supervisor +$ go get -u github.com/ochinchina/supervisord +``` + +# Run the supervisord + +After the supervisord binary is generated, create a supervisord configuration file and start the supervisord like below: + +```shell +$ cat supervisor.conf +[program:test] +command = /your/program args +$ supervisord -c supervisor.conf +``` +# Run as daemon +Add the inet interface in your configuration: +```ini +[inet_http_server] +port=127.0.0.1:9001 +``` +then run +```shell +$ supervisord -c supervisor.conf -d +``` +In order to controll the daemon, you can use `$ supervisord ctl` subcommand, available commands are: `status`, `start`, `stop`, `shutdown`, `reload`. + +```shell +$ supervisord ctl status +$ supervisord ctl stop +$ supervisord ctl stop all +$ supervisord ctl start +$ supervisord ctl start all +$ supervisord ctl shutdown +$ supervisord ctl reload +$ supervisord ctl signal ... +$ supervisord ctl signal all +``` + +the URL of supervisord in the "supervisor ctl" subcommand is dected in following order: + +- check if option -s or --serverurl is present, use this url +- check if -c option is present and the "serverurl" in "supervisorctl" section is present, use the "serverurl" in section "supervisorctl" +- return http://localhost:9001 + +# Check the version + +command "version" will show the current supervisor version. + +```shell +$ supervisord version +``` + +# Supported features + +## http server + +the unix socket & TCP http server is supported. Basic auth is supported. + +The unix socket setting is in the "unix_http_server" section. +The TCP http server setting is in "inet_http_server" section. + +If both "inet_http_server" and "unix_http_server" is not configured in the configuration file, no http server will be started. + +## supervisord information + +The log & pid of supervisord process is supported by section "supervisord" setting. + +## program + +the following features is supported in the "program:x" section: + +- program command +- process name +- numprocs +- numprocs_start +- autostart +- startsecs +- startretries +- autorestart +- exitcodes +- stopsignal +- stopwaitsecs +- stdout_logfile +- stdout_logfile_maxbytes +- stdout_logfile_backups +- redirect_stderr +- stderr_logfile +- stderr_logfile_maxbytes +- stderr_logfile_backups +- environment +- priority +- user +- directory + +### program extends + +Following new keys are supported by the [program:xxx] section: + +- depends_on: define program depends information. If program A depends on program B, C, the program B, C will be started before program A. Example: + +```ini +[program:A] +depends_on = B, C + +[program:B] +... +[program:C] +... +``` + +- user: user in the section "program:xxx" now is extended to support group with format "user[:group]". So "user" can be configured as: + +```ini +[program:xxx] +user = user_name +... +``` +or +```ini +[program:xxx] +user = user_name:group_name +... +``` +## Group +the "group" section is supported and you can set "programs" item + +## Events + +the supervisor 3.x defined events are supported partially. Now it supports following events: + +- all process state related events +- process communication event +- remote communication event +- tick related events +- process log related events + +# The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/rpoletaev/supervisord/circle.yml b/vendor/github.com/rpoletaev/supervisord/circle.yml new file mode 100644 index 000000000..41481db33 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/circle.yml @@ -0,0 +1,9 @@ +deployment: + master: + branch: [master] + commands: + - go version + - go get github.com/mitchellh/gox + - go get github.com/tcnksm/ghr + - gox -output "dist/supervisord_{{.OS}}_{{.Arch}}" -osarch="linux/amd64 linux/386 darwin/amd64" + - ghr -t $GITHUB_TOKEN -u $CIRCLE_PROJECT_USERNAME -r $CIRCLE_PROJECT_REPONAME --replace v1.0.0 dist/ \ No newline at end of file diff --git a/vendor/github.com/rpoletaev/supervisord/config/config.go b/vendor/github.com/rpoletaev/supervisord/config/config.go new file mode 100644 index 000000000..b1e9a6076 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/config.go @@ -0,0 +1,558 @@ +package config + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" + + ini "github.com/ochinchina/go-ini" + log "github.com/sirupsen/logrus" +) + +type ConfigEntry struct { + ConfigDir string + Group string + Name string + keyValues map[string]string +} + +func (c *ConfigEntry) IsProgram() bool { + return strings.HasPrefix(c.Name, "program:") +} + +func (c *ConfigEntry) GetProgramName() string { + if strings.HasPrefix(c.Name, "program:") { + return c.Name[len("program:"):] + } + return "" +} + +func (c *ConfigEntry) IsEventListener() bool { + return strings.HasPrefix(c.Name, "eventlistener:") +} + +func (c *ConfigEntry) GetEventListenerName() string { + if strings.HasPrefix(c.Name, "eventlistener:") { + return c.Name[len("eventlistener:"):] + } + return "" +} + +func (c *ConfigEntry) IsGroup() bool { + return strings.HasPrefix(c.Name, "group:") +} + +// get the group name if this entry is group +func (c *ConfigEntry) GetGroupName() string { + if strings.HasPrefix(c.Name, "group:") { + return c.Name[len("group:"):] + } + return "" +} + +// get the programs from the group +func (c *ConfigEntry) GetPrograms() []string { + if c.IsGroup() { + r := c.GetStringArray("programs", ",") + for i, p := range r { + r[i] = strings.TrimSpace(p) + } + return r + } + return make([]string, 0) +} + +// add key value entry +func (c *ConfigEntry) AddKeyValue(key, value string) { + c.keyValues[key] = value +} + +func (c *ConfigEntry) setGroup(group string) { + c.Group = group +} + +// dump the configuration as string +func (c *ConfigEntry) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buf, "configDir=%s\n", c.ConfigDir) + fmt.Fprintf(buf, "group=%s\n", c.Group) + for k, v := range c.keyValues { + fmt.Fprintf(buf, "%s=%s\n", k, v) + } + return buf.String() + +} + +type Config struct { + configFile string + //mapping between the section name and the configure + entries map[string]*ConfigEntry + + ProgramGroup *ProcessGroup +} + +func NewConfigEntry(configDir string) *ConfigEntry { + return &ConfigEntry{configDir, "", "", make(map[string]string)} +} + +func NewConfig(configFile string) *Config { + return &Config{configFile, make(map[string]*ConfigEntry), NewProcessGroup()} +} + +//create a new entry or return the already-exist entry +func (c *Config) createEntry(name string, configDir string) *ConfigEntry { + entry, ok := c.entries[name] + + if !ok { + entry = NewConfigEntry(configDir) + c.entries[name] = entry + } + return entry +} + +// +// return the loaded programs +func (c *Config) Load() ([]string, error) { + ini := ini.NewIni() + c.ProgramGroup = NewProcessGroup() + ini.LoadFile(c.configFile) + + includeFiles := c.getIncludeFiles(ini) + for _, f := range includeFiles { + ini.LoadFile(f) + } + return c.parse(ini), nil +} + +func (c *Config) getIncludeFiles(cfg *ini.Ini) []string { + result := make([]string, 0) + if includeSection, err := cfg.GetSection("include"); err == nil { + key, err := includeSection.GetValue("files") + if err == nil { + env := NewStringExpression("here", c.GetConfigFileDir()) + files := strings.Fields(key) + for _, f_raw := range files { + dir := c.GetConfigFileDir() + f, err := env.Eval(f_raw) + if err != nil { + continue + } + if filepath.IsAbs(f) { + dir = filepath.Dir(f) + } + fileInfos, err := ioutil.ReadDir(dir) + if err == nil { + goPattern := toRegexp(filepath.Base(f)) + for _, fileInfo := range fileInfos { + if matched, err := regexp.MatchString(goPattern, fileInfo.Name()); matched && err == nil { + result = append(result, filepath.Join(dir, fileInfo.Name())) + } + } + } + + } + } + } + return result + +} + +func (c *Config) parse(cfg *ini.Ini) []string { + c.parseGroup(cfg) + loaded_programs := c.parseProgram(cfg) + + //parse non-group,non-program and non-eventlistener sections + for _, section := range cfg.Sections() { + if !strings.HasPrefix(section.Name, "group:") && !strings.HasPrefix(section.Name, "program:") && !strings.HasPrefix(section.Name, "eventlistener:") { + entry := c.createEntry(section.Name, c.GetConfigFileDir()) + c.entries[section.Name] = entry + entry.parse(section) + } + } + return loaded_programs +} + +func (c *Config) GetConfigFileDir() string { + return filepath.Dir(c.configFile) +} + +//convert supervisor file pattern to the go regrexp +func toRegexp(pattern string) string { + tmp := strings.Split(pattern, ".") + for i, t := range tmp { + s := strings.Replace(t, "*", ".*", -1) + tmp[i] = strings.Replace(s, "?", ".", -1) + } + return strings.Join(tmp, "\\.") +} + +//get the unix_http_server section +func (c *Config) GetUnixHttpServer() (*ConfigEntry, bool) { + entry, ok := c.entries["unix_http_server"] + + return entry, ok +} + +//get the supervisord section +func (c *Config) GetSupervisord() (*ConfigEntry, bool) { + entry, ok := c.entries["supervisord"] + return entry, ok +} + +// Get the inet_http_server configuration section +func (c *Config) GetInetHttpServer() (*ConfigEntry, bool) { + entry, ok := c.entries["inet_http_server"] + return entry, ok +} + +func (c *Config) GetSupervisorctl() (*ConfigEntry, bool) { + entry, ok := c.entries["supervisorctl"] + return entry, ok +} +func (c *Config) GetEntries(filterFunc func(entry *ConfigEntry) bool) []*ConfigEntry { + result := make([]*ConfigEntry, 0) + for _, entry := range c.entries { + if filterFunc(entry) { + result = append(result, entry) + } + } + return result +} +func (c *Config) GetGroups() []*ConfigEntry { + return c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsGroup() + }) +} + +func (c *Config) GetPrograms() []*ConfigEntry { + programs := c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsProgram() + }) + + return sortProgram(programs) +} + +func (c *Config) GetEventListeners() []*ConfigEntry { + eventListeners := c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsEventListener() + }) + + return eventListeners +} + +func (c *Config) GetProgramNames() []string { + result := make([]string, 0) + programs := c.GetPrograms() + + programs = sortProgram(programs) + for _, entry := range programs { + result = append(result, entry.GetProgramName()) + } + return result +} + +//return the proram configure entry or nil +func (c *Config) GetProgram(name string) *ConfigEntry { + for _, entry := range c.entries { + if entry.IsProgram() && entry.GetProgramName() == name { + return entry + } + } + return nil +} + +// get value of key as bool +func (c *ConfigEntry) GetBool(key string, defValue bool) bool { + value, ok := c.keyValues[key] + + if ok { + b, err := strconv.ParseBool(value) + if err == nil { + return b + } + } + return defValue +} + +// check if has parameter +func (c *ConfigEntry) HasParameter(key string) bool { + _, ok := c.keyValues[key] + return ok +} + +func toInt(s string, factor int, defValue int) int { + i, err := strconv.Atoi(s) + if err == nil { + return i * factor + } + return defValue +} + +// get the value of the key as int +func (c *ConfigEntry) GetInt(key string, defValue int) int { + value, ok := c.keyValues[key] + + if ok { + return toInt(value, 1, defValue) + } + return defValue +} + +// get the value of key as environment setting. An enviroment string example: +// environment = A="env 1",B="this is a test" +func (c *ConfigEntry) GetEnv(key string) []string { + value, ok := c.keyValues[key] + env := make([]string, 0) + + if ok { + start := 0 + n := len(value) + var i int + for { + for i = start; i < n && value[i] != '='; { + i++ + } + key := value[start:i] + start = i + 1 + if value[start] == '"' { + for i = start + 1; i < n && value[i] != '"'; { + i++ + } + if i < n { + env = append(env, fmt.Sprintf("%s=%s", key, value[start+1:i])) + } + if i+1 < n && value[i+1] == ',' { + start = i + 2 + } else { + break + } + } else { + for i = start; i < n && value[i] != ','; { + i++ + } + if i < n { + env = append(env, fmt.Sprintf("%s=%s", key, value[start:i])) + start = i + 1 + } else { + env = append(env, fmt.Sprintf("%s=%s", key, value[start:])) + break + } + } + } + } + + result := make([]string, 0) + for i := 0; i < len(env); i++ { + tmp, err := NewStringExpression("program_name", c.GetProgramName(), + "process_num", c.GetString("process_num", "0"), + "group_name", c.GetGroupName(), + "here", c.ConfigDir).Eval(env[i]) + if err == nil { + result = append(result, tmp) + } + } + return result +} + +//get the value of key as string +func (c *ConfigEntry) GetString(key string, defValue string) string { + s, ok := c.keyValues[key] + + if ok { + env := NewStringExpression("here", c.ConfigDir) + rep_s, err := env.Eval(s) + if err == nil { + return rep_s + } else { + log.WithFields(log.Fields{ + log.ErrorKey: err, + "program": c.GetProgramName(), + "key": key, + }).Warn("Unable to parse expression") + } + } + return defValue +} + +//get the value of key as string and attempt to parse it with StringExpression +func (c *ConfigEntry) GetStringExpression(key string, defValue string) string { + s, ok := c.keyValues[key] + if !ok || s == "" { + return "" + } + + host_name, err := os.Hostname() + if err != nil { + host_name = "Unknown" + } + result, err := NewStringExpression("program_name", c.GetProgramName(), + "process_num", c.GetString("process_num", "0"), + "group_name", c.GetGroupName(), + "here", c.ConfigDir, + "host_node_name", host_name).Eval(s) + + if err != nil { + log.WithFields(log.Fields{ + log.ErrorKey: err, + "program": c.GetProgramName(), + "key": key, + }).Warn("unable to parse expression") + return s + } + + return result +} + +func (c *ConfigEntry) GetStringArray(key string, sep string) []string { + s, ok := c.keyValues[key] + + if ok { + return strings.Split(s, sep) + } + return make([]string, 0) +} + +// get the value of key as the bytes setting. +// +// logSize=1MB +// logSize=1GB +// logSize=1KB +// logSize=1024 +// +func (c *ConfigEntry) GetBytes(key string, defValue int) int { + v, ok := c.keyValues[key] + + if ok { + if len(v) > 2 { + lastTwoBytes := v[len(v)-2:] + if lastTwoBytes == "MB" { + return toInt(v[:len(v)-2], 1024*1024, defValue) + } else if lastTwoBytes == "GB" { + return toInt(v[:len(v)-2], 1024*1024*1024, defValue) + } else if lastTwoBytes == "KB" { + return toInt(v[:len(v)-2], 1024, defValue) + } + } + return toInt(v, 1, defValue) + } + return defValue +} + +func (c *ConfigEntry) parse(section *ini.Section) { + c.Name = section.Name + for _, key := range section.Keys() { + c.keyValues[key.Name()] = key.ValueWithDefault("") + } +} + +func (c *Config) parseGroup(cfg *ini.Ini) { + + //parse the group at first + for _, section := range cfg.Sections() { + if strings.HasPrefix(section.Name, "group:") { + entry := c.createEntry(section.Name, c.GetConfigFileDir()) + entry.parse(section) + groupName := entry.GetGroupName() + programs := entry.GetPrograms() + for _, program := range programs { + c.ProgramGroup.Add(groupName, program) + } + } + } +} + +func (c *Config) isProgramOrEventListener(section *ini.Section) (bool, string) { + //check if it is a program or event listener section + is_program := strings.HasPrefix(section.Name, "program:") + is_event_listener := strings.HasPrefix(section.Name, "eventlistener:") + prefix := "" + if is_program { + prefix = "program:" + } else if is_event_listener { + prefix = "eventlistener:" + } + return is_program || is_event_listener, prefix +} + +// parse the sections starts with "program:" prefix. +// +// Return all the parsed program names in the ini +func (c *Config) parseProgram(cfg *ini.Ini) []string { + loaded_programs := make([]string, 0) + for _, section := range cfg.Sections() { + + program_or_event_listener, prefix := c.isProgramOrEventListener(section) + + //if it is program or event listener + if program_or_event_listener { + //get the number of processes + numProcs, err := section.GetInt("numprocs") + programName := section.Name[len(prefix):] + if err != nil { + numProcs = 1 + } + procName, err := section.GetValue("process_name") + if numProcs > 1 { + if err != nil || strings.Index(procName, "%(process_num)") == -1 { + log.WithFields(log.Fields{ + "numprocs": numProcs, + "process_name": procName, + }).Error("no process_num in process name") + } + } + originalProcName := programName + if err == nil { + originalProcName = procName + } + + for i := 1; i <= numProcs; i++ { + envs := NewStringExpression("program_name", programName, + "process_num", fmt.Sprintf("%d", i), + "group_name", c.ProgramGroup.GetGroup(programName, programName), + "here", c.GetConfigFileDir()) + cmd, err := envs.Eval(section.GetValueWithDefault("command", "")) + if err != nil { + continue + } + section.Add("command", cmd) + + procName, err := envs.Eval(originalProcName) + if err != nil { + continue + } + + section.Add("process_name", procName) + section.Add("numprocs_start", fmt.Sprintf("%d", (i-1))) + section.Add("process_num", fmt.Sprintf("%d", i)) + entry := c.createEntry(procName, c.GetConfigFileDir()) + entry.parse(section) + entry.Name = prefix + procName + group := c.ProgramGroup.GetGroup(programName, programName) + entry.Group = group + loaded_programs = append(loaded_programs, procName) + } + } + } + return loaded_programs + +} + +func (c *Config) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buf, "configFile:%s\n", c.configFile) + for k, v := range c.entries { + fmt.Fprintf(buf, "[program:%s]\n", k) + fmt.Fprintf(buf, "%s\n", v.String()) + } + return buf.String() +} + +func (c *Config) RemoveProgram(programName string) { + delete(c.entries, fmt.Sprintf("program:%s", programName)) + c.ProgramGroup.Remove(programName) +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/process_group.go b/vendor/github.com/rpoletaev/supervisord/config/process_group.go new file mode 100644 index 000000000..264995bdf --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/process_group.go @@ -0,0 +1,114 @@ +package config + +import ( + "bytes" + "strings" + + "github.com/rpoletaev/supervisord/util" +) + +type ProcessGroup struct { + //mapping between the program and its group + processGroup map[string]string +} + +func NewProcessGroup() *ProcessGroup { + return &ProcessGroup{processGroup: make(map[string]string)} +} + +// clone the process group +func (pg *ProcessGroup) Clone() *ProcessGroup { + new_pg := NewProcessGroup() + for k, v := range pg.processGroup { + new_pg.processGroup[k] = v + } + return new_pg +} + +func (pg *ProcessGroup) Sub(other *ProcessGroup) (added []string, changed []string, removed []string) { + thisGroup := pg.GetAllGroup() + otherGroup := other.GetAllGroup() + added = util.Sub(thisGroup, otherGroup) + changed = make([]string, 0) + removed = util.Sub(otherGroup, thisGroup) + + for _, group := range thisGroup { + proc_1 := pg.GetAllProcess(group) + proc_2 := other.GetAllProcess(group) + if len(proc_2) > 0 && !util.IsSameStringArray(proc_1, proc_2) { + changed = append(changed, group) + } + } + return +} + +//add a process to a group +func (pg *ProcessGroup) Add(group string, procName string) { + pg.processGroup[procName] = group +} + +//remove a process +func (pg *ProcessGroup) Remove(procName string) { + delete(pg.processGroup, procName) +} + +//get all the groups +func (pg *ProcessGroup) GetAllGroup() []string { + groups := make(map[string]bool) + for _, group := range pg.processGroup { + groups[group] = true + } + + result := make([]string, 0) + for group, _ := range groups { + result = append(result, group) + } + return result +} + +// get all the processes in a group +func (pg *ProcessGroup) GetAllProcess(group string) []string { + result := make([]string, 0) + for procName, groupName := range pg.processGroup { + if group == groupName { + result = append(result, procName) + } + } + return result +} + +// check if a process belongs to a group or not +func (pg *ProcessGroup) InGroup(procName string, group string) bool { + groupName, ok := pg.processGroup[procName] + if ok && group == groupName { + return true + } + return false +} + +func (pg *ProcessGroup) ForEachProcess(procFunc func(group string, procName string)) { + for procName, groupName := range pg.processGroup { + procFunc(groupName, procName) + } +} + +func (pg *ProcessGroup) GetGroup(procName string, defGroup string) string { + group, ok := pg.processGroup[procName] + + if ok { + return group + } + pg.processGroup[procName] = defGroup + return defGroup +} + +func (pg *ProcessGroup) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + for _, group := range pg.GetAllGroup() { + buf.WriteString(group) + buf.WriteString(":") + buf.WriteString(strings.Join(pg.GetAllProcess(group), ",")) + buf.WriteString(";") + } + return buf.String() +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/process_sort.go b/vendor/github.com/rpoletaev/supervisord/config/process_sort.go new file mode 100644 index 000000000..bbd902f49 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/process_sort.go @@ -0,0 +1,159 @@ +package config + +import ( + "sort" + "strings" +) + +type ProgramByPriority []*ConfigEntry + +func (p ProgramByPriority) Len() int { + return len(p) +} + +func (p ProgramByPriority) Swap(i, j int) { + p[i], p[j] = p[j], p[i] +} + +func (p ProgramByPriority) Less(i, j int) bool { + return p[i].GetInt("priority", 999) < p[j].GetInt("priority", 999) +} + +type ProcessSorter struct { + depends_on_gragh map[string][]string + procs_without_depends []*ConfigEntry +} + +func NewProcessSorter() *ProcessSorter { + return &ProcessSorter{depends_on_gragh: make(map[string][]string), + procs_without_depends: make([]*ConfigEntry, 0)} +} + +func (p *ProcessSorter) initDepends(program_configs []*ConfigEntry) { + //sort by depends_on + for _, config := range program_configs { + if config.IsProgram() && config.HasParameter("depends_on") { + depends_on := config.GetString("depends_on", "") + prog_name := config.GetProgramName() + for _, depends_on_prog := range strings.Split(depends_on, ",") { + depends_on_prog = strings.TrimSpace(depends_on_prog) + if depends_on_prog != "" { + if _, ok := p.depends_on_gragh[prog_name]; !ok { + p.depends_on_gragh[prog_name] = make([]string, 0) + } + p.depends_on_gragh[prog_name] = append(p.depends_on_gragh[prog_name], depends_on_prog) + + } + } + } + } + +} + +func (p *ProcessSorter) initProgramWithoutDepends(program_configs []*ConfigEntry) { + depends_on_programs := p.getDependsOnInfo() + for _, config := range program_configs { + if config.IsProgram() { + if _, ok := depends_on_programs[config.GetProgramName()]; !ok { + p.procs_without_depends = append(p.procs_without_depends, config) + } + } + } +} + +func (p *ProcessSorter) getDependsOnInfo() map[string]string { + depends_on_programs := make(map[string]string) + + for k, v := range p.depends_on_gragh { + depends_on_programs[k] = k + for _, t := range v { + depends_on_programs[t] = t + } + } + + return depends_on_programs +} + +func (p *ProcessSorter) sortDepends() []string { + finished_programs := make(map[string]string) + progs_with_depends_info := p.getDependsOnInfo() + progs_start_order := make([]string, 0) + + //get all process without depends + for prog_name, _ := range progs_with_depends_info { + if _, ok := p.depends_on_gragh[prog_name]; !ok { + finished_programs[prog_name] = prog_name + progs_start_order = append(progs_start_order, prog_name) + } + } + + for len(finished_programs) < len(progs_with_depends_info) { + for prog_name, _ := range p.depends_on_gragh { + if _, ok := finished_programs[prog_name]; !ok && p.inFinishedPrograms(prog_name, finished_programs) { + finished_programs[prog_name] = prog_name + progs_start_order = append(progs_start_order, prog_name) + } + } + } + + return progs_start_order +} + +func (p *ProcessSorter) inFinishedPrograms(program_name string, finished_programs map[string]string) bool { + if depends_on, ok := p.depends_on_gragh[program_name]; ok { + for _, depend_program := range depends_on { + if _, finished := finished_programs[depend_program]; !finished { + return false + } + } + } + return true +} + +/*func (p *ProcessSorter) SortProcess(procs []*Process) []*Process { + prog_configs := make([]*ConfigEntry, 0) + for _, proc := range procs { + if proc.config.IsProgram() { + prog_configs = append(prog_configs, proc.config) + } + } + + result := make([]*Process, 0) + for _, config := range p.SortProgram(prog_configs) { + for _, proc := range procs { + if proc.config == config { + result = append(result, proc) + } + } + } + + return result +}*/ + +func (p *ProcessSorter) SortProgram(program_configs []*ConfigEntry) []*ConfigEntry { + p.initDepends(program_configs) + p.initProgramWithoutDepends(program_configs) + result := make([]*ConfigEntry, 0) + + for _, prog := range p.sortDepends() { + for _, config := range program_configs { + if config.IsProgram() && config.GetProgramName() == prog { + result = append(result, config) + } + } + } + + sort.Sort(ProgramByPriority(p.procs_without_depends)) + for _, p := range p.procs_without_depends { + result = append(result, p) + } + return result +} + +/*func sortProcess(procs []*Process) []*Process { + return NewProcessSorter().SortProcess(procs) +}*/ + +func sortProgram(configs []*ConfigEntry) []*ConfigEntry { + return NewProcessSorter().SortProgram(configs) +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/string_expression.go b/vendor/github.com/rpoletaev/supervisord/config/string_expression.go new file mode 100644 index 000000000..30933e44a --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/string_expression.go @@ -0,0 +1,88 @@ +package config + +import ( + "fmt" + "os" + "strconv" + "strings" +) + +type StringExpression struct { + env map[string]string +} + +func NewStringExpression(envs ...string) *StringExpression { + se := &StringExpression{env: make(map[string]string)} + + for _, env := range os.Environ() { + t := strings.Split(env, "=") + se.env["ENV_"+t[0]] = t[1] + } + n := len(envs) + for i := 0; i+1 < n; i += 2 { + se.env[envs[i]] = envs[i+1] + } + + hostname, err := os.Hostname() + if err == nil { + se.env["host_node_name"] = hostname + } + + return se + +} + +func (se *StringExpression) Add(key string, value string) *StringExpression { + se.env[key] = value + return se +} + +func (se *StringExpression) Eval(s string) (string, error) { + for { + //find variable start indicator + start := strings.Index(s, "%(") + + if start == -1 { + return s, nil + } + + end := start + 1 + n := len(s) + + //find variable end indicator + for end < n && s[end] != ')' { + end++ + } + + //find the type of the variable + typ := end + 1 + for typ < n && !((s[typ] >= 'a' && s[typ] <= 'z') || (s[typ] >= 'A' && s[typ] <= 'Z')) { + typ++ + } + + //evaluate the variable + if typ < n { + varName := s[start+2 : end] + + varValue, ok := se.env[varName] + + if !ok { + return "", fmt.Errorf("fail to find the environment variable %s", varName) + } + if s[typ] == 'd' { + i, err := strconv.Atoi(varValue) + if err != nil { + return "", fmt.Errorf("can't convert %s to integer", varValue) + } + s = s[0:start] + fmt.Sprintf("%"+s[end+1:typ+1], i) + s[typ+1:] + } else if s[typ] == 's' { + s = s[0:start] + varValue + s[typ+1:] + } else { + return "", fmt.Errorf("not implement type:%v", s[typ]) + } + } else { + return "", fmt.Errorf("invalid string expression format") + } + } + +} diff --git a/vendor/github.com/rpoletaev/supervisord/config_template.go b/vendor/github.com/rpoletaev/supervisord/config_template.go new file mode 100644 index 000000000..760cc4174 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config_template.go @@ -0,0 +1,137 @@ +package main + +import ( + "io" + "os" +) + +var config_template = `[unix_http_server] +file=/tmp/supervisord.sock +#chmod=not support +#chown=not support +username=test1 +password={SHA}82ab876d1387bfafe46cc1c8a2ef074eae50cb1d + +[inet_http_server] +port=127.0.0.1:9001 +username=test1 +password=thepassword + +[supervisord] +logfile=%(here)s/supervisord.log +logfile_maxbytes=50MB +logfile_backups=10 +loglevel=info +pidfile=%(here)s/supervisord.pid +#umask=not support +#nodaemon=not support +#minfds=not support +#minprocs=not support +#nocleanup=not support +#childlogdir=not support +#user=not support +#directory=not support +#strip_ansi=not support +#environment=not support +identifier=supervisor + +[program:x] +command=/bin/cat +process_name=%(program_name)s +numprocs=1 +#numprocs_start=not support +autostart=true +startsecs=3 +startretries=3 +autorestart=true +exitcodes=0,2 +stopsignal=TERM +stopwaitsecs=10 +#stopasgroup=not support +#killasgroup=not support +user=user1 +redirect_stderr=false +stdout_logfile=AUTO +stdout_logfile_maxbytes=50MB +stdout_logfile_backups=10 +stdout_capture_maxbytes=0 +stdout_events_enabled=true +stderr_logfile=AUTO +stderr_logfile_maxbytes=50MB +stderr_logfile_backups=10 +stderr_capture_maxbytes=0 +stderr_events_enabled=false +environment=KEY="val",KEY2="val2" +directory=/tmp +#umask=not support +serverurl=AUTO + +[include] +files=/an/absolute/filename.conf /an/absolute/*.conf foo.conf config??.conf + +[group:x] +programs=bar,baz +priority=999 + +[eventlistener:x] +command=/bin/eventlistener +process_name=%(program_name)s +numprocs=1 +#numprocs_start=not support +autostart=true +startsecs=3 +startretries=3 +autorestart=true +exitcodes=0,2 +stopsignal=TERM +stopwaitsecs=10 +#stopasgroup=not support +#killasgroup=not support +user=user1 +redirect_stderr=false +stdout_logfile=AUTO +stdout_logfile_maxbytes=50MB +stdout_logfile_backups=10 +stdout_capture_maxbytes=0 +stdout_events_enabled=true +stderr_logfile=AUTO +stderr_logfile_maxbytes=50MB +stderr_logfile_backups=10 +stderr_capture_maxbytes=0 +stderr_events_enabled=false +environment=KEY="val",KEY2="val2" +directory=/tmp +#umask=not support +serverurl=AUTO +buffer_size=10240 +events=PROCESS_STATE +#result_handler=not support +` + +type InitTemplateCommand struct { + OutFile string `short:"o" long:"output" description:"the output file name" required:"true"` +} + +var initTemplateCommand InitTemplateCommand + +func (x *InitTemplateCommand) Execute(args []string) error { + f, err := os.Create(x.OutFile) + if err != nil { + return err + } + defer f.Close() + return GenTemplate(f) +} + +func GenTemplate(writer io.Writer) error { + _, err := writer.Write([]byte(config_template)) + return err +} + +func init() { + parser.AddCommand("init", + "initialize a template", + "The init subcommand writes the supported configurations to specified file", + &initTemplateCommand) + +} diff --git a/vendor/github.com/rpoletaev/supervisord/content_checker.go b/vendor/github.com/rpoletaev/supervisord/content_checker.go new file mode 100644 index 000000000..6c0b7d3e3 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/content_checker.go @@ -0,0 +1,149 @@ +package main + +import ( + "fmt" + "net" + "net/http" + "os/exec" + "strings" + "time" +) + +type ContentChecker interface { + Check() bool +} + +type BaseChecker struct { + data string + includes []string + //timeout in second + timeoutTime time.Time + notifyChannel chan string +} + +func NewBaseChecker(includes []string, timeout int) *BaseChecker { + return &BaseChecker{data: "", + includes: includes, + timeoutTime: time.Now().Add(time.Duration(timeout) * time.Second), + notifyChannel: make(chan string, 1)} +} + +func (bc *BaseChecker) Write(b []byte) (int, error) { + bc.notifyChannel <- string(b) + return len(b), nil +} + +func (bc *BaseChecker) isReady() bool { + find_all := true + for _, include := range bc.includes { + if strings.Index(bc.data, include) == -1 { + find_all = false + break + } + } + return find_all +} +func (bc *BaseChecker) Check() bool { + d := bc.timeoutTime.Sub(time.Now()) + if d < 0 { + return false + } + timeoutSignal := time.After(d) + + for { + select { + case data := <-bc.notifyChannel: + bc.data = bc.data + data + if bc.isReady() { + return true + } + case <-timeoutSignal: + return false + } + } +} + +type ScriptChecker struct { + args []string +} + +func NewScriptChecker(args []string) *ScriptChecker { + return &ScriptChecker{args: args} +} + +func (sc *ScriptChecker) Check() bool { + cmd := exec.Command(sc.args[0]) + if len(sc.args) > 1 { + cmd.Args = sc.args + } + err := cmd.Run() + return err == nil && cmd.ProcessState != nil && cmd.ProcessState.Success() +} + +type TcpChecker struct { + host string + port int + conn net.Conn + baseChecker *BaseChecker +} + +func NewTcpChecker(host string, port int, includes []string, timeout int) *TcpChecker { + checker := &TcpChecker{host: host, + port: port, + baseChecker: NewBaseChecker(includes, timeout)} + checker.start() + return checker +} + +func (tc *TcpChecker) start() { + go func() { + b := make([]byte, 1024) + var err error = nil + for { + tc.conn, err = net.Dial("tcp", fmt.Sprintf("%s:%d", tc.host, tc.port)) + if err == nil || tc.baseChecker.timeoutTime.Before(time.Now()) { + break + } + } + + if err == nil { + for { + n, err := tc.conn.Read(b) + if err != nil { + break + } + tc.baseChecker.Write(b[0:n]) + } + } + }() +} + +func (tc *TcpChecker) Check() bool { + ret := tc.baseChecker.Check() + if tc.conn != nil { + tc.conn.Close() + } + return ret +} + +type HttpChecker struct { + url string + timeoutTime time.Time +} + +func NewHttpChecker(url string, timeout int) *HttpChecker { + return &HttpChecker{url: url, + timeoutTime: time.Now().Add(time.Duration(timeout) * time.Second)} +} + +func (hc *HttpChecker) Check() bool { + for { + if hc.timeoutTime.After(time.Now()) { + resp, err := http.Get(hc.url) + if err == nil { + return resp.StatusCode >= 200 && resp.StatusCode < 300 + } + } + } + return false +} diff --git a/vendor/github.com/rpoletaev/supervisord/ctl.go b/vendor/github.com/rpoletaev/supervisord/ctl.go new file mode 100644 index 000000000..977dc3ba9 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/ctl.go @@ -0,0 +1,159 @@ +package main + +import ( + "fmt" + "os" + "strings" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/xmlrpcclient" +) + +type CtlCommand struct { + ServerUrl string `short:"s" long:"serverurl" description:"URL on which supervisord server is listening"` +} + +var ctlCommand CtlCommand + +func (x *CtlCommand) getServerUrl() string { + fmt.Printf("%v\n", options) + if x.ServerUrl != "" { + return x.ServerUrl + } else if _, err := os.Stat(options.Configuration); err == nil { + config := config.NewConfig(options.Configuration) + config.Load() + if entry, ok := config.GetSupervisorctl(); ok { + serverurl := entry.GetString("serverurl", "") + if serverurl != "" { + return serverurl + } + } + } + return "http://localhost:9001" +} +func (x *CtlCommand) Execute(args []string) error { + if len(args) == 0 { + return nil + } + + rpcc := xmlrpcclient.NewXmlRPCClient(x.getServerUrl()) + verb := args[0] + + switch verb { + + //////////////////////////////////////////////////////////////////////////////// + // STATUS + //////////////////////////////////////////////////////////////////////////////// + case "status": + processes := args[1:] + processesMap := make(map[string]bool) + for _, process := range processes { + processesMap[strings.ToLower(process)] = true + } + if reply, err := rpcc.GetAllProcessInfo(); err == nil { + x.showProcessInfo(&reply, processesMap) + } + + //////////////////////////////////////////////////////////////////////////////// + // START or STOP + //////////////////////////////////////////////////////////////////////////////// + case "start", "stop": + state := map[string]string{ + "start": "started", + "stop": "stopped", + } + processes := args[1:] + if len(processes) <= 0 { + fmt.Printf("Please specify process for %s\n", verb) + } + for _, pname := range processes { + if pname == "all" { + reply, err := rpcc.ChangeAllProcessState(verb) + if err == nil { + x.showProcessInfo(&reply, make(map[string]bool)) + } else { + fmt.Printf("Fail to change all process state to %s", state) + } + } else { + if reply, err := rpcc.ChangeProcessState(verb, pname); err == nil { + fmt.Printf("%s: ", pname) + if !reply.Value { + fmt.Printf("not ") + } + fmt.Printf("%s\n", state[verb]) + } else { + fmt.Printf("%s: failed [%v]\n", pname, err) + } + } + } + + //////////////////////////////////////////////////////////////////////////////// + // SHUTDOWN + //////////////////////////////////////////////////////////////////////////////// + case "shutdown": + if reply, err := rpcc.Shutdown(); err == nil { + if reply.Value { + fmt.Printf("Shut Down\n") + } else { + fmt.Printf("Hmmm! Something gone wrong?!\n") + } + } + case "reload": + if reply, err := rpcc.ReloadConfig(); err == nil { + + if len(reply.AddedGroup) > 0 { + fmt.Printf("Added Groups: %s\n", strings.Join(reply.AddedGroup, ",")) + } + if len(reply.ChangedGroup) > 0 { + fmt.Printf("Changed Groups: %s\n", strings.Join(reply.ChangedGroup, ",")) + } + if len(reply.RemovedGroup) > 0 { + fmt.Printf("Removed Groups: %s\n", strings.Join(reply.RemovedGroup, ",")) + } + } + case "signal": + sig_name, processes := args[1], args[2:] + for _, process := range processes { + if process == "all" { + reply, err := rpcc.SignalAll(process) + if err == nil { + x.showProcessInfo(&reply, make(map[string]bool)) + } else { + fmt.Printf("Fail to send signal %s to all process", sig_name) + } + } else { + reply, err := rpcc.SignalProcess(sig_name, process) + if err == nil && reply.Success { + fmt.Printf("Succeed to send signal %s to process %s\n", sig_name, process) + } else { + fmt.Printf("Fail to send signal %s to process %s\n", sig_name, process) + } + } + } + + default: + fmt.Println("unknown command") + } + + return nil +} + +func (x *CtlCommand) showProcessInfo(reply *xmlrpcclient.AllProcessInfoReply, processesMap map[string]bool) { + for _, pinfo := range reply.Value { + name := strings.ToLower(pinfo.Name) + description := pinfo.Description + if strings.ToLower(description) == "" { + description = "" + } + if len(processesMap) <= 0 || processesMap[name] { + fmt.Printf("%-33s%-10s%s\n", name, pinfo.Statename, description) + } + } +} + +func init() { + parser.AddCommand("ctl", + "Control a running daemon", + "The ctl subcommand resembles supervisorctl command of original daemon.", + &ctlCommand) +} diff --git a/vendor/github.com/rpoletaev/supervisord/daemonize.go b/vendor/github.com/rpoletaev/supervisord/daemonize.go new file mode 100644 index 000000000..682b1e88c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/daemonize.go @@ -0,0 +1,25 @@ +// +build !windows + +package main + +import ( + log "github.com/sirupsen/logrus" + "github.com/sevlyar/go-daemon" +) + +func Deamonize(proc func()) { + context := new(daemon.Context) + + child, err := context.Reborn() + if err != nil { + log.WithFields(log.Fields{"err": err}).Fatal("Unable to run") + } + if child != nil { + return + } + defer context.Release() + + log.Info("daemon started") + + proc() +} diff --git a/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go b/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go new file mode 100644 index 000000000..9fe9d204d --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go @@ -0,0 +1,7 @@ +package main + +// +build windows + +func Deamonize(proc func()) { + proc() +} diff --git a/vendor/github.com/rpoletaev/supervisord/events/events.go b/vendor/github.com/rpoletaev/supervisord/events/events.go new file mode 100644 index 000000000..ad3c55137 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/events/events.go @@ -0,0 +1,745 @@ +package events + +import ( + "bufio" + "bytes" + "container/list" + "fmt" + log "github.com/sirupsen/logrus" + "io" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" +) + +const ( + EVENT_SYS_VERSION = "3.0" + PROC_COMMON_BEGIN_STR = "" + PROC_COMMON_END_STR = "" +) + +type Event interface { + GetSerial() uint64 + GetType() string + GetBody() string +} + +type BaseEvent struct { + serial uint64 + eventType string +} + +func (be *BaseEvent) GetSerial() uint64 { + return be.serial +} + +func (be *BaseEvent) GetType() string { + return be.eventType +} + +type EventListenerManager struct { + //mapping between the event listener name and the listener + namedListeners map[string]*EventListener + //mapping between the event name and the event listeners + eventListeners map[string]map[*EventListener]bool +} + +type EventPoolSerial struct { + sync.Mutex + poolserial map[string]uint64 +} + +func NewEventPoolSerial() *EventPoolSerial { + return &EventPoolSerial{poolserial: make(map[string]uint64)} +} + +func (eps *EventPoolSerial) nextSerial(pool string) uint64 { + eps.Lock() + defer eps.Unlock() + + r, ok := eps.poolserial[pool] + if !ok { + r = 1 + } + eps.poolserial[pool] = r + 1 + return r +} + +type EventListener struct { + pool string + server string + mutex sync.Mutex + events *list.List + stdin *bufio.Reader + stdout io.Writer + buffer_size int +} + +func NewEventListener(pool string, + server string, + stdin io.Reader, + stdout io.Writer, + buffer_size int) *EventListener { + evtListener := &EventListener{pool: pool, + server: server, + events: list.New(), + stdin: bufio.NewReader(stdin), + stdout: stdout, + buffer_size: buffer_size} + evtListener.start() + return evtListener +} + +func (el *EventListener) getFirstEvent() ([]byte, bool) { + el.mutex.Lock() + defer el.mutex.Unlock() + + if el.events.Len() > 0 { + elem := el.events.Front() + value := elem.Value + b, ok := value.([]byte) + return b, ok + } + return nil, false +} + +func (el *EventListener) removeFirstEvent() { + el.mutex.Lock() + defer el.mutex.Unlock() + if el.events.Len() > 0 { + el.events.Remove(el.events.Front()) + } +} + +func (el *EventListener) start() { + go func() { + for { + //read if it is ready + err := el.waitForReady() + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to read from event listener, the event listener may exit") + break + } + for { + if b, ok := el.getFirstEvent(); ok { + _, err := el.stdout.Write(b) + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to send event") + break + } + result, err := el.readResult() + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to read result") + break + } + if result == "OK" { //remove the event if succeed + log.WithFields(log.Fields{"eventListener": el.pool}).Info("succeed to send the event") + el.removeFirstEvent() + break + } else if result == "FAIL" { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to send the event") + break + } else { + log.WithFields(log.Fields{"eventListener": el.pool, "result": result}).Warn("unknown result from listener") + } + } + } + } + }() +} + +func (el *EventListener) waitForReady() error { + log.Debug("start to check if event listener program is ready") + for { + line, err := el.stdin.ReadString('\n') + if err != nil { + return err + } + if line == "READY\n" { + log.WithFields(log.Fields{"eventListener": el.pool}).Debug("the event listener is ready") + return nil + } + } +} + +func (el *EventListener) readResult() (string, error) { + s, err := el.stdin.ReadString('\n') + if err != nil { + return s, err + } + fields := strings.Fields(s) + if len(fields) == 2 && fields[0] == "RESULT" { + //try to get the length of result + n, err := strconv.Atoi(fields[1]) + if err != nil { + //return if fail to get the length + return "", err + } + if n < 0 { + return "", fmt.Errorf("Fail to read the result because the result bytes is less than 0") + } + //read n bytes + b := make([]byte, n) + for i := 0; i < n; i++ { + b[i], err = el.stdin.ReadByte() + if err != nil { + return "", err + } + } + //ok, get the n bytes + return string(b), nil + } else { + return "", fmt.Errorf("Fail to read the result") + } + +} + +func (el *EventListener) HandleEvent(event Event) { + encodedEvent := el.encodeEvent(event) + el.mutex.Lock() + defer el.mutex.Unlock() + if el.events.Len() <= el.buffer_size { + el.events.PushBack(encodedEvent) + } else { + log.WithFields(log.Fields{"eventListener": el.pool}).Error("events reaches the buffer_size, discard the events") + } +} + +func (el *EventListener) encodeEvent(event Event) []byte { + body := []byte(event.GetBody()) + + //header + s := fmt.Sprintf("ver:%s server:%s serial:%d pool:%s poolserial:%d eventname:%s len:%d\n", + EVENT_SYS_VERSION, + el.server, + event.GetSerial(), + el.pool, + eventPoolSerial.nextSerial(el.pool), + event.GetType(), + len(body)) + //write the header & body to buffer + r := bytes.NewBuffer([]byte(s)) + r.Write(body) + + return r.Bytes() +} + +var eventTypeDerives = map[string][]string{ + "PROCESS_STATE_STARTING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_RUNNING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_BACKOFF": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_STOPPING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_EXITED": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_STOPPED": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_FATAL": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_UNKNOWN": []string{"EVENT", "PROCESS_STATE"}, + "REMOTE_COMMUNICATION": []string{"EVENT"}, + "PROCESS_LOG_STDOUT": []string{"EVENT", "PROCESS_LOG"}, + "PROCESS_LOG_STDERR": []string{"EVENT", "PROCESS_LOG"}, + "PROCESS_COMMUNICATION_STDOUT": []string{"EVENT", "PROCESS_COMMUNICATION"}, + "PROCESS_COMMUNICATION_STDERR": []string{"EVENT", "PROCESS_COMMUNICATION"}, + "SUPERVISOR_STATE_CHANGE_RUNNING": []string{"EVENT", "SUPERVISOR_STATE_CHANGE"}, + "SUPERVISOR_STATE_CHANGE_STOPPING": []string{"EVENT", "SUPERVISOR_STATE_CHANGE"}, + "TICK_5": []string{"EVENT", "TICK"}, + "TICK_60": []string{"EVENT", "TICK"}, + "TICK_3600": []string{"EVENT", "TICK"}, + "PROCESS_GROUP_ADDED": []string{"EVENT", "PROCESS_GROUP"}, + "PROCESS_GROUP_REMOVED": []string{"EVENT", "PROCESS_GROUP"}} +var eventSerial uint64 = 0 +var eventListenerManager = NewEventListenerManager() +var eventPoolSerial = NewEventPoolSerial() + +func init() { + startTickTimer() +} + +func startTickTimer() { + tickConfigs := map[string]int64{"TICK_5": 5, + "TICK_60": 60, + "TICK_3600": 3600} + + //start a Tick timer + go func() { + lastTickSlice := make(map[string]int64) + + c := time.Tick(1 * time.Second) + for now := range c { + for tickType, period := range tickConfigs { + time_slice := now.Unix() / period + last_time_slice, ok := lastTickSlice[tickType] + if !ok { + lastTickSlice[tickType] = time_slice + } else if last_time_slice != time_slice { + lastTickSlice[tickType] = time_slice + EmitEvent(NewTickEvent(tickType, now.Unix())) + } + } + } + }() +} + +func nextEventSerial() uint64 { + return atomic.AddUint64(&eventSerial, 1) +} + +func NewEventListenerManager() *EventListenerManager { + return &EventListenerManager{namedListeners: make(map[string]*EventListener), + eventListeners: make(map[string]map[*EventListener]bool)} +} + +func (em *EventListenerManager) registerEventListener(eventListenerName string, + events []string, + listener *EventListener) { + + em.namedListeners[eventListenerName] = listener + all_events := make(map[string]bool) + for _, event := range events { + for k, values := range eventTypeDerives { + if event == k { //if it is a final event + all_events[k] = true + } else { //if it is an abstract event, add all its derived events + for _, val := range values { + if val == event { + all_events[k] = true + } + } + } + } + } + for event, _ := range all_events { + log.WithFields(log.Fields{"eventListener": eventListenerName, "event": event}).Info("register event listener") + if _, ok := em.eventListeners[event]; !ok { + em.eventListeners[event] = make(map[*EventListener]bool) + } + em.eventListeners[event][listener] = true + } +} + +func RegisterEventListener(eventListenerName string, + events []string, + listener *EventListener) { + eventListenerManager.registerEventListener( eventListenerName, events, listener ) +} + +func (em *EventListenerManager) unregisterEventListener(eventListenerName string) *EventListener { + listener, ok := em.namedListeners[eventListenerName] + if ok { + delete(em.namedListeners, eventListenerName) + for event, listeners := range em.eventListeners { + if _, ok = listeners[listener]; ok { + log.WithFields(log.Fields{"eventListener": eventListenerName, "event": event}).Info("unregister event listener") + } + + delete(listeners, listener) + } + return listener + } + return nil +} + +func UnregisterEventListener(eventListenerName string) *EventListener { + return eventListenerManager.unregisterEventListener( eventListenerName ) +} + +func (em *EventListenerManager) EmitEvent(event Event) { + listeners, ok := em.eventListeners[event.GetType()] + if ok { + log.WithFields(log.Fields{"event": event.GetType()}).Info("process event") + for listener, _ := range listeners { + log.WithFields(log.Fields{"eventListener": listener.pool, "event": event.GetType()}).Info("receive event on listener") + listener.HandleEvent(event) + } + } +} + +type RemoteCommunicationEvent struct { + BaseEvent + typ string + data string +} + +func NewRemoteCommunicationEvent(typ string, data string) *RemoteCommunicationEvent { + r := &RemoteCommunicationEvent{typ: typ, data: data} + r.eventType = "REMOTE_COMMUNICATION" + r.serial = nextEventSerial() + return r +} + +func (r *RemoteCommunicationEvent) GetBody() string { + return fmt.Sprintf("type:%s\n%s", r.typ, r.data) +} + +type ProcCommEvent struct { + BaseEvent + processName string + groupName string + pid int + data string +} + +func NewProcCommEvent(eventType string, + procName string, + groupName string, + pid int, + data string) *ProcCommEvent { + return &ProcCommEvent{BaseEvent: BaseEvent{eventType: eventType, serial: nextEventSerial()}, + processName: procName, + groupName: groupName, + pid: pid, + data: data} +} + +func (p *ProcCommEvent) GetBody() string { + return fmt.Sprintf("processname:%s groupname:%s pid:%d\n%s", p.processName, p.groupName, p.pid, p.data) +} + +func EmitEvent(event Event) { + eventListenerManager.EmitEvent(event) +} + +type TickEvent struct { + BaseEvent + when int64 +} + +func NewTickEvent(tickType string, when int64) *TickEvent { + r := &TickEvent{when: when} + r.eventType = tickType + r.serial = nextEventSerial() + return r +} + +func (te *TickEvent) GetBody() string { + return fmt.Sprintf("when:%d", te.when) +} + +type ProcCommEventCapture struct { + reader io.Reader + captureMaxBytes int + stdType string + procName string + groupName string + pid int + eventBuffer string + eventBeginPos int +} + +func NewProcCommEventCapture(reader io.Reader, + captureMaxBytes int, + stdType string, + procName string, + groupName string) *ProcCommEventCapture { + pec := &ProcCommEventCapture{reader: reader, + captureMaxBytes: captureMaxBytes, + stdType: stdType, + procName: procName, + groupName: groupName, + pid: -1, + eventBuffer: "", + eventBeginPos: -1} + pec.startCapture() + return pec +} + +func (pec *ProcCommEventCapture) SetPid(pid int) { + pec.pid = pid +} +func (pec *ProcCommEventCapture) startCapture() { + go func() { + buf := make([]byte, 10240) + for { + n, err := pec.reader.Read(buf) + if err != nil { + break + } + pec.eventBuffer += string(buf[0:n]) + for { + event := pec.captureEvent() + if event == nil { + break + } + EmitEvent(event) + } + } + }() +} + +func (pec *ProcCommEventCapture) captureEvent() Event { + pec.findBeginStr() + end_pos := pec.findEndStr() + if end_pos == -1 { + return nil + } + data := pec.eventBuffer[pec.eventBeginPos+len(PROC_COMMON_BEGIN_STR) : end_pos] + pec.eventBuffer = pec.eventBuffer[end_pos+len(PROC_COMMON_END_STR):] + pec.eventBeginPos = -1 + return NewProcCommEvent(pec.stdType, + pec.procName, + pec.groupName, + pec.pid, + data) +} + +func (pec *ProcCommEventCapture) findBeginStr() { + if pec.eventBeginPos == -1 { + pec.eventBeginPos = strings.Index(pec.eventBuffer, PROC_COMMON_BEGIN_STR) + if pec.eventBeginPos == -1 { + //remove some string + n := len(pec.eventBuffer) + if n > len(PROC_COMMON_BEGIN_STR) { + pec.eventBuffer = pec.eventBuffer[n-len(PROC_COMMON_BEGIN_STR):] + } + } + } +} + +func (pec *ProcCommEventCapture) findEndStr() int { + if pec.eventBeginPos == -1 { + return -1 + } + end_pos := strings.Index(pec.eventBuffer, PROC_COMMON_END_STR) + if end_pos == -1 { + if len(pec.eventBuffer) > pec.captureMaxBytes { + log.WithFields(log.Fields{"program": pec.procName}).Warn("The capture buffer is overflow, discard the content") + pec.eventBeginPos = -1 + pec.eventBuffer = "" + } + } + return end_pos +} + +type ProcessStateEvent struct { + BaseEvent + process_name string + group_name string + from_state string + tries int + expected int + pid int +} + +func CreateProcessStartingEvent(process string, + group string, + from_state string, + tries int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: tries, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_STARTING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessRunningEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_RUNNING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessBackoffEvent(process string, + group string, + from_state string, + tries int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: tries, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_BACKOFF" + r.serial = nextEventSerial() + return r +} + +func CreateProcessStoppingEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_STOPPING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessExitedEvent(process string, + group string, + from_state string, + expected int, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: expected, + pid: pid} + r.eventType = "PROCESS_STATE_EXITED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessStoppedEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_STOPPED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessFatalEvent(process string, + group string, + from_state string) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_FATAL" + r.serial = nextEventSerial() + return r +} + +func CreateProcessUnknownEvent(process string, + group string, + from_state string) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_UNKNOWN" + r.serial = nextEventSerial() + return r +} + +func (pse *ProcessStateEvent) GetBody() string { + body := fmt.Sprintf("processname:%s groupname:%s from_state:%s", pse.process_name, pse.group_name, pse.from_state) + if pse.tries >= 0 { + body = fmt.Sprintf("%s tries:%d", body, pse.tries) + } + + if pse.expected != -1 { + body = fmt.Sprintf("%s expected:%d", body, pse.expected) + } + + if pse.pid != 0 { + body = fmt.Sprintf("%s pid:%d", body, pse.pid) + } + return body +} + +type SupervisorStateChangeEvent struct { + BaseEvent +} + +func (s *SupervisorStateChangeEvent) GetBody() string { + return "" +} + +func CreateSupervisorStateChangeRunning() *SupervisorStateChangeEvent { + r := &SupervisorStateChangeEvent{} + r.eventType = "SUPERVISOR_STATE_CHANGE_RUNNING" + r.serial = nextEventSerial() + return r +} + +func createSupervisorStateChangeStopping() *SupervisorStateChangeEvent { + r := &SupervisorStateChangeEvent{} + r.eventType = "SUPERVISOR_STATE_CHANGE_STOPPING" + r.serial = nextEventSerial() + return r +} + +type ProcessLogEvent struct { + BaseEvent + process_name string + group_name string + pid int + data string +} + +func (pe *ProcessLogEvent) GetBody() string { + return fmt.Sprintf("processname:%s groupname:%s pid:%d\n%s", + pe.process_name, + pe.group_name, + pe.pid, + pe.data) +} + +func CreateProcessLogStdoutEvent(process_name string, + group_name string, + pid int, + data string) *ProcessLogEvent { + r := &ProcessLogEvent{process_name: process_name, + group_name: group_name, + pid: pid, + data: data} + r.eventType = "PROCESS_LOG_STDOUT" + r.serial = nextEventSerial() + return r +} + +func CreateProcessLogStderrEvent(process_name string, + group_name string, + pid int, + data string) *ProcessLogEvent { + r := &ProcessLogEvent{process_name: process_name, + group_name: group_name, + pid: pid, + data: data} + r.eventType = "PROCESS_LOG_STDERR" + r.serial = nextEventSerial() + return r +} + +type ProcessGroupEvent struct { + BaseEvent + group_name string +} + +func (pe *ProcessGroupEvent) GetBody() string { + return fmt.Sprintf("groupname:%s", pe.group_name) +} + +func CreateProcessGroupAddedEvent(group_name string) *ProcessGroupEvent { + r := &ProcessGroupEvent{group_name: group_name} + + r.eventType = "PROCESS_GROUP_ADDED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessGroupRemovedEvent(group_name string) *ProcessGroupEvent { + r := &ProcessGroupEvent{group_name: group_name} + + r.eventType = "PROCESS_GROUP_REMOVED" + r.serial = nextEventSerial() + return r +} diff --git a/vendor/github.com/rpoletaev/supervisord/faults/faults.go b/vendor/github.com/rpoletaev/supervisord/faults/faults.go new file mode 100644 index 000000000..2bc730ee2 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/faults/faults.go @@ -0,0 +1,30 @@ +package faults + +import ( + xmlrpc "github.com/ochinchina/gorilla-xmlrpc/xml" +) + +const ( + UNKNOWN_METHOD = 1 + INCORRECT_PARAMETERS = 2 + BAD_ARGUMENTS = 3 + SIGNATURE_UNSUPPORTED = 4 + SHUTDOWN_STATE = 6 + BAD_NAME = 10 + BAD_SIGNAL = 11 + NO_FILE = 20 + NOT_EXECUTABLE = 21 + FAILED = 30 + ABNORMAL_TERMINATION = 40 + SPAWN_ERROR = 50 + ALREADY_STARTED = 60 + NOT_RUNNING = 70 + SUCCESS = 80 + ALREADY_ADDED = 90 + STILL_RUNNING = 91 + CANT_REREAD = 92 +) + +func NewFault(code int, desc string) error { + return &xmlrpc.Fault{Code: code, String: desc} +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log.go b/vendor/github.com/rpoletaev/supervisord/logger/log.go new file mode 100644 index 000000000..189879015 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log.go @@ -0,0 +1,485 @@ +package logger + +import ( + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "path" + "strconv" + "strings" + "sync" + + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/faults" +) + +//implements io.Writer interface + +type Logger interface { + io.WriteCloser + SetPid(pid int) + ReadLog(offset int64, length int64) (string, error) + ReadTailLog(offset int64, length int64) (string, int64, bool, error) + ClearCurLogFile() error + ClearAllLogFile() error +} + +type LogEventEmitter interface { + emitLogEvent(data string) +} + +type FileLogger struct { + name string + maxSize int64 + backups int + curRotate int + fileSize int64 + file *os.File + logEventEmitter LogEventEmitter + locker sync.Locker +} + +type SysLogger struct { + NullLogger + logWriter io.WriteCloser + logEventEmitter LogEventEmitter +} + +type NullLogger struct { + logEventEmitter LogEventEmitter +} + +type NullLocker struct { +} + +func NewFileLogger(name string, maxSize int64, backups int, logEventEmitter LogEventEmitter, locker sync.Locker) *FileLogger { + logger := &FileLogger{name: name, + maxSize: maxSize, + backups: backups, + curRotate: -1, + fileSize: 0, + file: nil, + logEventEmitter: logEventEmitter, + locker: locker} + logger.updateLatestLog() + return logger +} + +func (l *FileLogger) SetPid(pid int) { + //NOTHING TO DO +} + +// return the next log file name +func (l *FileLogger) nextLogFile() { + l.curRotate++ + if l.curRotate >= l.backups { + l.curRotate = 0 + } +} + +func (l *FileLogger) updateLatestLog() { + dir := path.Dir(l.name) + files, err := ioutil.ReadDir(dir) + baseName := path.Base(l.name) + + if err != nil { + l.curRotate = 0 + } else { + //find all the rotate files + var latestFile os.FileInfo + latestNum := -1 + for _, fileInfo := range files { + if !fileInfo.IsDir() && strings.HasPrefix(fileInfo.Name(), baseName+".") { + n, err := strconv.Atoi(fileInfo.Name()[len(baseName)+1:]) + if err == nil && n >= 0 && n < l.backups { + if latestFile == nil || latestFile.ModTime().Before(fileInfo.ModTime()) { + latestFile = fileInfo + latestNum = n + } + } + } + } + l.curRotate = latestNum + if latestFile != nil { + l.fileSize = latestFile.Size() + } else { + l.fileSize = int64(0) + } + if l.fileSize >= l.maxSize || latestFile == nil { + l.nextLogFile() + l.openFile(true) + } else { + l.openFile(false) + } + } +} + +// open the file and truncate the file if trunc is true +func (l *FileLogger) openFile(trunc bool) error { + if l.file != nil { + l.file.Close() + } + var err error + fileName := l.GetCurrentLogFile() + if trunc { + l.file, err = os.Create(fileName) + } else { + l.file, err = os.OpenFile(fileName, os.O_RDWR|os.O_APPEND, 0666) + } + return err +} + +// get the name of current log file +func (l *FileLogger) GetCurrentLogFile() string { + return l.getLogFileName(l.curRotate) +} + +// get the name of previous log file +func (l *FileLogger) GetPrevLogFile() string { + i := (l.curRotate - 1 + l.backups) % l.backups + + return l.getLogFileName(i) +} + +func (l *FileLogger) getLogFileName(index int) string { + return fmt.Sprintf("%s.%d", l.name, index) +} + +// clear the current log file contents +func (l *FileLogger) ClearCurLogFile() error { + l.locker.Lock() + defer l.locker.Unlock() + + return l.openFile(true) +} + +func (l *FileLogger) ClearAllLogFile() error { + l.locker.Lock() + defer l.locker.Unlock() + + for i := 0; i < l.backups && i <= l.curRotate; i++ { + logFile := l.getLogFileName(i) + err := os.Remove(logFile) + if err != nil { + return faults.NewFault(faults.FAILED, err.Error()) + } + } + l.curRotate = 0 + err := l.openFile(true) + if err != nil { + return faults.NewFault(faults.FAILED, err.Error()) + } + return nil +} + +func (l *FileLogger) ReadLog(offset int64, length int64) (string, error) { + if offset < 0 && length != 0 { + return "", faults.NewFault(faults.BAD_ARGUMENTS, "BAD_ARGUMENTS") + } + if offset >= 0 && length < 0 { + return "", faults.NewFault(faults.BAD_ARGUMENTS, "BAD_ARGUMENTS") + } + + l.locker.Lock() + defer l.locker.Unlock() + f, err := os.Open(l.GetCurrentLogFile()) + + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + defer f.Close() + + //check the length of file + statInfo, err := f.Stat() + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + + fileLen := statInfo.Size() + + if offset < 0 { //offset < 0 && length == 0 + offset = fileLen + offset + if offset < 0 { + offset = 0 + } + length = fileLen - offset + } else if length == 0 { //offset >= 0 && length == 0 + if offset > fileLen { + return "", nil + } + length = fileLen - offset + } else { //offset >= 0 && length > 0 + + //if the offset exceeds the length of file + if offset >= fileLen { + return "", nil + } + + //compute actual bytes should be read + + if offset+length > fileLen { + length = fileLen - offset + } + } + + b := make([]byte, length) + n, err := f.ReadAt(b, offset) + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + return string(b[:n]), nil +} + +func (l *FileLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + if offset < 0 { + return "", offset, false, fmt.Errorf("offset should not be less than 0") + } + if length < 0 { + return "", offset, false, fmt.Errorf("length should be not be less than 0") + } + l.locker.Lock() + defer l.locker.Unlock() + + //open the file + f, err := os.Open(l.GetCurrentLogFile()) + if err != nil { + return "", 0, false, err + } + + defer f.Close() + + //get the length of file + statInfo, err := f.Stat() + if err != nil { + return "", 0, false, err + } + + fileLen := statInfo.Size() + + //check if offset exceeds the length of file + if offset >= fileLen { + return "", fileLen, true, nil + } + + //get the length + if offset+length > fileLen { + length = fileLen - offset + } + + b := make([]byte, length) + n, err := f.ReadAt(b, offset) + if err != nil { + return "", offset, false, err + } + return string(b[:n]), offset + int64(n), false, nil + +} + +// Override the function in io.Writer +func (l *FileLogger) Write(p []byte) (int, error) { + l.locker.Lock() + defer l.locker.Unlock() + + n, err := l.file.Write(p) + + if err != nil { + return n, err + } + l.logEventEmitter.emitLogEvent(string(p)) + l.fileSize += int64(n) + if l.fileSize >= l.maxSize { + fileInfo, errStat := os.Stat(fmt.Sprintf("%s.%d", l.name, l.curRotate)) + if errStat == nil { + l.fileSize = fileInfo.Size() + } else { + return n, errStat + } + } + if l.fileSize >= l.maxSize { + l.nextLogFile() + l.openFile(true) + } + return n, err +} + +func (l *FileLogger) Close() error { + if l.file != nil { + return l.file.Close() + } + return nil +} + +func (sl *SysLogger) Write(b []byte) (int, error) { + sl.logEventEmitter.emitLogEvent(string(b)) + if sl.logWriter != nil { + return sl.logWriter.Write(b) + } else { + return 0, errors.New("not connect to syslog server") + } +} + +func (sl *SysLogger) Close() error { + if sl.logWriter != nil { + return sl.logWriter.Close() + } else { + return errors.New("not connect to syslog server") + } +} +func NewNullLogger(logEventEmitter LogEventEmitter) *NullLogger { + return &NullLogger{logEventEmitter: logEventEmitter} +} + +func (l *NullLogger) SetPid(pid int) { + //NOTHING TO DO +} + +func (l *NullLogger) Write(p []byte) (int, error) { + l.logEventEmitter.emitLogEvent(string(p)) + return len(p), nil +} + +func (l *NullLogger) Close() error { + return nil +} + +func (l *NullLogger) ReadLog(offset int64, length int64) (string, error) { + return "", faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func (l *NullLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + return "", 0, false, faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func (l *NullLogger) ClearCurLogFile() error { + return fmt.Errorf("No log") +} + +func (l *NullLogger) ClearAllLogFile() error { + return faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func NewNullLocker() *NullLocker { + return &NullLocker{} +} + +func (l *NullLocker) Lock() { +} + +func (l *NullLocker) Unlock() { +} + +type StdLogger struct { + NullLogger + logEventEmitter LogEventEmitter + writer io.Writer +} + +func NewStdoutLogger(logEventEmitter LogEventEmitter) *StdLogger { + return &StdLogger{logEventEmitter: logEventEmitter, + writer: os.Stdout} +} + +func (l *StdLogger) Write(p []byte) (int, error) { + n, err := l.writer.Write(p) + if err != nil { + l.logEventEmitter.emitLogEvent(string(p)) + } + return n, err +} + +func NewStderrLogger(logEventEmitter LogEventEmitter) *StdLogger { + return &StdLogger{logEventEmitter: logEventEmitter, + writer: os.Stdout} +} + +type LogCaptureLogger struct { + underlineLogger Logger + procCommEventCapWriter io.Writer + procCommEventCapture *events.ProcCommEventCapture +} + +func NewLogCaptureLogger(underlineLogger Logger, + captureMaxBytes int, + stdType string, + procName string, + groupName string) *LogCaptureLogger { + r, w := io.Pipe() + eventCapture := events.NewProcCommEventCapture(r, + captureMaxBytes, + stdType, + procName, + groupName) + return &LogCaptureLogger{underlineLogger: underlineLogger, + procCommEventCapWriter: w, + procCommEventCapture: eventCapture} +} + +func (l *LogCaptureLogger) SetPid(pid int) { + l.procCommEventCapture.SetPid(pid) +} + +func (l *LogCaptureLogger) Write(p []byte) (int, error) { + l.procCommEventCapWriter.Write(p) + return l.underlineLogger.Write(p) +} + +func (l *LogCaptureLogger) Close() error { + return l.underlineLogger.Close() +} + +func (l *LogCaptureLogger) ReadLog(offset int64, length int64) (string, error) { + return l.underlineLogger.ReadLog(offset, length) +} + +func (l *LogCaptureLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + return l.underlineLogger.ReadTailLog(offset, length) +} + +func (l *LogCaptureLogger) ClearCurLogFile() error { + return l.underlineLogger.ClearCurLogFile() +} + +func (l *LogCaptureLogger) ClearAllLogFile() error { + return l.underlineLogger.ClearAllLogFile() +} + +type NullLogEventEmitter struct { +} + +func NewNullLogEventEmitter() *NullLogEventEmitter { + return &NullLogEventEmitter{} +} + +func (ne *NullLogEventEmitter) emitLogEvent(data string) { +} + +type StdLogEventEmitter struct { + Type string + process_name string + group_name string + pidFunc func() int +} + +func NewStdoutLogEventEmitter(process_name string, group_name string, procPidFunc func() int) *StdLogEventEmitter { + return &StdLogEventEmitter{Type: "stdout", + process_name: process_name, + group_name: group_name, + pidFunc: procPidFunc} +} + +func NewStderrLogEventEmitter(process_name string, group_name string, procPidFunc func() int) *StdLogEventEmitter { + return &StdLogEventEmitter{Type: "stderr", + process_name: process_name, + group_name: group_name, + pidFunc: procPidFunc} +} + +func (se *StdLogEventEmitter) emitLogEvent(data string) { + if se.Type == "stdout" { + events.EmitEvent(events.CreateProcessLogStdoutEvent(se.process_name, se.group_name, se.pidFunc(), data)) + } else { + events.EmitEvent(events.CreateProcessLogStderrEvent(se.process_name, se.group_name, se.pidFunc(), data)) + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go b/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go new file mode 100644 index 000000000..da683520c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go @@ -0,0 +1,16 @@ +// +build !windows,!nacl,!plan9 + +package logger + +import ( + "log/syslog" +) + +func NewSysLogger(name string, logEventEmitter LogEventEmitter) *SysLogger { + writer, err := syslog.New(syslog.LOG_DEBUG, name) + logger := &SysLogger{logEventEmitter: logEventEmitter} + if err == nil { + logger.logWriter = writer + } + return logger +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go b/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go new file mode 100644 index 000000000..cf4609f57 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go @@ -0,0 +1,7 @@ +// +build windows plan9 nacl + +package logger + +func NewSysLogger(name string, logEventEmitter LogEventEmitter) *SysLogger { + return &SysLogger{logEventEmitter: logEventEmitter, logWriter: nil} +} diff --git a/vendor/github.com/rpoletaev/supervisord/main.go b/vendor/github.com/rpoletaev/supervisord/main.go new file mode 100644 index 000000000..dfc491df6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/main.go @@ -0,0 +1,75 @@ +package main + +import ( + "fmt" + "os" + "os/signal" + "runtime" + "syscall" + + log "github.com/sirupsen/logrus" + "github.com/jessevdk/go-flags" +) + +type Options struct { + Configuration string `short:"c" long:"configuration" description:"the configuration file" default:"supervisord.conf"` + Daemon bool `short:"d" long:"daemon" description:"run as daemon"` +} + +func init() { + log.SetOutput(os.Stdout) + if runtime.GOOS == "windows" { + log.SetFormatter(&log.TextFormatter{DisableColors: true, FullTimestamp: true}) + } else { + log.SetFormatter(&log.TextFormatter{DisableColors: false, FullTimestamp: true}) + } + log.SetLevel(log.DebugLevel) +} + +func initSignals(s *Supervisor) { + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + go func() { + sig := <-sigs + log.WithFields(log.Fields{"signal": sig}).Info("receive a signal to stop all process & exit") + s.procMgr.StopAllProcesses() + os.Exit(-1) + }() + +} + +var options Options +var parser = flags.NewParser(&options, flags.Default & ^flags.PrintErrors) + +func RunServer() { + // infinite loop for handling Restart ('reload' command) + for true { + s := NewSupervisor(options.Configuration) + initSignals(s) + if sErr, _, _, _ := s.Reload(); sErr != nil { + panic(sErr) + } + s.WaitForExit() + } +} + +func main() { + if _, err := parser.Parse(); err != nil { + flagsErr, ok := err.(*flags.Error) + if ok { + switch flagsErr.Type { + case flags.ErrHelp: + fmt.Fprintln(os.Stdout, err) + os.Exit(0) + case flags.ErrCommandRequired: + if options.Daemon { + Deamonize(RunServer) + } else { + RunServer() + } + default: + panic(err) + } + } + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/command_parser.go b/vendor/github.com/rpoletaev/supervisord/process/command_parser.go new file mode 100644 index 000000000..b711bafdb --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/command_parser.go @@ -0,0 +1,81 @@ +package process + +import ( + "fmt" + "unicode" +) + +// find the position of byte ch in the string s start from offset +// +// return: -1 if byte ch is not found, >= offset if the ch is found +// in the string s from offset +func findChar(s string, offset int, ch byte) int { + for i := offset; i < len(s); i++ { + if s[i] == '\\' { + i++ + } else if s[i] == ch { + return i + } + } + return -1 +} + +// skip all the white space and return the first position of non-space char +// +// return: the first position of non-space char or -1 if all the char +// from offset are space +func skipSpace(s string, offset int) int { + for i := offset; i < len(s); i++ { + if !unicode.IsSpace(rune(s[i])) { + return i + } + } + return -1 +} + +func appendArgument(arg string, args []string) []string { + if arg[0] == '"' || arg[0] == '\'' { + return append(args, arg[1:len(arg)-1]) + } + return append(args, arg) +} + +func parseCommand(command string) ([]string, error) { + args := make([]string, 0) + cmdLen := len(command) + for i := 0; i < cmdLen; { + //find the first non-space char + j := skipSpace(command, i) + if j == -1 { + break + } + i = j + for ; j < cmdLen; j++ { + if unicode.IsSpace(rune(command[j])) { + args = appendArgument(command[i:j], args) + i = j + 1 + break + } else if command[j] == '\\' { + j++ + } else if command[j] == '"' || command[j] == '\'' { + k := findChar(command, j+1, command[j]) + if k == -1 { + args = appendArgument(command[i:], args) + i = cmdLen + } else { + args = appendArgument(command[i:k+1], args) + i = k + 1 + } + break + } + } + if j >= cmdLen { + args = appendArgument(command[i:], args) + i = cmdLen + } + } + if len(args) <= 0 { + return nil, fmt.Errorf("no command from empty string") + } + return args, nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/path.go b/vendor/github.com/rpoletaev/supervisord/process/path.go new file mode 100644 index 000000000..82b2fbd1d --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/path.go @@ -0,0 +1,46 @@ +package process + +import ( + "os/user" + "path/filepath" +) + +func path_split(path string) []string { + r := make([]string, 0) + cur_path := path + for { + dir, file := filepath.Split(cur_path) + if len(file) > 0 { + r = append(r, file) + } + if len(dir) <= 0 { + break + } + cur_path = dir[0 : len(dir)-1] + } + for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 { + r[i], r[j] = r[j], r[i] + } + return r +} +func Path_expand(path string) (string, error) { + pathList := path_split(path) + + if len(pathList) > 0 && len(pathList[0]) > 0 && pathList[0][0] == '~' { + var usr *user.User = nil + var err error = nil + + if pathList[0] == "~" { + usr, err = user.Current() + } else { + usr, err = user.Lookup(pathList[0][1:]) + } + + if err != nil { + return "", err + } + pathList[0] = usr.HomeDir + return filepath.Join(pathList...), nil + } + return path, nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go new file mode 100644 index 000000000..90e23f40b --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go @@ -0,0 +1,12 @@ +// +build linux + +package process + +import ( + "syscall" +) + +func set_deathsig(sysProcAttr *syscall.SysProcAttr) { + sysProcAttr.Setpgid = true + sysProcAttr.Pdeathsig = syscall.SIGKILL +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go new file mode 100644 index 000000000..da0042ecc --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go @@ -0,0 +1,12 @@ +// +build !linux +// +build !windows + +package process + +import ( + "syscall" +) + +func set_deathsig(sysProcAttr *syscall.SysProcAttr) { + sysProcAttr.Setpgid = true +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go new file mode 100644 index 000000000..e19e3d78c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go @@ -0,0 +1,9 @@ +// +build windows +package process + +import ( + "syscall" +) + +func set_deathsig(_ *syscall.SysProcAttr) { +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/process.go b/vendor/github.com/rpoletaev/supervisord/process/process.go new file mode 100644 index 000000000..f016dc3a6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/process.go @@ -0,0 +1,689 @@ +package process + +import ( + "fmt" + "io" + "os" + "os/exec" + "os/user" + "strconv" + "strings" + "sync" + "syscall" + "time" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/logger" + "github.com/rpoletaev/supervisord/signals" + log "github.com/sirupsen/logrus" +) + +type ProcessState int + +const ( + STOPPED ProcessState = iota + STARTING = 10 + RUNNING = 20 + BACKOFF = 30 + STOPPING = 40 + EXITED = 100 + FATAL = 200 + UNKNOWN = 1000 +) + +func (p ProcessState) String() string { + switch p { + case STOPPED: + return "STOPPED" + case STARTING: + return "STARTING" + case RUNNING: + return "RUNNING" + case BACKOFF: + return "BACKOFF" + case STOPPING: + return "STOPPING" + case EXITED: + return "EXITED" + case FATAL: + return "FATAL" + default: + return "UNKNOWN" + } +} + +type Process struct { + supervisor_id string + config *config.ConfigEntry + cmd *exec.Cmd + startTime time.Time + stopTime time.Time + state ProcessState + //true if process is starting + inStart bool + //true if the process is stopped by user + stopByUser bool + retryTimes int + lock sync.RWMutex + stdin io.WriteCloser + StdoutLog logger.Logger + StderrLog logger.Logger +} + +func NewProcess(supervisor_id string, config *config.ConfigEntry) *Process { + proc := &Process{supervisor_id: supervisor_id, + config: config, + cmd: nil, + startTime: time.Unix(0, 0), + stopTime: time.Unix(0, 0), + state: STOPPED, + inStart: false, + stopByUser: false, + retryTimes: 0} + proc.config = config + proc.cmd = nil + + //start the process if autostart is set to true + //if proc.isAutoStart() { + // proc.Start(false) + //} + + return proc +} + +func (p *Process) Start(wait bool) { + log.WithFields(log.Fields{"program": p.GetName()}).Info("try to start program") + p.lock.Lock() + if p.inStart { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start program again, program is already started") + p.lock.Unlock() + return + } + + p.inStart = true + p.stopByUser = false + p.lock.Unlock() + + var runCond *sync.Cond = nil + finished := false + if wait { + runCond = sync.NewCond(&sync.Mutex{}) + runCond.L.Lock() + } + + go func() { + p.retryTimes = 0 + + for { + if wait { + runCond.L.Lock() + } + p.run(func() { + finished = true + if wait { + runCond.L.Unlock() + runCond.Signal() + } + }) + if (p.stopTime.Unix() - p.startTime.Unix()) < int64(p.getStartSeconds()) { + p.retryTimes++ + } else { + p.retryTimes = 0 + } + if p.stopByUser { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Stopped by user, don't start it again") + break + } + if !p.isAutoRestart() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start the stopped program because its autorestart flag is false") + break + } + if p.retryTimes >= p.getStartRetries() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start the stopped program because its retry times ", p.retryTimes, " is greater than start retries ", p.getStartRetries()) + break + } + } + p.lock.Lock() + p.inStart = false + p.lock.Unlock() + }() + if wait && !finished { + runCond.Wait() + runCond.L.Unlock() + } +} + +func (p *Process) GetName() string { + if p.config.IsProgram() { + return p.config.GetProgramName() + } else if p.config.IsEventListener() { + return p.config.GetEventListenerName() + } else { + return "" + } +} + +func (p *Process) GetGroup() string { + return p.config.Group +} + +func (p *Process) GetDescription() string { + p.lock.Lock() + defer p.lock.Unlock() + if p.state == RUNNING { + seconds := int(time.Now().Sub(p.startTime).Seconds()) + minutes := seconds / 60 + hours := minutes / 60 + days := hours / 24 + if days > 0 { + return fmt.Sprintf("pid %d, uptime %d days, %d:%02d:%02d", p.cmd.Process.Pid, days, hours%24, minutes%60, seconds%60) + } else { + return fmt.Sprintf("pid %d, uptime %d:%02d:%02d", p.cmd.Process.Pid, hours%24, minutes%60, seconds%60) + } + } else if p.state != STOPPED { + return p.stopTime.String() + } + return "" +} + +func (p *Process) GetExitstatus() int { + p.lock.Lock() + defer p.lock.Unlock() + + if p.state == EXITED || p.state == BACKOFF { + if p.cmd.ProcessState == nil { + return 0 + } + status, ok := p.cmd.ProcessState.Sys().(syscall.WaitStatus) + if ok { + return status.ExitStatus() + } + } + return 0 +} + +func (p *Process) GetPid() int { + p.lock.Lock() + defer p.lock.Unlock() + + if p.state == STOPPED || p.state == FATAL || p.state == UNKNOWN || p.state == EXITED || p.state == BACKOFF { + return 0 + } + return p.cmd.Process.Pid +} + +// Get the process state +func (p *Process) GetState() ProcessState { + return p.state +} + +func (p *Process) GetStartTime() time.Time { + return p.startTime +} + +func (p *Process) GetStopTime() time.Time { + switch p.state { + case STARTING: + fallthrough + case RUNNING: + fallthrough + case STOPPING: + return time.Unix(0, 0) + default: + return p.stopTime + } +} + +func (p *Process) GetStdoutLogfile() string { + file_name := p.config.GetStringExpression("stdout_logfile", "/dev/null") + expand_file, err := Path_expand(file_name) + if err == nil { + return expand_file + } else { + return file_name + } +} + +func (p *Process) GetStderrLogfile() string { + file_name := p.config.GetStringExpression("stderr_logfile", "/dev/null") + expand_file, err := Path_expand(file_name) + if err == nil { + return expand_file + } else { + return file_name + } +} + +func (p *Process) getStartSeconds() int { + return p.config.GetInt("startsecs", 1) +} + +func (p *Process) getStartRetries() int { + return p.config.GetInt("startretries", 3) +} + +func (p *Process) isAutoStart() bool { + return p.config.GetString("autostart", "true") == "true" +} + +func (p *Process) GetPriority() int { + return p.config.GetInt("priority", 999) +} + +func (p *Process) getNumberProcs() int { + return p.config.GetInt("numprocs", 1) +} + +func (p *Process) SendProcessStdin(chars string) error { + if p.stdin != nil { + _, err := p.stdin.Write([]byte(chars)) + return err + } + return fmt.Errorf("NO_FILE") +} + +// check if the process should be +func (p *Process) isAutoRestart() bool { + autoRestart := p.config.GetString("autorestart", "unexpected") + + if autoRestart == "false" { + return false + } else if autoRestart == "true" { + return true + } else { + p.lock.Lock() + defer p.lock.Unlock() + if p.cmd != nil && p.cmd.ProcessState != nil { + exitCode, err := p.getExitCode() + return err == nil && p.inExitCodes(exitCode) + } + } + return false + +} + +func (p *Process) inExitCodes(exitCode int) bool { + for _, code := range p.getExitCodes() { + if code == exitCode { + return true + } + } + return false +} + +func (p *Process) getExitCode() (int, error) { + if p.cmd.ProcessState == nil { + return -1, fmt.Errorf("no exit code") + } + if status, ok := p.cmd.ProcessState.Sys().(syscall.WaitStatus); ok { + return status.ExitStatus(), nil + } + + return -1, fmt.Errorf("no exit code") + +} + +func (p *Process) getExitCodes() []int { + strExitCodes := strings.Split(p.config.GetString("exitcodes", "0,2"), ",") + result := make([]int, 0) + for _, val := range strExitCodes { + i, err := strconv.Atoi(val) + if err == nil { + result = append(result, i) + } + } + return result +} + +func (p *Process) run(finishCb func()) { + args, err := parseCommand(p.config.GetStringExpression("command", "")) + + if err != nil { + log.Error("the command is empty string") + finishCb() + return + } + p.lock.Lock() + if p.cmd != nil && p.cmd.ProcessState != nil { + status := p.cmd.ProcessState.Sys().(syscall.WaitStatus) + if status.Continued() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start program because it is running") + p.lock.Unlock() + finishCb() + return + } + } + p.cmd = exec.Command(args[0]) + if len(args) > 1 { + p.cmd.Args = args + } + p.cmd.SysProcAttr = &syscall.SysProcAttr{} + if p.setUser() != nil { + log.WithFields(log.Fields{"user": p.config.GetString("user", "")}).Error("fail to run as user") + p.lock.Unlock() + finishCb() + return + } + set_deathsig(p.cmd.SysProcAttr) + p.setEnv() + p.setDir() + p.setLog() + + p.stdin, _ = p.cmd.StdinPipe() + p.startTime = time.Now() + p.changeStateTo(STARTING) + err = p.cmd.Start() + if err != nil { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Errorf("fail to start program with error:%v", err) + p.changeStateTo(FATAL) + p.stopTime = time.Now() + p.lock.Unlock() + finishCb() + } else { + if p.StdoutLog != nil { + p.StdoutLog.SetPid(p.cmd.Process.Pid) + } + if p.StderrLog != nil { + p.StderrLog.SetPid(p.cmd.Process.Pid) + } + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("success to start program") + startSecs := p.config.GetInt("startsecs", 1) + //Set startsec to 0 to indicate that the program needn't stay + //running for any particular amount of time. + if startSecs <= 0 { + p.changeStateTo(RUNNING) + + } else { + time.Sleep(time.Duration(startSecs) * time.Second) + if tmpProc, err := os.FindProcess(p.cmd.Process.Pid); err == nil && tmpProc != nil { + p.changeStateTo(RUNNING) + } + } + p.lock.Unlock() + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Debug("wait program exit") + finishCb() + err = p.cmd.Wait() + if err == nil { + if p.cmd.ProcessState != nil { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Infof("program stopped with status:%v", p.cmd.ProcessState) + } else { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("program stopped") + } + } else { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Errorf("program stopped with error:%v", err) + } + + p.lock.Lock() + p.stopTime = time.Now() + if p.stopTime.Unix()-p.startTime.Unix() < int64(startSecs) { + p.changeStateTo(BACKOFF) + } else { + p.changeStateTo(EXITED) + } + p.lock.Unlock() + } + +} + +func (p *Process) changeStateTo(procState ProcessState) { + if p.config.IsProgram() { + progName := p.config.GetProgramName() + groupName := p.config.GetGroupName() + if procState == STARTING { + events.EmitEvent(events.CreateProcessStartingEvent(progName, groupName, p.state.String(), p.retryTimes)) + } else if procState == RUNNING { + events.EmitEvent(events.CreateProcessRunningEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == BACKOFF { + events.EmitEvent(events.CreateProcessBackoffEvent(progName, groupName, p.state.String(), p.retryTimes)) + } else if procState == STOPPING { + events.EmitEvent(events.CreateProcessStoppingEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == EXITED { + exitCode, err := p.getExitCode() + expected := 0 + if err == nil && p.inExitCodes(exitCode) { + expected = 1 + } + events.EmitEvent(events.CreateProcessExitedEvent(progName, groupName, p.state.String(), expected, p.cmd.Process.Pid)) + } else if procState == FATAL { + events.EmitEvent(events.CreateProcessFatalEvent(progName, groupName, p.state.String())) + } else if procState == STOPPED { + events.EmitEvent(events.CreateProcessStoppedEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == UNKNOWN { + events.EmitEvent(events.CreateProcessUnknownEvent(progName, groupName, p.state.String())) + } + } + p.state = procState +} + +func (p *Process) Signal(sig os.Signal) error { + p.lock.Lock() + defer p.lock.Unlock() + + return p.sendSignal(sig) +} + +func (p *Process) sendSignal(sig os.Signal) error { + if p.cmd != nil && p.cmd.Process != nil { + err := signals.Kill(p.cmd.Process, sig) + return err + } + return fmt.Errorf("process is not started") +} + +func (p *Process) setEnv() { + env := p.config.GetEnv("environment") + if len(env) != 0 { + p.cmd.Env = append(os.Environ(), env...) + } else { + p.cmd.Env = os.Environ() + } +} + +func (p *Process) setDir() { + dir := p.config.GetStringExpression("directory", "") + if dir != "" { + p.cmd.Dir = dir + } +} + +func (p *Process) setLog() { + if p.config.IsProgram() { + p.StdoutLog = p.createLogger(p.GetStdoutLogfile(), + int64(p.config.GetBytes("stdout_logfile_maxbytes", 50*1024*1024)), + p.config.GetInt("stdout_logfile_backups", 10), + p.createStdoutLogEventEmitter()) + capture_bytes := p.config.GetBytes("stdout_capture_maxbytes", 0) + if capture_bytes > 0 { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("capture stdout process communication") + p.StdoutLog = logger.NewLogCaptureLogger(p.StdoutLog, + capture_bytes, + "PROCESS_COMMUNICATION_STDOUT", + p.GetName(), + p.GetGroup()) + } + + p.cmd.Stdout = p.StdoutLog + + if p.config.GetBool("redirect_stderr", false) { + p.StderrLog = p.StdoutLog + } else { + p.StderrLog = p.createLogger(p.GetStderrLogfile(), + int64(p.config.GetBytes("stderr_logfile_maxbytes", 50*1024*1024)), + p.config.GetInt("stderr_logfile_backups", 10), + p.createStderrLogEventEmitter()) + } + + capture_bytes = p.config.GetBytes("stderr_capture_maxbytes", 0) + + if capture_bytes > 0 { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("capture stderr process communication") + p.StderrLog = logger.NewLogCaptureLogger(p.StdoutLog, + capture_bytes, + "PROCESS_COMMUNICATION_STDERR", + p.GetName(), + p.GetGroup()) + } + + p.cmd.Stderr = p.StderrLog + + } else if p.config.IsEventListener() { + in, err := p.cmd.StdoutPipe() + if err != nil { + log.WithFields(log.Fields{"eventListener": p.config.GetEventListenerName()}).Error("fail to get stdin") + return + } + out, err := p.cmd.StdinPipe() + if err != nil { + log.WithFields(log.Fields{"eventListener": p.config.GetEventListenerName()}).Error("fail to get stdout") + return + } + events := strings.Split(p.config.GetString("events", ""), ",") + for i, event := range events { + events[i] = strings.TrimSpace(event) + } + + p.registerEventListener(p.config.GetEventListenerName(), + events, + in, + out) + } +} + +func (p *Process) createStdoutLogEventEmitter() logger.LogEventEmitter { + if p.config.GetBytes("stdout_capture_maxbytes", 0) <= 0 && p.config.GetBool("stdout_events_enabled", false) { + return logger.NewStdoutLogEventEmitter(p.config.GetProgramName(), p.config.GetGroupName(), func() int { + return p.GetPid() + }) + } else { + return logger.NewNullLogEventEmitter() + } +} + +func (p *Process) createStderrLogEventEmitter() logger.LogEventEmitter { + if p.config.GetBytes("stderr_capture_maxbytes", 0) <= 0 && p.config.GetBool("stderr_events_enabled", false) { + return logger.NewStdoutLogEventEmitter(p.config.GetProgramName(), p.config.GetGroupName(), func() int { + return p.GetPid() + }) + } else { + return logger.NewNullLogEventEmitter() + } +} + +func (p *Process) registerEventListener(eventListenerName string, + _events []string, + stdin io.Reader, + stdout io.Writer) { + eventListener := events.NewEventListener(eventListenerName, + p.supervisor_id, + stdin, + stdout, + p.config.GetInt("buffer_size", 100)) + events.RegisterEventListener(eventListenerName, _events, eventListener) +} + +func (p *Process) unregisterEventListener(eventListenerName string) { + events.UnregisterEventListener(eventListenerName) +} + +func (p *Process) createLogger(logFile string, maxBytes int64, backups int, logEventEmitter logger.LogEventEmitter) logger.Logger { + var mylogger logger.Logger + mylogger = logger.NewNullLogger(logEventEmitter) + + if logFile == "/dev/stdout" { + mylogger = logger.NewStdoutLogger(logEventEmitter) + } else if logFile == "/dev/stderr" { + mylogger = logger.NewStderrLogger(logEventEmitter) + } else if logFile == "syslog" { + mylogger = logger.NewSysLogger(p.GetName(), logEventEmitter) + } else if len(logFile) > 0 { + mylogger = logger.NewFileLogger(logFile, maxBytes, backups, logEventEmitter, logger.NewNullLocker()) + } + return mylogger +} + +func (p *Process) setUser() error { + userName := p.config.GetString("user", "") + if len(userName) == 0 { + return nil + } + + //check if group is provided + pos := strings.Index(userName, ":") + groupName := "" + if pos != -1 { + groupName = userName[pos+1:] + userName = userName[0:pos] + } + u, err := user.Lookup(userName) + if err != nil { + return err + } + uid, err := strconv.ParseUint(u.Uid, 10, 32) + if err != nil { + return err + } + gid, err := strconv.ParseUint(u.Gid, 10, 32) + if err != nil && groupName == "" { + return err + } + if groupName != "" { + g, err := user.LookupGroup(groupName) + if err != nil { + return err + } + gid, err = strconv.ParseUint(g.Gid, 10, 32) + if err != nil { + return err + } + } + set_user_id(p.cmd.SysProcAttr, uint32(uid), uint32(gid)) + return nil +} + +//send signal to process to stop it +func (p *Process) Stop(wait bool) { + p.lock.RLock() + p.stopByUser = true + p.lock.RUnlock() + log.WithFields(log.Fields{"program": p.GetName()}).Info("stop the program") + sig, err := signals.ToSignal(p.config.GetString("stopsignal", "")) + if err == nil { + p.Signal(sig) + } + waitsecs := time.Duration(p.config.GetInt("stopwaitsecs", 10)) * time.Second + endTime := time.Now().Add(waitsecs) + go func() { + //wait at most "stopwaitsecs" seconds + for { + //if it already exits + if p.state != STARTING && p.state != RUNNING && p.state != STOPPING { + break + } + //if endTime reaches, raise signal syscall.SIGKILL + if endTime.Before(time.Now()) { + log.WithFields(log.Fields{"program": p.GetName()}).Info("force to kill the program") + p.Signal(syscall.SIGKILL) + break + } else { + time.Sleep(1 * time.Second) + } + } + }() + if wait { + for { + // if the program exits + if p.state != STARTING && p.state != RUNNING && p.state != STOPPING { + break + } + time.Sleep(1 * time.Second) + } + } +} + +func (p *Process) GetStatus() string { + if p.cmd.ProcessState.Exited() { + return p.cmd.ProcessState.String() + } + return "running" +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/process_manager.go b/vendor/github.com/rpoletaev/supervisord/process/process_manager.go new file mode 100644 index 000000000..c99e72481 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/process_manager.go @@ -0,0 +1,160 @@ +package process + +import ( + "strings" + "sync" + + "github.com/rpoletaev/supervisord/config" + log "github.com/sirupsen/logrus" +) + +type ProcessManager struct { + procs map[string]*Process + eventListeners map[string]*Process + lock sync.Mutex +} + +func NewProcessManager() *ProcessManager { + return &ProcessManager{procs: make(map[string]*Process), + eventListeners: make(map[string]*Process), + } +} + +func (pm *ProcessManager) CreateProcess(supervisor_id string, config *config.ConfigEntry) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + if config.IsProgram() { + return pm.createProgram(supervisor_id, config) + } else if config.IsEventListener() { + return pm.createEventListener(supervisor_id, config) + } else { + return nil + } +} + +func (pm *ProcessManager) StartAutoStartPrograms() { + pm.ForEachProcess(func(proc *Process) { + if proc.isAutoStart() { + proc.Start(false) + } + }) +} + +func (pm *ProcessManager) createProgram(supervisor_id string, config *config.ConfigEntry) *Process { + procName := config.GetProgramName() + + proc, ok := pm.procs[procName] + + if !ok { + proc = NewProcess(supervisor_id, config) + pm.procs[procName] = proc + } + log.Info("create process:", procName) + return proc +} + +func (pm *ProcessManager) createEventListener(supervisor_id string, config *config.ConfigEntry) *Process { + eventListenerName := config.GetEventListenerName() + + evtListener, ok := pm.eventListeners[eventListenerName] + + if !ok { + evtListener = NewProcess(supervisor_id, config) + pm.eventListeners[eventListenerName] = evtListener + } + log.Info("create event listener:", eventListenerName) + return evtListener +} + +func (pm *ProcessManager) Add(name string, proc *Process) { + pm.lock.Lock() + defer pm.lock.Unlock() + pm.procs[name] = proc + log.Info("add process:", name) +} + +// remove the process from the manager +// +// Arguments: +// name - the name of program +// +// Return the process or nil +func (pm *ProcessManager) Remove(name string) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + proc, _ := pm.procs[name] + delete(pm.procs, name) + log.Info("remove process:", name) + return proc +} + +// return process if found or nil if not found +func (pm *ProcessManager) Find(name string) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + proc, ok := pm.procs[name] + if ok { + log.Debug("succeed to find process:", name) + } else { + //remove group field if it is included + if pos := strings.Index(name, ":"); pos != -1 { + proc, ok = pm.procs[name[pos+1:]] + } + if !ok { + log.Info("fail to find process:", name) + } + } + return proc +} + +// clear all the processes +func (pm *ProcessManager) Clear() { + pm.lock.Lock() + defer pm.lock.Unlock() + pm.procs = make(map[string]*Process) +} + +func (pm *ProcessManager) ForEachProcess(procFunc func(p *Process)) { + pm.lock.Lock() + defer pm.lock.Unlock() + + procs := pm.getAllProcess() + for _, proc := range procs { + procFunc(proc) + } +} + +func (pm *ProcessManager) getAllProcess() []*Process { + tmpProcs := make([]*Process, 0) + for _, proc := range pm.procs { + tmpProcs = append(tmpProcs, proc) + } + return sortProcess(tmpProcs) +} + +func (pm *ProcessManager) StopAllProcesses() { + pm.ForEachProcess(func(proc *Process) { + proc.Stop(true) + }) +} + +func sortProcess(procs []*Process) []*Process { + prog_configs := make([]*config.ConfigEntry, 0) + for _, proc := range procs { + if proc.config.IsProgram() { + prog_configs = append(prog_configs, proc.config) + } + } + + result := make([]*Process, 0) + p := config.NewProcessSorter() + for _, config := range p.SortProgram(prog_configs) { + for _, proc := range procs { + if proc.config == config { + result = append(result, proc) + } + } + } + + return result +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go b/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go new file mode 100644 index 000000000..ec28fb5c4 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go @@ -0,0 +1,11 @@ +// +build !windows + +package process + +import ( + "syscall" +) + +func set_user_id(procAttr *syscall.SysProcAttr, uid uint32, gid uint32) { + procAttr.Credential = &syscall.Credential{Uid: uid, Gid: gid, NoSetGroups: true} +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go b/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go new file mode 100644 index 000000000..d114e4068 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go @@ -0,0 +1,11 @@ +// +build windows + +package process + +import ( + "syscall" +) + +func set_user_id(_ *syscall.SysProcAttr, _ uint32, _ uint32) { + +} diff --git a/vendor/github.com/rpoletaev/supervisord/signals/signal.go b/vendor/github.com/rpoletaev/supervisord/signals/signal.go new file mode 100644 index 000000000..16483a1d6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/signals/signal.go @@ -0,0 +1,34 @@ +// +build !windows + +package signals + +import ( + "os" + "syscall" +) + +//convert a signal name to signal +func ToSignal(signalName string) (os.Signal, error) { + if signalName == "HUP" { + return syscall.SIGHUP, nil + } else if signalName == "INT" { + return syscall.SIGINT, nil + } else if signalName == "QUIT" { + return syscall.SIGQUIT, nil + } else if signalName == "KILL" { + return syscall.SIGKILL, nil + } else if signalName == "USR1" { + return syscall.SIGUSR1, nil + } else if signalName == "USR2" { + return syscall.SIGUSR2, nil + } else { + return syscall.SIGTERM, nil + + } + +} + +func Kill(process *os.Process, sig os.Signal) error { + localSig := sig.(syscall.Signal) + return syscall.Kill(-process.Pid, localSig) +} diff --git a/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go b/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go new file mode 100644 index 000000000..5c3e539ac --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go @@ -0,0 +1,46 @@ +// +build windows + +package signals + +import ( + "errors" + "fmt" + log "github.com/sirupsen/logrus" + "os" + "os/exec" + "syscall" +) + +//convert a signal name to signal +func ToSignal(signalName string) (os.Signal, error) { + if signalName == "HUP" { + return syscall.SIGHUP, nil + } else if signalName == "INT" { + return syscall.SIGINT, nil + } else if signalName == "QUIT" { + return syscall.SIGQUIT, nil + } else if signalName == "KILL" { + return syscall.SIGKILL, nil + } else if signalName == "USR1" { + log.Warn("signal USR1 is not supported in windows") + return nil, errors.New("signal USR1 is not supported in windows") + } else if signalName == "USR2" { + log.Warn("signal USR2 is not supported in windows") + return nil, errors.New("signal USR2 is not supported in windows") + } else { + return syscall.SIGTERM, nil + + } + +} + +func Kill(process *os.Process, sig os.Signal) error { + //Signal command can't kill children processes, call taskkill command to kill them + cmd := exec.Command("taskkill", "/F", "/T", "/PID", fmt.Sprintf("%d", process.Pid)) + err := cmd.Start() + if err == nil { + return cmd.Wait() + } + //if fail to find taskkill, fallback to normal signal + return process.Signal(sig) +} diff --git a/vendor/github.com/rpoletaev/supervisord/supervisor.go b/vendor/github.com/rpoletaev/supervisord/supervisor.go new file mode 100644 index 000000000..cc47db0f0 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/supervisor.go @@ -0,0 +1,586 @@ +package main + +import ( + "fmt" + "net/http" + "os" + "strings" + "sync" + "time" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/faults" + "github.com/rpoletaev/supervisord/logger" + "github.com/rpoletaev/supervisord/process" + "github.com/rpoletaev/supervisord/signals" + "github.com/rpoletaev/supervisord/types" + "github.com/rpoletaev/supervisord/util" + + log "github.com/sirupsen/logrus" +) + +const ( + SUPERVISOR_VERSION = "3.0" +) + +type Supervisor struct { + config *config.Config + procMgr *process.ProcessManager + xmlRPC *XmlRPC + logger logger.Logger + restarting bool +} + +type StartProcessArgs struct { + Name string + Wait bool `default:"true"` +} + +type ProcessStdin struct { + Name string + Chars string +} + +type RemoteCommEvent struct { + Type string + Data string +} + +type StateInfo struct { + Statecode int `xml:"statecode"` + Statename string `xml:"statename"` +} + +type RpcTaskResult struct { + Name string `xml:"name"` + Group string `xml:"group"` + Status int `xml:"status"` + Description string `xml:"description"` +} + +type LogReadInfo struct { + Offset int + Length int +} + +type ProcessLogReadInfo struct { + Name string + Offset int + Length int +} + +type ProcessTailLog struct { + LogData string + Offset int64 + Overflow bool +} + +func NewSupervisor(configFile string) *Supervisor { + return &Supervisor{config: config.NewConfig(configFile), + procMgr: process.NewProcessManager(), + xmlRPC: NewXmlRPC(), + restarting: false} +} + +func (s *Supervisor) GetConfig() *config.Config { + return s.config +} + +func (s *Supervisor) GetVersion(r *http.Request, args *struct{}, reply *struct{ Version string }) error { + reply.Version = SUPERVISOR_VERSION + return nil +} + +func (s *Supervisor) GetSupervisorVersion(r *http.Request, args *struct{}, reply *struct{ Version string }) error { + reply.Version = SUPERVISOR_VERSION + return nil +} + +func (s *Supervisor) GetIdentification(r *http.Request, args *struct{}, reply *struct{ Id string }) error { + reply.Id = s.GetSupervisorId() + return nil +} + +func (s *Supervisor) GetSupervisorId() string { + entry, ok := s.config.GetSupervisord() + if ok { + return entry.GetString("identifier", "supervisor") + } else { + return "supervisor" + } +} + +func (s *Supervisor) GetState(r *http.Request, args *struct{}, reply *struct{ StateInfo StateInfo }) error { + //statecode statename + //======================= + // 2 FATAL + // 1 RUNNING + // 0 RESTARTING + // -1 SHUTDOWN + log.Debug("Get state") + reply.StateInfo.Statecode = 1 + reply.StateInfo.Statename = "RUNNING" + return nil +} + +func (s *Supervisor) GetPID(r *http.Request, args *struct{}, reply *struct{ Pid int }) error { + reply.Pid = os.Getpid() + return nil +} + +func (s *Supervisor) ReadLog(r *http.Request, args *LogReadInfo, reply *struct{ Log string }) error { + data, err := s.logger.ReadLog(int64(args.Offset), int64(args.Length)) + reply.Log = data + return err +} + +func (s *Supervisor) ClearLog(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + err := s.logger.ClearAllLogFile() + reply.Ret = err == nil + return err +} + +func (s *Supervisor) Shutdown(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + reply.Ret = true + log.Info("received rpc request to stop all processes & exit") + s.procMgr.StopAllProcesses() + go func() { + time.Sleep(1 * time.Second) + os.Exit(0) + }() + return nil +} + +func (s *Supervisor) Restart(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + log.Info("Receive instruction to restart") + s.restarting = true + reply.Ret = true + return nil +} + +func (s *Supervisor) IsRestarting() bool { + return s.restarting +} + +func getProcessInfo(proc *process.Process) *types.ProcessInfo { + return &types.ProcessInfo{Name: proc.GetName(), + Group: proc.GetGroup(), + Description: proc.GetDescription(), + Start: int(proc.GetStartTime().Unix()), + Stop: int(proc.GetStopTime().Unix()), + Now: int(time.Now().Unix()), + State: int(proc.GetState()), + Statename: proc.GetState().String(), + Spawnerr: "", + Exitstatus: proc.GetExitstatus(), + Logfile: proc.GetStdoutLogfile(), + Stdout_logfile: proc.GetStdoutLogfile(), + Stderr_logfile: proc.GetStderrLogfile(), + Pid: proc.GetPid()} + +} + +func (s *Supervisor) GetAllProcessInfo(r *http.Request, args *struct{}, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + reply.AllProcessInfo = make([]types.ProcessInfo, 0) + s.procMgr.ForEachProcess(func(proc *process.Process) { + procInfo := getProcessInfo(proc) + reply.AllProcessInfo = append(reply.AllProcessInfo, *procInfo) + }) + + return nil +} + +func (s *Supervisor) GetProcessInfo(r *http.Request, args *struct{ Name string }, reply *struct{ ProcInfo types.ProcessInfo }) error { + log.Debug("Get process info of: ", args.Name) + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("no process named %s", args.Name) + } + + reply.ProcInfo = *getProcessInfo(proc) + return nil +} + +func (s *Supervisor) StartProcess(r *http.Request, args *StartProcessArgs, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + + if proc == nil { + return fmt.Errorf("fail to find process %s", args.Name) + } + proc.Start(args.Wait) + reply.Success = true + return nil +} + +func (s *Supervisor) StartAllProcesses(r *http.Request, args *struct { + Wait bool `default:"true"` +}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.Start(args.Wait) + processInfo := *getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: processInfo.Name, + Group: processInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + return nil +} + +func (s *Supervisor) StartProcessGroup(r *http.Request, args *StartProcessArgs, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + log.WithFields(log.Fields{"group": args.Name}).Info("start process group") + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + proc.Start(args.Wait) + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + + return nil +} + +func (s *Supervisor) StopProcess(r *http.Request, args *StartProcessArgs, reply *struct{ Success bool }) error { + log.WithFields(log.Fields{"program": args.Name}).Info("stop process") + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("fail to find process %s", args.Name) + } + proc.Stop(args.Wait) + reply.Success = true + return nil +} + +func (s *Supervisor) StopProcessGroup(r *http.Request, args *StartProcessArgs, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + log.WithFields(log.Fields{"group": args.Name}).Info("stop process group") + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + proc.Stop(args.Wait) + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + return nil +} + +func (s *Supervisor) StopAllProcesses(r *http.Request, args *struct { + Wait bool `default:"true"` +}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.Stop(args.Wait) + processInfo := *getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: processInfo.Name, + Group: processInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + return nil +} + +func (s *Supervisor) SignalProcess(r *http.Request, args *types.ProcessSignal, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + reply.Success = false + return fmt.Errorf("No process named %s", args.Name) + } + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + reply.Success = true + return nil +} + +func (s *Supervisor) SignalProcessGroup(r *http.Request, args *types.ProcessSignal, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + } + }) + + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + return nil +} + +func (s *Supervisor) SignalAllProcesses(r *http.Request, args *types.ProcessSignal, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + }) + s.procMgr.ForEachProcess(func(proc *process.Process) { + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + }) + return nil +} + +func (s *Supervisor) SendProcessStdin(r *http.Request, args *ProcessStdin, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + log.WithFields(log.Fields{"program": args.Name}).Error("program does not exist") + return fmt.Errorf("NOT_RUNNING") + } + if proc.GetState() != process.RUNNING { + log.WithFields(log.Fields{"program": args.Name}).Error("program does not run") + return fmt.Errorf("NOT_RUNNING") + } + err := proc.SendProcessStdin(args.Chars) + if err == nil { + reply.Success = true + } else { + reply.Success = false + } + return err +} + +func (s *Supervisor) SendRemoteCommEvent(r *http.Request, args *RemoteCommEvent, reply *struct{ Success bool }) error { + events.EmitEvent(events.NewRemoteCommunicationEvent(args.Type, args.Data)) + reply.Success = true + return nil +} + +func (s *Supervisor) Reload() (error, []string, []string, []string) { + //get the previous loaded programs + prevPrograms := s.config.GetProgramNames() + prevProgGroup := s.config.ProgramGroup.Clone() + + loaded_programs, err := s.config.Load() + + if err == nil { + s.setSupervisordInfo() + s.startEventListeners() + s.createPrograms(prevPrograms) + s.startHttpServer() + s.startAutoStartPrograms() + } + removedPrograms := util.Sub(prevPrograms, loaded_programs) + for _, removedProg := range removedPrograms { + log.WithFields(log.Fields{"program": removedProg}).Info("the program is removed and will be stopped") + s.config.RemoveProgram(removedProg) + proc := s.procMgr.Remove(removedProg) + if proc != nil { + proc.Stop(false) + } + + } + addedGroup, changedGroup, removedGroup := s.config.ProgramGroup.Sub(prevProgGroup) + return err, addedGroup, changedGroup, removedGroup + +} + +func (s *Supervisor) WaitForExit() { + for { + if s.IsRestarting() { + s.procMgr.StopAllProcesses() + break + } + time.Sleep(10 * time.Second) + } +} + +func (s *Supervisor) createPrograms(prevPrograms []string) { + + programs := s.config.GetProgramNames() + for _, entry := range s.config.GetPrograms() { + s.procMgr.CreateProcess(s.GetSupervisorId(), entry) + } + removedPrograms := util.Sub(prevPrograms, programs) + for _, p := range removedPrograms { + s.procMgr.Remove(p) + } +} + +func (s *Supervisor) startAutoStartPrograms() { + s.procMgr.StartAutoStartPrograms() +} + +func (s *Supervisor) startEventListeners() { + eventListeners := s.config.GetEventListeners() + for _, entry := range eventListeners { + s.procMgr.CreateProcess(s.GetSupervisorId(), entry) + } + + if len(eventListeners) > 0 { + time.Sleep(1 * time.Second) + } +} + +func (s *Supervisor) startHttpServer() { + httpServerConfig, ok := s.config.GetInetHttpServer() + if ok { + addr := httpServerConfig.GetString("port", "") + if addr != "" { + go s.xmlRPC.StartInetHttpServer(httpServerConfig.GetString("username", ""), httpServerConfig.GetString("password", ""), addr, s) + } + } + + httpServerConfig, ok = s.config.GetUnixHttpServer() + if ok { + env := config.NewStringExpression("here", s.config.GetConfigFileDir()) + sockFile, err := env.Eval(httpServerConfig.GetString("file", "/tmp/supervisord.sock")) + if err == nil { + go s.xmlRPC.StartUnixHttpServer(httpServerConfig.GetString("username", ""), httpServerConfig.GetString("password", ""), sockFile, s) + } + } + +} + +func (s *Supervisor) setSupervisordInfo() { + supervisordConf, ok := s.config.GetSupervisord() + if ok { + //set supervisord log + + env := config.NewStringExpression("here", s.config.GetConfigFileDir()) + logFile, err := env.Eval(supervisordConf.GetString("logfile", "supervisord.log")) + logFile, err = process.Path_expand(logFile) + logEventEmitter := logger.NewNullLogEventEmitter() + s.logger = logger.NewNullLogger(logEventEmitter) + if err == nil { + logfile_maxbytes := int64(supervisordConf.GetBytes("logfile_maxbytes", 50*1024*1024)) + logfile_backups := supervisordConf.GetInt("logfile_backups", 10) + loglevel := supervisordConf.GetString("loglevel", "info") + switch logFile { + case "/dev/null": + s.logger = logger.NewNullLogger(logEventEmitter) + case "syslog": + s.logger = logger.NewSysLogger("supervisord", logEventEmitter) + case "/dev/stdout": + s.logger = logger.NewStdoutLogger(logEventEmitter) + case "/dev/stderr": + s.logger = logger.NewStderrLogger(logEventEmitter) + case "": + s.logger = logger.NewNullLogger(logEventEmitter) + default: + s.logger = logger.NewFileLogger(logFile, logfile_maxbytes, logfile_backups, logEventEmitter, &sync.Mutex{}) + } + log.SetOutput(s.logger) + log.SetLevel(toLogLevel(loglevel)) + log.SetFormatter(&log.TextFormatter{DisableColors: true}) + } + //set the pid + pidfile, err := env.Eval(supervisordConf.GetString("pidfile", "supervisord.pid")) + if err == nil { + f, err := os.Create(pidfile) + if err == nil { + fmt.Fprintf(f, "%d", os.Getpid()) + f.Close() + } + } + } +} + +func toLogLevel(level string) log.Level { + switch strings.ToLower(level) { + case "critical": + return log.FatalLevel + case "error": + return log.ErrorLevel + case "warn": + return log.WarnLevel + case "info": + return log.InfoLevel + default: + return log.DebugLevel + } +} + +func (s *Supervisor) ReloadConfig(r *http.Request, args *struct{}, reply *types.ReloadConfigResult) error { + log.Info("start to reload config") + err, addedGroup, changedGroup, removedGroup := s.Reload() + if len(addedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(addedGroup, ",")}).Info("added groups") + } + + if len(changedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(changedGroup, ",")}).Info("changed groups") + } + + if len(removedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(removedGroup, ",")}).Info("removed groups") + } + reply.AddedGroup = addedGroup + reply.ChangedGroup = changedGroup + reply.RemovedGroup = removedGroup + return err +} + +func (s *Supervisor) AddProcessGroup(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + reply.Success = false + return nil +} + +func (s *Supervisor) RemoveProcessGroup(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + reply.Success = false + return nil +} + +func (s *Supervisor) ReadProcessStdoutLog(r *http.Request, args *ProcessLogReadInfo, reply *struct{ LogData string }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, err = proc.StdoutLog.ReadLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) ReadProcessStderrLog(r *http.Request, args *ProcessLogReadInfo, reply *struct{ LogData string }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, err = proc.StderrLog.ReadLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) TailProcessStdoutLog(r *http.Request, args *ProcessLogReadInfo, reply *ProcessTailLog) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, reply.Offset, reply.Overflow, err = proc.StdoutLog.ReadTailLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) ClearProcessLogs(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + err1 := proc.StdoutLog.ClearAllLogFile() + err2 := proc.StderrLog.ClearAllLogFile() + reply.Success = err1 == nil && err2 == nil + if err1 != nil { + return err1 + } + return err2 +} + +func (s *Supervisor) ClearAllProcessLogs(r *http.Request, args *struct{}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.StdoutLog.ClearAllLogFile() + proc.StderrLog.ClearAllLogFile() + procInfo := getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: procInfo.Name, + Group: procInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + + return nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/util/util.go b/vendor/github.com/rpoletaev/supervisord/util/util.go new file mode 100644 index 000000000..e103b0ecc --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/util/util.go @@ -0,0 +1,64 @@ +package util + +// return true if the elem is in the array arr +func InArray(elem interface{}, arr []interface{}) bool { + for _, e := range arr { + if e == elem { + return true + } + } + return false +} + +//return true if the array arr1 contains all elements of array arr2 +func HasAllElements(arr1 []interface{}, arr2 []interface{}) bool { + for _, e2 := range arr2 { + if !InArray(e2, arr1) { + return false + } + } + return true +} + +func StringArrayToInterfacArray(arr []string) []interface{} { + result := make([]interface{}, 0) + for _, s := range arr { + result = append(result, s) + } + return result +} + +func Sub(arr_1 []string, arr_2 []string) []string { + result := make([]string, 0) + for _, s := range arr_1 { + exist := false + for _, s2 := range arr_2 { + if s == s2 { + exist = true + } + } + if !exist { + result = append(result, s) + } + } + return result +} + +func IsSameStringArray(arr_1 []string, arr_2 []string) bool { + if len(arr_1) != len(arr_2) { + return false + } + for _, s := range arr_1 { + exist := false + for _, s2 := range arr_2 { + if s2 == s { + exist = true + break + } + } + if !exist { + return false + } + } + return true +} diff --git a/vendor/github.com/rpoletaev/supervisord/version.go b/vendor/github.com/rpoletaev/supervisord/version.go new file mode 100644 index 000000000..12dc42aee --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/version.go @@ -0,0 +1,24 @@ +package main + +import ( + "fmt" +) + +const VERSION = "1.0.008" + +type VersionCommand struct { +} + +var versionCommand VersionCommand + +func (v VersionCommand) Execute(args []string) error { + fmt.Println(VERSION) + return nil +} + +func init() { + parser.AddCommand("version", + "show the version of supervisor", + "display the supervisor version", + &versionCommand) +} diff --git a/vendor/github.com/rpoletaev/supervisord/xmlrpc.go b/vendor/github.com/rpoletaev/supervisord/xmlrpc.go new file mode 100644 index 000000000..e4381000f --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/xmlrpc.go @@ -0,0 +1,136 @@ +package main + +import ( + "crypto/sha1" + "encoding/hex" + "io" + "net" + "net/http" + "os" + "strings" + + "github.com/gorilla/rpc" + "github.com/ochinchina/gorilla-xmlrpc/xml" + log "github.com/sirupsen/logrus" +) + +type XmlRPC struct { + listeners map[string]net.Listener + // true if RPC is started + started bool +} + +type httpBasicAuth struct { + user string + password string + handler http.Handler +} + +func NewHttpBasicAuth(user string, password string, handler http.Handler) *httpBasicAuth { + if user != "" && password != "" { + log.Debug("require authentication") + } + return &httpBasicAuth{user: user, password: password, handler: handler} +} + +func (h *httpBasicAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if h.user == "" || h.password == "" { + log.Debug("no auth required") + h.handler.ServeHTTP(w, r) + return + } + username, password, ok := r.BasicAuth() + if ok && username == h.user { + if strings.HasPrefix(h.password, "{SHA}") { + log.Debug("auth with SHA") + hash := sha1.New() + io.WriteString(hash, password) + if hex.EncodeToString(hash.Sum(nil)) == h.password[5:] { + h.handler.ServeHTTP(w, r) + return + } + } else if password == h.password { + log.Debug("Auth with normal password") + h.handler.ServeHTTP(w, r) + return + } + } + w.Header().Set("WWW-Authenticate", "Basic realm=\"supervisor\"") + w.WriteHeader(401) +} + +func NewXmlRPC() *XmlRPC { + return &XmlRPC{listeners: make(map[string]net.Listener), started: false} +} + +func (p *XmlRPC) Stop() { + for _, listener := range p.listeners { + listener.Close() + } +} + +func (p *XmlRPC) StartUnixHttpServer(user string, password string, listenAddr string, s *Supervisor) { + os.Remove(listenAddr) + p.startHttpServer(user, password, "unix", listenAddr, s) +} + +func (p *XmlRPC) StartInetHttpServer(user string, password string, listenAddr string, s *Supervisor) { + p.startHttpServer(user, password, "tcp", listenAddr, s) +} + +func (p *XmlRPC) startHttpServer(user string, password string, protocol string, listenAddr string, s *Supervisor) { + if p.started { + return + } + p.started = true + mux := http.NewServeMux() + mux.Handle("/RPC2", NewHttpBasicAuth(user, password, p.createRPCServer(s))) + listener, err := net.Listen(protocol, listenAddr) + if err == nil { + p.listeners[protocol] = listener + http.Serve(listener, mux) + } else { + log.WithFields(log.Fields{"addr": listenAddr, "protocol": protocol}).Error("fail to listen on address") + } + +} +func (p *XmlRPC) createRPCServer(s *Supervisor) *rpc.Server { + RPC := rpc.NewServer() + xmlrpcCodec := xml.NewCodec() + RPC.RegisterCodec(xmlrpcCodec, "text/xml") + RPC.RegisterService(s, "") + + xmlrpcCodec.RegisterAlias("supervisor.getVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getAPIVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getIdentification", "Supervisor.GetIdentification") + xmlrpcCodec.RegisterAlias("supervisor.getState", "Supervisor.GetState") + xmlrpcCodec.RegisterAlias("supervisor.getPID", "Supervisor.GetPID") + xmlrpcCodec.RegisterAlias("supervisor.readLog", "Supervisor.ReadLog") + xmlrpcCodec.RegisterAlias("supervisor.clearLog", "Supervisor.ClearLog") + xmlrpcCodec.RegisterAlias("supervisor.shutdown", "Supervisor.Shutdown") + xmlrpcCodec.RegisterAlias("supervisor.restart", "Supervisor.Restart") + xmlrpcCodec.RegisterAlias("supervisor.getProcessInfo", "Supervisor.GetProcessInfo") + xmlrpcCodec.RegisterAlias("supervisor.getSupervisorVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getAllProcessInfo", "Supervisor.GetAllProcessInfo") + xmlrpcCodec.RegisterAlias("supervisor.startProcess", "Supervisor.StartProcess") + xmlrpcCodec.RegisterAlias("supervisor.startAllProcesses", "Supervisor.StartAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.startProcessGroup", "Supervisor.StartProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.stopProcess", "Supervisor.StopProcess") + xmlrpcCodec.RegisterAlias("supervisor.stopProcessGroup", "Supervisor.StopProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.stopAllProcesses", "Supervisor.StopAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.signalProcess", "Supervisor.SignalProcess") + xmlrpcCodec.RegisterAlias("supervisor.signalProcessGroup", "Supervisor.SignalProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.signalAllProcesses", "Supervisor.SignalAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.sendProcessStdin", "Supervisor.SendProcessStdin") + xmlrpcCodec.RegisterAlias("supervisor.sendRemoteCommEvent", "Supervisor.SendRemoteCommEvent") + xmlrpcCodec.RegisterAlias("supervisor.reloadConfig", "Supervisor.ReloadConfig") + xmlrpcCodec.RegisterAlias("supervisor.addProcessGroup", "Supervisor.AddProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.removeProcessGroup", "Supervisor.RemoveProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.readProcessStdoutLog", "Supervisor.ReadProcessStdoutLog") + xmlrpcCodec.RegisterAlias("supervisor.readProcessStderrLog", "Supervisor.ReadProcessStderrLog") + xmlrpcCodec.RegisterAlias("supervisor.tailProcessStdoutLog", "Supervisor.TailProcessStdoutLog") + xmlrpcCodec.RegisterAlias("supervisor.tailProcessStderrLog", "Supervisor.TailProcessStderrLog") + xmlrpcCodec.RegisterAlias("supervisor.clearProcessLogs", "Supervisor.ClearProcessLogs") + xmlrpcCodec.RegisterAlias("supervisor.clearAllProcessLogs", "Supervisor.ClearAllProcessLogs") + return RPC +} diff --git a/vendor/vendor.json b/vendor/vendor.json index a8bbc84e1..1565ffc5c 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -75,6 +75,12 @@ "revision": "1e59b77b52bf8e4b449a57e6f79f21226d571845", "revisionTime": "2017-11-13T18:07:20Z" }, + { + "checksumSHA1": "UcxIsr0IzcSKDqGVnK1HsxnSSVU=", + "path": "github.com/gorilla/rpc", + "revision": "22c016f3df3febe0c1f6727598b6389507e03a18", + "revisionTime": "2016-09-23T22:06:01Z" + }, { "checksumSHA1": "SGSXlSU1TFtg5aTlVA9v4Ka86lU=", "origin": "github.com/centrifugal/centrifugo/vendor/github.com/gorilla/securecookie", @@ -220,6 +226,18 @@ "revision": "179d4d0c4d8d407a32af483c2354df1d2c91e6c3", "revisionTime": "2013-12-21T20:05:32Z" }, + { + "checksumSHA1": "8p5uEwUdi9/xh/XpF1ULlZ00k2w=", + "path": "github.com/ochinchina/go-ini", + "revision": "4dcbd5514a9220bb68c8d45eabf79b25479ea2d1", + "revisionTime": "2018-03-10T02:35:15Z" + }, + { + "checksumSHA1": "E3jV3ILrMbnBTbR4GXkRf7rXc5Q=", + "path": "github.com/ochinchina/gorilla-xmlrpc/xml", + "revision": "ecf2fe693a2ca10ce68d2c7d4c559f1a57d2c845", + "revisionTime": "2017-10-12T05:53:24Z" + }, { "checksumSHA1": "BoXdUBWB8UnSlFlbnuTQaPqfCGk=", "path": "github.com/op/go-logging", @@ -257,6 +275,66 @@ "revision": "2315d5715e36303a941d907f038da7f7c44c773b", "revisionTime": "2017-11-01T20:10:47Z" }, + { + "checksumSHA1": "sESN0ZfX2JIOB6pcxXuw5yXo4+E=", + "path": "github.com/rogpeppe/go-charset/charset", + "revision": "e9ff06f347d3f5d0013d59ed83754f0e88de10d4", + "revisionTime": "2015-06-15T17:25:32Z" + }, + { + "checksumSHA1": "MyUzunzysfhOlm/yJfV89oC+mO4=", + "path": "github.com/rogpeppe/go-charset/data", + "revision": "e9ff06f347d3f5d0013d59ed83754f0e88de10d4", + "revisionTime": "2015-06-15T17:25:32Z" + }, + { + "checksumSHA1": "4yHilxHn118WAI/J+/uQd+lVky0=", + "path": "github.com/rpoletaev/supervisord", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "UHnNA1Cx5MtPY68fBrM/ank3bUY=", + "path": "github.com/rpoletaev/supervisord/config", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "7079G1HzEBpr9xCFA7S7OYtw3F4=", + "path": "github.com/rpoletaev/supervisord/events", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "A+d9lhIE1xvY1fdypT5GdO9C3wY=", + "path": "github.com/rpoletaev/supervisord/faults", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "YLXHdj1snMGqXXiFNQciKtUqTgM=", + "path": "github.com/rpoletaev/supervisord/logger", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "HHkvC6JQjF9hZ4RblCEEKV6k3Is=", + "path": "github.com/rpoletaev/supervisord/process", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "TKy4P7QqrqVfiKwGbXBkP5XNPY4=", + "path": "github.com/rpoletaev/supervisord/signals", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "F4x0/vDYzuOYgOMp3NlFbbTX1Vg=", + "path": "github.com/rpoletaev/supervisord/util", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, { "checksumSHA1": "eDQ6f1EsNf+frcRO/9XukSEchm8=", "path": "github.com/satori/go.uuid", From c13fd44ec482ca2a789983b78c42df83500a1ddc Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 20 Jun 2018 09:56:03 +0300 Subject: [PATCH 101/169] change founder account to user account on adding role_participant --- packages/migration/roles_data.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/migration/roles_data.go b/packages/migration/roles_data.go index 496b078cb..45093d690 100644 --- a/packages/migration/roles_data.go +++ b/packages/migration/roles_data.go @@ -7,9 +7,9 @@ INSERT INTO "%[1]d_roles" ("id", "default_page", "role_name", "deleted", "role_t ('2','', 'Developer', '0', '3', NOW(), '{}', '{}'); INSERT INTO "%[1]d_roles_participants" ("id","role" ,"member", "date_created") - VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()), - ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()); + VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()), + ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()); - INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[4]d', 'founder'); + INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[2]d', 'founder'); ` From 3e46ecf10f01a250a90f4112756349f5bf973cc4 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 20 Jun 2018 10:04:51 +0300 Subject: [PATCH 102/169] Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. --- packages/migration/roles_data.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/migration/roles_data.go b/packages/migration/roles_data.go index 45093d690..496b078cb 100644 --- a/packages/migration/roles_data.go +++ b/packages/migration/roles_data.go @@ -7,9 +7,9 @@ INSERT INTO "%[1]d_roles" ("id", "default_page", "role_name", "deleted", "role_t ('2','', 'Developer', '0', '3', NOW(), '{}', '{}'); INSERT INTO "%[1]d_roles_participants" ("id","role" ,"member", "date_created") - VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()), - ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()); + VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()), + ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()); - INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[2]d', 'founder'); + INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[4]d', 'founder'); ` From 97b6ed99be319ac426ab5e5bfb145c9add8b733c Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 20 Jun 2018 10:09:55 +0300 Subject: [PATCH 103/169] change platform founder to user wallet --- packages/migration/roles_data.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/migration/roles_data.go b/packages/migration/roles_data.go index 496b078cb..45093d690 100644 --- a/packages/migration/roles_data.go +++ b/packages/migration/roles_data.go @@ -7,9 +7,9 @@ INSERT INTO "%[1]d_roles" ("id", "default_page", "role_name", "deleted", "role_t ('2','', 'Developer', '0', '3', NOW(), '{}', '{}'); INSERT INTO "%[1]d_roles_participants" ("id","role" ,"member", "date_created") - VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()), - ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()); + VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()), + ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()); - INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[4]d', 'founder'); + INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[2]d', 'founder'); ` From ba5c484701b3151f195a99e29c55e87a1d660b2b Mon Sep 17 00:00:00 2001 From: Dmitriy Chertkov Date: Wed, 20 Jun 2018 14:59:37 +0500 Subject: [PATCH 104/169] Fixed cleaning cache of requests buffer --- packages/api/smart_test.go | 3 ++- packages/utils/tx/multi_request_buffer.go | 10 ++++------ packages/utils/tx/request_buffer.go | 9 +++------ 3 files changed, 9 insertions(+), 13 deletions(-) diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index bb338ca18..8b5597267 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -731,7 +731,8 @@ func TestBytesToString(t *testing.T) { $result = BytesToString($File) } }`}, - "Conditions": {"true"}, + "Conditions": {"true"}, + "ApplicationId": {"1"}, })) content := crypto.RandSeq(100) diff --git a/packages/utils/tx/multi_request_buffer.go b/packages/utils/tx/multi_request_buffer.go index 0cbc5c45a..c27509e84 100644 --- a/packages/utils/tx/multi_request_buffer.go +++ b/packages/utils/tx/multi_request_buffer.go @@ -28,10 +28,8 @@ type MultiRequestContract struct { type MultiRequestBuffer struct { mutex sync.Mutex - timer *time.Timer requestExpire time.Duration - - requests map[string]*MultiRequest + requests map[string]*MultiRequest } func (mrb *MultiRequestBuffer) NewMultiRequest() *MultiRequest { @@ -49,7 +47,6 @@ func (mrb *MultiRequestBuffer) AddRequest(mr *MultiRequest) { defer mrb.mutex.Unlock() mrb.requests[mr.ID] = mr - mrb.timer.Reset(mrb.requestExpire) } func (mrb *MultiRequestBuffer) GetRequest(id string) (*MultiRequest, bool) { @@ -65,7 +62,9 @@ func (mrb *MultiRequestBuffer) GetRequest(id string) (*MultiRequest, bool) { } func (mrb *MultiRequestBuffer) waitForCleaning() { - for t := range mrb.timer.C { + ticker := time.NewTicker(mrb.requestExpire) + + for t := range ticker.C { mrb.clean(t) } } @@ -84,7 +83,6 @@ func (mrb *MultiRequestBuffer) clean(t time.Time) { func NewMultiRequestBuffer(requestExpire time.Duration) *MultiRequestBuffer { mrb := &MultiRequestBuffer{ requests: make(map[string]*MultiRequest), - timer: time.NewTimer(-1), requestExpire: requestExpire, } diff --git a/packages/utils/tx/request_buffer.go b/packages/utils/tx/request_buffer.go index e57c3f5d9..05e8bc5d5 100644 --- a/packages/utils/tx/request_buffer.go +++ b/packages/utils/tx/request_buffer.go @@ -96,10 +96,8 @@ type File struct { type RequestBuffer struct { mutex sync.Mutex - timer *time.Timer requestExpire time.Duration - - requests map[string]*Request + requests map[string]*Request } func (rb *RequestBuffer) ExpireDuration() time.Duration { @@ -125,7 +123,6 @@ func (rb *RequestBuffer) AddRequest(r *Request) { defer rb.mutex.Unlock() rb.requests[r.ID] = r - rb.timer.Reset(rb.requestExpire) } func (rb *RequestBuffer) GetRequest(id string) (*Request, bool) { @@ -141,7 +138,8 @@ func (rb *RequestBuffer) GetRequest(id string) (*Request, bool) { } func (rb *RequestBuffer) waitForCleaning() { - for t := range rb.timer.C { + ticker := time.NewTicker(rb.requestExpire) + for t := range ticker.C { rb.clean(t) } } @@ -163,7 +161,6 @@ func (rb *RequestBuffer) clean(t time.Time) { func NewRequestBuffer(requestExpire time.Duration) *RequestBuffer { rb := &RequestBuffer{ requests: make(map[string]*Request), - timer: time.NewTimer(-1), requestExpire: requestExpire, } From aedc6e6439d2199f041f184ffdaf4497a0ce5c86 Mon Sep 17 00:00:00 2001 From: Dmitriy Chertkov Date: Wed, 20 Jun 2018 16:15:11 +0500 Subject: [PATCH 105/169] Fixed building desync_monitor tool --- tools/desync_monitor/query/query.go | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tools/desync_monitor/query/query.go b/tools/desync_monitor/query/query.go index a47a7b3e1..9cafc3ff8 100644 --- a/tools/desync_monitor/query/query.go +++ b/tools/desync_monitor/query/query.go @@ -3,8 +3,6 @@ package query import ( "fmt" "sync" - - "github.com/GenesisKernel/go-genesis/packages/api" ) const maxBlockIDEndpoint = "/api/v2/maxblockid" @@ -14,6 +12,15 @@ type MaxBlockID struct { MaxBlockID int64 `json:"max_block_id"` } +type blockInfoResult struct { + Hash []byte `json:"hash"` + EcosystemID int64 `json:"ecosystem_id"` + KeyID int64 `json:"key_id"` + Time int64 `json:"time"` + Tx int32 `json:"tx_count"` + RollbacksHash []byte `json:"rollbacks_hash"` +} + func MaxBlockIDs(nodesList []string) ([]int64, error) { wg := sync.WaitGroup{} workResults := ConcurrentMap{m: map[string]interface{}{}} @@ -42,14 +49,14 @@ func MaxBlockIDs(nodesList []string) ([]int64, error) { return maxBlockIds, nil } -func BlockInfo(nodesList []string, blockID int64) (map[string]*api.GetBlockInfoResult, error) { +func BlockInfo(nodesList []string, blockID int64) (map[string]*blockInfoResult, error) { wg := sync.WaitGroup{} workResults := ConcurrentMap{m: map[string]interface{}{}} for _, nodeUrl := range nodesList { wg.Add(1) go func(url string) { defer wg.Done() - blockInfo := &api.GetBlockInfoResult{} + blockInfo := &blockInfoResult{} if err := sendGetRequest(url+fmt.Sprintf(blockInfoEndpoint, blockID), blockInfo); err != nil { workResults.Set(url, err) return @@ -58,12 +65,12 @@ func BlockInfo(nodesList []string, blockID int64) (map[string]*api.GetBlockInfoR }(nodeUrl) } wg.Wait() - result := map[string]*api.GetBlockInfoResult{} + result := map[string]*blockInfoResult{} for nodeUrl, blockInfoOrError := range workResults.m { switch res := blockInfoOrError.(type) { case error: return nil, res - case *api.GetBlockInfoResult: + case *blockInfoResult: result[nodeUrl] = res } } From 22996faa03b4ae4702f96906fd47d6b07314871a Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Wed, 20 Jun 2018 21:48:32 +0300 Subject: [PATCH 106/169] eliminate common_ stuff in parsers, move all functions to block.go and transaction.go, move all to object-oriented style, rename parser to transaction --- packages/daemons/block_generator.go | 2 +- packages/daemons/queue_parser_tx.go | 2 +- packages/parser/block.go | 257 ++++++++-- packages/parser/common.go | 204 -------- packages/parser/common_parse_data_full.go | 556 ---------------------- packages/parser/db.go | 2 +- packages/parser/first_block.go | 47 +- packages/parser/limits.go | 30 +- packages/parser/parser_cache.go | 28 +- packages/parser/stop_network.go | 28 +- packages/parser/transaction.go | 519 ++++++++++++++++++++ packages/rollback/block.go | 31 +- 12 files changed, 826 insertions(+), 880 deletions(-) delete mode 100644 packages/parser/common.go delete mode 100644 packages/parser/common_parse_data_full.go create mode 100644 packages/parser/transaction.go diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index def213110..f55909d9b 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -154,7 +154,7 @@ func generateNextBlock(blockHeader *utils.BlockData, trs []*model.Transaction, k } func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { - p := new(parser.Parser) + p := new(parser.Transaction) // verify transactions err := parser.ProcessTransactionsQueue(p.DbTransaction) diff --git a/packages/daemons/queue_parser_tx.go b/packages/daemons/queue_parser_tx.go index 4585dc1df..6172b5862 100644 --- a/packages/daemons/queue_parser_tx.go +++ b/packages/daemons/queue_parser_tx.go @@ -49,7 +49,7 @@ func QueueParserTx(ctx context.Context, d *daemon) error { return err } - p := new(parser.Parser) + p := new(parser.Transaction) err = parser.ProcessTransactionsQueue(p.DbTransaction) if err != nil { d.logger.WithFields(log.Fields{"error": err}).Error("parsing transactions") diff --git a/packages/parser/block.go b/packages/parser/block.go index df6a83ea5..971edf39a 100644 --- a/packages/parser/block.go +++ b/packages/parser/block.go @@ -8,6 +8,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/utils" @@ -16,14 +17,14 @@ import ( // Block is storing block data type Block struct { - Header utils.BlockData - PrevHeader *utils.BlockData - MrklRoot []byte - BinData []byte - Parsers []*Parser - SysUpdate bool - GenBlock bool // it equals true when we are generating a new block - StopCount int // The count of good tx in the block + Header utils.BlockData + PrevHeader *utils.BlockData + MrklRoot []byte + BinData []byte + Transactions []*Transaction + SysUpdate bool + GenBlock bool // it equals true when we are generating a new block + StopCount int // The count of good tx in the block } func (b Block) String() string { @@ -47,7 +48,7 @@ func (b *Block) PlayBlockSafe() error { err = b.PlayBlock(dbTransaction) if b.GenBlock && b.StopCount > 0 { - doneTx := b.Parsers[:b.StopCount] + doneTx := b.Transactions[:b.StopCount] trData := make([][]byte, 0, b.StopCount) for _, tr := range doneTx { trData = append(trData, tr.TxFullData) @@ -71,7 +72,7 @@ func (b *Block) PlayBlockSafe() error { return err } b.BinData = newBlockData - b.Parsers = nb.Parsers + b.Transactions = nb.Transactions b.MrklRoot = nb.MrklRoot b.SysUpdate = nb.SysUpdate err = nil @@ -126,21 +127,21 @@ func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { return err } limits := NewLimits(b) - for curTx, p := range b.Parsers { + for curTx, t := range b.Transactions { var ( msg string err error ) - p.DbTransaction = dbTransaction + t.DbTransaction = dbTransaction err = dbTransaction.Savepoint(curTx) if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("using savepoint") + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("using savepoint") return err } - msg, err = playTransaction(p) - if err == nil && p.TxSmart != nil { - err = limits.CheckLimit(p) + msg, err = t.play() + if err == nil && t.TxSmart != nil { + err = limits.CheckLimit(t) } if err != nil { if err == errNetworkStopping { @@ -149,48 +150,48 @@ func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { if b.GenBlock && err == ErrLimitStop { b.StopCount = curTx - model.IncrementTxAttemptCount(p.DbTransaction, p.TxHash) + model.IncrementTxAttemptCount(t.DbTransaction, t.TxHash) } errRoll := dbTransaction.RollbackSavepoint(curTx) if errRoll != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("rolling back to previous savepoint") + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("rolling back to previous savepoint") return errRoll } if b.GenBlock && err == ErrLimitStop { break } // skip this transaction - model.MarkTransactionUsed(p.DbTransaction, p.TxHash) - MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) - if p.SysUpdate { - if err = syspar.SysUpdate(p.DbTransaction); err != nil { + model.MarkTransactionUsed(t.DbTransaction, t.TxHash) + MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) + if t.SysUpdate { + if err = syspar.SysUpdate(t.DbTransaction); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") } - p.SysUpdate = false + t.SysUpdate = false } continue } err = dbTransaction.ReleaseSavepoint(curTx) if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("releasing savepoint") + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("releasing savepoint") } - if p.SysUpdate { + if t.SysUpdate { b.SysUpdate = true - p.SysUpdate = false + t.SysUpdate = false } - if _, err := model.MarkTransactionUsed(p.DbTransaction, p.TxHash); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("marking transaction used") + if _, err := model.MarkTransactionUsed(t.DbTransaction, t.TxHash); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("marking transaction used") return err } // update status ts := &model.TransactionStatus{} - if err := ts.UpdateBlockMsg(p.DbTransaction, b.Header.BlockID, msg, p.TxHash); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("updating transaction status block id") + if err := ts.UpdateBlockMsg(t.DbTransaction, b.Header.BlockID, msg, t.TxHash); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("updating transaction status block id") return err } - if err := InsertInLogTx(p.DbTransaction, p.TxFullData, p.TxTime); err != nil { + if err := InsertInLogTx(t.DbTransaction, t.TxFullData, t.TxTime); err != nil { return utils.ErrInfo(err) } } @@ -248,8 +249,8 @@ func (b *Block) CheckBlock() error { // check each transaction txCounter := make(map[int64]int) txHashes := make(map[string]struct{}) - for _, p := range b.Parsers { - hexHash := string(converter.BinToHex(p.TxHash)) + for _, t := range b.Transactions { + hexHash := string(converter.BinToHex(t.TxHash)) // check for duplicate transactions if _, ok := txHashes[hexHash]; ok { logger.WithFields(log.Fields{"tx_hash": hexHash, "type": consts.DuplicateObject}).Error("duplicate transaction") @@ -258,12 +259,12 @@ func (b *Block) CheckBlock() error { txHashes[hexHash] = struct{}{} // check for max transaction per user in one block - txCounter[p.TxKeyID]++ - if txCounter[p.TxKeyID] > syspar.GetMaxBlockUserTx() { + txCounter[t.TxKeyID]++ + if txCounter[t.TxKeyID] > syspar.GetMaxBlockUserTx() { return utils.ErrInfo(fmt.Errorf("max_block_user_transactions")) } - if err := checkTransaction(p, b.Header.Time, false); err != nil { + if err := t.check(b.Header.Time, false); err != nil { return utils.ErrInfo(err) } @@ -311,3 +312,187 @@ func (b *Block) CheckHash() (bool, error) { return true, nil } + +// InsertBlockWOForks is inserting blocks +func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { + block, err := ProcessBlockWherePrevFromBlockchainTable(data, !firstBlock) + if err != nil { + return err + } + block.GenBlock = genBlock + if err := block.CheckBlock(); err != nil { + return err + } + + err = block.PlayBlockSafe() + if err != nil { + return err + } + + log.WithFields(log.Fields{"block_id": block.Header.BlockID}).Debug("block was inserted successfully") + return nil +} + +// ProcessBlockWherePrevFromMemory is processing block with in memory previous block +func ProcessBlockWherePrevFromMemory(data []byte) (*Block, error) { + if int64(len(data)) > syspar.GetMaxBlockSize() { + log.WithFields(log.Fields{"size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") + return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) + } + + buf := bytes.NewBuffer(data) + if buf.Len() == 0 { + log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("block data is empty") + return nil, fmt.Errorf("empty buffer") + } + + block, err := ParseBlock(buf, false) + if err != nil { + return nil, err + } + block.BinData = data + + if err := block.readPreviousBlockFromMemory(); err != nil { + return nil, err + } + return block, nil +} + +// ProcessBlockWherePrevFromBlockchainTable is processing block with in table previous block +func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Block, error) { + if checkSize && int64(len(data)) > syspar.GetMaxBlockSize() { + log.WithFields(log.Fields{"check_size": checkSize, "size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") + return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) + } + + buf := bytes.NewBuffer(data) + if buf.Len() == 0 { + log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("buffer is empty") + return nil, fmt.Errorf("empty buffer") + } + + block, err := ParseBlock(buf, !checkSize) + if err != nil { + return nil, err + } + block.BinData = data + + if err := block.readPreviousBlockFromBlockchainTable(); err != nil { + return nil, err + } + + return block, nil +} + +func ParseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { + header, err := utils.ParseBlockHeader(blockBuffer, !firstBlock) + if err != nil { + return nil, err + } + + logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_time": header.Time, "block_wallet_id": header.KeyID, + "block_state_id": header.EcosystemID, "block_hash": header.Hash, "block_version": header.Version}) + transactions := make([]*Transaction, 0) + + var mrklSlice [][]byte + + // parse transactions + for blockBuffer.Len() > 0 { + transactionSize, err := converter.DecodeLengthBuf(blockBuffer) + if err != nil { + logger.WithFields(log.Fields{"type": consts.UnmarshallingError, "error": err}).Error("transaction size is 0") + return nil, fmt.Errorf("bad block format (%s)", err) + } + if blockBuffer.Len() < int(transactionSize) { + logger.WithFields(log.Fields{"size": blockBuffer.Len(), "match_size": int(transactionSize), "type": consts.SizeDoesNotMatch}).Error("transaction size does not matches encoded length") + return nil, fmt.Errorf("bad block format (transaction len is too big: %d)", transactionSize) + } + + if transactionSize == 0 { + logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("transaction size is 0") + return nil, fmt.Errorf("transaction size is 0") + } + + bufTransaction := bytes.NewBuffer(blockBuffer.Next(int(transactionSize))) + t, err := ParseTransaction(bufTransaction) + if err != nil { + if t != nil && t.TxHash != nil { + MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) + } + return nil, fmt.Errorf("parse transaction error(%s)", err) + } + t.BlockData = &header + + transactions = append(transactions, t) + + // build merkle tree + if len(t.TxFullData) > 0 { + dSha256Hash, err := crypto.DoubleHash(t.TxFullData) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing tx full data") + return nil, err + } + dSha256Hash = converter.BinToHex(dSha256Hash) + mrklSlice = append(mrklSlice, dSha256Hash) + } + } + + if len(mrklSlice) == 0 { + mrklSlice = append(mrklSlice, []byte("0")) + } + + return &Block{ + Header: header, + Transactions: transactions, + MrklRoot: utils.MerkleTreeRoot(mrklSlice), + }, nil +} + +// MarshallBlock is marshalling block +func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, key string) ([]byte, error) { + var mrklArray [][]byte + var blockDataTx []byte + var signed []byte + logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_hash": header.Hash, "block_time": header.Time, "block_version": header.Version, "block_wallet_id": header.KeyID, "block_state_id": header.EcosystemID}) + + for _, tr := range trData { + doubleHash, err := crypto.DoubleHash(tr) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing transaction") + return nil, err + } + mrklArray = append(mrklArray, converter.BinToHex(doubleHash)) + blockDataTx = append(blockDataTx, converter.EncodeLengthPlusData(tr)...) + } + + if key != "" { + if len(mrklArray) == 0 { + mrklArray = append(mrklArray, []byte("0")) + } + mrklRoot := utils.MerkleTreeRoot(mrklArray) + + forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", + header.BlockID, prevHash, header.Time, header.EcosystemID, header.KeyID, header.NodePosition, mrklRoot) + + var err error + signed, err = crypto.Sign(key, forSign) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing blocko") + return nil, err + } + } + + var buf bytes.Buffer + // fill header + buf.Write(converter.DecToBin(header.Version, 2)) + buf.Write(converter.DecToBin(header.BlockID, 4)) + buf.Write(converter.DecToBin(header.Time, 4)) + buf.Write(converter.DecToBin(header.EcosystemID, 4)) + buf.Write(converter.EncodeLenInt64InPlace(header.KeyID)) + buf.Write(converter.DecToBin(header.NodePosition, 1)) + buf.Write(converter.EncodeLengthPlusData(signed)) + // data + buf.Write(blockDataTx) + + return buf.Bytes(), nil +} diff --git a/packages/parser/common.go b/packages/parser/common.go deleted file mode 100644 index b2061e13f..000000000 --- a/packages/parser/common.go +++ /dev/null @@ -1,204 +0,0 @@ -// Copyright 2016 The go-daylight Authors -// This file is part of the go-daylight library. -// -// The go-daylight library is free software: you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// The go-daylight library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public License -// along with the go-daylight library. If not, see . - -package parser - -import ( - "fmt" - "reflect" - - "github.com/GenesisKernel/go-genesis/packages/consts" - "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/smart" - "github.com/GenesisKernel/go-genesis/packages/utils" - "github.com/GenesisKernel/go-genesis/packages/utils/tx" - - "github.com/shopspring/decimal" - log "github.com/sirupsen/logrus" -) - -// GetTxTypeAndUserID returns tx type, wallet and citizen id from the block data -func GetTxTypeAndUserID(binaryBlock []byte) (txType int64, keyID int64) { - tmp := binaryBlock[:] - txType = converter.BinToDecBytesShift(&binaryBlock, 1) - if consts.IsStruct(int(txType)) { - var txHead consts.TxHeader - converter.BinUnmarshal(&tmp, &txHead) - keyID = txHead.KeyID - } - return -} - -// ParserInterface is parsing transactions -type ParserInterface interface { - Init() error - Validate() error - Action() error - Rollback() error - Header() *tx.Header -} - -// GetParser returns ParserInterface -func GetParser(p *Parser, txType string) (ParserInterface, error) { - switch txType { - case consts.TxTypeParserFirstBlock: - return &FirstBlockParser{p}, nil - case consts.TxTypeParserStopNetwork: - return &StopNetworkParser{p, nil}, nil - } - log.WithFields(log.Fields{"tx_type": txType, "type": consts.UnknownObject}).Error("unknown txType") - return nil, fmt.Errorf("Unknown txType: %s", txType) -} - -// Parser is a structure for parsing transactions -type Parser struct { - BlockData *utils.BlockData - PrevBlock *utils.BlockData - dataType int - blockData []byte - CurrentVersion string - PublicKeys [][]byte - - TxBinaryData []byte // transaction binary data - TxFullData []byte // full transaction, with type and data - TxHash []byte - TxKeyID int64 - TxEcosystemID int64 - TxNodePosition uint32 - TxTime int64 - TxType int64 - TxCost int64 // Maximum cost of executing contract - TxFuel int64 // The fuel cost of executed contract - TxUsedCost decimal.Decimal // Used cost of CPU resources - TxPtr interface{} // Pointer to the corresponding struct in consts/struct.go - TxData map[string]interface{} - TxSmart *tx.SmartContract - TxContract *smart.Contract - TxHeader *tx.Header - txParser ParserInterface - DbTransaction *model.DbTransaction - SysUpdate bool - - SmartContract smart.SmartContract -} - -// GetLogger returns logger -func (p Parser) GetLogger() *log.Entry { - if p.BlockData != nil && p.PrevBlock != nil { - logger := log.WithFields(log.Fields{"block_id": p.BlockData.BlockID, "block_time": p.BlockData.Time, "block_wallet_id": p.BlockData.KeyID, "block_state_id": p.BlockData.EcosystemID, "block_hash": p.BlockData.Hash, "block_version": p.BlockData.Version, "prev_block_id": p.PrevBlock.BlockID, "prev_block_time": p.PrevBlock.Time, "prev_block_wallet_id": p.PrevBlock.KeyID, "prev_block_state_id": p.PrevBlock.EcosystemID, "prev_block_hash": p.PrevBlock.Hash, "prev_block_version": p.PrevBlock.Version, "tx_type": p.TxType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID, "tx_wallet_id": p.TxKeyID}) - return logger - } - if p.BlockData != nil { - logger := log.WithFields(log.Fields{"block_id": p.BlockData.BlockID, "block_time": p.BlockData.Time, "block_wallet_id": p.BlockData.KeyID, "block_state_id": p.BlockData.EcosystemID, "block_hash": p.BlockData.Hash, "block_version": p.BlockData.Version, "tx_type": p.TxType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID, "tx_wallet_id": p.TxKeyID}) - return logger - } - if p.PrevBlock != nil { - logger := log.WithFields(log.Fields{"prev_block_id": p.PrevBlock.BlockID, "prev_block_time": p.PrevBlock.Time, "prev_block_wallet_id": p.PrevBlock.KeyID, "prev_block_state_id": p.PrevBlock.EcosystemID, "prev_block_hash": p.PrevBlock.Hash, "prev_block_version": p.PrevBlock.Version, "tx_type": p.TxType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID, "tx_wallet_id": p.TxKeyID}) - return logger - } - logger := log.WithFields(log.Fields{"tx_type": p.TxType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID, "tx_wallet_id": p.TxKeyID}) - return logger -} - -// FormatBlockData returns formated block data -func (p *Parser) FormatBlockData() string { - result := "" - if p.BlockData != nil { - v := reflect.ValueOf(*p.BlockData) - typeOfT := v.Type() - if typeOfT.Kind() == reflect.Ptr { - typeOfT = typeOfT.Elem() - } - for i := 0; i < v.NumField(); i++ { - name := typeOfT.Field(i).Name - switch name { - case "BlockId", "Time", "UserId", "Level": - result += "[" + name + "] = " + fmt.Sprintf("%d\n", v.Field(i).Interface()) - case "Sign", "Hash", "HeadHash": - result += "[" + name + "] = " + fmt.Sprintf("%x\n", v.Field(i).Interface()) - default: - result += "[" + name + "] = " + fmt.Sprintf("%s\n", v.Field(i).Interface()) - } - } - } - return result -} - -// ErrInfo returns the more detailed error -func (p *Parser) ErrInfo(verr interface{}) error { - var err error - switch verr.(type) { - case error: - err = verr.(error) - case string: - err = fmt.Errorf(verr.(string)) - } - return fmt.Errorf("[ERROR] %s (%s)\n%s", err, utils.Caller(1), p.FormatBlockData()) -} - -// AccessRights checks the access right by executing the condition value -func (p *Parser) AccessRights(condition string, iscondition bool) error { - logger := p.GetLogger() - sp := &model.StateParameter{} - sp.SetTablePrefix(converter.Int64ToStr(p.TxSmart.EcosystemID)) - _, err := sp.Get(p.DbTransaction, condition) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting state parameter by name transaction") - return err - } - conditions := sp.Value - if iscondition { - conditions = sp.Conditions - } - if len(conditions) > 0 { - ret, err := p.SmartContract.EvalIf(conditions) - if err != nil { - logger.WithFields(log.Fields{"type": consts.EvalError, "error": err, "conditions": conditions}).Error("evaluating conditions") - return err - } - if !ret { - logger.WithFields(log.Fields{"type": consts.AccessDenied}).Error("Access denied") - return fmt.Errorf(`Access denied`) - } - } else { - logger.WithFields(log.Fields{"type": consts.EmptyObject, "conditions": condition}).Error("No condition in state_parameters") - return fmt.Errorf(`There is not %s in state_parameters`, condition) - } - return nil -} - -// CallContract calls the contract functions according to the specified flags -func (p *Parser) CallContract(flags int) (resultContract string, err error) { - sc := smart.SmartContract{ - VDE: false, - Rollback: true, - SysUpdate: false, - VM: smart.GetVM(false, 0), - TxSmart: *p.TxSmart, - TxData: p.TxData, - TxContract: p.TxContract, - TxCost: p.TxCost, - TxUsedCost: p.TxUsedCost, - BlockData: p.BlockData, - TxHash: p.TxHash, - PublicKeys: p.PublicKeys, - DbTransaction: p.DbTransaction, - } - resultContract, err = sc.CallContract(flags) - p.SysUpdate = sc.SysUpdate - return -} diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go deleted file mode 100644 index 9076ed3cb..000000000 --- a/packages/parser/common_parse_data_full.go +++ /dev/null @@ -1,556 +0,0 @@ -// Copyright 2016 The go-daylight Authors -// This file is part of the go-daylight library. -// -// The go-daylight library is free software: you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// The go-daylight library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public License -// along with the go-daylight library. If not, see . - -package parser - -import ( - "bytes" - "encoding/hex" - "fmt" - "strings" - "time" - - "github.com/GenesisKernel/go-genesis/packages/conf/syspar" - "github.com/GenesisKernel/go-genesis/packages/consts" - "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/crypto" - "github.com/GenesisKernel/go-genesis/packages/script" - "github.com/GenesisKernel/go-genesis/packages/smart" - "github.com/GenesisKernel/go-genesis/packages/utils" - "github.com/GenesisKernel/go-genesis/packages/utils/tx" - - "github.com/shopspring/decimal" - log "github.com/sirupsen/logrus" - "gopkg.in/vmihailenco/msgpack.v2" -) - -var txParserCache = &parserCache{cache: make(map[string]*Parser)} - -// InsertBlockWOForks is inserting blocks -func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { - block, err := ProcessBlockWherePrevFromBlockchainTable(data, !firstBlock) - if err != nil { - return err - } - block.GenBlock = genBlock - if err := block.CheckBlock(); err != nil { - return err - } - - err = block.PlayBlockSafe() - if err != nil { - return err - } - - log.WithFields(log.Fields{"block_id": block.Header.BlockID}).Debug("block was inserted successfully") - return nil -} - -// ProcessBlockWherePrevFromMemory is processing block with in memory previous block -func ProcessBlockWherePrevFromMemory(data []byte) (*Block, error) { - if int64(len(data)) > syspar.GetMaxBlockSize() { - log.WithFields(log.Fields{"size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") - return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) - } - - buf := bytes.NewBuffer(data) - if buf.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("block data is empty") - return nil, fmt.Errorf("empty buffer") - } - - block, err := ParseBlock(buf, false) - if err != nil { - return nil, err - } - block.BinData = data - - if err := block.readPreviousBlockFromMemory(); err != nil { - return nil, err - } - return block, nil -} - -// ProcessBlockWherePrevFromBlockchainTable is processing block with in table previous block -func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Block, error) { - if checkSize && int64(len(data)) > syspar.GetMaxBlockSize() { - log.WithFields(log.Fields{"check_size": checkSize, "size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") - return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) - } - - buf := bytes.NewBuffer(data) - if buf.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("buffer is empty") - return nil, fmt.Errorf("empty buffer") - } - - block, err := ParseBlock(buf, !checkSize) - if err != nil { - return nil, err - } - block.BinData = data - - if err := block.readPreviousBlockFromBlockchainTable(); err != nil { - return nil, err - } - - return block, nil -} - -func ParseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { - header, err := utils.ParseBlockHeader(blockBuffer, !firstBlock) - if err != nil { - return nil, err - } - - logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_time": header.Time, "block_wallet_id": header.KeyID, - "block_state_id": header.EcosystemID, "block_hash": header.Hash, "block_version": header.Version}) - parsers := make([]*Parser, 0) - - var mrklSlice [][]byte - - // parse transactions - for blockBuffer.Len() > 0 { - transactionSize, err := converter.DecodeLengthBuf(blockBuffer) - if err != nil { - logger.WithFields(log.Fields{"type": consts.UnmarshallingError, "error": err}).Error("transaction size is 0") - return nil, fmt.Errorf("bad block format (%s)", err) - } - if blockBuffer.Len() < int(transactionSize) { - logger.WithFields(log.Fields{"size": blockBuffer.Len(), "match_size": int(transactionSize), "type": consts.SizeDoesNotMatch}).Error("transaction size does not matches encoded length") - return nil, fmt.Errorf("bad block format (transaction len is too big: %d)", transactionSize) - } - - if transactionSize == 0 { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("transaction size is 0") - return nil, fmt.Errorf("transaction size is 0") - } - - bufTransaction := bytes.NewBuffer(blockBuffer.Next(int(transactionSize))) - p, err := ParseTransaction(bufTransaction) - if err != nil { - if p != nil && p.TxHash != nil { - MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) - } - return nil, fmt.Errorf("parse transaction error(%s)", err) - } - p.BlockData = &header - - parsers = append(parsers, p) - - // build merkle tree - if len(p.TxFullData) > 0 { - dSha256Hash, err := crypto.DoubleHash(p.TxFullData) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing tx full data") - return nil, err - } - dSha256Hash = converter.BinToHex(dSha256Hash) - mrklSlice = append(mrklSlice, dSha256Hash) - } - } - - if len(mrklSlice) == 0 { - mrklSlice = append(mrklSlice, []byte("0")) - } - - return &Block{ - Header: header, - Parsers: parsers, - MrklRoot: utils.MerkleTreeRoot(mrklSlice), - }, nil -} - -// ParseTransaction is parsing transaction -func ParseTransaction(buffer *bytes.Buffer) (*Parser, error) { - if buffer.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty transaction buffer") - return nil, fmt.Errorf("empty transaction buffer") - } - - hash, err := crypto.Hash(buffer.Bytes()) - // or DoubleHash ? - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing transaction") - return nil, err - } - - if p, ok := txParserCache.Get(string(hash)); ok { - return p, nil - } - - p := new(Parser) - p.TxHash = hash - p.TxUsedCost = decimal.New(0, 0) - p.TxFullData = buffer.Bytes() - - txType := int64(buffer.Bytes()[0]) - p.dataType = int(txType) - - // smart contract transaction - if IsContractTransaction(int(txType)) { - // skip byte with transaction type - buffer.Next(1) - p.TxBinaryData = buffer.Bytes() - if err := parseContractTransaction(p, buffer); err != nil { - return nil, err - } - - // TODO: check for what it was here: - /*if err := p.CallContract(smart.CallInit | smart.CallCondition); err != nil { - return nil, err - }*/ - - // struct transaction (only first block transaction for now) - } else if consts.IsStruct(int(txType)) { - p.TxBinaryData = buffer.Bytes() - if err := parseStructTransaction(p, buffer, txType); err != nil { - return p, err - } - - // all other transactions - } else { - // skip byte with transaction type - buffer.Next(1) - p.TxBinaryData = buffer.Bytes() - if err := parseRegularTransaction(p, buffer, txType); err != nil { - return p, err - } - } - - txParserCache.Set(p) - - return p, nil -} - -// IsContractTransaction checks txType -func IsContractTransaction(txType int) bool { - return txType > 127 -} - -func parseContractTransaction(p *Parser, buf *bytes.Buffer) error { - smartTx := tx.SmartContract{} - if err := msgpack.Unmarshal(buf.Bytes(), &smartTx); err != nil { - log.WithFields(log.Fields{"tx_type": p.dataType, "tx_hash": p.TxHash, "error": err, "type": consts.UnmarshallingError}).Error("unmarshalling smart tx msgpack") - return err - } - p.TxPtr = nil - p.TxSmart = &smartTx - p.TxTime = smartTx.Time - p.TxEcosystemID = (smartTx.EcosystemID) - p.TxKeyID = smartTx.KeyID - - contract := smart.GetContractByID(int32(smartTx.Type)) - if contract == nil { - log.WithFields(log.Fields{"contract_type": smartTx.Type, "type": consts.NotFound}).Error("unknown contract") - return fmt.Errorf(`unknown contract %d`, smartTx.Type) - } - forsign := []string{smartTx.ForSign()} - - p.TxContract = contract - p.TxHeader = &smartTx.Header - - input := smartTx.Data - p.TxData = make(map[string]interface{}) - - if contract.Block.Info.(*script.ContractInfo).Tx != nil { - for _, fitem := range *contract.Block.Info.(*script.ContractInfo).Tx { - var err error - var v interface{} - var forv string - var isforv bool - - if fitem.ContainsTag(script.TagFile) { - var ( - data []byte - file *tx.File - ) - if err := converter.BinUnmarshal(&input, &data); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling file") - return err - } - if err := msgpack.Unmarshal(data, &file); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("unmarshalling file msgpack") - return err - } - - p.TxData[fitem.Name] = file.Data - p.TxData[fitem.Name+"MimeType"] = file.MimeType - - forsign = append(forsign, file.MimeType, file.Hash) - continue - } - - switch fitem.Type.String() { - case `uint64`: - var val uint64 - converter.BinUnmarshal(&input, &val) - v = val - case `float64`: - var val float64 - converter.BinUnmarshal(&input, &val) - v = val - case `int64`: - v, err = converter.DecodeLenInt64(&input) - case script.Decimal: - var s string - if err := converter.BinUnmarshal(&input, &s); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling script.Decimal") - return err - } - v, err = decimal.NewFromString(s) - case `string`: - var s string - if err := converter.BinUnmarshal(&input, &s); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") - return err - } - v = s - case `[]uint8`: - var b []byte - if err := converter.BinUnmarshal(&input, &b); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") - return err - } - v = hex.EncodeToString(b) - case `[]interface {}`: - count, err := converter.DecodeLength(&input) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling []interface{}") - return err - } - isforv = true - list := make([]interface{}, 0) - for count > 0 { - length, err := converter.DecodeLength(&input) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling tx length") - return err - } - if len(input) < int(length) { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "length": int(length), "slice length": len(input)}).Error("incorrect tx size") - return fmt.Errorf(`input slice is short`) - } - list = append(list, string(input[:length])) - input = input[length:] - count-- - } - if len(list) > 0 { - slist := make([]string, len(list)) - for j, lval := range list { - slist[j] = lval.(string) - } - forv = strings.Join(slist, `,`) - } - v = list - } - if p.TxData[fitem.Name] == nil { - p.TxData[fitem.Name] = v - } - if err != nil { - return err - } - if strings.Index(fitem.Tags, `image`) >= 0 { - continue - } - if isforv { - v = forv - } - forsign = append(forsign, fmt.Sprintf("%v", v)) - } - } - p.TxData[`forsign`] = strings.Join(forsign, ",") - - return nil -} - -func parseStructTransaction(p *Parser, buf *bytes.Buffer, txType int64) error { - trParser, err := GetParser(p, consts.TxTypes[int(txType)]) - if err != nil { - return err - } - p.txParser = trParser - - p.TxPtr = consts.MakeStruct(consts.TxTypes[int(txType)]) - input := buf.Bytes() - if err := converter.BinUnmarshal(&input, p.TxPtr); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "tx_type": int(txType)}).Error("getting parser for tx type") - return err - } - - head := consts.Header(p.TxPtr) - p.TxKeyID = head.KeyID - p.TxTime = int64(head.Time) - p.TxType = txType - - err = trParser.Validate() - if err != nil { - return utils.ErrInfo(err) - } - - return nil -} - -func parseRegularTransaction(p *Parser, buf *bytes.Buffer, txType int64) error { - trParser, err := GetParser(p, consts.TxTypes[int(txType)]) - if err != nil { - return err - } - p.txParser = trParser - - err = trParser.Init() - if err != nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser init") - return err - } - header := trParser.Header() - if header == nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser get header") - return fmt.Errorf("tx header is nil") - } - - p.TxHeader = header - p.TxTime = header.Time - p.TxType = txType - p.TxEcosystemID = (header.EcosystemID) - p.TxKeyID = header.KeyID - - err = trParser.Validate() - if _, ok := err.(error); ok { - return utils.ErrInfo(err.(error)) - } - - return nil -} - -func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { - err := CheckLogTx(p.TxFullData, checkForDupTr, false) - if err != nil { - return utils.ErrInfo(err) - } - logger := log.WithFields(log.Fields{"tx_type": p.dataType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID}) - // time in the transaction cannot be more than MAX_TX_FORW seconds of block time - if p.TxTime-consts.MAX_TX_FORW > checkTime { - logger.WithFields(log.Fields{"tx_max_forw": consts.MAX_TX_FORW, "type": consts.ParameterExceeded}).Error("time in the tx cannot be more than MAX_TX_FORW seconds of block time ") - return utils.ErrInfo(fmt.Errorf("transaction time is too big")) - } - - // time in transaction cannot be less than -24 of block time - if p.TxTime < checkTime-consts.MAX_TX_BACK { - logger.WithFields(log.Fields{"tx_max_back": consts.MAX_TX_BACK, "type": consts.ParameterExceeded}).Error("time in the tx cannot be less then -24 of block time") - return utils.ErrInfo(fmt.Errorf("incorrect transaction time")) - } - - if p.TxContract == nil { - if p.BlockData != nil && p.BlockData.BlockID != 1 { - if p.TxKeyID == 0 { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("Empty user id") - return utils.ErrInfo(fmt.Errorf("empty user id")) - } - } - } - - return nil -} - -// CheckTransaction is checking transaction -func CheckTransaction(data []byte) (*tx.Header, error) { - trBuff := bytes.NewBuffer(data) - p, err := ParseTransaction(trBuff) - if err != nil { - return nil, err - } - - err = checkTransaction(p, time.Now().Unix(), true) - if err != nil { - return nil, err - } - - return p.TxHeader, nil -} - -func playTransaction(p *Parser) (string, error) { - // smart-contract - if p.TxContract != nil { - // check that there are enough money in CallContract - return p.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction) - } - - if p.txParser == nil { - return "", utils.ErrInfo(fmt.Errorf("can't find parser for %d", p.TxType)) - } - - err := p.txParser.Action() - if err != nil { - return "", err - } - - return "", nil -} - -// MarshallBlock is marshalling block -func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, key string) ([]byte, error) { - var mrklArray [][]byte - var blockDataTx []byte - var signed []byte - logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_hash": header.Hash, "block_time": header.Time, "block_version": header.Version, "block_wallet_id": header.KeyID, "block_state_id": header.EcosystemID}) - - for _, tr := range trData { - doubleHash, err := crypto.DoubleHash(tr) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing transaction") - return nil, err - } - mrklArray = append(mrklArray, converter.BinToHex(doubleHash)) - blockDataTx = append(blockDataTx, converter.EncodeLengthPlusData(tr)...) - } - - if key != "" { - if len(mrklArray) == 0 { - mrklArray = append(mrklArray, []byte("0")) - } - mrklRoot := utils.MerkleTreeRoot(mrklArray) - - forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", - header.BlockID, prevHash, header.Time, header.EcosystemID, header.KeyID, header.NodePosition, mrklRoot) - - var err error - signed, err = crypto.Sign(key, forSign) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing blocko") - return nil, err - } - } - - var buf bytes.Buffer - // fill header - buf.Write(converter.DecToBin(header.Version, 2)) - buf.Write(converter.DecToBin(header.BlockID, 4)) - buf.Write(converter.DecToBin(header.Time, 4)) - buf.Write(converter.DecToBin(header.EcosystemID, 4)) - buf.Write(converter.EncodeLenInt64InPlace(header.KeyID)) - buf.Write(converter.DecToBin(header.NodePosition, 1)) - buf.Write(converter.EncodeLengthPlusData(signed)) - // data - buf.Write(blockDataTx) - - return buf.Bytes(), nil -} - -// CleanCache cleans cache of transaction parsers -func CleanCache() { - txParserCache.Clean() -} diff --git a/packages/parser/db.go b/packages/parser/db.go index d0e4893a2..9f421f2f5 100644 --- a/packages/parser/db.go +++ b/packages/parser/db.go @@ -105,7 +105,7 @@ func InsertIntoBlockchain(transaction *model.DbTransaction, block *Block) error NodePosition: block.Header.NodePosition, Time: block.Header.Time, RollbacksHash: rollbackTxsHash, - Tx: int32(len(block.Parsers)), + Tx: int32(len(block.Transactions)), } blockTimeCalculator, err := utils.BuildBlockTimeCalculator() if err != nil { diff --git a/packages/parser/first_block.go b/packages/parser/first_block.go index d92be788b..6e9a2ba57 100644 --- a/packages/parser/first_block.go +++ b/packages/parser/first_block.go @@ -30,6 +30,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/smart" + "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" "github.com/shopspring/decimal" @@ -39,32 +40,32 @@ import ( const firstEcosystemID = 1 // FirstBlockParser is parser wrapper -type FirstBlockParser struct { - *Parser +type FirstBlockTransaction struct { + *Transaction } // ErrFirstBlockHostIsEmpty host for first block is not specified var ErrFirstBlockHostIsEmpty = errors.New("FirstBlockHost is empty") // Init first block -func (p *FirstBlockParser) Init() error { +func (t *FirstBlockTransaction) Init() error { return nil } // Validate first block -func (p *FirstBlockParser) Validate() error { +func (t *FirstBlockTransaction) Validate() error { return nil } // Action is fires first block -func (p *FirstBlockParser) Action() error { - logger := p.GetLogger() - data := p.TxPtr.(*consts.FirstBlock) +func (t *FirstBlockTransaction) Action() error { + logger := t.GetLogger() + data := t.TxPtr.(*consts.FirstBlock) keyID := crypto.Address(data.PublicKey) err := model.ExecSchemaEcosystem(nil, firstEcosystemID, keyID, ``, keyID) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("executing ecosystem schema") - return p.ErrInfo(err) + return utils.ErrInfo(err) } sp := &model.StateParameter{} @@ -76,48 +77,48 @@ func (p *FirstBlockParser) Action() error { } amount := decimal.New(consts.FounderAmount, int32(converter.StrToInt64(sp.Value))).String() - err = model.GetDB(p.DbTransaction).Exec(`insert into "1_keys" (id,pub,amount) values(?, ?,?)`, + err = model.GetDB(t.DbTransaction).Exec(`insert into "1_keys" (id,pub,amount) values(?, ?,?)`, keyID, data.PublicKey, amount).Error if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("inserting default page") - return p.ErrInfo(err) + return utils.ErrInfo(err) } - err = model.GetDB(p.DbTransaction).Exec(`insert into "1_pages" (id,name,menu,value,conditions) values('1', 'default_page', + err = model.GetDB(t.DbTransaction).Exec(`insert into "1_pages" (id,name,menu,value,conditions) values('1', 'default_page', 'default_menu', ?, 'ContractAccess("@1EditPage")')`, syspar.SysString(`default_ecosystem_page`)).Error if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("inserting default page") - return p.ErrInfo(err) + return utils.ErrInfo(err) } - err = model.GetDB(p.DbTransaction).Exec(`insert into "1_menu" (id,name,value,title,conditions) values('1', 'default_menu', ?, ?, 'ContractAccess("@1EditMenu")')`, + err = model.GetDB(t.DbTransaction).Exec(`insert into "1_menu" (id,name,value,title,conditions) values('1', 'default_menu', ?, ?, 'ContractAccess("@1EditMenu")')`, syspar.SysString(`default_ecosystem_menu`), `default`).Error if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("inserting default menu") - return p.ErrInfo(err) + return utils.ErrInfo(err) } - err = smart.LoadContract(p.DbTransaction, `1`) + err = smart.LoadContract(t.DbTransaction, `1`) if err != nil { - return p.ErrInfo(err) + return utils.ErrInfo(err) } commission := &model.SystemParameter{Name: `commission_wallet`} if err = commission.SaveArray([][]string{{"1", converter.Int64ToStr(keyID)}}); err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("saving commission_wallet array") - return p.ErrInfo(err) + return utils.ErrInfo(err) } if err = syspar.SysUpdate(nil); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - return p.ErrInfo(err) + return utils.ErrInfo(err) } syspar.SetFirstBlockData(data) return nil } // Rollback first block -func (p *FirstBlockParser) Rollback() error { +func (t *FirstBlockTransaction) Rollback() error { return nil } // Header is returns first block header -func (p FirstBlockParser) Header() *tx.Header { +func (t FirstBlockTransaction) Header() *tx.Header { return nil } @@ -162,13 +163,13 @@ func GetDataFromFirstBlock() (data *consts.FirstBlock, ok bool) { return } - if len(pb.Parsers) == 0 { + if len(pb.Transactions) == 0 { log.WithFields(log.Fields{"type": consts.ParserError}).Error("list of parsers is empty") return } - p := pb.Parsers[0] - data, ok = p.TxPtr.(*consts.FirstBlock) + t := pb.Transactions[0] + data, ok = t.TxPtr.(*consts.FirstBlock) if !ok { log.WithFields(log.Fields{"type": consts.ParserError}).Error("getting data of first block") return diff --git a/packages/parser/limits.go b/packages/parser/limits.go index e013979f8..e269ad873 100644 --- a/packages/parser/limits.go +++ b/packages/parser/limits.go @@ -46,7 +46,7 @@ type Limits struct { // Limiter describes interface functions for limits type Limiter interface { init(*Block) - check(*Parser, int) error + check(*Transaction, int) error } type limiterModes struct { @@ -90,9 +90,9 @@ func NewLimits(b *Block) (limits *Limits) { } // CheckLimit calls each limiter -func (limits *Limits) CheckLimit(p *Parser) error { +func (limits *Limits) CheckLimit(t *Transaction) error { for _, limiter := range limits.Limiters { - if err := limiter.check(p, limits.Mode); err != nil { + if err := limiter.check(t, limits.Mode); err != nil { return err } } @@ -115,7 +115,7 @@ func (bl *txMaxLimit) init(b *Block) { bl.Limit = syspar.GetMaxTxCount() } -func (bl *txMaxLimit) check(p *Parser, mode int) error { +func (bl *txMaxLimit) check(t *Transaction, mode int) error { bl.Count++ if bl.Count > bl.Limit { if mode == letPreprocess { @@ -137,7 +137,7 @@ func (bl *timeBlockLimit) init(b *Block) { bl.Limit = time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) } -func (bl *timeBlockLimit) check(p *Parser, mode int) error { +func (bl *timeBlockLimit) check(t *Transaction, mode int) error { if time.Since(bl.Start) < bl.Limit { return nil } @@ -160,12 +160,12 @@ func (bl *txUserLimit) init(b *Block) { bl.Limit = syspar.GetMaxBlockUserTx() } -func (bl *txUserLimit) check(p *Parser, mode int) error { +func (bl *txUserLimit) check(t *Transaction, mode int) error { var ( count int ok bool ) - keyID := p.TxSmart.KeyID + keyID := t.TxSmart.KeyID if count, ok = bl.TxUsers[keyID]; ok { if count+1 > bl.Limit { if mode == letPreprocess { @@ -192,9 +192,9 @@ func (bl *txUserEcosysLimit) init(b *Block) { bl.TxEcosys = make(map[int64]ecosysLimit) } -func (bl *txUserEcosysLimit) check(p *Parser, mode int) error { - keyID := p.TxSmart.KeyID - ecosystemID := p.TxSmart.EcosystemID +func (bl *txUserEcosysLimit) check(t *Transaction, mode int) error { + keyID := t.TxSmart.KeyID + ecosystemID := t.TxSmart.EcosystemID if val, ok := bl.TxEcosys[ecosystemID]; ok { if user, ok := val.TxUsers[keyID]; ok { if user+1 > val.Limit { @@ -212,7 +212,7 @@ func (bl *txUserEcosysLimit) check(p *Parser, mode int) error { limit := syspar.GetMaxBlockUserTx() sp := &model.StateParameter{} sp.SetTablePrefix(converter.Int64ToStr(ecosystemID)) - found, err := sp.Get(p.DbTransaction, `max_block_user_tx`) + found, err := sp.Get(t.DbTransaction, `max_block_user_tx`) if err != nil { return limitError(`txUserEcosysLimit`, err.Error()) } @@ -237,8 +237,8 @@ func (bl *txMaxSize) init(b *Block) { bl.LimitTx = syspar.GetMaxTxSize() } -func (bl *txMaxSize) check(p *Parser, mode int) error { - size := int64(len(p.TxFullData)) +func (bl *txMaxSize) check(t *Transaction, mode int) error { + size := int64(len(t.TxFullData)) if size > bl.LimitTx { return limitError(`txMaxSize`, `Max size of tx`) } @@ -264,8 +264,8 @@ func (bl *txMaxFuel) init(b *Block) { bl.LimitTx = syspar.GetMaxTxFuel() } -func (bl *txMaxFuel) check(p *Parser, mode int) error { - fuel := p.TxFuel +func (bl *txMaxFuel) check(t *Transaction, mode int) error { + fuel := t.TxFuel if fuel > bl.LimitTx { return limitError(`txMaxFuel`, `Max fuel of tx %d > %d`, fuel, bl.LimitTx) } diff --git a/packages/parser/parser_cache.go b/packages/parser/parser_cache.go index adbd54009..21fd435b1 100644 --- a/packages/parser/parser_cache.go +++ b/packages/parser/parser_cache.go @@ -2,29 +2,29 @@ package parser import "sync" -type parserCache struct { +type transactionCache struct { mutex sync.RWMutex - cache map[string]*Parser + cache map[string]*Transaction } -func (pc *parserCache) Get(hash string) (p *Parser, ok bool) { - pc.mutex.RLock() - defer pc.mutex.RUnlock() +func (tc *transactionCache) Get(hash string) (t *Transaction, ok bool) { + tc.mutex.RLock() + defer tc.mutex.RUnlock() - p, ok = pc.cache[hash] + t, ok = tc.cache[hash] return } -func (pc *parserCache) Set(p *Parser) { - pc.mutex.Lock() - defer pc.mutex.Unlock() +func (tc *transactionCache) Set(t *Transaction) { + tc.mutex.Lock() + defer tc.mutex.Unlock() - pc.cache[string(p.TxHash)] = p + tc.cache[string(t.TxHash)] = t } -func (pc *parserCache) Clean() { - pc.mutex.Lock() - defer pc.mutex.Unlock() +func (tc *transactionCache) Clean() { + tc.mutex.Lock() + defer tc.mutex.Unlock() - pc.cache = make(map[string]*Parser) + tc.cache = make(map[string]*Transaction) } diff --git a/packages/parser/stop_network.go b/packages/parser/stop_network.go index b36896120..cef829a1c 100644 --- a/packages/parser/stop_network.go +++ b/packages/parser/stop_network.go @@ -16,27 +16,27 @@ var ( errNetworkStopping = errors.New("Network is stopping") ) -type StopNetworkParser struct { - *Parser +type StopNetworkTransaction struct { + *Transaction cert *utils.Cert } -func (p *StopNetworkParser) Init() error { +func (t *StopNetworkTransaction) Init() error { return nil } -func (p *StopNetworkParser) Validate() error { - if err := p.validate(); err != nil { - p.GetLogger().WithError(err).Error("validating tx") +func (t *StopNetworkTransaction) Validate() error { + if err := t.validate(); err != nil { + t.GetLogger().WithError(err).Error("validating tx") return err } return nil } -func (p *StopNetworkParser) validate() error { - data := p.TxPtr.(*consts.StopNetwork) +func (t *StopNetworkTransaction) validate() error { + data := t.TxPtr.(*consts.StopNetwork) cert, err := utils.ParseCert(data.StopNetworkCert) if err != nil { @@ -52,27 +52,27 @@ func (p *StopNetworkParser) validate() error { return err } - p.cert = cert + t.cert = cert return nil } -func (p *StopNetworkParser) Action() error { +func (t *StopNetworkTransaction) Action() error { // Allow execute transaction, if the certificate was used - if p.cert.EqualBytes(consts.UsedStopNetworkCerts...) { + if t.cert.EqualBytes(consts.UsedStopNetworkCerts...) { return nil } // Set the node in a pause state service.PauseNodeActivity(service.PauseTypeStopingNetwork) - p.GetLogger().Warn(messageNetworkStopping) + t.GetLogger().Warn(messageNetworkStopping) return errNetworkStopping } -func (p *StopNetworkParser) Rollback() error { +func (t *StopNetworkTransaction) Rollback() error { return nil } -func (p StopNetworkParser) Header() *tx.Header { +func (t StopNetworkTransaction) Header() *tx.Header { return nil } diff --git a/packages/parser/transaction.go b/packages/parser/transaction.go new file mode 100644 index 000000000..7fcda3540 --- /dev/null +++ b/packages/parser/transaction.go @@ -0,0 +1,519 @@ +package parser + +import ( + "bytes" + "encoding/hex" + "fmt" + "strings" + "time" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/crypto" + "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/script" + "github.com/GenesisKernel/go-genesis/packages/smart" + "github.com/GenesisKernel/go-genesis/packages/utils" + "github.com/GenesisKernel/go-genesis/packages/utils/tx" + + "github.com/shopspring/decimal" + log "github.com/sirupsen/logrus" + "gopkg.in/vmihailenco/msgpack.v2" +) + +// Transaction is a structure for parsing transactions +type Transaction struct { + BlockData *utils.BlockData + PrevBlock *utils.BlockData + dataType int + blockData []byte + CurrentVersion string + PublicKeys [][]byte + + TxBinaryData []byte // transaction binary data + TxFullData []byte // full transaction, with type and data + TxHash []byte + TxKeyID int64 + TxEcosystemID int64 + TxNodePosition uint32 + TxTime int64 + TxType int64 + TxCost int64 // Maximum cost of executing contract + TxFuel int64 // The fuel cost of executed contract + TxUsedCost decimal.Decimal // Used cost of CPU resources + TxPtr interface{} // Pointer to the corresponding struct in consts/struct.go + TxData map[string]interface{} + TxSmart *tx.SmartContract + TxContract *smart.Contract + TxHeader *tx.Header + tx TransactionInterface + DbTransaction *model.DbTransaction + SysUpdate bool + + SmartContract smart.SmartContract +} + +// GetLogger returns logger +func (t Transaction) GetLogger() *log.Entry { + if t.BlockData != nil && t.PrevBlock != nil { + logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + return logger + } + if t.BlockData != nil { + logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + return logger + } + if t.PrevBlock != nil { + logger := log.WithFields(log.Fields{"prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + return logger + } + logger := log.WithFields(log.Fields{"tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + return logger +} + +// TransactionInterface is parsing transactions +type TransactionInterface interface { + Init() error + Validate() error + Action() error + Rollback() error + Header() *tx.Header +} + +// GetTransaction returns TransactionInterface +func GetTransaction(t *Transaction, txType string) (TransactionInterface, error) { + switch txType { + case consts.TxTypeParserFirstBlock: + return &FirstBlockTransaction{t}, nil + case consts.TxTypeParserStopNetwork: + return &StopNetworkTransaction{t, nil}, nil + } + log.WithFields(log.Fields{"tx_type": txType, "type": consts.UnknownObject}).Error("unknown txType") + return nil, fmt.Errorf("Unknown txType: %s", txType) +} + +var txParserCache = &transactionCache{cache: make(map[string]*Transaction)} + +// ParseTransaction is parsing transaction +func ParseTransaction(buffer *bytes.Buffer) (*Transaction, error) { + if buffer.Len() == 0 { + log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty transaction buffer") + return nil, fmt.Errorf("empty transaction buffer") + } + + hash, err := crypto.Hash(buffer.Bytes()) + // or DoubleHash ? + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing transaction") + return nil, err + } + + if t, ok := txParserCache.Get(string(hash)); ok { + return t, nil + } + + t := new(Transaction) + t.TxHash = hash + t.TxUsedCost = decimal.New(0, 0) + t.TxFullData = buffer.Bytes() + + txType := int64(buffer.Bytes()[0]) + t.dataType = int(txType) + + // smart contract transaction + if IsContractTransaction(int(txType)) { + // skip byte with transaction type + buffer.Next(1) + t.TxBinaryData = buffer.Bytes() + if err := t.parseFromContract(buffer); err != nil { + return nil, err + } + + // struct transaction (only first block transaction for now) + } else if consts.IsStruct(int(txType)) { + t.TxBinaryData = buffer.Bytes() + if err := t.parseFromStruct(buffer, txType); err != nil { + return t, err + } + + // all other transactions + } else { + // skip byte with transaction type + buffer.Next(1) + t.TxBinaryData = buffer.Bytes() + if err := t.parseFromRegular(buffer, txType); err != nil { + return t, err + } + } + + txParserCache.Set(t) + + return t, nil +} + +// IsContractTransaction checks txType +func IsContractTransaction(txType int) bool { + return txType > 127 +} + +func (t *Transaction) parseFromStruct(buf *bytes.Buffer, txType int64) error { + trParser, err := GetTransaction(t, consts.TxTypes[int(txType)]) + if err != nil { + return err + } + t.tx = trParser + + t.TxPtr = consts.MakeStruct(consts.TxTypes[int(txType)]) + input := buf.Bytes() + if err := converter.BinUnmarshal(&input, t.TxPtr); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "tx_type": int(txType)}).Error("getting parser for tx type") + return err + } + + head := consts.Header(t.TxPtr) + t.TxKeyID = head.KeyID + t.TxTime = int64(head.Time) + t.TxType = txType + + err = trParser.Validate() + if err != nil { + return utils.ErrInfo(err) + } + + return nil +} + +func (t *Transaction) parseFromContract(buf *bytes.Buffer) error { + smartTx := tx.SmartContract{} + if err := msgpack.Unmarshal(buf.Bytes(), &smartTx); err != nil { + log.WithFields(log.Fields{"tx_type": t.dataType, "tx_hash": t.TxHash, "error": err, "type": consts.UnmarshallingError}).Error("unmarshalling smart tx msgpack") + return err + } + t.TxPtr = nil + t.TxSmart = &smartTx + t.TxTime = smartTx.Time + t.TxEcosystemID = (smartTx.EcosystemID) + t.TxKeyID = smartTx.KeyID + + contract := smart.GetContractByID(int32(smartTx.Type)) + if contract == nil { + log.WithFields(log.Fields{"contract_type": smartTx.Type, "type": consts.NotFound}).Error("unknown contract") + return fmt.Errorf(`unknown contract %d`, smartTx.Type) + } + forsign := []string{smartTx.ForSign()} + + t.TxContract = contract + t.TxHeader = &smartTx.Header + + input := smartTx.Data + t.TxData = make(map[string]interface{}) + + if contract.Block.Info.(*script.ContractInfo).Tx != nil { + for _, fitem := range *contract.Block.Info.(*script.ContractInfo).Tx { + var err error + var v interface{} + var forv string + var isforv bool + + if fitem.ContainsTag(script.TagFile) { + var ( + data []byte + file *tx.File + ) + if err := converter.BinUnmarshal(&input, &data); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling file") + return err + } + if err := msgpack.Unmarshal(data, &file); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("unmarshalling file msgpack") + return err + } + + t.TxData[fitem.Name] = file.Data + t.TxData[fitem.Name+"MimeType"] = file.MimeType + + forsign = append(forsign, file.MimeType, file.Hash) + continue + } + + switch fitem.Type.String() { + case `uint64`: + var val uint64 + converter.BinUnmarshal(&input, &val) + v = val + case `float64`: + var val float64 + converter.BinUnmarshal(&input, &val) + v = val + case `int64`: + v, err = converter.DecodeLenInt64(&input) + case script.Decimal: + var s string + if err := converter.BinUnmarshal(&input, &s); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling script.Decimal") + return err + } + v, err = decimal.NewFromString(s) + case `string`: + var s string + if err := converter.BinUnmarshal(&input, &s); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") + return err + } + v = s + case `[]uint8`: + var b []byte + if err := converter.BinUnmarshal(&input, &b); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") + return err + } + v = hex.EncodeToString(b) + case `[]interface {}`: + count, err := converter.DecodeLength(&input) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling []interface{}") + return err + } + isforv = true + list := make([]interface{}, 0) + for count > 0 { + length, err := converter.DecodeLength(&input) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling tx length") + return err + } + if len(input) < int(length) { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "length": int(length), "slice length": len(input)}).Error("incorrect tx size") + return fmt.Errorf(`input slice is short`) + } + list = append(list, string(input[:length])) + input = input[length:] + count-- + } + if len(list) > 0 { + slist := make([]string, len(list)) + for j, lval := range list { + slist[j] = lval.(string) + } + forv = strings.Join(slist, `,`) + } + v = list + } + if t.TxData[fitem.Name] == nil { + t.TxData[fitem.Name] = v + } + if err != nil { + return err + } + if strings.Index(fitem.Tags, `image`) >= 0 { + continue + } + if isforv { + v = forv + } + forsign = append(forsign, fmt.Sprintf("%v", v)) + } + } + t.TxData[`forsign`] = strings.Join(forsign, ",") + + return nil +} + +func parseRegularTransaction(t *Transaction, buf *bytes.Buffer, txType int64) error { + trParser, err := GetTransaction(t, consts.TxTypes[int(txType)]) + if err != nil { + return err + } + t.tx = trParser + + err = trParser.Init() + if err != nil { + log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser init") + return err + } + header := trParser.Header() + if header == nil { + log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser get header") + return fmt.Errorf("tx header is nil") + } + + t.TxHeader = header + t.TxTime = header.Time + t.TxType = txType + t.TxEcosystemID = (header.EcosystemID) + t.TxKeyID = header.KeyID + + err = trParser.Validate() + if _, ok := err.(error); ok { + return utils.ErrInfo(err.(error)) + } + + return nil +} + +func (t *Transaction) parseFromRegular(buf *bytes.Buffer, txType int64) error { + trParser, err := GetTransaction(t, consts.TxTypes[int(txType)]) + if err != nil { + return err + } + t.tx = trParser + + err = trParser.Init() + if err != nil { + log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser init") + return err + } + header := trParser.Header() + if header == nil { + log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser get header") + return fmt.Errorf("tx header is nil") + } + + t.TxHeader = header + t.TxTime = header.Time + t.TxType = txType + t.TxEcosystemID = (header.EcosystemID) + t.TxKeyID = header.KeyID + + err = trParser.Validate() + if _, ok := err.(error); ok { + return utils.ErrInfo(err.(error)) + } + + return nil +} + +// CheckTransaction is checking transaction +func CheckTransaction(data []byte) (*tx.Header, error) { + trBuff := bytes.NewBuffer(data) + t, err := ParseTransaction(trBuff) + if err != nil { + return nil, err + } + + err = t.check(time.Now().Unix(), true) + if err != nil { + return nil, err + } + + return t.TxHeader, nil +} + +func (t *Transaction) check(checkTime int64, checkForDupTr bool) error { + err := CheckLogTx(t.TxFullData, checkForDupTr, false) + if err != nil { + return utils.ErrInfo(err) + } + logger := log.WithFields(log.Fields{"tx_type": t.dataType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID}) + // time in the transaction cannot be more than MAX_TX_FORW seconds of block time + if t.TxTime-consts.MAX_TX_FORW > checkTime { + logger.WithFields(log.Fields{"tx_max_forw": consts.MAX_TX_FORW, "type": consts.ParameterExceeded}).Error("time in the tx cannot be more than MAX_TX_FORW seconds of block time ") + return utils.ErrInfo(fmt.Errorf("transaction time is too big")) + } + + // time in transaction cannot be less than -24 of block time + if t.TxTime < checkTime-consts.MAX_TX_BACK { + logger.WithFields(log.Fields{"tx_max_back": consts.MAX_TX_BACK, "type": consts.ParameterExceeded}).Error("time in the tx cannot be less then -24 of block time") + return utils.ErrInfo(fmt.Errorf("incorrect transaction time")) + } + + if t.TxContract == nil { + if t.BlockData != nil && t.BlockData.BlockID != 1 { + if t.TxKeyID == 0 { + logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("Empty user id") + return utils.ErrInfo(fmt.Errorf("empty user id")) + } + } + } + + return nil +} + +func (t *Transaction) play() (string, error) { + // smart-contract + if t.TxContract != nil { + // check that there are enough money in CallContract + return t.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction) + } + + if t.tx == nil { + return "", utils.ErrInfo(fmt.Errorf("can't find parser for %d", t.TxType)) + } + + err := t.tx.Action() + if err != nil { + return "", err + } + + return "", nil +} + +// AccessRights checks the access right by executing the condition value +func (t *Transaction) AccessRights(condition string, iscondition bool) error { + logger := t.GetLogger() + sp := &model.StateParameter{} + sp.SetTablePrefix(converter.Int64ToStr(t.TxSmart.EcosystemID)) + _, err := sp.Get(t.DbTransaction, condition) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting state parameter by name transaction") + return err + } + conditions := sp.Value + if iscondition { + conditions = sp.Conditions + } + if len(conditions) > 0 { + ret, err := t.SmartContract.EvalIf(conditions) + if err != nil { + logger.WithFields(log.Fields{"type": consts.EvalError, "error": err, "conditions": conditions}).Error("evaluating conditions") + return err + } + if !ret { + logger.WithFields(log.Fields{"type": consts.AccessDenied}).Error("Access denied") + return fmt.Errorf(`Access denied`) + } + } else { + logger.WithFields(log.Fields{"type": consts.EmptyObject, "conditions": condition}).Error("No condition in state_parameters") + return fmt.Errorf(`There is not %s in state_parameters`, condition) + } + return nil +} + +// CallContract calls the contract functions according to the specified flags +func (t *Transaction) CallContract(flags int) (resultContract string, err error) { + sc := smart.SmartContract{ + VDE: false, + Rollback: true, + SysUpdate: false, + VM: smart.GetVM(false, 0), + TxSmart: *t.TxSmart, + TxData: t.TxData, + TxContract: t.TxContract, + TxCost: t.TxCost, + TxUsedCost: t.TxUsedCost, + BlockData: t.BlockData, + TxHash: t.TxHash, + PublicKeys: t.PublicKeys, + DbTransaction: t.DbTransaction, + } + resultContract, err = sc.CallContract(flags) + t.SysUpdate = sc.SysUpdate + return +} + +// CleanCache cleans cache of transaction parsers +func CleanCache() { + txParserCache.Clean() +} + +// GetTxTypeAndUserID returns tx type, wallet and citizen id from the block data +func GetTxTypeAndUserID(binaryBlock []byte) (txType int64, keyID int64) { + tmp := binaryBlock[:] + txType = converter.BinToDecBytesShift(&binaryBlock, 1) + if consts.IsStruct(int(txType)) { + var txHead consts.TxHeader + converter.BinUnmarshal(&tmp, &txHead) + keyID = txHead.KeyID + } + return +} diff --git a/packages/rollback/block.go b/packages/rollback/block.go index d574542a4..802344c76 100644 --- a/packages/rollback/block.go +++ b/packages/rollback/block.go @@ -24,6 +24,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/parser" "github.com/GenesisKernel/go-genesis/packages/smart" + "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" ) @@ -71,54 +72,54 @@ func RollbackBlock(data []byte, deleteBlock bool) error { func rollbackBlock(transaction *model.DbTransaction, block *parser.Block) error { // rollback transactions in reverse order logger := block.GetLogger() - for i := len(block.Parsers) - 1; i >= 0; i-- { - p := block.Parsers[i] - p.DbTransaction = transaction + for i := len(block.Transactions) - 1; i >= 0; i-- { + t := block.Transactions[i] + t.DbTransaction = transaction - _, err := model.MarkTransactionUnusedAndUnverified(transaction, p.TxHash) + _, err := model.MarkTransactionUnusedAndUnverified(transaction, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting transaction") return err } - _, err = model.DeleteLogTransactionsByHash(transaction, p.TxHash) + _, err = model.DeleteLogTransactionsByHash(transaction, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting log transactions by hash") return err } ts := &model.TransactionStatus{} - err = ts.UpdateBlockID(transaction, 0, p.TxHash) + err = ts.UpdateBlockID(transaction, 0, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating block id in transaction status") return err } - _, err = model.DeleteQueueTxByHash(transaction, p.TxHash) + _, err = model.DeleteQueueTxByHash(transaction, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transacion from queue by hash") return err } - if p.TxContract != nil { - if _, err := p.CallContract(smart.CallInit | smart.CallRollback); err != nil { + if t.TxContract != nil { + if _, err := t.CallContract(smart.CallInit | smart.CallRollback); err != nil { return err } - if err = rollbackTransaction(p.TxHash, p.DbTransaction, logger); err != nil { + if err = rollbackTransaction(t.TxHash, t.DbTransaction, logger); err != nil { return err } } else { - MethodName := consts.TxTypes[int(p.TxType)] - txParser, err := parser.GetParser(p, MethodName) + MethodName := consts.TxTypes[int(t.TxType)] + txParser, err := parser.GetTransaction(t, MethodName) if err != nil { - return p.ErrInfo(err) + return utils.ErrInfo(err) } result := txParser.Init() if _, ok := result.(error); ok { - return p.ErrInfo(result.(error)) + return utils.ErrInfo(result.(error)) } result = txParser.Rollback() if _, ok := result.(error); ok { - return p.ErrInfo(result.(error)) + return utils.ErrInfo(result.(error)) } } } From eeea67318d30bc4fd0578d6121b8c02e11db4724 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 21 Jun 2018 19:44:36 +0500 Subject: [PATCH 107/169] Added BOM checking (#406) --- packages/smart/funcs.go | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 6bde1d49e..c317605dc 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -35,6 +35,7 @@ import ( "strconv" "strings" "time" + "unicode/utf8" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" @@ -55,6 +56,8 @@ import ( const nodeBanNotificationHeader = "Your node was banned" +var BOM = []byte{0xEF, 0xBB, 0xBF} + type permTable struct { Insert string `json:"insert"` Update string `json:"update"` @@ -1681,6 +1684,9 @@ func StringToBytes(src string) []byte { // BytesToString converts bytes to string func BytesToString(src []byte) string { + if bytes.HasPrefix(src, BOM) && utf8.Valid(src[len(BOM):]) { + return string(src[len(BOM):]) + } return string(src) } From bf1abd0442663d2878a3415a816b642d51a330b8 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 21 Jun 2018 19:45:22 +0500 Subject: [PATCH 108/169] feature/887-doublecontract (#407) * Fixed redefining contracts * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Fixed redefining contracts --- packages/api/contract_test.go | 8 ++++++++ packages/migration/ecosystem.go | 2 +- packages/smart/errors.go | 1 + packages/smart/funcs.go | 4 ++++ 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index 1eebd9b2d..195ca81fb 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -30,6 +30,14 @@ import ( "github.com/GenesisKernel/go-genesis/packages/crypto" ) +func TestExistContract(t *testing.T) { + assert.NoError(t, keyLogin(1)) + form := url.Values{"Name": {`EditPage`}, "Value": {`contract EditPage {action {}}`}, + "ApplicationId": {`1`}, "Conditions": {`true`}} + err := postTx(`NewContract`, &form) + assert.EqualError(t, err, `{"type":"panic","error":"Contract EditPage already exists"}`) +} + func TestNewContracts(t *testing.T) { wanted := func(name, want string) bool { diff --git a/packages/migration/ecosystem.go b/packages/migration/ecosystem.go index 289f34e96..9f7afffb4 100644 --- a/packages/migration/ecosystem.go +++ b/packages/migration/ecosystem.go @@ -129,7 +129,7 @@ var schemaEcosystem = `DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_ke CREATE TABLE "%[1]d_contracts" ( "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', + "name" text NOT NULL UNIQUE DEFAULT '', "value" text NOT NULL DEFAULT '', "wallet_id" bigint NOT NULL DEFAULT '0', "token_id" bigint NOT NULL DEFAULT '1', diff --git a/packages/smart/errors.go b/packages/smart/errors.go index de42178d8..04676d096 100644 --- a/packages/smart/errors.go +++ b/packages/smart/errors.go @@ -20,6 +20,7 @@ import "errors" const ( eTableNotFound = `Table %s has not been found` + eContractExist = `Contract %s already exists` ) var ( diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index c317605dc..4023f0f7c 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -463,6 +463,10 @@ func CreateContract(sc *SmartContract, name, value, conditions string, walletID, } var id int64 var err error + + if GetContractByName(sc, name) != 0 { + return 0, fmt.Errorf(eContractExist, name) + } root, err := CompileContract(sc, value, sc.TxSmart.EcosystemID, walletID, tokenEcosystem) if err != nil { return 0, err From ddb49b63f6d73f71d26d89b8254a637f6059b383 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Fri, 22 Jun 2018 19:25:15 +0500 Subject: [PATCH 109/169] Fixed CreateEcosystem (#419) --- packages/smart/funcs.go | 1 + packages/smart/smart.go | 6 ++++++ packages/smart/smart_p.go | 2 ++ 3 files changed, 9 insertions(+) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 4023f0f7c..d7801efcb 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -75,6 +75,7 @@ type permColumn struct { type SmartContract struct { VDE bool Rollback bool + FullAccess bool SysUpdate bool VM *script.VM TxSmart tx.SmartContract diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 8da0cc30b..f529a7779 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -626,6 +626,9 @@ func (sc *SmartContract) AccessTablePerm(table, action string) (map[string]strin } func (sc *SmartContract) AccessTable(table, action string) error { + if sc.FullAccess { + return nil + } _, err := sc.AccessTablePerm(table, action) return err } @@ -649,6 +652,9 @@ type colAccess struct { // AccessColumns checks access rights to the columns func (sc *SmartContract) AccessColumns(table string, columns *[]string, update bool) error { logger := sc.GetLogger() + if sc.FullAccess { + return nil + } if table == getDefTableName(sc, `parameters`) || table == getDefTableName(sc, `app_params`) { if update { if sc.TxSmart.KeyID == converter.StrToInt64(EcosysParam(sc, `founder_account`)) { diff --git a/packages/smart/smart_p.go b/packages/smart/smart_p.go index 084833b6b..0a3cf8117 100644 --- a/packages/smart/smart_p.go +++ b/packages/smart/smart_p.go @@ -453,6 +453,7 @@ func CreateEcosystem(sc *SmartContract, wallet int64, name string) (int64, error } sc.Rollback = false + sc.FullAccess = true if _, _, err = DBInsert(sc, `@`+idStr+"_pages", "id,name,value,menu,conditions", "1", "default_page", SysParamString("default_ecosystem_page"), "default_menu", `ContractConditions("MainCondition")`); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("inserting default page") @@ -482,6 +483,7 @@ func CreateEcosystem(sc *SmartContract, wallet int64, name string) (int64, error return 0, err } + sc.FullAccess = false // because of we need to know which ecosystem to rollback. // All tables will be deleted so it's no need to rollback data from tables sc.Rollback = true From a73695bcc27cadd77f73017a11d88e340c7f194d Mon Sep 17 00:00:00 2001 From: Dmitriy Chertkov Date: Mon, 25 Jun 2018 15:17:37 +0500 Subject: [PATCH 110/169] Added execution of delayed contracts in next blocks --- packages/migration/first_ecosys_contracts_data.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/first_ecosys_contracts_data.go b/packages/migration/first_ecosys_contracts_data.go index cafe7cb85..46f813f56 100644 --- a/packages/migration/first_ecosys_contracts_data.go +++ b/packages/migration/first_ecosys_contracts_data.go @@ -1385,7 +1385,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { error "Access denied" } - if $block != Int($cur["block_id"]) { + if $block < Int($cur["block_id"]) { error Sprintf("Delayed contract %%d must run on block %%s, current block %%d", $Id, $cur["block_id"], $block) } } From 8eacb53552b2126312b594d304a23b642764cd5f Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 1 Jun 2018 17:23:02 +0300 Subject: [PATCH 111/169] add batch insert --- packages/model/batch.go | 85 ++++++++++++++++++++++++++++++++++++ packages/model/batch_test.go | 44 +++++++++++++++++++ packages/model/queue_tx.go | 18 ++++++++ packages/tcpserver/type1.go | 14 +++--- 4 files changed, 156 insertions(+), 5 deletions(-) create mode 100644 packages/model/batch.go create mode 100644 packages/model/batch_test.go diff --git a/packages/model/batch.go b/packages/model/batch.go new file mode 100644 index 000000000..3d2719f82 --- /dev/null +++ b/packages/model/batch.go @@ -0,0 +1,85 @@ +package model + +import ( + "fmt" + "strings" +) + +const maxBatchRows = 1000 + +// BatchModel allows bulk insert on BatchModel slice +type BatchModel interface { + TableName() string + FieldValue(fieldName string) (interface{}, error) +} + +// BatchInsert create and execute batch queries from rows splitted by maxBatchRows and fields +func BatchInsert(rows []BatchModel, fields []string) error { + queries, values, err := batchQueue(rows, fields) + if err != nil { + return err + } + + for i := 0; i < len(queries); i++ { + if err := DBConn.Exec(queries[i], values[i]...).Error; err != nil { + return err + } + } + + return nil +} + +func batchQueue(rows []BatchModel, fields []string) (queries []string, values [][]interface{}, err error) { + for len(rows) > 0 { + if len(rows) > maxBatchRows { + q, vals, err := prepareQuery(rows[:maxBatchRows], fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = rows[maxBatchRows:] + continue + } + + q, vals, err := prepareQuery(rows, fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = nil + } + + return +} + +func prepareQuery(rows []BatchModel, fields []string) (query string, values []interface{}, err error) { + valueTemplates := make([]string, 0, len(rows)) + valueArgs := make([]interface{}, 0, len(rows)*len(fields)) + query = fmt.Sprintf(`INSERT INTO "%s" (%s) VALUES `, rows[0].TableName(), strings.Join(fields, ",")) + + rowQSlice := make([]string, 0, len(fields)) + for range fields { + rowQSlice = append(rowQSlice, "?") + } + + valueTemplate := fmt.Sprintf("(%s)", strings.Join(rowQSlice, ",")) + + for _, row := range rows { + valueTemplates = append(valueTemplates, valueTemplate) + for _, field := range fields { + val, err := row.FieldValue(field) + if err != nil { + return query, values, err + } + + valueArgs = append(valueArgs, val) + } + } + + query += strings.Join(valueTemplates, ",") + return +} diff --git a/packages/model/batch_test.go b/packages/model/batch_test.go new file mode 100644 index 000000000..ff35bc946 --- /dev/null +++ b/packages/model/batch_test.go @@ -0,0 +1,44 @@ +package model + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +type TestBatchModel struct { + ID int64 + Name string +} + +func (m TestBatchModel) TableName() string { + return "test_batch" +} + +func (m TestBatchModel) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "id": + return m.ID, nil + case "name": + return m.Name, nil + default: + return nil, fmt.Errorf("Unknown field %s of TestBatchModel", fieldName) + } +} + +func TestPrepareQuery(t *testing.T) { + slice := []BatchModel{ + TestBatchModel{ID: 1, Name: "first"}, + TestBatchModel{ID: 2, Name: "second"}, + } + + query, args, err := prepareQuery(slice, []string{"id", "name"}) + require.NoError(t, err) + + checkQuery := `INSERT INTO "test_batch" (id,name) VALUES (?,?),(?,?)` + checkArgs := []interface{}{1, "first", 2, "second"} + + require.Equal(t, checkQuery, query) + require.Equal(t, checkArgs, args) +} diff --git a/packages/model/queue_tx.go b/packages/model/queue_tx.go index b19c24224..18d57a0a5 100644 --- a/packages/model/queue_tx.go +++ b/packages/model/queue_tx.go @@ -1,5 +1,9 @@ package model +import ( + "fmt" +) + // QueueTx is model type QueueTx struct { Hash []byte `gorm:"primary_key;not null"` @@ -76,3 +80,17 @@ func GetAllUnverifiedAndUnusedTransactions() ([]*QueueTx, error) { } return result, nil } + +// FieldValue implementing BatchModel interface +func (qt QueueTx) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "hash": + return qt.Hash, nil + case "data": + return qt.Data, nil + case "from_gate": + return qt.FromGate, nil + default: + return nil, fmt.Errorf("Unknown field '%s' for QueueTx", fieldName) + } +} diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 1265f58f2..615cfcedb 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -194,7 +194,9 @@ func getUnknownTransactions(buf *bytes.Buffer) ([]byte, error) { func saveNewTransactions(r *DisRequest) error { binaryTxs := r.Data + queue := []model.QueueTx{} log.WithFields(log.Fields{"binaryTxs": binaryTxs}).Debug("trying to save binary txs") + for len(binaryTxs) > 0 { txSize, err := converter.DecodeLength(&binaryTxs) if err != nil { @@ -222,12 +224,14 @@ func saveNewTransactions(r *DisRequest) error { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err, "value": txBinData}).Fatal("cannot hash bindata") } - queueTx := &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1} + queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) err = queueTx.Create() - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") - return err - } } + + if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") + return err + } + return nil } From 117ad7daae5d6f13e32c525e412c417d30acbc11 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 4 Jun 2018 12:34:36 +0300 Subject: [PATCH 112/169] test and small fixes --- packages/api/smart_test.go | 49 +++++++++++++++++++++++++++++++++--- packages/model/batch.go | 4 +-- packages/model/batch_test.go | 2 +- packages/tcpserver/type1.go | 4 +-- 4 files changed, 51 insertions(+), 8 deletions(-) diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index bb338ca18..63e02fcf7 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -434,14 +434,57 @@ func TestUpdateSysParam(t *testing.T) { func TestUpdateFullNodesWithEmptyArray(t *testing.T) { require.NoErrorf(t, keyLogin(1), "on login") - byteNodes := `[]` - // byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"5462687003324713865", "public_key":"4ea2433951ca21e6817426675874b2a6d98e5051c1100eddefa1847b0388e4834facf9abf427c46e2bc6cd5e3277fba533d03db553e499eb368194b3f1e514d4"}]` + byteNodes := `[` + byteNodes += `{"tcp_address":"127.0.0.1:7078", "api_address":"https://127.0.0.1:7079", "key_id":"-4466900793776865315", "public_key":"ca901a97e84d76f8d46e2053028f709074b3e60d3e2e33495840586567a0c961820d789592666b67b05c6ae120d5bd83d4388b2f1218638d8226d40ced0bb208"},` + byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"542353610328569127", "public_key":"a8ada71764fd2f0c9fa1d2986455288f11f0f3931492d27dc62862fdff9c97c38923ef46679488ad1cd525342d4d974621db58f809be6f8d1c19fdab50abc06b"},` + byteNodes += `{"tcp_address":"127.0.0.1:7082", "api_address":"https://127.0.0.1:7083", "key_id":"5972241339967729614", "public_key":"de1b74d36ae39422f2478cba591f4d14eb017306f6ffdc3b577cc52ee50edb8fe7c7b2eb191a24c8ddfc567cef32152bab17de698ed7b3f2ab75f3bcc8b9b372"}` + byteNodes += `]` form := &url.Values{ "Name": {"full_nodes"}, "Value": {string(byteNodes)}, } - require.EqualError(t, postTx(`UpdateSysParam`, form), `{"type":"panic","error":"Invalid value"}`) + require.NoError(t, postTx(`UpdateSysParam`, form)) +} + +func TestHelper_InsertNodeKey(t *testing.T) { + + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + + form := url.Values{ + `Value`: {`contract InsertNodeKey { + data { + KeyID string + PubKey string + } + conditions {} + action { + DBInsert("keys", "id,pub,amount", $KeyID, $PubKey, "100000000000000000000") + } + }`}, + `ApplicationId`: {`1`}, + `Conditions`: {`true`}, + } + + require.NoError(t, postTx(`NewContract`, &form)) + + forms := []url.Values{ + url.Values{ + `KeyID`: {"542353610328569127"}, + `PubKey`: {"be78f54bcf6bb7b49b7ea00790b18b40dd3f5e231ffc764f1c32d3f5a82ab322aee157931bbfca733bac83255002f5ded418f911b959b77a937f0d5d07de74f8"}, + }, + url.Values{ + `KeyID`: {"5972241339967729614"}, + `PubKey`: {"7b11a9ee4f509903118d5b965a819b778c83a21a52a033e5768d697a70a61a1bad270465f25d7f70683e977be93a9252e762488fc53808a90220d363d0a38eb6"}, + }, + } + + for _, frm := range forms { + require.NoError(t, postTx(`InsertNodeKey`, &frm)) + } } func TestValidateConditions(t *testing.T) { diff --git a/packages/model/batch.go b/packages/model/batch.go index 3d2719f82..f36c4d68d 100644 --- a/packages/model/batch.go +++ b/packages/model/batch.go @@ -58,7 +58,7 @@ func batchQueue(rows []BatchModel, fields []string) (queries []string, values [] func prepareQuery(rows []BatchModel, fields []string) (query string, values []interface{}, err error) { valueTemplates := make([]string, 0, len(rows)) - valueArgs := make([]interface{}, 0, len(rows)*len(fields)) + values = make([]interface{}, 0, len(rows)*len(fields)) query = fmt.Sprintf(`INSERT INTO "%s" (%s) VALUES `, rows[0].TableName(), strings.Join(fields, ",")) rowQSlice := make([]string, 0, len(fields)) @@ -76,7 +76,7 @@ func prepareQuery(rows []BatchModel, fields []string) (query string, values []in return query, values, err } - valueArgs = append(valueArgs, val) + values = append(values, val) } } diff --git a/packages/model/batch_test.go b/packages/model/batch_test.go index ff35bc946..2b260c2f5 100644 --- a/packages/model/batch_test.go +++ b/packages/model/batch_test.go @@ -37,7 +37,7 @@ func TestPrepareQuery(t *testing.T) { require.NoError(t, err) checkQuery := `INSERT INTO "test_batch" (id,name) VALUES (?,?),(?,?)` - checkArgs := []interface{}{1, "first", 2, "second"} + checkArgs := []interface{}{int64(1), "first", int64(2), "second"} require.Equal(t, checkQuery, query) require.Equal(t, checkArgs, args) diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 615cfcedb..5cbb48c17 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -194,7 +194,7 @@ func getUnknownTransactions(buf *bytes.Buffer) ([]byte, error) { func saveNewTransactions(r *DisRequest) error { binaryTxs := r.Data - queue := []model.QueueTx{} + queue := []model.BatchModel{} log.WithFields(log.Fields{"binaryTxs": binaryTxs}).Debug("trying to save binary txs") for len(binaryTxs) > 0 { @@ -225,7 +225,7 @@ func saveNewTransactions(r *DisRequest) error { } queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) - err = queueTx.Create() + // err = queueTx.Create() } if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { From 1f312c65ba5cd571b1656c94f79e9cac2248a19f Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 15 Jun 2018 17:02:51 +0300 Subject: [PATCH 113/169] requested changes --- packages/tcpserver/type1.go | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 5cbb48c17..12d179b30 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -225,7 +225,6 @@ func saveNewTransactions(r *DisRequest) error { } queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) - // err = queueTx.Create() } if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { From 9a32d9acda82d6a79c37e895735eddc57e23e306 Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Tue, 26 Jun 2018 09:57:27 +0300 Subject: [PATCH 114/169] divide block and transaction to separate packages --- cmd/generateFirstBlock.go | 4 +- packages/{parser => block}/block.go | 176 ++-------------- packages/block/db.go | 189 ++++++++++++++++++ packages/{parser => block}/limits.go | 19 +- packages/block/serialization.go | 126 ++++++++++++ packages/daemons/block_generator.go | 23 ++- packages/daemons/blocks_collection.go | 80 ++++---- packages/daemons/locking.go | 4 +- packages/daemons/queue_parser_tx.go | 6 +- packages/daylight/daemonsctl/daemonsctl.go | 4 +- packages/rollback/block.go | 19 +- packages/transaction/custom/custom.go | 14 ++ .../custom}/first_block.go | 71 +------ .../custom}/stop_network.go | 24 ++- packages/{parser => transaction}/db.go | 145 +------------- .../{parser => transaction}/transaction.go | 43 ++-- .../transaction_cache.go} | 2 +- 17 files changed, 465 insertions(+), 484 deletions(-) rename packages/{parser => block}/block.go (65%) create mode 100644 packages/block/db.go rename packages/{parser => block}/limits.go (91%) create mode 100644 packages/block/serialization.go create mode 100644 packages/transaction/custom/custom.go rename packages/{parser => transaction/custom}/first_block.go (70%) rename packages/{parser => transaction/custom}/stop_network.go (76%) rename packages/{parser => transaction}/db.go (58%) rename packages/{parser => transaction}/transaction.go (96%) rename packages/{parser/parser_cache.go => transaction/transaction_cache.go} (96%) diff --git a/cmd/generateFirstBlock.go b/cmd/generateFirstBlock.go index 16d435278..ee54a8e13 100644 --- a/cmd/generateFirstBlock.go +++ b/cmd/generateFirstBlock.go @@ -9,10 +9,10 @@ import ( "path/filepath" + "github.com/GenesisKernel/go-genesis/packages/block" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/parser" "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" ) @@ -83,7 +83,7 @@ var generateFirstBlockCmd = &cobra.Command{ return } - block, err := parser.MarshallBlock(header, [][]byte{tx}, []byte("0"), "") + block, err := block.MarshallBlock(header, [][]byte{tx}, []byte("0"), "") if err != nil { log.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Fatal("first block marshalling") return diff --git a/packages/parser/block.go b/packages/block/block.go similarity index 65% rename from packages/parser/block.go rename to packages/block/block.go index 971edf39a..5793e255c 100644 --- a/packages/parser/block.go +++ b/packages/block/block.go @@ -1,4 +1,4 @@ -package parser +package block import ( "bytes" @@ -8,8 +8,9 @@ import ( "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/transaction" + "github.com/GenesisKernel/go-genesis/packages/transaction/custom" "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" @@ -21,7 +22,7 @@ type Block struct { PrevHeader *utils.BlockData MrklRoot []byte BinData []byte - Transactions []*Transaction + Transactions []*transaction.Transaction SysUpdate bool GenBlock bool // it equals true when we are generating a new block StopCount int // The count of good tx in the block @@ -38,7 +39,7 @@ func (b Block) GetLogger() *log.Entry { } // PlayBlockSafe is inserting block safely -func (b *Block) PlayBlockSafe() error { +func (b *Block) PlaySafe() error { logger := b.GetLogger() dbTransaction, err := model.StartTransaction() if err != nil { @@ -46,7 +47,7 @@ func (b *Block) PlayBlockSafe() error { return err } - err = b.PlayBlock(dbTransaction) + err = b.Play(dbTransaction) if b.GenBlock && b.StopCount > 0 { doneTx := b.Transactions[:b.StopCount] trData := make([][]byte, 0, b.StopCount) @@ -66,7 +67,7 @@ func (b *Block) PlayBlockSafe() error { } isFirstBlock := b.Header.BlockID == 1 - nb, err := ParseBlock(bytes.NewBuffer(newBlockData), isFirstBlock) + nb, err := UnmarshallBlock(bytes.NewBuffer(newBlockData), isFirstBlock) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("parsing new block") return err @@ -102,10 +103,6 @@ func (b *Block) PlayBlockSafe() error { return nil } -func (b *Block) readPreviousBlockFromMemory() error { - return nil -} - func (b *Block) readPreviousBlockFromBlockchainTable() error { if b.Header.BlockID == 1 { b.PrevHeader = &utils.BlockData{} @@ -120,7 +117,7 @@ func (b *Block) readPreviousBlockFromBlockchainTable() error { return nil } -func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { +func (b *Block) Play(dbTransaction *model.DbTransaction) error { logger := b.GetLogger() if _, err := model.DeleteUsedTransactions(dbTransaction); err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("delete used transactions") @@ -139,12 +136,12 @@ func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("using savepoint") return err } - msg, err = t.play() + msg, err = t.Play() if err == nil && t.TxSmart != nil { err = limits.CheckLimit(t) } if err != nil { - if err == errNetworkStopping { + if err == custom.ErrNetworkStopping { return err } @@ -162,7 +159,7 @@ func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { } // skip this transaction model.MarkTransactionUsed(t.DbTransaction, t.TxHash) - MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) + transaction.MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) if t.SysUpdate { if err = syspar.SysUpdate(t.DbTransaction); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") @@ -191,7 +188,7 @@ func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("updating transaction status block id") return err } - if err := InsertInLogTx(t.DbTransaction, t.TxFullData, t.TxTime); err != nil { + if err := transaction.InsertInLogTx(t.DbTransaction, t.TxFullData, t.TxTime); err != nil { return utils.ErrInfo(err) } } @@ -199,8 +196,7 @@ func (b *Block) PlayBlock(dbTransaction *model.DbTransaction) error { } // CheckBlock is checking block -func (b *Block) CheckBlock() error { - +func (b *Block) Check() error { logger := b.GetLogger() // exclude blocks from future if b.Header.Time > time.Now().Unix() { @@ -264,7 +260,7 @@ func (b *Block) CheckBlock() error { return utils.ErrInfo(fmt.Errorf("max_block_user_transactions")) } - if err := t.check(b.Header.Time, false); err != nil { + if err := t.Check(b.Header.Time, false); err != nil { return utils.ErrInfo(err) } @@ -320,11 +316,11 @@ func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { return err } block.GenBlock = genBlock - if err := block.CheckBlock(); err != nil { + if err := block.Check(); err != nil { return err } - err = block.PlayBlockSafe() + err = block.PlaySafe() if err != nil { return err } @@ -333,31 +329,6 @@ func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { return nil } -// ProcessBlockWherePrevFromMemory is processing block with in memory previous block -func ProcessBlockWherePrevFromMemory(data []byte) (*Block, error) { - if int64(len(data)) > syspar.GetMaxBlockSize() { - log.WithFields(log.Fields{"size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") - return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) - } - - buf := bytes.NewBuffer(data) - if buf.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("block data is empty") - return nil, fmt.Errorf("empty buffer") - } - - block, err := ParseBlock(buf, false) - if err != nil { - return nil, err - } - block.BinData = data - - if err := block.readPreviousBlockFromMemory(); err != nil { - return nil, err - } - return block, nil -} - // ProcessBlockWherePrevFromBlockchainTable is processing block with in table previous block func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Block, error) { if checkSize && int64(len(data)) > syspar.GetMaxBlockSize() { @@ -371,7 +342,7 @@ func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Blo return nil, fmt.Errorf("empty buffer") } - block, err := ParseBlock(buf, !checkSize) + block, err := UnmarshallBlock(buf, !checkSize) if err != nil { return nil, err } @@ -383,116 +354,3 @@ func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Blo return block, nil } - -func ParseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { - header, err := utils.ParseBlockHeader(blockBuffer, !firstBlock) - if err != nil { - return nil, err - } - - logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_time": header.Time, "block_wallet_id": header.KeyID, - "block_state_id": header.EcosystemID, "block_hash": header.Hash, "block_version": header.Version}) - transactions := make([]*Transaction, 0) - - var mrklSlice [][]byte - - // parse transactions - for blockBuffer.Len() > 0 { - transactionSize, err := converter.DecodeLengthBuf(blockBuffer) - if err != nil { - logger.WithFields(log.Fields{"type": consts.UnmarshallingError, "error": err}).Error("transaction size is 0") - return nil, fmt.Errorf("bad block format (%s)", err) - } - if blockBuffer.Len() < int(transactionSize) { - logger.WithFields(log.Fields{"size": blockBuffer.Len(), "match_size": int(transactionSize), "type": consts.SizeDoesNotMatch}).Error("transaction size does not matches encoded length") - return nil, fmt.Errorf("bad block format (transaction len is too big: %d)", transactionSize) - } - - if transactionSize == 0 { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("transaction size is 0") - return nil, fmt.Errorf("transaction size is 0") - } - - bufTransaction := bytes.NewBuffer(blockBuffer.Next(int(transactionSize))) - t, err := ParseTransaction(bufTransaction) - if err != nil { - if t != nil && t.TxHash != nil { - MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) - } - return nil, fmt.Errorf("parse transaction error(%s)", err) - } - t.BlockData = &header - - transactions = append(transactions, t) - - // build merkle tree - if len(t.TxFullData) > 0 { - dSha256Hash, err := crypto.DoubleHash(t.TxFullData) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing tx full data") - return nil, err - } - dSha256Hash = converter.BinToHex(dSha256Hash) - mrklSlice = append(mrklSlice, dSha256Hash) - } - } - - if len(mrklSlice) == 0 { - mrklSlice = append(mrklSlice, []byte("0")) - } - - return &Block{ - Header: header, - Transactions: transactions, - MrklRoot: utils.MerkleTreeRoot(mrklSlice), - }, nil -} - -// MarshallBlock is marshalling block -func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, key string) ([]byte, error) { - var mrklArray [][]byte - var blockDataTx []byte - var signed []byte - logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_hash": header.Hash, "block_time": header.Time, "block_version": header.Version, "block_wallet_id": header.KeyID, "block_state_id": header.EcosystemID}) - - for _, tr := range trData { - doubleHash, err := crypto.DoubleHash(tr) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing transaction") - return nil, err - } - mrklArray = append(mrklArray, converter.BinToHex(doubleHash)) - blockDataTx = append(blockDataTx, converter.EncodeLengthPlusData(tr)...) - } - - if key != "" { - if len(mrklArray) == 0 { - mrklArray = append(mrklArray, []byte("0")) - } - mrklRoot := utils.MerkleTreeRoot(mrklArray) - - forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", - header.BlockID, prevHash, header.Time, header.EcosystemID, header.KeyID, header.NodePosition, mrklRoot) - - var err error - signed, err = crypto.Sign(key, forSign) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing blocko") - return nil, err - } - } - - var buf bytes.Buffer - // fill header - buf.Write(converter.DecToBin(header.Version, 2)) - buf.Write(converter.DecToBin(header.BlockID, 4)) - buf.Write(converter.DecToBin(header.Time, 4)) - buf.Write(converter.DecToBin(header.EcosystemID, 4)) - buf.Write(converter.EncodeLenInt64InPlace(header.KeyID)) - buf.Write(converter.DecToBin(header.NodePosition, 1)) - buf.Write(converter.EncodeLengthPlusData(signed)) - // data - buf.Write(blockDataTx) - - return buf.Bytes(), nil -} diff --git a/packages/block/db.go b/packages/block/db.go new file mode 100644 index 000000000..84fe63a6c --- /dev/null +++ b/packages/block/db.go @@ -0,0 +1,189 @@ +package block + +import ( + "bytes" + "encoding/json" + "fmt" + "time" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/crypto" + "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/utils" + + log "github.com/sirupsen/logrus" +) + +// UpdBlockInfo updates info_block table +func UpdBlockInfo(dbTransaction *model.DbTransaction, block *Block) error { + blockID := block.Header.BlockID + // for the local tests + forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", blockID, block.PrevHeader.Hash, block.MrklRoot, + block.Header.Time, block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition) + + hash, err := crypto.DoubleHash([]byte(forSha)) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal("double hashing block") + } + + block.Header.Hash = hash + if block.Header.BlockID == 1 { + ib := &model.InfoBlock{ + Hash: hash, + BlockID: blockID, + Time: block.Header.Time, + EcosystemID: block.Header.EcosystemID, + KeyID: block.Header.KeyID, + NodePosition: converter.Int64ToStr(block.Header.NodePosition), + CurrentVersion: fmt.Sprintf("%d", block.Header.Version), + } + err := ib.Create(dbTransaction) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") + return fmt.Errorf("error insert into info_block %s", err) + } + } else { + ibUpdate := &model.InfoBlock{ + Hash: hash, + BlockID: blockID, + Time: block.Header.Time, + EcosystemID: block.Header.EcosystemID, + KeyID: block.Header.KeyID, + NodePosition: converter.Int64ToStr(block.Header.NodePosition), + Sent: 0, + } + if err := ibUpdate.Update(dbTransaction); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") + return fmt.Errorf("error while updating info_block: %s", err) + } + } + + return nil +} + +// InsertIntoBlockchain inserts a block into the blockchain +func InsertIntoBlockchain(transaction *model.DbTransaction, block *Block) error { + // for local tests + blockID := block.Header.BlockID + + // record into the block chain + bl := &model.Block{} + err := bl.DeleteById(transaction, blockID) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting block by id") + return err + } + rollbackTx := &model.RollbackTx{} + blockRollbackTxs, err := rollbackTx.GetBlockRollbackTransactions(transaction, blockID) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block rollback txs") + return err + } + buffer := bytes.Buffer{} + for _, rollbackTx := range blockRollbackTxs { + rollbackTxBytes, err := json.Marshal(rollbackTx) + if err != nil { + log.WithFields(log.Fields{"type": consts.JSONMarshallError, "error": err}).Error("marshalling rollback_tx to json") + return err + } + + buffer.Write(rollbackTxBytes) + } + rollbackTxsHash, err := crypto.Hash(buffer.Bytes()) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing block rollback_txs") + return err + } + b := &model.Block{ + ID: blockID, + Hash: block.Header.Hash, + Data: block.BinData, + EcosystemID: block.Header.EcosystemID, + KeyID: block.Header.KeyID, + NodePosition: block.Header.NodePosition, + Time: block.Header.Time, + RollbacksHash: rollbackTxsHash, + Tx: int32(len(block.Transactions)), + } + blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") + return err + } + validBlockTime := true + if blockID > 1 { + validBlockTime, err = blockTimeCalculator.ValidateBlock(b.NodePosition, time.Unix(b.Time, 0)) + if err != nil { + log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("block validation") + return err + } + } + if validBlockTime { + err = b.Create(transaction) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") + return err + } + } else { + err := fmt.Errorf("Invalid block time: %d", block.Header.Time) + log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("invalid block time") + return err + } + + return nil +} + +// GetBlockDataFromBlockChain is retrieving block data from blockchain +func GetBlockDataFromBlockChain(blockID int64) (*utils.BlockData, error) { + BlockData := new(utils.BlockData) + block := &model.Block{} + _, err := block.Get(blockID) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Getting block by ID") + return BlockData, utils.ErrInfo(err) + } + + header, err := utils.ParseBlockHeader(bytes.NewBuffer(block.Data), false) + if err != nil { + return nil, utils.ErrInfo(err) + } + + BlockData = &header + BlockData.Hash = block.Hash + return BlockData, nil +} + +// GetDataFromFirstBlock returns data of first block +func GetDataFromFirstBlock() (data *consts.FirstBlock, ok bool) { + block := &model.Block{} + isFound, err := block.Get(1) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting record of first block") + return + } + + if !isFound { + return + } + + pb, err := UnmarshallBlock(bytes.NewBuffer(block.Data), true) + if err != nil { + log.WithFields(log.Fields{"type": consts.ParserError, "error": err}).Error("parsing data of first block") + return + } + + if len(pb.Transactions) == 0 { + log.WithFields(log.Fields{"type": consts.ParserError}).Error("list of parsers is empty") + return + } + + t := pb.Transactions[0] + data, ok = t.TxPtr.(*consts.FirstBlock) + if !ok { + log.WithFields(log.Fields{"type": consts.ParserError}).Error("getting data of first block") + return + } + + return +} diff --git a/packages/parser/limits.go b/packages/block/limits.go similarity index 91% rename from packages/parser/limits.go rename to packages/block/limits.go index e269ad873..bf66df5fe 100644 --- a/packages/parser/limits.go +++ b/packages/block/limits.go @@ -14,7 +14,7 @@ // You should have received a copy of the GNU Lesser General Public License // along with the go-daylight library. If not, see . -package parser +package block import ( "errors" @@ -26,6 +26,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/script" + "github.com/GenesisKernel/go-genesis/packages/transaction" log "github.com/sirupsen/logrus" ) @@ -46,7 +47,7 @@ type Limits struct { // Limiter describes interface functions for limits type Limiter interface { init(*Block) - check(*Transaction, int) error + check(*transaction.Transaction, int) error } type limiterModes struct { @@ -90,7 +91,7 @@ func NewLimits(b *Block) (limits *Limits) { } // CheckLimit calls each limiter -func (limits *Limits) CheckLimit(t *Transaction) error { +func (limits *Limits) CheckLimit(t *transaction.Transaction) error { for _, limiter := range limits.Limiters { if err := limiter.check(t, limits.Mode); err != nil { return err @@ -115,7 +116,7 @@ func (bl *txMaxLimit) init(b *Block) { bl.Limit = syspar.GetMaxTxCount() } -func (bl *txMaxLimit) check(t *Transaction, mode int) error { +func (bl *txMaxLimit) check(t *transaction.Transaction, mode int) error { bl.Count++ if bl.Count > bl.Limit { if mode == letPreprocess { @@ -137,7 +138,7 @@ func (bl *timeBlockLimit) init(b *Block) { bl.Limit = time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) } -func (bl *timeBlockLimit) check(t *Transaction, mode int) error { +func (bl *timeBlockLimit) check(t *transaction.Transaction, mode int) error { if time.Since(bl.Start) < bl.Limit { return nil } @@ -160,7 +161,7 @@ func (bl *txUserLimit) init(b *Block) { bl.Limit = syspar.GetMaxBlockUserTx() } -func (bl *txUserLimit) check(t *Transaction, mode int) error { +func (bl *txUserLimit) check(t *transaction.Transaction, mode int) error { var ( count int ok bool @@ -192,7 +193,7 @@ func (bl *txUserEcosysLimit) init(b *Block) { bl.TxEcosys = make(map[int64]ecosysLimit) } -func (bl *txUserEcosysLimit) check(t *Transaction, mode int) error { +func (bl *txUserEcosysLimit) check(t *transaction.Transaction, mode int) error { keyID := t.TxSmart.KeyID ecosystemID := t.TxSmart.EcosystemID if val, ok := bl.TxEcosys[ecosystemID]; ok { @@ -237,7 +238,7 @@ func (bl *txMaxSize) init(b *Block) { bl.LimitTx = syspar.GetMaxTxSize() } -func (bl *txMaxSize) check(t *Transaction, mode int) error { +func (bl *txMaxSize) check(t *transaction.Transaction, mode int) error { size := int64(len(t.TxFullData)) if size > bl.LimitTx { return limitError(`txMaxSize`, `Max size of tx`) @@ -264,7 +265,7 @@ func (bl *txMaxFuel) init(b *Block) { bl.LimitTx = syspar.GetMaxTxFuel() } -func (bl *txMaxFuel) check(t *Transaction, mode int) error { +func (bl *txMaxFuel) check(t *transaction.Transaction, mode int) error { fuel := t.TxFuel if fuel > bl.LimitTx { return limitError(`txMaxFuel`, `Max fuel of tx %d > %d`, fuel, bl.LimitTx) diff --git a/packages/block/serialization.go b/packages/block/serialization.go new file mode 100644 index 000000000..22f30fe79 --- /dev/null +++ b/packages/block/serialization.go @@ -0,0 +1,126 @@ +package block + +import ( + "bytes" + "fmt" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/crypto" + "github.com/GenesisKernel/go-genesis/packages/transaction" + "github.com/GenesisKernel/go-genesis/packages/utils" + log "github.com/sirupsen/logrus" +) + +// MarshallBlock is marshalling block +func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, key string) ([]byte, error) { + var mrklArray [][]byte + var blockDataTx []byte + var signed []byte + logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_hash": header.Hash, "block_time": header.Time, "block_version": header.Version, "block_wallet_id": header.KeyID, "block_state_id": header.EcosystemID}) + + for _, tr := range trData { + doubleHash, err := crypto.DoubleHash(tr) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing transaction") + return nil, err + } + mrklArray = append(mrklArray, converter.BinToHex(doubleHash)) + blockDataTx = append(blockDataTx, converter.EncodeLengthPlusData(tr)...) + } + + if key != "" { + if len(mrklArray) == 0 { + mrklArray = append(mrklArray, []byte("0")) + } + mrklRoot := utils.MerkleTreeRoot(mrklArray) + + forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", + header.BlockID, prevHash, header.Time, header.EcosystemID, header.KeyID, header.NodePosition, mrklRoot) + + var err error + signed, err = crypto.Sign(key, forSign) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing blocko") + return nil, err + } + } + + var buf bytes.Buffer + // fill header + buf.Write(converter.DecToBin(header.Version, 2)) + buf.Write(converter.DecToBin(header.BlockID, 4)) + buf.Write(converter.DecToBin(header.Time, 4)) + buf.Write(converter.DecToBin(header.EcosystemID, 4)) + buf.Write(converter.EncodeLenInt64InPlace(header.KeyID)) + buf.Write(converter.DecToBin(header.NodePosition, 1)) + buf.Write(converter.EncodeLengthPlusData(signed)) + // data + buf.Write(blockDataTx) + + return buf.Bytes(), nil +} + +func UnmarshallBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { + header, err := utils.ParseBlockHeader(blockBuffer, !firstBlock) + if err != nil { + return nil, err + } + + logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_time": header.Time, "block_wallet_id": header.KeyID, + "block_state_id": header.EcosystemID, "block_hash": header.Hash, "block_version": header.Version}) + transactions := make([]*transaction.Transaction, 0) + + var mrklSlice [][]byte + + // parse transactions + for blockBuffer.Len() > 0 { + transactionSize, err := converter.DecodeLengthBuf(blockBuffer) + if err != nil { + logger.WithFields(log.Fields{"type": consts.UnmarshallingError, "error": err}).Error("transaction size is 0") + return nil, fmt.Errorf("bad block format (%s)", err) + } + if blockBuffer.Len() < int(transactionSize) { + logger.WithFields(log.Fields{"size": blockBuffer.Len(), "match_size": int(transactionSize), "type": consts.SizeDoesNotMatch}).Error("transaction size does not matches encoded length") + return nil, fmt.Errorf("bad block format (transaction len is too big: %d)", transactionSize) + } + + if transactionSize == 0 { + logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("transaction size is 0") + return nil, fmt.Errorf("transaction size is 0") + } + + bufTransaction := bytes.NewBuffer(blockBuffer.Next(int(transactionSize))) + t, err := transaction.ParseTransaction(bufTransaction) + if err != nil { + if t != nil && t.TxHash != nil { + transaction.MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) + } + return nil, fmt.Errorf("parse transaction error(%s)", err) + } + t.BlockData = &header + + transactions = append(transactions, t) + + // build merkle tree + if len(t.TxFullData) > 0 { + dSha256Hash, err := crypto.DoubleHash(t.TxFullData) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing tx full data") + return nil, err + } + dSha256Hash = converter.BinToHex(dSha256Hash) + mrklSlice = append(mrklSlice, dSha256Hash) + } + } + + if len(mrklSlice) == 0 { + mrklSlice = append(mrklSlice, []byte("0")) + } + + return &Block{ + Header: header, + Transactions: transactions, + MrklRoot: utils.MerkleTreeRoot(mrklSlice), + }, nil +} diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index f55909d9b..bc5950af1 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -21,13 +21,14 @@ import ( "context" "time" + "github.com/GenesisKernel/go-genesis/packages/block" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/notificator" - "github.com/GenesisKernel/go-genesis/packages/parser" "github.com/GenesisKernel/go-genesis/packages/service" + "github.com/GenesisKernel/go-genesis/packages/transaction" "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" @@ -134,7 +135,7 @@ func BlockGenerator(ctx context.Context, d *daemon) error { return err } - err = parser.InsertBlockWOForks(blockBin, true, false) + err = block.InsertBlockWOForks(blockBin, true, false) if err != nil { return err } @@ -150,14 +151,14 @@ func generateNextBlock(blockHeader *utils.BlockData, trs []*model.Transaction, k trData = append(trData, tr.Data) } - return parser.MarshallBlock(blockHeader, trData, prevBlockHash, key) + return block.MarshallBlock(blockHeader, trData, prevBlockHash, key) } func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { - p := new(parser.Transaction) + p := new(transaction.Transaction) // verify transactions - err := parser.ProcessTransactionsQueue(p.DbTransaction) + err := transaction.ProcessTransactionsQueue(p.DbTransaction) if err != nil { return nil, err } @@ -168,28 +169,28 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { return nil, err } - limits := parser.NewLimits(nil) + limits := block.NewLimits(nil) // Checks preprocessing count limits txList := make([]*model.Transaction, 0, len(trs)) for i, txItem := range trs { bufTransaction := bytes.NewBuffer(txItem.Data) - p, err := parser.ParseTransaction(bufTransaction) + p, err := transaction.ParseTransaction(bufTransaction) if err != nil { if p != nil { - parser.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) + transaction.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) } continue } if p.TxSmart != nil { err = limits.CheckLimit(p) - if err == parser.ErrLimitStop && i > 0 { + if err == block.ErrLimitStop && i > 0 { model.IncrementTxAttemptCount(nil, p.TxHash) break } else if err != nil { - if err == parser.ErrLimitSkip { + if err == block.ErrLimitSkip { model.IncrementTxAttemptCount(nil, p.TxHash) } else { - parser.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) + transaction.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) } continue } diff --git a/packages/daemons/blocks_collection.go b/packages/daemons/blocks_collection.go index 2f27862fb..92bfd0acb 100644 --- a/packages/daemons/blocks_collection.go +++ b/packages/daemons/blocks_collection.go @@ -23,13 +23,13 @@ import ( "io/ioutil" "time" + "github.com/GenesisKernel/go-genesis/packages/block" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/parser" "github.com/GenesisKernel/go-genesis/packages/rollback" "github.com/GenesisKernel/go-genesis/packages/service" "github.com/GenesisKernel/go-genesis/packages/tcpserver" @@ -149,41 +149,41 @@ func UpdateChain(ctx context.Context, d *daemon, host string, maxBlockID int64) playRawBlock := func(rawBlocksQueueCh chan []byte) error { for rb := range rawBlocksQueueCh { - block, err := parser.ProcessBlockWherePrevFromBlockchainTable(rb, true) + b, err := block.ProcessBlockWherePrevFromBlockchainTable(rb, true) if err != nil { // we got bad block and should ban this host - banNode(host, block, err) + banNode(host, b, err) d.logger.WithFields(log.Fields{"error": err, "type": consts.BlockError}).Error("processing block") return err } // hash compare could be failed in the case of fork - hashMatched, thisErrIsOk := block.CheckHash() + hashMatched, thisErrIsOk := b.CheckHash() if thisErrIsOk != nil { d.logger.WithFields(log.Fields{"error": err, "type": consts.BlockError}).Error("checking block hash") } if !hashMatched { //it should be fork, replace our previous blocks to ones from the host - err := GetBlocks(block.Header.BlockID-1, host) + err := GetBlocks(b.Header.BlockID-1, host) if err != nil { d.logger.WithFields(log.Fields{"error": err, "type": consts.ParserError}).Error("processing block") - banNode(host, block, err) + banNode(host, b, err) return err } } - block.PrevHeader, err = parser.GetBlockDataFromBlockChain(block.Header.BlockID - 1) + b.PrevHeader, err = block.GetBlockDataFromBlockChain(b.Header.BlockID - 1) if err != nil { - banNode(host, block, err) - return utils.ErrInfo(fmt.Errorf("can't get block %d", block.Header.BlockID-1)) + banNode(host, b, err) + return utils.ErrInfo(fmt.Errorf("can't get block %d", b.Header.BlockID-1)) } - if err = block.CheckBlock(); err != nil { - banNode(host, block, err) + if err = b.Check(); err != nil { + banNode(host, b, err) return err } - if err = block.PlayBlockSafe(); err != nil { - banNode(host, block, err) + if err = b.PlaySafe(); err != nil { + banNode(host, b, err) return err } } @@ -228,7 +228,7 @@ func loadFirstBlock(logger *log.Entry) error { }).Error("reading first block from file") } - if err = parser.InsertBlockWOForks(newBlock, false, true); err != nil { + if err = block.InsertBlockWOForks(newBlock, false, true); err != nil { logger.WithFields(log.Fields{"type": consts.ParserError, "error": err}).Error("inserting new block") return err } @@ -258,7 +258,7 @@ func needLoad(logger *log.Entry) (bool, error) { return false, nil } -func banNode(host string, block *parser.Block, err error) { +func banNode(host string, block *block.Block, err error) { var ( reason string blockId, blockTime int64 @@ -343,12 +343,12 @@ func GetBlocks(blockID int64, host string) error { return processBlocks(blocks) } -func getBlocks(blockID int64, host string) ([]*parser.Block, error) { +func getBlocks(blockID int64, host string) ([]*block.Block, error) { rollback := syspar.GetRbBlocks1() badBlocks := make(map[int64]string) - blocks := make([]*parser.Block, 0) + blocks := make([]*block.Block, 0) var count int64 // load the block bodies from the host @@ -367,7 +367,7 @@ func getBlocks(blockID int64, host string) ([]*parser.Block, error) { break } - block, err := parser.ProcessBlockWherePrevFromBlockchainTable(binaryBlock, true) + block, err := block.ProcessBlockWherePrevFromBlockchainTable(binaryBlock, true) if err != nil { return nil, utils.ErrInfo(err) } @@ -412,7 +412,7 @@ func getBlocks(blockID int64, host string) ([]*parser.Block, error) { return blocks, nil } -func processBlocks(blocks []*parser.Block) error { +func processBlocks(blocks []*block.Block) error { dbTransaction, err := model.StartTransaction() if err != nil { log.WithFields(log.Fields{"error": err, "type": consts.DBError}).Error("starting transaction") @@ -420,47 +420,47 @@ func processBlocks(blocks []*parser.Block) error { } // go through new blocks from the smallest block_id to the largest block_id - prevBlocks := make(map[int64]*parser.Block, 0) + prevBlocks := make(map[int64]*block.Block, 0) for i := len(blocks) - 1; i >= 0; i-- { - block := blocks[i] - - if prevBlocks[block.Header.BlockID-1] != nil { - block.PrevHeader.Hash = prevBlocks[block.Header.BlockID-1].Header.Hash - block.PrevHeader.Time = prevBlocks[block.Header.BlockID-1].Header.Time - block.PrevHeader.BlockID = prevBlocks[block.Header.BlockID-1].Header.BlockID - block.PrevHeader.EcosystemID = prevBlocks[block.Header.BlockID-1].Header.EcosystemID - block.PrevHeader.KeyID = prevBlocks[block.Header.BlockID-1].Header.KeyID - block.PrevHeader.NodePosition = prevBlocks[block.Header.BlockID-1].Header.NodePosition + b := blocks[i] + + if prevBlocks[b.Header.BlockID-1] != nil { + b.PrevHeader.Hash = prevBlocks[b.Header.BlockID-1].Header.Hash + b.PrevHeader.Time = prevBlocks[b.Header.BlockID-1].Header.Time + b.PrevHeader.BlockID = prevBlocks[b.Header.BlockID-1].Header.BlockID + b.PrevHeader.EcosystemID = prevBlocks[b.Header.BlockID-1].Header.EcosystemID + b.PrevHeader.KeyID = prevBlocks[b.Header.BlockID-1].Header.KeyID + b.PrevHeader.NodePosition = prevBlocks[b.Header.BlockID-1].Header.NodePosition } - forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", block.Header.BlockID, block.PrevHeader.Hash, block.MrklRoot, block.Header.Time, block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition) + forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", b.Header.BlockID, b.PrevHeader.Hash, b.MrklRoot, b.Header.Time, b.Header.EcosystemID, b.Header.KeyID, b.Header.NodePosition) hash, err := crypto.DoubleHash([]byte(forSha)) if err != nil { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal("double hashing block") } - block.Header.Hash = hash + b.Header.Hash = hash - if err := block.CheckBlock(); err != nil { + if err := b.Check(); err != nil { dbTransaction.Rollback() return utils.ErrInfo(err) } - if err := block.PlayBlock(dbTransaction); err != nil { + if err := b.Play(dbTransaction); err != nil { dbTransaction.Rollback() return utils.ErrInfo(err) } - prevBlocks[block.Header.BlockID] = block + prevBlocks[b.Header.BlockID] = b // for last block we should update block info if i == 0 { - err := parser.UpdBlockInfo(dbTransaction, block) + err := block.UpdBlockInfo(dbTransaction, b) if err != nil { dbTransaction.Rollback() return utils.ErrInfo(err) } } - if block.SysUpdate { + if b.SysUpdate { if err := syspar.SysUpdate(dbTransaction); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") return utils.ErrInfo(err) @@ -470,16 +470,16 @@ func processBlocks(blocks []*parser.Block) error { // If all right we can delete old blockchain and write new for i := len(blocks) - 1; i >= 0; i-- { - block := blocks[i] + b := blocks[i] // Delete old blocks from blockchain - b := &model.Block{} - err = b.DeleteById(dbTransaction, block.Header.BlockID) + bl := &model.Block{} + err = bl.DeleteById(dbTransaction, b.Header.BlockID) if err != nil { dbTransaction.Rollback() return err } // insert new blocks into blockchain - if err := parser.InsertIntoBlockchain(dbTransaction, block); err != nil { + if err := block.InsertIntoBlockchain(dbTransaction, b); err != nil { dbTransaction.Rollback() return err } diff --git a/packages/daemons/locking.go b/packages/daemons/locking.go index b7383e6b9..5b4cc6efe 100644 --- a/packages/daemons/locking.go +++ b/packages/daemons/locking.go @@ -7,7 +7,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/parser" + "github.com/GenesisKernel/go-genesis/packages/transaction" log "github.com/sirupsen/logrus" ) @@ -60,6 +60,6 @@ func DBLock() { // DBUnlock unlocks database func DBUnlock() { - parser.CleanCache() + transaction.CleanCache() mutex.Unlock() } diff --git a/packages/daemons/queue_parser_tx.go b/packages/daemons/queue_parser_tx.go index 6172b5862..ab522dda1 100644 --- a/packages/daemons/queue_parser_tx.go +++ b/packages/daemons/queue_parser_tx.go @@ -21,7 +21,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/parser" + "github.com/GenesisKernel/go-genesis/packages/transaction" log "github.com/sirupsen/logrus" ) @@ -49,8 +49,8 @@ func QueueParserTx(ctx context.Context, d *daemon) error { return err } - p := new(parser.Transaction) - err = parser.ProcessTransactionsQueue(p.DbTransaction) + p := new(transaction.Transaction) + err = transaction.ProcessTransactionsQueue(p.DbTransaction) if err != nil { d.logger.WithFields(log.Fields{"error": err}).Error("parsing transactions") return err diff --git a/packages/daylight/daemonsctl/daemonsctl.go b/packages/daylight/daemonsctl/daemonsctl.go index cdddac4d8..eb8049c75 100644 --- a/packages/daylight/daemonsctl/daemonsctl.go +++ b/packages/daylight/daemonsctl/daemonsctl.go @@ -1,10 +1,10 @@ package daemonsctl import ( + "github.com/GenesisKernel/go-genesis/packages/block" conf "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/daemons" - "github.com/GenesisKernel/go-genesis/packages/parser" "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/tcpserver" "github.com/GenesisKernel/go-genesis/packages/utils" @@ -23,7 +23,7 @@ func RunAllDaemons() error { return err } - if data, ok := parser.GetDataFromFirstBlock(); ok { + if data, ok := block.GetDataFromFirstBlock(); ok { syspar.SetFirstBlockData(data) } diff --git a/packages/rollback/block.go b/packages/rollback/block.go index 802344c76..e2d6b05cc 100644 --- a/packages/rollback/block.go +++ b/packages/rollback/block.go @@ -20,10 +20,11 @@ import ( "bytes" "fmt" + "github.com/GenesisKernel/go-genesis/packages/block" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/parser" "github.com/GenesisKernel/go-genesis/packages/smart" + "github.com/GenesisKernel/go-genesis/packages/transaction" "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" @@ -37,7 +38,7 @@ func RollbackBlock(data []byte, deleteBlock bool) error { return fmt.Errorf("empty buffer") } - block, err := parser.ParseBlock(buf, false) + block, err := block.UnmarshallBlock(buf, false) if err != nil { return err } @@ -69,32 +70,32 @@ func RollbackBlock(data []byte, deleteBlock bool) error { return err } -func rollbackBlock(transaction *model.DbTransaction, block *parser.Block) error { +func rollbackBlock(dbTransaction *model.DbTransaction, block *block.Block) error { // rollback transactions in reverse order logger := block.GetLogger() for i := len(block.Transactions) - 1; i >= 0; i-- { t := block.Transactions[i] - t.DbTransaction = transaction + t.DbTransaction = dbTransaction - _, err := model.MarkTransactionUnusedAndUnverified(transaction, t.TxHash) + _, err := model.MarkTransactionUnusedAndUnverified(dbTransaction, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting transaction") return err } - _, err = model.DeleteLogTransactionsByHash(transaction, t.TxHash) + _, err = model.DeleteLogTransactionsByHash(dbTransaction, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting log transactions by hash") return err } ts := &model.TransactionStatus{} - err = ts.UpdateBlockID(transaction, 0, t.TxHash) + err = ts.UpdateBlockID(dbTransaction, 0, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating block id in transaction status") return err } - _, err = model.DeleteQueueTxByHash(transaction, t.TxHash) + _, err = model.DeleteQueueTxByHash(dbTransaction, t.TxHash) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting transacion from queue by hash") return err @@ -109,7 +110,7 @@ func rollbackBlock(transaction *model.DbTransaction, block *parser.Block) error } } else { MethodName := consts.TxTypes[int(t.TxType)] - txParser, err := parser.GetTransaction(t, MethodName) + txParser, err := transaction.GetTransaction(t, MethodName) if err != nil { return utils.ErrInfo(err) } diff --git a/packages/transaction/custom/custom.go b/packages/transaction/custom/custom.go new file mode 100644 index 000000000..3d2d7830c --- /dev/null +++ b/packages/transaction/custom/custom.go @@ -0,0 +1,14 @@ +package custom + +import ( + "github.com/GenesisKernel/go-genesis/packages/utils/tx" +) + +// TransactionInterface is parsing transactions +type TransactionInterface interface { + Init() error + Validate() error + Action() error + Rollback() error + Header() *tx.Header +} diff --git a/packages/parser/first_block.go b/packages/transaction/custom/first_block.go similarity index 70% rename from packages/parser/first_block.go rename to packages/transaction/custom/first_block.go index 6e9a2ba57..7d086d7ae 100644 --- a/packages/parser/first_block.go +++ b/packages/transaction/custom/first_block.go @@ -14,16 +14,11 @@ // You should have received a copy of the GNU Lesser General Public License // along with the go-daylight library. If not, see . -package parser +package custom import ( - "bytes" - "encoding/hex" "errors" - "io/ioutil" - "path/filepath" - "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" @@ -41,7 +36,9 @@ const firstEcosystemID = 1 // FirstBlockParser is parser wrapper type FirstBlockTransaction struct { - *Transaction + Logger *log.Entry + DbTransaction *model.DbTransaction + Data interface{} } // ErrFirstBlockHostIsEmpty host for first block is not specified @@ -59,8 +56,8 @@ func (t *FirstBlockTransaction) Validate() error { // Action is fires first block func (t *FirstBlockTransaction) Action() error { - logger := t.GetLogger() - data := t.TxPtr.(*consts.FirstBlock) + logger := t.Logger + data := t.Data.(*consts.FirstBlock) keyID := crypto.Address(data.PublicKey) err := model.ExecSchemaEcosystem(nil, firstEcosystemID, keyID, ``, keyID) if err != nil { @@ -121,59 +118,3 @@ func (t *FirstBlockTransaction) Rollback() error { func (t FirstBlockTransaction) Header() *tx.Header { return nil } - -// GetKeyIDFromPrivateKey load KeyID fron PrivateKey file -func GetKeyIDFromPrivateKey() (int64, error) { - - key, err := ioutil.ReadFile(filepath.Join(conf.Config.KeysDir, consts.PrivateKeyFilename)) - if err != nil { - log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("reading private key file") - return 0, err - } - key, err = hex.DecodeString(string(key)) - if err != nil { - log.WithFields(log.Fields{"type": consts.ConversionError, "error": err}).Error("decoding private key from hex") - return 0, err - } - key, err = crypto.PrivateToPublic(key) - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("converting private key to public") - return 0, err - } - - return crypto.Address(key), nil -} - -// GetDataFromFirstBlock returns data of first block -func GetDataFromFirstBlock() (data *consts.FirstBlock, ok bool) { - block := &model.Block{} - isFound, err := block.Get(1) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting record of first block") - return - } - - if !isFound { - return - } - - pb, err := ParseBlock(bytes.NewBuffer(block.Data), true) - if err != nil { - log.WithFields(log.Fields{"type": consts.ParserError, "error": err}).Error("parsing data of first block") - return - } - - if len(pb.Transactions) == 0 { - log.WithFields(log.Fields{"type": consts.ParserError}).Error("list of parsers is empty") - return - } - - t := pb.Transactions[0] - data, ok = t.TxPtr.(*consts.FirstBlock) - if !ok { - log.WithFields(log.Fields{"type": consts.ParserError}).Error("getting data of first block") - return - } - - return -} diff --git a/packages/parser/stop_network.go b/packages/transaction/custom/stop_network.go similarity index 76% rename from packages/parser/stop_network.go rename to packages/transaction/custom/stop_network.go index cef829a1c..0e30f4b9e 100644 --- a/packages/parser/stop_network.go +++ b/packages/transaction/custom/stop_network.go @@ -1,4 +1,4 @@ -package parser +package custom import ( "errors" @@ -8,18 +8,21 @@ import ( "github.com/GenesisKernel/go-genesis/packages/service" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" + + log "github.com/sirupsen/logrus" ) var ( messageNetworkStopping = "Attention! The network is stopped!" - errNetworkStopping = errors.New("Network is stopping") + ErrNetworkStopping = errors.New("Network is stopping") ) type StopNetworkTransaction struct { - *Transaction + Logger *log.Entry + Data interface{} - cert *utils.Cert + Cert *utils.Cert } func (t *StopNetworkTransaction) Init() error { @@ -28,7 +31,7 @@ func (t *StopNetworkTransaction) Init() error { func (t *StopNetworkTransaction) Validate() error { if err := t.validate(); err != nil { - t.GetLogger().WithError(err).Error("validating tx") + t.Logger.WithError(err).Error("validating tx") return err } @@ -36,8 +39,7 @@ func (t *StopNetworkTransaction) Validate() error { } func (t *StopNetworkTransaction) validate() error { - data := t.TxPtr.(*consts.StopNetwork) - + data := t.Data.(*consts.StopNetwork) cert, err := utils.ParseCert(data.StopNetworkCert) if err != nil { return err @@ -52,21 +54,21 @@ func (t *StopNetworkTransaction) validate() error { return err } - t.cert = cert + t.Cert = cert return nil } func (t *StopNetworkTransaction) Action() error { // Allow execute transaction, if the certificate was used - if t.cert.EqualBytes(consts.UsedStopNetworkCerts...) { + if t.Cert.EqualBytes(consts.UsedStopNetworkCerts...) { return nil } // Set the node in a pause state service.PauseNodeActivity(service.PauseTypeStopingNetwork) - t.GetLogger().Warn(messageNetworkStopping) - return errNetworkStopping + t.Logger.Warn(messageNetworkStopping) + return ErrNetworkStopping } func (t *StopNetworkTransaction) Rollback() error { diff --git a/packages/parser/db.go b/packages/transaction/db.go similarity index 58% rename from packages/parser/db.go rename to packages/transaction/db.go index 9f421f2f5..24f2a8c60 100644 --- a/packages/parser/db.go +++ b/packages/transaction/db.go @@ -1,14 +1,10 @@ -package parser +package transaction import ( - "bytes" - "encoding/json" "errors" "fmt" - "time" "github.com/GenesisKernel/go-genesis/packages/consts" - "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/utils" @@ -16,125 +12,6 @@ import ( log "github.com/sirupsen/logrus" ) -// UpdBlockInfo updates info_block table -func UpdBlockInfo(dbTransaction *model.DbTransaction, block *Block) error { - blockID := block.Header.BlockID - // for the local tests - forSha := fmt.Sprintf("%d,%x,%s,%d,%d,%d,%d", blockID, block.PrevHeader.Hash, block.MrklRoot, - block.Header.Time, block.Header.EcosystemID, block.Header.KeyID, block.Header.NodePosition) - - hash, err := crypto.DoubleHash([]byte(forSha)) - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Fatal("double hashing block") - } - - block.Header.Hash = hash - if block.Header.BlockID == 1 { - ib := &model.InfoBlock{ - Hash: hash, - BlockID: blockID, - Time: block.Header.Time, - EcosystemID: block.Header.EcosystemID, - KeyID: block.Header.KeyID, - NodePosition: converter.Int64ToStr(block.Header.NodePosition), - CurrentVersion: fmt.Sprintf("%d", block.Header.Version), - } - err := ib.Create(dbTransaction) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") - return fmt.Errorf("error insert into info_block %s", err) - } - } else { - ibUpdate := &model.InfoBlock{ - Hash: hash, - BlockID: blockID, - Time: block.Header.Time, - EcosystemID: block.Header.EcosystemID, - KeyID: block.Header.KeyID, - NodePosition: converter.Int64ToStr(block.Header.NodePosition), - Sent: 0, - } - if err := ibUpdate.Update(dbTransaction); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating info block") - return fmt.Errorf("error while updating info_block: %s", err) - } - } - - return nil -} - -// InsertIntoBlockchain inserts a block into the blockchain -func InsertIntoBlockchain(transaction *model.DbTransaction, block *Block) error { - // for local tests - blockID := block.Header.BlockID - - // record into the block chain - bl := &model.Block{} - err := bl.DeleteById(transaction, blockID) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("deleting block by id") - return err - } - rollbackTx := &model.RollbackTx{} - blockRollbackTxs, err := rollbackTx.GetBlockRollbackTransactions(transaction, blockID) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block rollback txs") - return err - } - buffer := bytes.Buffer{} - for _, rollbackTx := range blockRollbackTxs { - rollbackTxBytes, err := json.Marshal(rollbackTx) - if err != nil { - log.WithFields(log.Fields{"type": consts.JSONMarshallError, "error": err}).Error("marshalling rollback_tx to json") - return err - } - - buffer.Write(rollbackTxBytes) - } - rollbackTxsHash, err := crypto.Hash(buffer.Bytes()) - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing block rollback_txs") - return err - } - b := &model.Block{ - ID: blockID, - Hash: block.Header.Hash, - Data: block.BinData, - EcosystemID: block.Header.EcosystemID, - KeyID: block.Header.KeyID, - NodePosition: block.Header.NodePosition, - Time: block.Header.Time, - RollbacksHash: rollbackTxsHash, - Tx: int32(len(block.Transactions)), - } - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") - return err - } - validBlockTime := true - if blockID > 1 { - validBlockTime, err = blockTimeCalculator.ValidateBlock(b.NodePosition, time.Unix(b.Time, 0)) - if err != nil { - log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("block validation") - return err - } - } - if validBlockTime { - err = b.Create(transaction) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") - return err - } - } else { - err := fmt.Errorf("Invalid block time: %d", block.Header.Time) - log.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("invalid block time") - return err - } - - return nil -} - // InsertInLogTx is inserting tx in log func InsertInLogTx(transaction *model.DbTransaction, binaryTx []byte, time int64) error { txHash, err := crypto.Hash(binaryTx) @@ -199,26 +76,6 @@ func CheckLogTx(txBinary []byte, transactions, txQueue bool) error { return nil } -// GetBlockDataFromBlockChain is retrieving block data from blockchain -func GetBlockDataFromBlockChain(blockID int64) (*utils.BlockData, error) { - BlockData := new(utils.BlockData) - block := &model.Block{} - _, err := block.Get(blockID) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Getting block by ID") - return BlockData, utils.ErrInfo(err) - } - - header, err := utils.ParseBlockHeader(bytes.NewBuffer(block.Data), false) - if err != nil { - return nil, utils.ErrInfo(err) - } - - BlockData = &header - BlockData.Hash = block.Hash - return BlockData, nil -} - // DeleteQueueTx deletes a transaction from the queue func DeleteQueueTx(dbTransaction *model.DbTransaction, hash []byte) error { delQueueTx := &model.QueueTx{Hash: hash} diff --git a/packages/parser/transaction.go b/packages/transaction/transaction.go similarity index 96% rename from packages/parser/transaction.go rename to packages/transaction/transaction.go index 7fcda3540..b3e82e192 100644 --- a/packages/parser/transaction.go +++ b/packages/transaction/transaction.go @@ -1,4 +1,4 @@ -package parser +package transaction import ( "bytes" @@ -13,6 +13,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/script" "github.com/GenesisKernel/go-genesis/packages/smart" + "github.com/GenesisKernel/go-genesis/packages/transaction/custom" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" @@ -46,7 +47,7 @@ type Transaction struct { TxSmart *tx.SmartContract TxContract *smart.Contract TxHeader *tx.Header - tx TransactionInterface + tx custom.TransactionInterface DbTransaction *model.DbTransaction SysUpdate bool @@ -71,27 +72,6 @@ func (t Transaction) GetLogger() *log.Entry { return logger } -// TransactionInterface is parsing transactions -type TransactionInterface interface { - Init() error - Validate() error - Action() error - Rollback() error - Header() *tx.Header -} - -// GetTransaction returns TransactionInterface -func GetTransaction(t *Transaction, txType string) (TransactionInterface, error) { - switch txType { - case consts.TxTypeParserFirstBlock: - return &FirstBlockTransaction{t}, nil - case consts.TxTypeParserStopNetwork: - return &StopNetworkTransaction{t, nil}, nil - } - log.WithFields(log.Fields{"tx_type": txType, "type": consts.UnknownObject}).Error("unknown txType") - return nil, fmt.Errorf("Unknown txType: %s", txType) -} - var txParserCache = &transactionCache{cache: make(map[string]*Transaction)} // ParseTransaction is parsing transaction @@ -391,7 +371,7 @@ func CheckTransaction(data []byte) (*tx.Header, error) { return nil, err } - err = t.check(time.Now().Unix(), true) + err = t.Check(time.Now().Unix(), true) if err != nil { return nil, err } @@ -399,7 +379,7 @@ func CheckTransaction(data []byte) (*tx.Header, error) { return t.TxHeader, nil } -func (t *Transaction) check(checkTime int64, checkForDupTr bool) error { +func (t *Transaction) Check(checkTime int64, checkForDupTr bool) error { err := CheckLogTx(t.TxFullData, checkForDupTr, false) if err != nil { return utils.ErrInfo(err) @@ -429,7 +409,7 @@ func (t *Transaction) check(checkTime int64, checkForDupTr bool) error { return nil } -func (t *Transaction) play() (string, error) { +func (t *Transaction) Play() (string, error) { // smart-contract if t.TxContract != nil { // check that there are enough money in CallContract @@ -517,3 +497,14 @@ func GetTxTypeAndUserID(binaryBlock []byte) (txType int64, keyID int64) { } return } + +func GetTransaction(t *Transaction, txType string) (custom.TransactionInterface, error) { + switch txType { + case consts.TxTypeParserFirstBlock: + return &custom.FirstBlockTransaction{t.GetLogger(), t.DbTransaction, t.TxPtr}, nil + case consts.TxTypeParserStopNetwork: + return &custom.StopNetworkTransaction{t.GetLogger(), t.TxPtr, nil}, nil + } + log.WithFields(log.Fields{"tx_type": txType, "type": consts.UnknownObject}).Error("unknown txType") + return nil, fmt.Errorf("Unknown txType: %s", txType) +} diff --git a/packages/parser/parser_cache.go b/packages/transaction/transaction_cache.go similarity index 96% rename from packages/parser/parser_cache.go rename to packages/transaction/transaction_cache.go index 21fd435b1..117142939 100644 --- a/packages/parser/parser_cache.go +++ b/packages/transaction/transaction_cache.go @@ -1,4 +1,4 @@ -package parser +package transaction import "sync" From 11c24c55faf7f35db525e7b96bd5b6df613882e8 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Tue, 26 Jun 2018 12:07:28 +0500 Subject: [PATCH 115/169] feature/940 history (#412) * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change update permissions for notifications table * Fixed changing schema of system_parameters table * Added GetPageHistory * add reles_access for 'Apla Consensus asbl' * Added GetMenuHistory * Added GetContractHistory * Added history template * Added block history * Added Source to template funcs * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * separate routes by vde * separate vde migration to own package * temporary commit * temporary commit * fix login * move changes * temporary commit * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Added BOM checking (#406) * feature/887-doublecontract (#407) * Fixed redefining contracts * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Fixed redefining contracts * Fixed CreateEcosystem (#419) * Merge develop * Merge develop * Merge develop * Added GetContractHistory * Merge develop * Added block history * Added Source to template funcs --- packages/api/smart_test.go | 76 +++++++++++++++++++++++++++++ packages/migration/data.go | 4 +- packages/model/rollback_tx.go | 3 +- packages/smart/errors.go | 1 + packages/smart/funcs.go | 91 ++++++++++++++++++++++++++++++++++- packages/template/funcs.go | 59 +++++++++++++++++++++++ 6 files changed, 231 insertions(+), 3 deletions(-) diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index bb338ca18..e65172a69 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -826,3 +826,79 @@ func TestStack(t *testing.T) { assert.NoError(t, err) assert.Equal(t, fmt.Sprintf("[[@1%s] [@1%[1]s @1%s]]", parent, child), res) } + +func TestPageHistory(t *testing.T) { + assert.NoError(t, keyLogin(1)) + + name := randName(`page`) + value := `P(test,test paragraph)` + + form := url.Values{"Name": {name}, "Value": {value}, "ApplicationId": {`1`}, + "Menu": {"default_menu"}, "Conditions": {"ContractConditions(`MainCondition`)"}} + assert.NoError(t, postTx(`NewPage`, &form)) + + var ret listResult + assert.NoError(t, sendGet(`list/pages`, nil, &ret)) + id := ret.Count + assert.NoError(t, postTx(`EditPage`, &url.Values{"Id": {id}, "Value": {"Div(style){ok}"}})) + assert.NoError(t, postTx(`EditPage`, &url.Values{"Id": {id}, "Conditions": {"true"}})) + + form = url.Values{"Name": {randName(`menu`)}, "Value": {`MenuItem(First)MenuItem(Second)`}, + "ApplicationId": {`1`}, "Conditions": {"ContractConditions(`MainCondition`)"}} + assert.NoError(t, postTx(`NewMenu`, &form)) + + assert.NoError(t, sendGet(`list/menu`, nil, &ret)) + idmenu := ret.Count + assert.NoError(t, postTx(`EditMenu`, &url.Values{"Id": {idmenu}, "Conditions": {"true"}})) + assert.NoError(t, postTx(`EditMenu`, &url.Values{"Id": {idmenu}, "Value": {"MenuItem(Third)"}})) + assert.NoError(t, postTx(`EditMenu`, &url.Values{"Id": {idmenu}, + "Value": {"MenuItem(Third)"}, "Conditions": {"false"}})) + + form = url.Values{"Value": {`contract C` + name + `{ action {}}`}, + "ApplicationId": {`1`}, "Conditions": {"ContractConditions(`MainCondition`)"}} + _, idCont, err := postTxResult(`NewContract`, &form) + assert.NoError(t, err) + assert.NoError(t, postTx(`EditContract`, &url.Values{"Id": {idCont}, + "Value": {`contract C` + name + `{ action {Println("OK")}}`}, "Conditions": {"true"}})) + + form = url.Values{`Value`: {`contract Get` + name + ` { + data { + IdPage int + IdMenu int + IdCont int + } + action { + var ret array + ret = GetPageHistory($IdPage) + $result = Str(Len(ret)) + ret = GetMenuHistory($IdMenu) + $result = $result + Str(Len(ret)) + ret = GetContractHistory($IdCont) + $result = $result + Str(Len(ret)) + } + }`}, "ApplicationId": {`1`}, `Conditions`: {`true`}} + assert.NoError(t, postTx(`NewContract`, &form)) + + _, msg, err := postTxResult(`Get`+name, &url.Values{"IdPage": {id}, "IdMenu": {idmenu}, + "IdCont": {idCont}}) + assert.NoError(t, err) + assert.Equal(t, `231`, msg) + + form = url.Values{"Name": {name + `1`}, "Value": {value}, "ApplicationId": {`1`}, + "Menu": {"default_menu"}, "Conditions": {"ContractConditions(`MainCondition`)"}} + assert.NoError(t, postTx(`NewPage`, &form)) + + assert.NoError(t, postTx(`Get`+name, &url.Values{"IdPage": {converter.Int64ToStr( + converter.StrToInt64(id) + 1)}, "IdMenu": {idmenu}, "IdCont": {idCont}})) + + assert.EqualError(t, postTx(`Get`+name, &url.Values{"IdPage": {`1000000`}, "IdMenu": {idmenu}, + "IdCont": {idCont}}), `{"type":"panic","error":"Record has not been found"}`) + + var retTemp contentResult + assert.NoError(t, sendPost(`content`, &url.Values{`template`: {fmt.Sprintf(`GetPageHistory(MySrc,%s)`, + id)}}, &retTemp)) + + if len(RawToString(retTemp.Tree)) < 400 { + t.Error(fmt.Errorf(`wrong tree %s`, RawToString(retTemp.Tree))) + } +} diff --git a/packages/migration/data.go b/packages/migration/data.go index 53bca4f3c..02e512f57 100644 --- a/packages/migration/data.go +++ b/packages/migration/data.go @@ -121,7 +121,9 @@ var ( ); ALTER SEQUENCE rollback_tx_id_seq owned by rollback_tx.id; ALTER TABLE ONLY "rollback_tx" ADD CONSTRAINT rollback_tx_pkey PRIMARY KEY (id); - + CREATE INDEX "rollback_tx_table" ON "rollback_tx" (table_name, table_id); + + DROP TABLE IF EXISTS "install"; CREATE TABLE "install" ( "progress" varchar(10) NOT NULL DEFAULT '' ); diff --git a/packages/model/rollback_tx.go b/packages/model/rollback_tx.go index dbf4dce09..68583a120 100644 --- a/packages/model/rollback_tx.go +++ b/packages/model/rollback_tx.go @@ -30,7 +30,8 @@ func (rt *RollbackTx) GetBlockRollbackTransactions(dbTransaction *DbTransaction, // GetRollbackTxsByTableIDAndTableName returns records of rollback by table name and id func (rt *RollbackTx) GetRollbackTxsByTableIDAndTableName(tableID, tableName string, limit int) (*[]RollbackTx, error) { rollbackTx := new([]RollbackTx) - if err := DBConn.Where("table_id = ? AND table_name = ?", tableID, tableName).Limit(limit).Find(rollbackTx).Error; err != nil { + if err := DBConn.Where("table_id = ? AND table_name = ?", tableID, tableName). + Order("id desc").Limit(limit).Find(rollbackTx).Error; err != nil { return nil, err } return rollbackTx, nil diff --git a/packages/smart/errors.go b/packages/smart/errors.go index 04676d096..a2d338a90 100644 --- a/packages/smart/errors.go +++ b/packages/smart/errors.go @@ -29,4 +29,5 @@ var ( errContractNotFound = errors.New(`Contract has not been found`) errAccessRollbackContract = errors.New(`RollbackContract can be only called from Import or NewContract`) errCommission = errors.New("There is not enough money to pay the commission fee") + errNotFound = errors.New(`Record has not been found`) ) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index d7801efcb..2083094b5 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -54,7 +54,10 @@ import ( log "github.com/sirupsen/logrus" ) -const nodeBanNotificationHeader = "Your node was banned" +const ( + nodeBanNotificationHeader = "Your node was banned" + historyLimit = 250 +) var BOM = []byte{0xEF, 0xBB, 0xBF} @@ -248,6 +251,10 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { "GetMapKeys": GetMapKeys, "SortedKeys": SortedKeys, "Append": Append, + "GetPageHistory": GetPageHistory, + "GetBlockHistory": GetBlockHistory, + "GetMenuHistory": GetMenuHistory, + "GetContractHistory": GetContractHistory, } switch vt { @@ -1719,3 +1726,85 @@ func StopVDEProcess(sc *SmartContract, name string) error { func GetVDEList(sc *SmartContract) (map[string]string, error) { return vdemanager.Manager.ListProcess() } + +func GetHistory(transaction *model.DbTransaction, ecosystem int64, tableName string, id int64) ([]interface{}, error) { + table := fmt.Sprintf(`%d_%s`, ecosystem, tableName) + rows, err := model.GetDB(transaction).Table(table).Where("id=?", id).Rows() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("get current values") + return nil, err + } + if !rows.Next() { + return nil, errNotFound + } + defer rows.Close() + // Get column names + columns, err := rows.Columns() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("get columns") + return nil, err + } + values := make([][]byte, len(columns)) + scanArgs := make([]interface{}, len(values)) + for i := range values { + scanArgs[i] = &values[i] + } + err = rows.Scan(scanArgs...) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("scan values") + return nil, err + } + var value string + curVal := make(map[string]string) + for i, col := range values { + if col == nil { + value = "NULL" + } else { + value = string(col) + } + curVal[columns[i]] = value + } + rollbackList := []interface{}{} + rollbackTx := &model.RollbackTx{} + txs, err := rollbackTx.GetRollbackTxsByTableIDAndTableName(converter.Int64ToStr(id), + table, historyLimit) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("rollback history") + return nil, err + } + for _, tx := range *txs { + if len(rollbackList) > 0 { + rollbackList[len(rollbackList)-1].(map[string]string)[`block_id`] = converter.Int64ToStr(tx.BlockID) + } + if tx.Data == "" { + continue + } + rollback := make(map[string]string) + for k, v := range curVal { + rollback[k] = v + } + if err := json.Unmarshal([]byte(tx.Data), &rollback); err != nil { + log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err}).Error("unmarshalling rollbackTx.Data from JSON") + return nil, err + } + rollbackList = append(rollbackList, rollback) + curVal = rollback + } + return rollbackList, nil +} + +func GetBlockHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `blocks`, id) +} + +func GetPageHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `pages`, id) +} + +func GetMenuHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `menu`, id) +} + +func GetContractHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `contracts`, id) +} diff --git a/packages/template/funcs.go b/packages/template/funcs.go index bbedc6455..3afd08dec 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -63,6 +63,10 @@ func init() { funcs[`EcosysParam`] = tplFunc{ecosysparTag, defaultTag, `ecosyspar`, `Name,Index,Source`} funcs[`Em`] = tplFunc{defaultTag, defaultTag, `em`, `Body,Class`} funcs[`GetVar`] = tplFunc{getvarTag, defaultTag, `getvar`, `Name`} + funcs[`GetContractHistory`] = tplFunc{getContractHistoryTag, defaultTag, `getcontracthistory`, `Source,Id`} + funcs[`GetMenuHistory`] = tplFunc{getMenuHistoryTag, defaultTag, `getmenuhistory`, `Source,Id`} + funcs[`GetBlockHistory`] = tplFunc{getBlockHistoryTag, defaultTag, `getblockhistory`, `Source,Id`} + funcs[`GetPageHistory`] = tplFunc{getPageHistoryTag, defaultTag, `getpagehistory`, `Source,Id`} funcs[`ImageInput`] = tplFunc{defaultTag, defaultTag, `imageinput`, `Name,Width,Ratio,Format`} funcs[`InputErr`] = tplFunc{defaultTag, defaultTag, `inputerr`, `*`} funcs[`JsonToSource`] = tplFunc{jsontosourceTag, defaultTag, `jsontosource`, `Source,Data`} @@ -1219,3 +1223,58 @@ func columntypeTag(par parFunc) string { } return `` } + +func getHistoryTag(par parFunc, table string) string { + setAllAttr(par) + + list, err := smart.GetHistory(nil, converter.StrToInt64((*par.Workspace.Vars)[`ecosystem_id`]), + table, converter.StrToInt64(macro((*par.Pars)[`Id`], par.Workspace.Vars))) + if err != nil { + return err.Error() + } + data := make([][]string, 0) + cols := make([]string, 0, 8) + types := make([]string, 0, 8) + if len(list) > 0 { + for i := range list { + item := list[i].(map[string]string) + if i == 0 { + for key := range item { + cols = append(cols, key) + types = append(types, `text`) + } + } + items := make([]string, len(cols)) + for ind, key := range cols { + val := item[key] + if val == `NULL` { + val = `` + } + items[ind] = val + } + data = append(data, items) + } + } + par.Node.Attr[`columns`] = &cols + par.Node.Attr[`types`] = &types + par.Node.Attr[`data`] = &data + newSource(par) + par.Owner.Children = append(par.Owner.Children, par.Node) + return `` +} + +func getContractHistoryTag(par parFunc) string { + return getHistoryTag(par, `contracts`) +} + +func getBlockHistoryTag(par parFunc) string { + return getHistoryTag(par, `blocks`) +} + +func getMenuHistoryTag(par parFunc) string { + return getHistoryTag(par, `menu`) +} + +func getPageHistoryTag(par parFunc) string { + return getHistoryTag(par, `pages`) +} From 5c5a6519bece33655b98479fc7797a3fd4e3e81a Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Wed, 20 Jun 2018 10:09:55 +0300 Subject: [PATCH 116/169] change platform founder to user wallet --- packages/migration/roles_data.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/migration/roles_data.go b/packages/migration/roles_data.go index 496b078cb..45093d690 100644 --- a/packages/migration/roles_data.go +++ b/packages/migration/roles_data.go @@ -7,9 +7,9 @@ INSERT INTO "%[1]d_roles" ("id", "default_page", "role_name", "deleted", "role_t ('2','', 'Developer', '0', '3', NOW(), '{}', '{}'); INSERT INTO "%[1]d_roles_participants" ("id","role" ,"member", "date_created") - VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()), - ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()); + VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()), + ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()); - INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[4]d', 'founder'); + INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[2]d', 'founder'); ` From a307052e71cd26c0f33e0049326a429327670b78 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Tue, 26 Jun 2018 12:58:43 +0500 Subject: [PATCH 117/169] feature/919 include (#405) * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * Added macro to include * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * Fixed query * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * separate routes by vde * separate vde migration to own package * temporary commit * temporary commit * fix login * move changes * temporary commit * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Added macro to include * Fixed query * Added BOM checking (#406) * feature/887-doublecontract (#407) * Fixed redefining contracts * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Fixed redefining contracts * Fixed CreateEcosystem (#419) * Added macro to include * Fixed query * Fixed db query in include * Fixed dbquery in include --- packages/api/content_test.go | 2 +- packages/template/funcs.go | 12 +++++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/packages/api/content_test.go b/packages/api/content_test.go index 2b5177e55..004906908 100644 --- a/packages/api/content_test.go +++ b/packages/api/content_test.go @@ -101,7 +101,7 @@ func TestContent(t *testing.T) { )`}, "source": {"true"}, }, - `[{"tag":"data","attr":{"columns":"id,name","data":"1,Test message 1\n\t\t\t\t\t2,"Test message 2"\n\t\t\t\t\t3,"Test message 3"","source":"myforlist"}}]`, + `[{"tag":"data","attr":{"columns":"id,name","data":"1,Test message 1\n\t\t\t\t\t2,\"Test message 2\"\n\t\t\t\t\t3,\"Test message 3\"","source":"myforlist"}}]`, }, { "content", diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 3afd08dec..c5bb6f44d 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -820,15 +820,21 @@ func tailTag(par parFunc) string { func includeTag(par parFunc) string { if len((*par.Pars)[`Name`]) >= 0 && len((*par.Workspace.Vars)[`_include`]) < 5 { - pattern, err := model.Single(`select value from "`+(*par.Workspace.Vars)[`ecosystem_id`]+`_blocks" where name=?`, (*par.Pars)[`Name`]).String() + bi := &model.BlockInterface{} + bi.SetTablePrefix((*par.Workspace.Vars)[`ecosystem_id`]) + found, err := bi.Get(macro((*par.Pars)[`Name`], par.Workspace.Vars)) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block by name") return err.Error() } - if len(pattern) > 0 { + if !found { + log.WithFields(log.Fields{"type": consts.NotFound, "name": (*par.Pars)[`Name`]}).Error("include block not found") + return fmt.Sprintf("Inlcude %s has not been found", (*par.Pars)[`Name`]) + } + if len(bi.Value) > 0 { root := node{} (*par.Workspace.Vars)[`_include`] += `1` - process(pattern, &root, par.Workspace) + process(bi.Value, &root, par.Workspace) (*par.Workspace.Vars)[`_include`] = (*par.Workspace.Vars)[`_include`][:len((*par.Workspace.Vars)[`_include`])-1] for _, item := range root.Children { par.Owner.Children = append(par.Owner.Children, item) From 250aa35bc27cf6cb6160337e158a1e003f55ad7c Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Tue, 26 Jun 2018 19:47:05 +0300 Subject: [PATCH 118/169] Fix bug with passing nil pointer, while parsing StructTransaction, regularTransactions doest not needed anymore --- packages/transaction/transaction.go | 85 ++--------------------------- 1 file changed, 6 insertions(+), 79 deletions(-) diff --git a/packages/transaction/transaction.go b/packages/transaction/transaction.go index b3e82e192..904aa9172 100644 --- a/packages/transaction/transaction.go +++ b/packages/transaction/transaction.go @@ -117,15 +117,7 @@ func ParseTransaction(buffer *bytes.Buffer) (*Transaction, error) { } // all other transactions - } else { - // skip byte with transaction type - buffer.Next(1) - t.TxBinaryData = buffer.Bytes() - if err := t.parseFromRegular(buffer, txType); err != nil { - return t, err - } } - txParserCache.Set(t) return t, nil @@ -137,24 +129,23 @@ func IsContractTransaction(txType int) bool { } func (t *Transaction) parseFromStruct(buf *bytes.Buffer, txType int64) error { - trParser, err := GetTransaction(t, consts.TxTypes[int(txType)]) - if err != nil { - return err - } - t.tx = trParser - t.TxPtr = consts.MakeStruct(consts.TxTypes[int(txType)]) input := buf.Bytes() if err := converter.BinUnmarshal(&input, t.TxPtr); err != nil { log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "tx_type": int(txType)}).Error("getting parser for tx type") return err } - head := consts.Header(t.TxPtr) t.TxKeyID = head.KeyID t.TxTime = int64(head.Time) t.TxType = txType + trParser, err := GetTransaction(t, consts.TxTypes[int(txType)]) + if err != nil { + return err + } + t.tx = trParser + err = trParser.Validate() if err != nil { return utils.ErrInfo(err) @@ -299,70 +290,6 @@ func (t *Transaction) parseFromContract(buf *bytes.Buffer) error { return nil } -func parseRegularTransaction(t *Transaction, buf *bytes.Buffer, txType int64) error { - trParser, err := GetTransaction(t, consts.TxTypes[int(txType)]) - if err != nil { - return err - } - t.tx = trParser - - err = trParser.Init() - if err != nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser init") - return err - } - header := trParser.Header() - if header == nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser get header") - return fmt.Errorf("tx header is nil") - } - - t.TxHeader = header - t.TxTime = header.Time - t.TxType = txType - t.TxEcosystemID = (header.EcosystemID) - t.TxKeyID = header.KeyID - - err = trParser.Validate() - if _, ok := err.(error); ok { - return utils.ErrInfo(err.(error)) - } - - return nil -} - -func (t *Transaction) parseFromRegular(buf *bytes.Buffer, txType int64) error { - trParser, err := GetTransaction(t, consts.TxTypes[int(txType)]) - if err != nil { - return err - } - t.tx = trParser - - err = trParser.Init() - if err != nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser init") - return err - } - header := trParser.Header() - if header == nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser get header") - return fmt.Errorf("tx header is nil") - } - - t.TxHeader = header - t.TxTime = header.Time - t.TxType = txType - t.TxEcosystemID = (header.EcosystemID) - t.TxKeyID = header.KeyID - - err = trParser.Validate() - if _, ok := err.(error); ok { - return utils.ErrInfo(err.(error)) - } - - return nil -} - // CheckTransaction is checking transaction func CheckTransaction(data []byte) (*tx.Header, error) { trBuff := bytes.NewBuffer(data) From c79c4ee9434ff818f87cfdcdb844c40da9e16ccc Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Tue, 26 Jun 2018 20:28:17 +0300 Subject: [PATCH 119/169] Delete unecessary fields in tx struct --- packages/transaction/transaction.go | 61 +++++++++++++---------------- 1 file changed, 27 insertions(+), 34 deletions(-) diff --git a/packages/transaction/transaction.go b/packages/transaction/transaction.go index 904aa9172..35757a7ad 100644 --- a/packages/transaction/transaction.go +++ b/packages/transaction/transaction.go @@ -24,32 +24,27 @@ import ( // Transaction is a structure for parsing transactions type Transaction struct { - BlockData *utils.BlockData - PrevBlock *utils.BlockData - dataType int - blockData []byte - CurrentVersion string - PublicKeys [][]byte - - TxBinaryData []byte // transaction binary data - TxFullData []byte // full transaction, with type and data - TxHash []byte - TxKeyID int64 - TxEcosystemID int64 - TxNodePosition uint32 - TxTime int64 - TxType int64 - TxCost int64 // Maximum cost of executing contract - TxFuel int64 // The fuel cost of executed contract - TxUsedCost decimal.Decimal // Used cost of CPU resources - TxPtr interface{} // Pointer to the corresponding struct in consts/struct.go - TxData map[string]interface{} - TxSmart *tx.SmartContract - TxContract *smart.Contract - TxHeader *tx.Header - tx custom.TransactionInterface - DbTransaction *model.DbTransaction - SysUpdate bool + BlockData *utils.BlockData + PrevBlock *utils.BlockData + PublicKeys [][]byte + + TxBinaryData []byte // transaction binary data + TxFullData []byte // full transaction, with type and data + TxHash []byte + TxKeyID int64 + TxTime int64 + TxType int64 + TxCost int64 // Maximum cost of executing contract + TxFuel int64 + TxUsedCost decimal.Decimal // Used cost of CPU resources + TxPtr interface{} // Pointer to the corresponding struct in consts/struct.go + TxData map[string]interface{} + TxSmart *tx.SmartContract + TxContract *smart.Contract + TxHeader *tx.Header + tx custom.TransactionInterface + DbTransaction *model.DbTransaction + SysUpdate bool SmartContract smart.SmartContract } @@ -57,18 +52,18 @@ type Transaction struct { // GetLogger returns logger func (t Transaction) GetLogger() *log.Entry { if t.BlockData != nil && t.PrevBlock != nil { - logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) return logger } if t.BlockData != nil { - logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) return logger } if t.PrevBlock != nil { - logger := log.WithFields(log.Fields{"prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + logger := log.WithFields(log.Fields{"prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) return logger } - logger := log.WithFields(log.Fields{"tx_type": t.TxType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID, "tx_wallet_id": t.TxKeyID}) + logger := log.WithFields(log.Fields{"tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) return logger } @@ -98,7 +93,6 @@ func ParseTransaction(buffer *bytes.Buffer) (*Transaction, error) { t.TxFullData = buffer.Bytes() txType := int64(buffer.Bytes()[0]) - t.dataType = int(txType) // smart contract transaction if IsContractTransaction(int(txType)) { @@ -157,13 +151,12 @@ func (t *Transaction) parseFromStruct(buf *bytes.Buffer, txType int64) error { func (t *Transaction) parseFromContract(buf *bytes.Buffer) error { smartTx := tx.SmartContract{} if err := msgpack.Unmarshal(buf.Bytes(), &smartTx); err != nil { - log.WithFields(log.Fields{"tx_type": t.dataType, "tx_hash": t.TxHash, "error": err, "type": consts.UnmarshallingError}).Error("unmarshalling smart tx msgpack") + log.WithFields(log.Fields{"tx_hash": t.TxHash, "error": err, "type": consts.UnmarshallingError}).Error("unmarshalling smart tx msgpack") return err } t.TxPtr = nil t.TxSmart = &smartTx t.TxTime = smartTx.Time - t.TxEcosystemID = (smartTx.EcosystemID) t.TxKeyID = smartTx.KeyID contract := smart.GetContractByID(int32(smartTx.Type)) @@ -311,7 +304,7 @@ func (t *Transaction) Check(checkTime int64, checkForDupTr bool) error { if err != nil { return utils.ErrInfo(err) } - logger := log.WithFields(log.Fields{"tx_type": t.dataType, "tx_time": t.TxTime, "tx_state_id": t.TxEcosystemID}) + logger := log.WithFields(log.Fields{"tx_time": t.TxTime}) // time in the transaction cannot be more than MAX_TX_FORW seconds of block time if t.TxTime-consts.MAX_TX_FORW > checkTime { logger.WithFields(log.Fields{"tx_max_forw": consts.MAX_TX_FORW, "type": consts.ParameterExceeded}).Error("time in the tx cannot be more than MAX_TX_FORW seconds of block time ") From 4ff32791854b96de05640c28c6d4e698df605c88 Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Tue, 26 Jun 2018 20:43:20 +0300 Subject: [PATCH 120/169] Fix transaction GetLogger method --- packages/transaction/transaction.go | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/packages/transaction/transaction.go b/packages/transaction/transaction.go index 35757a7ad..26df17713 100644 --- a/packages/transaction/transaction.go +++ b/packages/transaction/transaction.go @@ -51,19 +51,13 @@ type Transaction struct { // GetLogger returns logger func (t Transaction) GetLogger() *log.Entry { - if t.BlockData != nil && t.PrevBlock != nil { - logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) - return logger - } + logger := log.WithFields(log.Fields{"tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) if t.BlockData != nil { - logger := log.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) - return logger + logger = logger.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version}) } if t.PrevBlock != nil { - logger := log.WithFields(log.Fields{"prev_block_id": t.PrevBlock.BlockID, "prev_block_time": t.PrevBlock.Time, "prev_block_wallet_id": t.PrevBlock.KeyID, "prev_block_state_id": t.PrevBlock.EcosystemID, "prev_block_hash": t.PrevBlock.Hash, "prev_block_version": t.PrevBlock.Version, "tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) - return logger + logger = logger.WithFields(log.Fields{"block_id": t.BlockData.BlockID, "block_time": t.BlockData.Time, "block_wallet_id": t.BlockData.KeyID, "block_state_id": t.BlockData.EcosystemID, "block_hash": t.BlockData.Hash, "block_version": t.BlockData.Version}) } - logger := log.WithFields(log.Fields{"tx_type": t.TxType, "tx_time": t.TxTime, "tx_wallet_id": t.TxKeyID}) return logger } From baab8b796d80ba8ffa74c8098446f360ecf4190f Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Tue, 26 Jun 2018 21:26:11 +0300 Subject: [PATCH 121/169] some renaming and move filling txData to separate method --- packages/block/serialization.go | 2 +- packages/daemons/block_generator.go | 2 +- packages/transaction/transaction.go | 239 ++++++++++++++-------------- 3 files changed, 123 insertions(+), 120 deletions(-) diff --git a/packages/block/serialization.go b/packages/block/serialization.go index 22f30fe79..ac6977491 100644 --- a/packages/block/serialization.go +++ b/packages/block/serialization.go @@ -91,7 +91,7 @@ func UnmarshallBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) } bufTransaction := bytes.NewBuffer(blockBuffer.Next(int(transactionSize))) - t, err := transaction.ParseTransaction(bufTransaction) + t, err := transaction.UnmarshallTransaction(bufTransaction) if err != nil { if t != nil && t.TxHash != nil { transaction.MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index bc5950af1..2db69d872 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -174,7 +174,7 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { txList := make([]*model.Transaction, 0, len(trs)) for i, txItem := range trs { bufTransaction := bytes.NewBuffer(txItem.Data) - p, err := transaction.ParseTransaction(bufTransaction) + p, err := transaction.UnmarshallTransaction(bufTransaction) if err != nil { if p != nil { transaction.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) diff --git a/packages/transaction/transaction.go b/packages/transaction/transaction.go index 26df17713..8620e5bed 100644 --- a/packages/transaction/transaction.go +++ b/packages/transaction/transaction.go @@ -61,10 +61,10 @@ func (t Transaction) GetLogger() *log.Entry { return logger } -var txParserCache = &transactionCache{cache: make(map[string]*Transaction)} +var txCache = &transactionCache{cache: make(map[string]*Transaction)} -// ParseTransaction is parsing transaction -func ParseTransaction(buffer *bytes.Buffer) (*Transaction, error) { +// UnmarshallTransaction is unmarshalling transaction +func UnmarshallTransaction(buffer *bytes.Buffer) (*Transaction, error) { if buffer.Len() == 0 { log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty transaction buffer") return nil, fmt.Errorf("empty transaction buffer") @@ -77,7 +77,7 @@ func ParseTransaction(buffer *bytes.Buffer) (*Transaction, error) { return nil, err } - if t, ok := txParserCache.Get(string(hash)); ok { + if t, ok := txCache.Get(string(hash)); ok { return t, nil } @@ -106,7 +106,7 @@ func ParseTransaction(buffer *bytes.Buffer) (*Transaction, error) { // all other transactions } - txParserCache.Set(t) + txCache.Set(t) return t, nil } @@ -142,6 +142,115 @@ func (t *Transaction) parseFromStruct(buf *bytes.Buffer, txType int64) error { return nil } +func (t *Transaction) fillTxData(fieldInfos []*script.FieldInfo, input []byte, forsign []string) error { + for _, fitem := range fieldInfos { + var err error + var v interface{} + var forv string + var isforv bool + + if fitem.ContainsTag(script.TagFile) { + var ( + data []byte + file *tx.File + ) + if err := converter.BinUnmarshal(&input, &data); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling file") + return err + } + if err := msgpack.Unmarshal(data, &file); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("unmarshalling file msgpack") + return err + } + + t.TxData[fitem.Name] = file.Data + t.TxData[fitem.Name+"MimeType"] = file.MimeType + + forsign = append(forsign, file.MimeType, file.Hash) + continue + } + + switch fitem.Type.String() { + case `uint64`: + var val uint64 + converter.BinUnmarshal(&input, &val) + v = val + case `float64`: + var val float64 + converter.BinUnmarshal(&input, &val) + v = val + case `int64`: + v, err = converter.DecodeLenInt64(&input) + case script.Decimal: + var s string + if err := converter.BinUnmarshal(&input, &s); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling script.Decimal") + return err + } + v, err = decimal.NewFromString(s) + case `string`: + var s string + if err := converter.BinUnmarshal(&input, &s); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") + return err + } + v = s + case `[]uint8`: + var b []byte + if err := converter.BinUnmarshal(&input, &b); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") + return err + } + v = hex.EncodeToString(b) + case `[]interface {}`: + count, err := converter.DecodeLength(&input) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling []interface{}") + return err + } + isforv = true + list := make([]interface{}, 0) + for count > 0 { + length, err := converter.DecodeLength(&input) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling tx length") + return err + } + if len(input) < int(length) { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "length": int(length), "slice length": len(input)}).Error("incorrect tx size") + return fmt.Errorf(`input slice is short`) + } + list = append(list, string(input[:length])) + input = input[length:] + count-- + } + if len(list) > 0 { + slist := make([]string, len(list)) + for j, lval := range list { + slist[j] = lval.(string) + } + forv = strings.Join(slist, `,`) + } + v = list + } + if t.TxData[fitem.Name] == nil { + t.TxData[fitem.Name] = v + } + if err != nil { + return err + } + if strings.Index(fitem.Tags, `image`) >= 0 { + continue + } + if isforv { + v = forv + } + forsign = append(forsign, fmt.Sprintf("%v", v)) + } + t.TxData[`forsign`] = strings.Join(forsign, ",") + return nil +} + func (t *Transaction) parseFromContract(buf *bytes.Buffer) error { smartTx := tx.SmartContract{} if err := msgpack.Unmarshal(buf.Bytes(), &smartTx); err != nil { @@ -165,114 +274,13 @@ func (t *Transaction) parseFromContract(buf *bytes.Buffer) error { input := smartTx.Data t.TxData = make(map[string]interface{}) + txInfo := contract.Block.Info.(*script.ContractInfo).Tx - if contract.Block.Info.(*script.ContractInfo).Tx != nil { - for _, fitem := range *contract.Block.Info.(*script.ContractInfo).Tx { - var err error - var v interface{} - var forv string - var isforv bool - - if fitem.ContainsTag(script.TagFile) { - var ( - data []byte - file *tx.File - ) - if err := converter.BinUnmarshal(&input, &data); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling file") - return err - } - if err := msgpack.Unmarshal(data, &file); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("unmarshalling file msgpack") - return err - } - - t.TxData[fitem.Name] = file.Data - t.TxData[fitem.Name+"MimeType"] = file.MimeType - - forsign = append(forsign, file.MimeType, file.Hash) - continue - } - - switch fitem.Type.String() { - case `uint64`: - var val uint64 - converter.BinUnmarshal(&input, &val) - v = val - case `float64`: - var val float64 - converter.BinUnmarshal(&input, &val) - v = val - case `int64`: - v, err = converter.DecodeLenInt64(&input) - case script.Decimal: - var s string - if err := converter.BinUnmarshal(&input, &s); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling script.Decimal") - return err - } - v, err = decimal.NewFromString(s) - case `string`: - var s string - if err := converter.BinUnmarshal(&input, &s); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") - return err - } - v = s - case `[]uint8`: - var b []byte - if err := converter.BinUnmarshal(&input, &b); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") - return err - } - v = hex.EncodeToString(b) - case `[]interface {}`: - count, err := converter.DecodeLength(&input) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling []interface{}") - return err - } - isforv = true - list := make([]interface{}, 0) - for count > 0 { - length, err := converter.DecodeLength(&input) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling tx length") - return err - } - if len(input) < int(length) { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "length": int(length), "slice length": len(input)}).Error("incorrect tx size") - return fmt.Errorf(`input slice is short`) - } - list = append(list, string(input[:length])) - input = input[length:] - count-- - } - if len(list) > 0 { - slist := make([]string, len(list)) - for j, lval := range list { - slist[j] = lval.(string) - } - forv = strings.Join(slist, `,`) - } - v = list - } - if t.TxData[fitem.Name] == nil { - t.TxData[fitem.Name] = v - } - if err != nil { - return err - } - if strings.Index(fitem.Tags, `image`) >= 0 { - continue - } - if isforv { - v = forv - } - forsign = append(forsign, fmt.Sprintf("%v", v)) + if txInfo != nil { + if err := t.fillTxData(*txInfo, input, forsign); err != nil { + return err } } - t.TxData[`forsign`] = strings.Join(forsign, ",") return nil } @@ -280,7 +288,7 @@ func (t *Transaction) parseFromContract(buf *bytes.Buffer) error { // CheckTransaction is checking transaction func CheckTransaction(data []byte) (*tx.Header, error) { trBuff := bytes.NewBuffer(data) - t, err := ParseTransaction(trBuff) + t, err := UnmarshallTransaction(trBuff) if err != nil { return nil, err } @@ -334,12 +342,7 @@ func (t *Transaction) Play() (string, error) { return "", utils.ErrInfo(fmt.Errorf("can't find parser for %d", t.TxType)) } - err := t.tx.Action() - if err != nil { - return "", err - } - - return "", nil + return "", t.tx.Action() } // AccessRights checks the access right by executing the condition value @@ -397,7 +400,7 @@ func (t *Transaction) CallContract(flags int) (resultContract string, err error) // CleanCache cleans cache of transaction parsers func CleanCache() { - txParserCache.Clean() + txCache.Clean() } // GetTxTypeAndUserID returns tx type, wallet and citizen id from the block data From 59acb10e54b642f0ab05b4244f4b83b8ef7d699d Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 28 Jun 2018 10:13:32 +0500 Subject: [PATCH 122/169] feature/958-column (#414) * Fixed checking column name * Fixed checkColumnName --- packages/api/contract_test.go | 9 +++++++++ packages/smart/errors.go | 2 ++ packages/smart/funcs.go | 36 +++++++++++++++++++++++++++-------- 3 files changed, 39 insertions(+), 8 deletions(-) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index 195ca81fb..5ffd7a683 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -448,6 +448,15 @@ func TestNewTableWithEmptyName(t *testing.T) { `{"type":"error","error":"Table name cannot be empty"}` { t.Error(`wrong error`, err) } + + form = url.Values{ + "Name": {"Digit" + name}, + "Columns": {"[{\"name\":\"1\",\"type\":\"varchar\", \"index\": \"0\", \"conditions\":{\"update\":\"true\", \"read\":\"true\"}}]"}, + "ApplicationId": {"1"}, + "Permissions": {"{\"insert\": \"true\", \"update\" : \"true\", \"new_column\": \"true\"}"}, + } + + assert.EqualError(t, postTx("NewTable", &form), `{"type":"panic","error":"Column name cannot begin with digit"}`) } func TestActivateContracts(t *testing.T) { diff --git a/packages/smart/errors.go b/packages/smart/errors.go index a2d338a90..907c43859 100644 --- a/packages/smart/errors.go +++ b/packages/smart/errors.go @@ -29,5 +29,7 @@ var ( errContractNotFound = errors.New(`Contract has not been found`) errAccessRollbackContract = errors.New(`RollbackContract can be only called from Import or NewContract`) errCommission = errors.New("There is not enough money to pay the commission fee") + errEmptyColumn = errors.New(`Column name is empty`) + errWrongColumn = errors.New(`Column name cannot begin with digit`) errNotFound = errors.New(`Record has not been found`) ) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 2083094b5..88c73e8ff 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -546,6 +546,9 @@ func CreateTable(sc *SmartContract, name, columns, permissions string, applicati data = v.(map[string]interface{}) } colname := converter.EscapeSQL(strings.ToLower(data[`name`].(string))) + if err := checkColumnName(colname); err != nil { + return err + } if colList[colname] { return fmt.Errorf(`There are the same columns`) } @@ -1206,25 +1209,42 @@ func RowConditions(sc *SmartContract, tblname string, id int64, conditionOnly bo return nil } +func checkColumnName(name string) error { + if len(name) == 0 { + return errEmptyColumn + } else if name[0] >= '0' && name[0] <= '9' { + return errWrongColumn + } + return nil +} + // CreateColumn is creating column -func CreateColumn(sc *SmartContract, tableName, name, colType, permissions string) error { +func CreateColumn(sc *SmartContract, tableName, name, colType, permissions string) (err error) { + var ( + sqlColType string + permout []byte + ) if !accessContracts(sc, `NewColumn`) { log.WithFields(log.Fields{"type": consts.InvalidObject}).Error("CreateColumn can be only called from @1NewColumn") return fmt.Errorf(`CreateColumn can be only called from NewColumn`) } name = converter.EscapeSQL(strings.ToLower(name)) + if err = checkColumnName(name); err != nil { + return + } + tableName = strings.ToLower(tableName) tblname := getDefTableName(sc, tableName) - sqlColType, err := columnType(colType) + sqlColType, err = columnType(colType) if err != nil { - return err + return } err = model.AlterTableAddColumn(sc.DbTransaction, tblname, name, sqlColType) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("adding column to the table") - return err + return } tables := getDefTableName(sc, `tables`) @@ -1234,16 +1254,16 @@ func CreateColumn(sc *SmartContract, tableName, name, colType, permissions strin temp := &cols{} err = model.DBConn.Table(tables).Where("name = ?", tableName).Select("columns").Find(temp).Error if err != nil { - return err + return } var perm map[string]string err = json.Unmarshal([]byte(temp.Columns), &perm) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("selecting columns from the table") - return err + return } perm[name] = permissions - permout, err := json.Marshal(perm) + permout, err = json.Marshal(perm) if err != nil { log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err}).Error("unmarshalling columns to json") return err @@ -1251,7 +1271,7 @@ func CreateColumn(sc *SmartContract, tableName, name, colType, permissions strin _, _, err = sc.selectiveLoggingAndUpd([]string{`columns`}, []interface{}{string(permout)}, tables, []string{`name`}, []string{tableName}, !sc.VDE && sc.Rollback, false) if err != nil { - return err + return } return nil From b50b7a0e0fe408e0616a571158a1610a050efba0 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Mon, 2 Jul 2018 14:29:29 +0500 Subject: [PATCH 123/169] Fixed ContactConditions loop (#429) --- packages/api/contract_test.go | 51 +++++++++++++++++++++++++++++++++++ packages/smart/errors.go | 1 + packages/smart/funcs.go | 16 +++++++++-- 3 files changed, 66 insertions(+), 2 deletions(-) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index 5ffd7a683..6a9bef2f3 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -1049,3 +1049,54 @@ func TestContractChain(t *testing.T) { t.Error(fmt.Errorf(`wrong result %s`, msg)) } } + +func TestLoopCond(t *testing.T) { + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + rnd := `rnd` + crypto.RandSeq(4) + + form := url.Values{`Value`: {`contract ` + rnd + `1 { + conditions { + + } + }`}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + err := postTx(`NewContract`, &form) + if err != nil { + t.Error(err) + return + } + form = url.Values{`Value`: {`contract ` + rnd + `2 { + conditions { + ContractConditions("` + rnd + `1") + } + }`}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + err = postTx(`NewContract`, &form) + if err != nil { + t.Error(err) + return + } + var ret getContractResult + err = sendGet(`contract/`+rnd+`1`, nil, &ret) + if err != nil { + t.Error(err) + return + } + sid := ret.TableID + form = url.Values{`Value`: {`contract ` + rnd + `1 { + conditions { + ContractConditions("` + rnd + `2") + } + }`}, `Id`: {sid}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + err = postTx(`EditContract`, &form) + if err != nil { + t.Error(err) + return + } + err = postTx(rnd+`2`, &url.Values{}) + if err != nil { + t.Error(err) + return + } +} diff --git a/packages/smart/errors.go b/packages/smart/errors.go index 907c43859..60bc7c0b1 100644 --- a/packages/smart/errors.go +++ b/packages/smart/errors.go @@ -20,6 +20,7 @@ import "errors" const ( eTableNotFound = `Table %s has not been found` + eContractLoop = `There is loop in %s contract` eContractExist = `Contract %s already exists` ) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index ee93304e0..b2fc24f96 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -88,6 +88,7 @@ type SmartContract struct { TxCost int64 // Maximum cost of executing contract TxUsedCost decimal.Decimal // Used cost of CPU resources BlockData *utils.BlockData + Loop map[string]bool TxHash []byte PublicKeys [][]byte DbTransaction *model.DbTransaction @@ -367,11 +368,22 @@ func ContractConditions(sc *SmartContract, names ...interface{}) (bool, error) { log.WithFields(log.Fields{"contract_name": name, "type": consts.EmptyObject}).Error("There is not conditions in contract") return false, fmt.Errorf(`There is not conditions in contract %s`, name) } - _, err := VMRun(sc.VM, block, []interface{}{}, &map[string]interface{}{`ecosystem_id`: int64(sc.TxSmart.EcosystemID), - `key_id`: sc.TxSmart.KeyID, `sc`: sc, `original_contract`: ``, `this_contract`: ``, `role_id`: sc.TxSmart.RoleID}) + vars := map[string]interface{}{`ecosystem_id`: int64(sc.TxSmart.EcosystemID), + `key_id`: sc.TxSmart.KeyID, `sc`: sc, `original_contract`: ``, `this_contract`: ``, `role_id`: sc.TxSmart.RoleID} + + if sc.Loop == nil { + sc.Loop = make(map[string]bool) + } + if _, ok := sc.Loop[`loop_`+name]; ok { + log.WithFields(log.Fields{"type": consts.ContractError, "contract_name": name}).Error("there is loop in contract") + return false, fmt.Errorf(eContractLoop, name) + } + sc.Loop[`loop_`+name] = true + _, err := VMRun(sc.VM, block, []interface{}{}, &vars) if err != nil { return false, err } + delete(sc.Loop, `loop_`+name) } else { log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty contract name in ContractConditions") return false, fmt.Errorf(`empty contract name in ContractConditions`) From 58c51ad11c1e226fb53002a1e7a1dd9fbf24bd25 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 2 Jul 2018 12:53:50 +0300 Subject: [PATCH 124/169] add content of default page to system_parameters --- .../migration/first_system_parameters_data.go | 27 ++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/packages/migration/first_system_parameters_data.go b/packages/migration/first_system_parameters_data.go index fb2319ba7..cde0ad8fc 100644 --- a/packages/migration/first_system_parameters_data.go +++ b/packages/migration/first_system_parameters_data.go @@ -2,7 +2,32 @@ package migration var firstSystemParametersDataSQL = ` INSERT INTO "1_system_parameters" ("id","name", "value", "conditions") VALUES - ('1','default_ecosystem_page', '', 'true'), + ('1','default_ecosystem_page', 'Div(content-wrapper){ + Div(panel panel-primary){ + Div(list-group-item text-center){ + P(Class: h3 m0 text-bold, Body: Congratulations! You created your own ecosystem.) + } + Div(list-group-item){ + Span(Class: h3, Body: "You as Founder hold a complete set of rights for controlling the ecosystem – creating and editing applications, modifying ecosystem parameters, etc. ") + Span(Class: h3, Body: "To get started, you can download the finished applications from the") + Span(Class: h3 text-primary, Body: " https://github.com/GenesisKernel/apps ") + Span(Class: h3, Body: "and install them using the Import service. ") + Span(Class: h3, Body: "The Strong(basic.json) contains applications for managing roles, creating notifications and votings. ") + Span(Class: h3, Body: "Or you can create your own apps using the tools in the Admin tab. ") + Span(Class: h3, Body: "Documentation ") + Span(Class: h3 text-primary, Body: "https://genesiskernel.readthedocs.io") + } + Div(panel-footer text-right clearfix){ + Div(pull-left){ + Button(Body: Ecosystem parameters, Class: btn btn-default, Page: params_list) + }.Style(margin-right: 20px;) + Div(pull-left){ + Button(Body: Dashboard, Class: btn btn-default, Page: admin_dashboard) + } + Button(Body: Import, Class: btn btn-primary, Page: import_upload) + } + } + }', 'true'), ('2','default_ecosystem_menu', '', 'true'), ('3','default_ecosystem_contract', '', 'true'), ('4','gap_between_blocks', '2', 'true'), From c53e963ed3817c6635960c793b832d29c8982c17 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 2 Jul 2018 13:04:30 +0300 Subject: [PATCH 125/169] remove default_page from roles --- packages/migration/roles_data.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/roles_data.go b/packages/migration/roles_data.go index 45093d690..d1bc7e915 100644 --- a/packages/migration/roles_data.go +++ b/packages/migration/roles_data.go @@ -3,7 +3,7 @@ package migration var rolesDataSQL = ` INSERT INTO "%[1]d_roles" ("id", "default_page", "role_name", "deleted", "role_type", "date_created","creator","roles_access") VALUES - ('1','default_ecosystem_page', 'Admin', '0', '3', NOW(), '{}', '{}'), + ('1','', 'Admin', '0', '3', NOW(), '{}', '{}'), ('2','', 'Developer', '0', '3', NOW(), '{}', '{}'); INSERT INTO "%[1]d_roles_participants" ("id","role" ,"member", "date_created") From 3d718fe4301d347dc3fed37a011f22983822124b Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 2 Jul 2018 15:09:38 +0300 Subject: [PATCH 126/169] move updating system parameters before creating default_page --- packages/parser/first_block.go | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/packages/parser/first_block.go b/packages/parser/first_block.go index 099d40113..5f74458a6 100644 --- a/packages/parser/first_block.go +++ b/packages/parser/first_block.go @@ -76,6 +76,16 @@ func (p *FirstBlockParser) Action() error { } amount := decimal.New(consts.FounderAmount, int32(converter.StrToInt64(sp.Value))).String() + commission := &model.SystemParameter{Name: `commission_wallet`} + if err = commission.SaveArray([][]string{{"1", converter.Int64ToStr(keyID)}}); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("saving commission_wallet array") + return p.ErrInfo(err) + } + if err = syspar.SysUpdate(nil); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") + return p.ErrInfo(err) + } + err = model.GetDB(p.DbTransaction).Exec(`insert into "1_keys" (id,pub,amount) values(?, ?,?)`, keyID, data.PublicKey, amount).Error if err != nil { @@ -98,15 +108,7 @@ func (p *FirstBlockParser) Action() error { if err != nil { return p.ErrInfo(err) } - commission := &model.SystemParameter{Name: `commission_wallet`} - if err = commission.SaveArray([][]string{{"1", converter.Int64ToStr(keyID)}}); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("saving commission_wallet array") - return p.ErrInfo(err) - } - if err = syspar.SysUpdate(nil); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - return p.ErrInfo(err) - } + syspar.SetFirstBlockData(data) return nil } From 803a5fe9c6182c392d92073dcc43424c4a8ea59d Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Wed, 4 Jul 2018 09:27:19 +0300 Subject: [PATCH 127/169] Merged develop --- cmd/config.go | 4 +- packages/api/api.go | 51 +- packages/api/content_test.go | 2 +- packages/api/contract.go | 4 +- packages/api/contract_test.go | 90 +- packages/api/login.go | 58 +- packages/api/route.go | 31 +- packages/api/smart_test.go | 174 +- packages/api/table.go | 3 +- packages/api/template_test.go | 8 +- packages/api/vde.go | 11 +- packages/api/vde_test.go | 140 +- packages/block/block.go | 2 +- packages/block/db.go | 2 +- packages/conf/conf.go | 51 +- packages/conf/runmode.go | 40 + packages/conf/syspar/syspar.go | 15 + packages/consts/consts.go | 3 + packages/consts/log_types.go | 2 + packages/converter/converter.go | 5 + packages/daemons/block_generator.go | 2 +- packages/daemons/block_generator_tx.go | 2 +- packages/daemons/blocks_collection.go | 5 + packages/daemons/common.go | 13 +- packages/daylight/daemonsctl/daemonsctl.go | 27 +- packages/daylight/start.go | 52 +- packages/migration/applications_data.go | 3 + packages/migration/blocks_data.go | 112 +- packages/migration/data.go | 4 +- packages/migration/ecosystem.go | 5 +- packages/migration/first_applications_data.go | 3 - .../migration/first_ecosys_contracts_data.go | 2825 ++++++++++------- packages/migration/first_ecosystems_data.go | 11 + packages/migration/menu_data.go | 18 +- packages/migration/pages_data.go | 2181 +++++++------ packages/migration/roles_data.go | 16 +- packages/migration/tables_data.go | 21 +- packages/migration/vde/vde_data_contracts.go | 867 +++++ packages/migration/vde/vde_data_keys.go | 6 + packages/migration/vde/vde_data_members.go | 7 + packages/migration/vde/vde_data_menu.go | 45 + packages/migration/vde/vde_data_pages.go | 5 + packages/migration/vde/vde_data_parameters.go | 18 + packages/migration/vde/vde_data_tables.go | 76 + packages/migration/vde/vde_schema.go | 172 + packages/model/batch.go | 85 + packages/model/batch_test.go | 44 + packages/model/db.go | 46 +- packages/model/queue_tx.go | 18 + packages/model/rollback_tx.go | 3 +- packages/model/system_parameters.go | 5 + packages/script/vm.go | 6 +- packages/script/vminit.go | 19 +- packages/service/node_ban.go | 2 +- packages/service/node_relevance.go | 3 + packages/smart/errors.go | 5 + packages/smart/funcs.go | 584 +++- packages/smart/selective.go | 30 +- packages/smart/smart.go | 58 +- packages/smart/smart_p.go | 34 +- packages/smart/smart_test.go | 28 + packages/tcpserver/tcpserver.go | 6 + packages/tcpserver/type1.go | 15 +- packages/template/funcs.go | 114 +- packages/template/template.go | 2 +- packages/template/template_test.go | 20 +- packages/transaction/db.go | 9 +- packages/transaction/transaction.go | 2 +- packages/utils/utils.go | 4 +- packages/vdemanager/config.go | 66 + packages/vdemanager/manager.go | 290 ++ tools/desync_monitor/query/query.go | 19 +- vendor/github.com/gorilla/rpc/LICENSE | 27 + vendor/github.com/gorilla/rpc/README.md | 7 + vendor/github.com/gorilla/rpc/doc.go | 81 + vendor/github.com/gorilla/rpc/map.go | 180 ++ vendor/github.com/gorilla/rpc/server.go | 269 ++ vendor/github.com/ochinchina/go-ini/LICENSE | 21 + vendor/github.com/ochinchina/go-ini/README.md | 368 +++ vendor/github.com/ochinchina/go-ini/doc.go | 49 + .../ochinchina/go-ini/env_replacer.go | 65 + vendor/github.com/ochinchina/go-ini/ini.go | 265 ++ vendor/github.com/ochinchina/go-ini/key.go | 282 ++ vendor/github.com/ochinchina/go-ini/loader.go | 349 ++ .../ochinchina/go-ini/properties.go | 116 + .../github.com/ochinchina/go-ini/section.go | 177 ++ .../ochinchina/gorilla-xmlrpc/LICENSE | 27 + .../ochinchina/gorilla-xmlrpc/xml/client.go | 26 + .../ochinchina/gorilla-xmlrpc/xml/doc.go | 50 + .../ochinchina/gorilla-xmlrpc/xml/fault.go | 51 + .../ochinchina/gorilla-xmlrpc/xml/rpc2xml.go | 149 + .../ochinchina/gorilla-xmlrpc/xml/server.go | 118 + .../ochinchina/gorilla-xmlrpc/xml/xml2rpc.go | 219 ++ .../rogpeppe/go-charset/charset/big5.go | 88 + .../rogpeppe/go-charset/charset/charset.go | 301 ++ .../rogpeppe/go-charset/charset/codepage.go | 133 + .../rogpeppe/go-charset/charset/cp932.go | 195 ++ .../rogpeppe/go-charset/charset/file.go | 40 + .../rogpeppe/go-charset/charset/local.go | 162 + .../rogpeppe/go-charset/charset/utf16.go | 110 + .../rogpeppe/go-charset/charset/utf8.go | 51 + .../rogpeppe/go-charset/data/data_big5.dat.go | 18 + .../go-charset/data/data_charsets.json.go | 18 + .../go-charset/data/data_cp932.dat.go | 18 + .../go-charset/data/data_ibm437.cp.go | 18 + .../go-charset/data/data_ibm850.cp.go | 18 + .../go-charset/data/data_ibm866.cp.go | 18 + .../go-charset/data/data_iso-8859-1.cp.go | 18 + .../go-charset/data/data_iso-8859-10.cp.go | 18 + .../go-charset/data/data_iso-8859-15.cp.go | 18 + .../go-charset/data/data_iso-8859-2.cp.go | 18 + .../go-charset/data/data_iso-8859-3.cp.go | 18 + .../go-charset/data/data_iso-8859-4.cp.go | 18 + .../go-charset/data/data_iso-8859-5.cp.go | 18 + .../go-charset/data/data_iso-8859-6.cp.go | 18 + .../go-charset/data/data_iso-8859-7.cp.go | 18 + .../go-charset/data/data_iso-8859-8.cp.go | 18 + .../go-charset/data/data_iso-8859-9.cp.go | 18 + .../go-charset/data/data_jisx0201kana.dat.go | 18 + .../go-charset/data/data_koi8-r.cp.go | 18 + .../go-charset/data/data_windows-1250.cp.go | 18 + .../go-charset/data/data_windows-1251.cp.go | 18 + .../go-charset/data/data_windows-1252.cp.go | 18 + .../rogpeppe/go-charset/data/doc.go | 6 + .../rogpeppe/go-charset/data/generate.go | 97 + .../rpoletaev/supervisord/Gopkg.lock | 63 + .../rpoletaev/supervisord/Gopkg.toml | 46 + .../github.com/rpoletaev/supervisord/LICENSE | 21 + .../rpoletaev/supervisord/README.md | 161 + .../rpoletaev/supervisord/circle.yml | 9 + .../rpoletaev/supervisord/config/config.go | 558 ++++ .../supervisord/config/process_group.go | 114 + .../supervisord/config/process_sort.go | 159 + .../supervisord/config/string_expression.go | 88 + .../rpoletaev/supervisord/config_template.go | 137 + .../rpoletaev/supervisord/content_checker.go | 149 + .../github.com/rpoletaev/supervisord/ctl.go | 159 + .../rpoletaev/supervisord/daemonize.go | 25 + .../supervisord/daemonize_windows.go | 7 + .../rpoletaev/supervisord/events/events.go | 745 +++++ .../rpoletaev/supervisord/faults/faults.go | 30 + .../rpoletaev/supervisord/logger/log.go | 485 +++ .../rpoletaev/supervisord/logger/log_unix.go | 16 + .../supervisord/logger/log_windows.go | 7 + .../github.com/rpoletaev/supervisord/main.go | 75 + .../supervisord/process/command_parser.go | 81 + .../rpoletaev/supervisord/process/path.go | 46 + .../supervisord/process/pdeathsig_linux.go | 12 + .../supervisord/process/pdeathsig_other.go | 12 + .../supervisord/process/pdeathsig_windows.go | 9 + .../rpoletaev/supervisord/process/process.go | 689 ++++ .../supervisord/process/process_manager.go | 160 + .../supervisord/process/set_user_id.go | 11 + .../process/set_user_id_windows.go | 11 + .../rpoletaev/supervisord/signals/signal.go | 34 + .../supervisord/signals/signal_windows.go | 46 + .../rpoletaev/supervisord/supervisor.go | 586 ++++ .../rpoletaev/supervisord/util/util.go | 64 + .../rpoletaev/supervisord/version.go | 24 + .../rpoletaev/supervisord/xmlrpc.go | 136 + vendor/vendor.json | 78 + 161 files changed, 15691 insertions(+), 2546 deletions(-) create mode 100644 packages/conf/runmode.go create mode 100644 packages/migration/applications_data.go delete mode 100644 packages/migration/first_applications_data.go create mode 100644 packages/migration/vde/vde_data_contracts.go create mode 100644 packages/migration/vde/vde_data_keys.go create mode 100644 packages/migration/vde/vde_data_members.go create mode 100644 packages/migration/vde/vde_data_menu.go create mode 100644 packages/migration/vde/vde_data_pages.go create mode 100644 packages/migration/vde/vde_data_parameters.go create mode 100644 packages/migration/vde/vde_data_tables.go create mode 100644 packages/migration/vde/vde_schema.go create mode 100644 packages/model/batch.go create mode 100644 packages/model/batch_test.go create mode 100644 packages/vdemanager/config.go create mode 100644 packages/vdemanager/manager.go create mode 100644 vendor/github.com/gorilla/rpc/LICENSE create mode 100644 vendor/github.com/gorilla/rpc/README.md create mode 100644 vendor/github.com/gorilla/rpc/doc.go create mode 100644 vendor/github.com/gorilla/rpc/map.go create mode 100644 vendor/github.com/gorilla/rpc/server.go create mode 100644 vendor/github.com/ochinchina/go-ini/LICENSE create mode 100644 vendor/github.com/ochinchina/go-ini/README.md create mode 100644 vendor/github.com/ochinchina/go-ini/doc.go create mode 100644 vendor/github.com/ochinchina/go-ini/env_replacer.go create mode 100644 vendor/github.com/ochinchina/go-ini/ini.go create mode 100644 vendor/github.com/ochinchina/go-ini/key.go create mode 100644 vendor/github.com/ochinchina/go-ini/loader.go create mode 100644 vendor/github.com/ochinchina/go-ini/properties.go create mode 100644 vendor/github.com/ochinchina/go-ini/section.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go create mode 100644 vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/big5.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/charset.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/codepage.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/cp932.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/file.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/local.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/utf16.go create mode 100644 vendor/github.com/rogpeppe/go-charset/charset/utf8.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/doc.go create mode 100644 vendor/github.com/rogpeppe/go-charset/data/generate.go create mode 100644 vendor/github.com/rpoletaev/supervisord/Gopkg.lock create mode 100644 vendor/github.com/rpoletaev/supervisord/Gopkg.toml create mode 100644 vendor/github.com/rpoletaev/supervisord/LICENSE create mode 100644 vendor/github.com/rpoletaev/supervisord/README.md create mode 100644 vendor/github.com/rpoletaev/supervisord/circle.yml create mode 100644 vendor/github.com/rpoletaev/supervisord/config/config.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/process_group.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/process_sort.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config/string_expression.go create mode 100644 vendor/github.com/rpoletaev/supervisord/config_template.go create mode 100644 vendor/github.com/rpoletaev/supervisord/content_checker.go create mode 100644 vendor/github.com/rpoletaev/supervisord/ctl.go create mode 100644 vendor/github.com/rpoletaev/supervisord/daemonize.go create mode 100644 vendor/github.com/rpoletaev/supervisord/daemonize_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/events/events.go create mode 100644 vendor/github.com/rpoletaev/supervisord/faults/faults.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log_unix.go create mode 100644 vendor/github.com/rpoletaev/supervisord/logger/log_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/main.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/command_parser.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/path.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/process.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/process_manager.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/set_user_id.go create mode 100644 vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/signals/signal.go create mode 100644 vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go create mode 100644 vendor/github.com/rpoletaev/supervisord/supervisor.go create mode 100644 vendor/github.com/rpoletaev/supervisord/util/util.go create mode 100644 vendor/github.com/rpoletaev/supervisord/version.go create mode 100644 vendor/github.com/rpoletaev/supervisord/xmlrpc.go diff --git a/cmd/config.go b/cmd/config.go index c6779160a..127f7c827 100644 --- a/cmd/config.go +++ b/cmd/config.go @@ -136,7 +136,7 @@ func init() { configCmd.Flags().StringVar(&conf.Config.TLSKey, "tls-key", "", "Filepath to the private key") configCmd.Flags().Int64Var(&conf.Config.MaxPageGenerationTime, "mpgt", 1000, "Max page generation time in ms") configCmd.Flags().StringSliceVar(&conf.Config.NodesAddr, "nodesAddr", []string{}, "List of addresses for downloading blockchain") - configCmd.Flags().BoolVar(&conf.Config.PrivateBlockchain, "privateBlockchain", false, "Is blockchain private") + configCmd.Flags().StringVar(&conf.Config.RunningMode, "runMode", "PublicBlockchain", "Node running mode") viper.BindPFlag("PidFilePath", configCmd.Flags().Lookup("pid")) viper.BindPFlag("LockFilePath", configCmd.Flags().Lookup("lock")) @@ -147,7 +147,7 @@ func init() { viper.BindPFlag("TLSCert", configCmd.Flags().Lookup("tls-cert")) viper.BindPFlag("TLSKey", configCmd.Flags().Lookup("tls-key")) viper.BindPFlag("MaxPageGenerationTime", configCmd.Flags().Lookup("mpgt")) - viper.BindPFlag("PrivateBlockchain", configCmd.Flags().Lookup("privateBlockchain")) viper.BindPFlag("TempDir", configCmd.Flags().Lookup("tempDir")) viper.BindPFlag("NodesAddr", configCmd.Flags().Lookup("nodesAddr")) + viper.BindPFlag("RunningMode", configCmd.Flags().Lookup("runMode")) } diff --git a/packages/api/api.go b/packages/api/api.go index 1bdeb76d2..9e55102aa 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -30,6 +30,7 @@ import ( hr "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" @@ -132,9 +133,6 @@ func errorAPI(w http.ResponseWriter, err interface{}, code int, params ...interf func getPrefix(data *apiData) (prefix string) { prefix = converter.Int64ToStr(data.ecosystemId) - if data.vde { - prefix += `_vde` - } return } @@ -241,17 +239,12 @@ func fillToken(w http.ResponseWriter, r *http.Request, data *apiData, logger *lo func fillParams(params map[string]int) apiHandle { return func(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry) error { - // Getting and validating request parameters - vde := r.FormValue(`vde`) - if vde == `1` || vde == `true` { - data.vm = smart.GetVM(true, data.ecosystemId) - if data.vm == nil { - return errorAPI(w, `E_VDE`, http.StatusBadRequest, data.ecosystemId) - } + if conf.Config.IsSupportingVDE() { data.vde = true - } else { - data.vm = smart.GetVM(false, 0) } + + data.vm = smart.GetVM() + for key, par := range params { val := r.FormValue(key) if par&pOptional == 0 && len(val) == 0 { @@ -278,6 +271,10 @@ func fillParams(params map[string]int) apiHandle { } func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (int64, string, error) { + if conf.Config.IsSupportingVDE() { + return consts.DefaultVDE, "1", nil + } + ecosystemID := data.ecosystemId if data.params[`ecosystem`].(int64) > 0 { ecosystemID = data.params[`ecosystem`].(int64) @@ -292,9 +289,9 @@ func checkEcosystem(w http.ResponseWriter, data *apiData, logger *log.Entry) (in } } prefix := converter.Int64ToStr(ecosystemID) - if data.vde { - prefix += `_vde` - } + // if data.vde { + // prefix += `_vde` + // } return ecosystemID, prefix, nil } @@ -303,18 +300,20 @@ func fillTokenData(data *apiData, claims *JWTClaims, logger *log.Entry) error { data.keyId = converter.StrToInt64(claims.KeyID) data.isMobile = claims.IsMobile data.roleId = converter.StrToInt64(claims.RoleID) - ecosystem := &model.Ecosystem{} - found, err := ecosystem.Get(data.ecosystemId) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") - return err - } + if !conf.Config.IsSupportingVDE() { + ecosystem := &model.Ecosystem{} + found, err := ecosystem.Get(data.ecosystemId) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting ecosystem from db") + return err + } - if !found { - err := fmt.Errorf("ecosystem not found") - logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") - } + if !found { + err := fmt.Errorf("ecosystem not found") + logger.WithFields(log.Fields{"type": consts.NotFound, "id": data.ecosystemId, "error": err}).Error("ecosystem not found") + } - data.ecosystemName = ecosystem.Name + data.ecosystemName = ecosystem.Name + } return nil } diff --git a/packages/api/content_test.go b/packages/api/content_test.go index 2b5177e55..004906908 100644 --- a/packages/api/content_test.go +++ b/packages/api/content_test.go @@ -101,7 +101,7 @@ func TestContent(t *testing.T) { )`}, "source": {"true"}, }, - `[{"tag":"data","attr":{"columns":"id,name","data":"1,Test message 1\n\t\t\t\t\t2,"Test message 2"\n\t\t\t\t\t3,"Test message 3"","source":"myforlist"}}]`, + `[{"tag":"data","attr":{"columns":"id,name","data":"1,Test message 1\n\t\t\t\t\t2,\"Test message 2\"\n\t\t\t\t\t3,\"Test message 3\"","source":"myforlist"}}]`, }, { "content", diff --git a/packages/api/contract.go b/packages/api/contract.go index 713ce4646..df38c6de8 100644 --- a/packages/api/contract.go +++ b/packages/api/contract.go @@ -317,8 +317,8 @@ func (c *contractHandlers) contract(w http.ResponseWriter, r *http.Request, data } pubkey := []byte{} - if _, ok := data.params["public_key"]; ok { - pubkey = data.params["public_key"].([]byte) + if _, ok := data.params["pubkey"]; ok { + pubkey = data.params["pubkey"].([]byte) } var err error publicKey, err = getPublicKey(signID, data.ecosystemId, pubkey, w, logger) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index fadc0ce1b..6a9bef2f3 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -30,6 +30,14 @@ import ( "github.com/GenesisKernel/go-genesis/packages/crypto" ) +func TestExistContract(t *testing.T) { + assert.NoError(t, keyLogin(1)) + form := url.Values{"Name": {`EditPage`}, "Value": {`contract EditPage {action {}}`}, + "ApplicationId": {`1`}, "Conditions": {`true`}} + err := postTx(`NewContract`, &form) + assert.EqualError(t, err, `{"type":"panic","error":"Contract EditPage already exists"}`) +} + func TestNewContracts(t *testing.T) { wanted := func(name, want string) bool { @@ -413,15 +421,42 @@ func TestEditContracts(t *testing.T) { func TestNewTableWithEmptyName(t *testing.T) { require.NoError(t, keyLogin(1)) - + sql1 := `new_column varchar(10); update block_chain set key_id='1234' where id='1' --` + sql2 := `new_column varchar(10); update block_chain set key_id='12' where id='1' --` + name := randName(`tbl`) form := url.Values{ + "Name": {name}, + "Columns": {"[{\"name\":\"" + sql1 + "\",\"type\":\"varchar\", \"index\": \"0\", \"conditions\":{\"update\":\"true\", \"read\":\"true\"}}]"}, + "ApplicationId": {"1"}, + "Permissions": {"{\"insert\": \"true\", \"update\" : \"true\", \"new_column\": \"true\"}"}, + } + + require.NoError(t, postTx("NewTable", &form)) + + form = url.Values{"TableName": {name}, "Name": {sql2}, + "Type": {"varchar"}, "Index": {"0"}, "Permissions": {"true"}} + assert.NoError(t, postTx(`NewColumn`, &form)) + + form = url.Values{ "Name": {""}, "Columns": {"[{\"name\":\"MyName\",\"type\":\"varchar\", \"index\": \"0\", \"conditions\":{\"update\":\"true\", \"read\":\"true\"}}]"}, "ApplicationId": {"1"}, "Permissions": {"{\"insert\": \"true\", \"update\" : \"true\", \"new_column\": \"true\"}"}, } - require.NoError(t, postTx("NewTable", &form)) + if err := postTx("NewTable", &form); err == nil || err.Error() != + `{"type":"error","error":"Table name cannot be empty"}` { + t.Error(`wrong error`, err) + } + + form = url.Values{ + "Name": {"Digit" + name}, + "Columns": {"[{\"name\":\"1\",\"type\":\"varchar\", \"index\": \"0\", \"conditions\":{\"update\":\"true\", \"read\":\"true\"}}]"}, + "ApplicationId": {"1"}, + "Permissions": {"{\"insert\": \"true\", \"update\" : \"true\", \"new_column\": \"true\"}"}, + } + + assert.EqualError(t, postTx("NewTable", &form), `{"type":"panic","error":"Column name cannot begin with digit"}`) } func TestActivateContracts(t *testing.T) { @@ -1014,3 +1049,54 @@ func TestContractChain(t *testing.T) { t.Error(fmt.Errorf(`wrong result %s`, msg)) } } + +func TestLoopCond(t *testing.T) { + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + rnd := `rnd` + crypto.RandSeq(4) + + form := url.Values{`Value`: {`contract ` + rnd + `1 { + conditions { + + } + }`}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + err := postTx(`NewContract`, &form) + if err != nil { + t.Error(err) + return + } + form = url.Values{`Value`: {`contract ` + rnd + `2 { + conditions { + ContractConditions("` + rnd + `1") + } + }`}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + err = postTx(`NewContract`, &form) + if err != nil { + t.Error(err) + return + } + var ret getContractResult + err = sendGet(`contract/`+rnd+`1`, nil, &ret) + if err != nil { + t.Error(err) + return + } + sid := ret.TableID + form = url.Values{`Value`: {`contract ` + rnd + `1 { + conditions { + ContractConditions("` + rnd + `2") + } + }`}, `Id`: {sid}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + err = postTx(`EditContract`, &form) + if err != nil { + t.Error(err) + return + } + err = postTx(rnd+`2`, &url.Values{}) + if err != nil { + t.Error(err) + return + } +} diff --git a/packages/api/login.go b/packages/api/login.go index b55fe85c3..90cba58b4 100644 --- a/packages/api/login.go +++ b/packages/api/login.go @@ -17,14 +17,15 @@ package api import ( - "fmt" "net/http" + "strings" "time" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/notificator" "github.com/GenesisKernel/go-genesis/packages/publisher" + msgpack "gopkg.in/vmihailenco/msgpack.v2" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" @@ -124,28 +125,61 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En return err } + pubkey = data.params[`pubkey`].([]byte) hexPubKey := hex.EncodeToString(pubkey) - params := make([]byte, 0) - params = append(append(params, converter.EncodeLength(int64(len(hexPubKey)))...), hexPubKey...) + params := converter.EncodeLength(int64(len(hexPubKey))) + params = append(params, hexPubKey...) - vm := smart.GetVM(false, 0) - contract := smart.VMGetContract(vm, "NewUser", 1) - info := contract.Block.Info.(*script.ContractInfo) + contract := smart.GetContract("NewUser", 1) - err = tx.BuildTransaction(tx.SmartContract{ + sc := tx.SmartContract{ Header: tx.Header{ - Type: int(info.ID), + Type: int(contract.Block.Info.(*script.ContractInfo).ID), Time: time.Now().Unix(), EcosystemID: 1, KeyID: conf.Config.KeyID, NetworkID: consts.NETWORK_ID, + PublicKey: pubkey, }, SignedBy: smart.PubToID(NodePublicKey), Data: params, - }, NodePrivateKey, NodePublicKey, string(hexPubKey)) - if err != nil { - log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") } + + if conf.Config.IsSupportingVDE() { + + signPrms := []string{sc.ForSign()} + signPrms = append(signPrms, hexPubKey) + signData := strings.Join(signPrms, ",") + signature, err := crypto.Sign(NodePrivateKey, signData) + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing by node private key") + return err + } + + sc.BinSignatures = converter.EncodeLengthPlusData(signature) + + if sc.PublicKey, err = hex.DecodeString(NodePublicKey); err != nil { + log.WithFields(log.Fields{"type": consts.ConversionError, "error": err}).Error("decoding public key from hex") + return err + } + + serializedContract, err := msgpack.Marshal(sc) + if err != nil { + logger.WithFields(log.Fields{"type": consts.MarshallingError, "error": err}).Error("marshalling smart contract to msgpack") + return errorAPI(w, err, http.StatusInternalServerError) + } + ret, err := VDEContract(serializedContract, data) + if err != nil { + return errorAPI(w, err, http.StatusInternalServerError) + } + data.result = ret + } else { + err = tx.BuildTransaction(sc, NodePrivateKey, NodePublicKey, hexPubKey) + if err != nil { + log.WithFields(log.Fields{"type": consts.ContractError}).Error("Executing contract") + } + } + } if ecosystemID > 1 && len(pubkey) == 0 { @@ -207,7 +241,7 @@ func login(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En Address: address, IsOwner: founder == wallet, IsNode: conf.Config.KeyID == wallet, - IsVDE: model.IsTable(fmt.Sprintf(`%d_vde_tables`, ecosystemID)), + IsVDE: conf.Config.IsSupportingVDE(), } data.result = &result diff --git a/packages/api/route.go b/packages/api/route.go index 1fb9e45b0..56b547f61 100644 --- a/packages/api/route.go +++ b/packages/api/route.go @@ -19,6 +19,7 @@ package api import ( "strings" + "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/utils/tx" @@ -50,28 +51,18 @@ func Route(route *hr.Router) { route.Handle(`OPTIONS`, consts.ApiPath+`*name`, optionsHandler()) route.Handle(`GET`, consts.ApiPath+`data/:table/:id/:column/:hash`, dataHandler()) - get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) - get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) - get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) get(`contract/:name`, ``, authWallet, getContract) get(`contracts`, `?limit ?offset:int64`, authWallet, getContracts) - get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) - get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) - get(`ecosystems`, ``, authWallet, ecosystems) get(`getuid`, ``, getUID) get(`list/:name`, `?limit ?offset:int64,?columns:string`, authWallet, list) get(`row/:name/:id`, `?columns:string`, authWallet, row) get(`interface/page/:name`, ``, authWallet, getPageRow) get(`interface/menu/:name`, ``, authWallet, getMenuRow) get(`interface/block/:name`, ``, authWallet, getBlockInterfaceRow) - get(`systemparams`, `?names:string`, authWallet, systemParams) + // get(`systemparams`, `?names:string`, authWallet, systemParams) get(`table/:name`, ``, authWallet, table) get(`tables`, `?limit ?offset:int64`, authWallet, tables) - get(`txstatus/:hash`, ``, authWallet, txstatus) get(`test/:name`, ``, getTest) - get(`history/:table/:id`, ``, authWallet, getHistory) - get(`block/:id`, ``, getBlockInfo) - get(`maxblockid`, ``, getMaxBlockID) get(`version`, ``, getVersion) get(`avatar/:ecosystem/:member`, ``, getAvatar) get(`config/:option`, ``, getConfigOption) @@ -80,7 +71,6 @@ func Route(route *hr.Router) { post(`content/page/:name`, `?lang:string`, authWallet, getPage) post(`content/menu/:name`, `?lang:string`, authWallet, getMenu) post(`content/hash/:name`, ``, getPageHash) - post(`vde/create`, ``, authWallet, vdeCreate) post(`login`, `?pubkey signature:hex,?key_id ?mobile:string,?ecosystem ?expire ?role_id:int64`, login) post(`prepare/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, authWallet, contractHandlers.prepareContract) post(`prepareMultiple`, `data:string`, authWallet, contractHandlers.prepareMultipleContract) @@ -91,8 +81,23 @@ func Route(route *hr.Router) { post(`test/:name`, ``, getTest) post(`content`, `template ?source:string`, jsonContent) post(`updnotificator`, `ids:string`, updateNotificator) - + get(`ecosystemparam/:name`, `?ecosystem:int64`, authWallet, ecosystemParam) methodRoute(route, `POST`, `node/:name`, `?token_ecosystem:int64,?max_sum ?payover:string`, contractHandlers.nodeContract) + + if !conf.Config.IsSupportingVDE() { + get(`txstatus/:hash`, ``, authWallet, txstatus) + get(`txstatusMultiple`, `data:string`, authWallet, txstatusMulti) + get(`appparam/:appid/:name`, `?ecosystem:int64`, authWallet, appParam) + get(`appparams/:appid`, `?ecosystem:int64,?names:string`, authWallet, appParams) + get(`history/:table/:id`, ``, authWallet, getHistory) + get(`balance/:wallet`, `?ecosystem:int64`, authWallet, balance) + get(`block/:id`, ``, getBlockInfo) + get(`maxblockid`, ``, getMaxBlockID) + + get(`ecosystemparams`, `?ecosystem:int64,?names:string`, authWallet, ecosystemParams) + get(`systemparams`, `?names:string`, authWallet, systemParams) + get(`ecosystems`, ``, authWallet, ecosystems) + } } func processParams(input string) (params map[string]int) { diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index 045c6fc79..eb615b202 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -260,6 +260,21 @@ func TestPage(t *testing.T) { assert.NoError(t, postTx(`AppendPage`, &form)) } +func TestNewTableOnly(t *testing.T) { + assert.NoError(t, keyLogin(1)) + + name := "MMy_s_test_table" + form := url.Values{"Name": {name}, "ApplicationId": {"1"}, "Columns": {`[{"name":"MyName","type":"varchar", + "conditions":"true"}, + {"name":"Name", "type":"varchar","index": "0", "conditions":"{\"read\":\"true\",\"update\":\"true\"}"}]`}, + "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} + require.NoError(t, postTx(`NewTable`, &form)) + + var ret tableResult + require.NoError(t, sendGet(`table/`+name, nil, &ret)) + fmt.Printf("%+v\n", ret) +} + func TestNewTable(t *testing.T) { assert.NoError(t, keyLogin(1)) @@ -419,14 +434,57 @@ func TestUpdateSysParam(t *testing.T) { func TestUpdateFullNodesWithEmptyArray(t *testing.T) { require.NoErrorf(t, keyLogin(1), "on login") - byteNodes := `[]` - // byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"5462687003324713865", "public_key":"4ea2433951ca21e6817426675874b2a6d98e5051c1100eddefa1847b0388e4834facf9abf427c46e2bc6cd5e3277fba533d03db553e499eb368194b3f1e514d4"}]` + byteNodes := `[` + byteNodes += `{"tcp_address":"127.0.0.1:7078", "api_address":"https://127.0.0.1:7079", "key_id":"-4466900793776865315", "public_key":"ca901a97e84d76f8d46e2053028f709074b3e60d3e2e33495840586567a0c961820d789592666b67b05c6ae120d5bd83d4388b2f1218638d8226d40ced0bb208"},` + byteNodes += `{"tcp_address":"127.0.0.1:7080", "api_address":"https://127.0.0.1:7081", "key_id":"542353610328569127", "public_key":"a8ada71764fd2f0c9fa1d2986455288f11f0f3931492d27dc62862fdff9c97c38923ef46679488ad1cd525342d4d974621db58f809be6f8d1c19fdab50abc06b"},` + byteNodes += `{"tcp_address":"127.0.0.1:7082", "api_address":"https://127.0.0.1:7083", "key_id":"5972241339967729614", "public_key":"de1b74d36ae39422f2478cba591f4d14eb017306f6ffdc3b577cc52ee50edb8fe7c7b2eb191a24c8ddfc567cef32152bab17de698ed7b3f2ab75f3bcc8b9b372"}` + byteNodes += `]` form := &url.Values{ "Name": {"full_nodes"}, "Value": {string(byteNodes)}, } - require.EqualError(t, postTx(`UpdateSysParam`, form), `{"type":"panic","error":"Invalid value"}`) + require.NoError(t, postTx(`UpdateSysParam`, form)) +} + +func TestHelper_InsertNodeKey(t *testing.T) { + + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + + form := url.Values{ + `Value`: {`contract InsertNodeKey { + data { + KeyID string + PubKey string + } + conditions {} + action { + DBInsert("keys", "id,pub,amount", $KeyID, $PubKey, "100000000000000000000") + } + }`}, + `ApplicationId`: {`1`}, + `Conditions`: {`true`}, + } + + require.NoError(t, postTx(`NewContract`, &form)) + + forms := []url.Values{ + url.Values{ + `KeyID`: {"542353610328569127"}, + `PubKey`: {"be78f54bcf6bb7b49b7ea00790b18b40dd3f5e231ffc764f1c32d3f5a82ab322aee157931bbfca733bac83255002f5ded418f911b959b77a937f0d5d07de74f8"}, + }, + url.Values{ + `KeyID`: {"5972241339967729614"}, + `PubKey`: {"7b11a9ee4f509903118d5b965a819b778c83a21a52a033e5768d697a70a61a1bad270465f25d7f70683e977be93a9252e762488fc53808a90220d363d0a38eb6"}, + }, + } + + for _, frm := range forms { + require.NoError(t, postTx(`InsertNodeKey`, &frm)) + } } func TestValidateConditions(t *testing.T) { @@ -777,3 +835,113 @@ func TestMemoryLimit(t *testing.T) { assert.EqualError(t, postTx(contract, &url.Values{}), `{"type":"panic","error":"Memory limit exceeded"}`) } + +func TestStack(t *testing.T) { + assert.NoError(t, keyLogin(1)) + + parent := randName("Parent") + child := randName("Child") + + assert.NoError(t, postTx("NewContract", &url.Values{ + "Value": {`contract ` + child + ` { + action { + $result = $stack + } + }`}, + "ApplicationId": {"1"}, + "Conditions": {"true"}, + })) + + assert.NoError(t, postTx("NewContract", &url.Values{ + "Value": {`contract ` + parent + ` { + action { + var arr array + arr[0] = $stack + arr[1] = ` + child + `() + $result = arr + } + }`}, + "ApplicationId": {"1"}, + "Conditions": {"true"}, + })) + + _, res, err := postTxResult(parent, &url.Values{}) + assert.NoError(t, err) + assert.Equal(t, fmt.Sprintf("[[@1%s] [@1%[1]s @1%s]]", parent, child), res) +} + +func TestPageHistory(t *testing.T) { + assert.NoError(t, keyLogin(1)) + + name := randName(`page`) + value := `P(test,test paragraph)` + + form := url.Values{"Name": {name}, "Value": {value}, "ApplicationId": {`1`}, + "Menu": {"default_menu"}, "Conditions": {"ContractConditions(`MainCondition`)"}} + assert.NoError(t, postTx(`NewPage`, &form)) + + var ret listResult + assert.NoError(t, sendGet(`list/pages`, nil, &ret)) + id := ret.Count + assert.NoError(t, postTx(`EditPage`, &url.Values{"Id": {id}, "Value": {"Div(style){ok}"}})) + assert.NoError(t, postTx(`EditPage`, &url.Values{"Id": {id}, "Conditions": {"true"}})) + + form = url.Values{"Name": {randName(`menu`)}, "Value": {`MenuItem(First)MenuItem(Second)`}, + "ApplicationId": {`1`}, "Conditions": {"ContractConditions(`MainCondition`)"}} + assert.NoError(t, postTx(`NewMenu`, &form)) + + assert.NoError(t, sendGet(`list/menu`, nil, &ret)) + idmenu := ret.Count + assert.NoError(t, postTx(`EditMenu`, &url.Values{"Id": {idmenu}, "Conditions": {"true"}})) + assert.NoError(t, postTx(`EditMenu`, &url.Values{"Id": {idmenu}, "Value": {"MenuItem(Third)"}})) + assert.NoError(t, postTx(`EditMenu`, &url.Values{"Id": {idmenu}, + "Value": {"MenuItem(Third)"}, "Conditions": {"false"}})) + + form = url.Values{"Value": {`contract C` + name + `{ action {}}`}, + "ApplicationId": {`1`}, "Conditions": {"ContractConditions(`MainCondition`)"}} + _, idCont, err := postTxResult(`NewContract`, &form) + assert.NoError(t, err) + assert.NoError(t, postTx(`EditContract`, &url.Values{"Id": {idCont}, + "Value": {`contract C` + name + `{ action {Println("OK")}}`}, "Conditions": {"true"}})) + + form = url.Values{`Value`: {`contract Get` + name + ` { + data { + IdPage int + IdMenu int + IdCont int + } + action { + var ret array + ret = GetPageHistory($IdPage) + $result = Str(Len(ret)) + ret = GetMenuHistory($IdMenu) + $result = $result + Str(Len(ret)) + ret = GetContractHistory($IdCont) + $result = $result + Str(Len(ret)) + } + }`}, "ApplicationId": {`1`}, `Conditions`: {`true`}} + assert.NoError(t, postTx(`NewContract`, &form)) + + _, msg, err := postTxResult(`Get`+name, &url.Values{"IdPage": {id}, "IdMenu": {idmenu}, + "IdCont": {idCont}}) + assert.NoError(t, err) + assert.Equal(t, `231`, msg) + + form = url.Values{"Name": {name + `1`}, "Value": {value}, "ApplicationId": {`1`}, + "Menu": {"default_menu"}, "Conditions": {"ContractConditions(`MainCondition`)"}} + assert.NoError(t, postTx(`NewPage`, &form)) + + assert.NoError(t, postTx(`Get`+name, &url.Values{"IdPage": {converter.Int64ToStr( + converter.StrToInt64(id) + 1)}, "IdMenu": {idmenu}, "IdCont": {idCont}})) + + assert.EqualError(t, postTx(`Get`+name, &url.Values{"IdPage": {`1000000`}, "IdMenu": {idmenu}, + "IdCont": {idCont}}), `{"type":"panic","error":"Record has not been found"}`) + + var retTemp contentResult + assert.NoError(t, sendPost(`content`, &url.Values{`template`: {fmt.Sprintf(`GetPageHistory(MySrc,%s)`, + id)}}, &retTemp)) + + if len(RawToString(retTemp.Tree)) < 400 { + t.Error(fmt.Errorf(`wrong tree %s`, RawToString(retTemp.Tree))) + } +} diff --git a/packages/api/table.go b/packages/api/table.go index 4440ef471..554c2173d 100644 --- a/packages/api/table.go +++ b/packages/api/table.go @@ -19,6 +19,7 @@ package api import ( "encoding/json" "net/http" + "strings" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" @@ -51,7 +52,7 @@ func table(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.En prefix := getPrefix(data) table := &model.Table{} table.SetTablePrefix(prefix) - _, err = table.Get(nil, data.params[`name`].(string)) + _, err = table.Get(nil, strings.ToLower(data.params[`name`].(string))) if err != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Getting table") return errorAPI(w, err.Error(), http.StatusInternalServerError) diff --git a/packages/api/template_test.go b/packages/api/template_test.go index 9c4d6805d..ea73bf31e 100644 --- a/packages/api/template_test.go +++ b/packages/api/template_test.go @@ -129,6 +129,12 @@ var forTest = tplList{ Page:members_list,).Alert(Text: $want_save_changesx$, ConfirmButton: $yesx$, CancelButton: $nox$, Icon: question)`, `[{"tag":"button","attr":{"alert":{"cancelbutton":"$nox$","confirmbutton":"$yesx$","icon":"question","text":"$want_save_changesx$"},"class":"btn btn-primary","contract":"EditProfile","page":"members_list"},"children":[{"tag":"text","text":"savex"}]}]`}, + {`Button(Body: button).Popup(Width: 100)`, + `[{"tag":"button","attr":{"popup":{"width":"100"}},"children":[{"tag":"text","text":"button"}]}]`}, + {`Button(Body: button).Popup(Width: 100, Header: header)`, + `[{"tag":"button","attr":{"popup":{"header":"header","width":"100"}},"children":[{"tag":"text","text":"button"}]}]`}, + {`Button(Body: button).Popup(Header: header)`, + `[{"tag":"button","children":[{"tag":"text","text":"button"}]}]`}, {`Simple Strong(bold text)`, `[{"tag":"text","text":"Simple "},{"tag":"strong","children":[{"tag":"text","text":"bold text"}]}]`}, {`EcosysParam(gender, Source: mygender)`, @@ -141,8 +147,6 @@ var forTest = tplList{ SetVar(varNotZero, 1) If(#varNotZero#>0) { the varNotZero should be visible } If(#varUndefined#>0) { the varUndefined should be hidden }`, `[{"tag":"text","text":"the varNotZero should be visible"}]`}, - {`Address(EcosysParam(founder_account))+EcosysParam(founder_account)`, - `[{"tag":"text","text":"1651-3553-1389-2023-2108"},{"tag":"text","text":"+-1933190934789319508"}]`}, } func TestMobile(t *testing.T) { diff --git a/packages/api/vde.go b/packages/api/vde.go index d494dba3e..9891ffddb 100644 --- a/packages/api/vde.go +++ b/packages/api/vde.go @@ -67,8 +67,8 @@ func InitSmartContract(sc *smart.SmartContract, data []byte) error { if err := msgpack.Unmarshal(data, &sc.TxSmart); err != nil { return err } - sc.TxContract = smart.VMGetContractByID(smart.GetVM(sc.VDE, sc.TxSmart.EcosystemID), - int32(sc.TxSmart.Type)) + + sc.TxContract = smart.VMGetContractByID(smart.GetVM(), int32(sc.TxSmart.Type)) if sc.TxContract == nil { return fmt.Errorf(`unknown contract %d`, sc.TxSmart.Type) } @@ -173,17 +173,22 @@ func VDEContract(contractData []byte, data *apiData) (result *contractResult, er result.Message = &txstatusError{Type: "panic", Error: err.Error()} return } + if data.token != nil && data.token.Valid { if auth, err := data.token.SignedString([]byte(jwtSecret)); err == nil { sc.TxData[`auth_token`] = auth } } + if ret, err = sc.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction); err == nil { result.Result = ret } else { if errResult := json.Unmarshal([]byte(err.Error()), &result.Message); errResult != nil { - log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "text": err.Error(), + log.WithFields(log.Fields{ + "type": consts.JSONUnmarshallError, + "text": err.Error(), "error": errResult}).Error("unmarshalling contract error") + result.Message = &txstatusError{Type: "panic", Error: errResult.Error()} } } diff --git a/packages/api/vde_test.go b/packages/api/vde_test.go index c0d6b7d68..990809436 100644 --- a/packages/api/vde_test.go +++ b/packages/api/vde_test.go @@ -24,6 +24,7 @@ import ( "time" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" "github.com/GenesisKernel/go-genesis/packages/conf" "github.com/GenesisKernel/go-genesis/packages/consts" @@ -33,123 +34,46 @@ import ( ) func TestVDECreate(t *testing.T) { - var ( - err error - retid int64 - ret vdeCreateResult - ) - - assert.NoError(t, keyLogin(1)) + require.NoError(t, keyLogin(1)) - if err = sendPost(`vde/create`, nil, &ret); err != nil && - err.Error() != `400 {"error": "E_VDECREATED", "msg": "Virtual Dedicated Ecosystem is already created" }` { - t.Error(err) - return + form := url.Values{ + "VDEName": {"myvde3"}, + "DBUser": {"myvdeuser3"}, + "DBPassword": {"vdepassword"}, + "VDEAPIPort": {"8004"}, } + assert.NoError(t, postTx("NewVDE", &form)) +} - rnd := `rnd` + crypto.RandSeq(6) - form := url.Values{`Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - - retid, _, err = postTxResult(`NewContract`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`contract ` + rnd + ` { - data { - Par string - } - action { Test("active 5", $Par)}}`}, `Conditions`: {`ContractConditions("MainCondition")`}, `vde`: {`true`}} - assert.NoError(t, postTx(`EditContract`, &form)) - - form = url.Values{`Name`: {rnd}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} +func TestVDEList(t *testing.T) { + require.NoError(t, keyLogin(1)) - retid, _, err = postTxResult(`NewParameter`, &form) - assert.NoError(t, err) + fmt.Println(postTx("ListVDE", nil)) +} - form = url.Values{`Name`: {`new_table`}, `Value`: {`Test value`}, `Conditions`: {`ContractConditions("MainCondition")`}, - `vde`: {`1`}} - if err = postTx(`NewParameter`, &form); err != nil && err.Error() != - `500 {"error": "E_SERVER", "msg": "{\"type\":\"warning\",\"error\":\"Parameter new_table already exists\"}" }` { - t.Error(err) - return +func TestStopVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, } - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, `Conditions`: {`true`}, - `vde`: {`1`}} - - assert.NoError(t, postTx(`EditParameter`, &form)) - - form = url.Values{"Name": {`menu` + rnd}, "Value": {`first - second - third`}, "Title": {`My Menu`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewMenu`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit value`}, - `Conditions`: {`true`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditMenu`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Append)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendMenu`, &form)) - - form = url.Values{"Name": {`page` + rnd}, "Value": {`Page`}, "Menu": {`government`}, - "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewPage`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit page value`}, - `Conditions`: {`true`}, "Menu": {`government`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`EditPage`, &form)) - - form = url.Values{"Id": {converter.Int64ToStr(retid)}, "Value": {`Span(Test Page)`}, - `vde`: {`1`}} - assert.NoError(t, postTx(`AppendPage`, &form)) - - form = url.Values{"Name": {`block` + rnd}, "Value": {`Page block`}, "Conditions": {`true`}, `vde`: {`1`}} - retid, _, err = postTxResult(`NewBlock`, &form) - assert.NoError(t, err) - - form = url.Values{`Id`: {converter.Int64ToStr(retid)}, `Value`: {`Test edit block value`}, - `Conditions`: {`true`}, `vde`: {`1`}} - assert.NoError(t, postTx(`EditBlock`, &form)) - - name := randName(`tbl`) - form = url.Values{"Name": {name}, `vde`: {`true`}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "1", - "conditions":"true"}, - {"name":"Amount", "type":"number","index": "0", "conditions":"true"}, - {"name":"Active", "type":"character","index": "0", "conditions":"true"}]`}, - "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} - assert.NoError(t, postTx(`NewTable`, &form)) - - form = url.Values{"Name": {name}, `vde`: {`true`}, - "Permissions": {`{"insert": "ContractConditions(\"MainCondition\")", - "update" : "true", "new_column": "ContractConditions(\"MainCondition\")"}`}} - assert.NoError(t, postTx(`EditTable`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {"true"}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newColRead`}, `vde`: {`1`}, - "Type": {"varchar"}, "Index": {"0"}, "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`NewColumn`, &form)) - - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {"ContractConditions(\"MainCondition\")"}} - assert.NoError(t, postTx(`EditColumn`, &form)) + require.NoError(t, postTx("StopVDE", &form)) +} - form = url.Values{"TableName": {name}, "Name": {`newCol`}, `vde`: {`1`}, - "Permissions": {`{"update":"true", "read":"false"}`}} - assert.NoError(t, postTx(`EditColumn`, &form)) +func TestRunVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RunVDE", &form)) } +func TestRemoveVDE(t *testing.T) { + require.NoError(t, keyLogin(1)) + form := url.Values{ + "VDEName": {"myvde3"}, + } + require.NoError(t, postTx("RemoveVDE", &form)) +} func TestVDEParams(t *testing.T) { assert.NoError(t, keyLogin(1)) diff --git a/packages/block/block.go b/packages/block/block.go index 5793e255c..4a4ca4494 100644 --- a/packages/block/block.go +++ b/packages/block/block.go @@ -223,7 +223,7 @@ func (b *Block) Check() error { // skip time validation for first block if b.Header.BlockID > 1 { - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + blockTimeCalculator, err := utils.BuildBlockTimeCalculator(nil) if err != nil { logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") return err diff --git a/packages/block/db.go b/packages/block/db.go index 84fe63a6c..36b5da750 100644 --- a/packages/block/db.go +++ b/packages/block/db.go @@ -106,7 +106,7 @@ func InsertIntoBlockchain(transaction *model.DbTransaction, block *Block) error RollbacksHash: rollbackTxsHash, Tx: int32(len(block.Transactions)), } - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + blockTimeCalculator, err := utils.BuildBlockTimeCalculator(nil) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating block") return err diff --git a/packages/conf/conf.go b/packages/conf/conf.go index 7ac76b6e8..b91be9b38 100644 --- a/packages/conf/conf.go +++ b/packages/conf/conf.go @@ -89,6 +89,7 @@ type GlobalConfig struct { TLS bool // TLS is on/off. It is required for https TLSCert string // TLSCert is a filepath of the fullchain of certificate. TLSKey string // TLSKey is a filepath of the private key. + RunningMode string MaxPageGenerationTime int64 // in milliseconds @@ -132,10 +133,33 @@ func LoadConfig(path string) error { if err != nil { return errors.Wrapf(err, "marshalling config to global struct variable") } - return nil } +// GetConfigFromPath read config from path and returns GlobalConfig struct +func GetConfigFromPath(path string) (*GlobalConfig, error) { + log.WithFields(log.Fields{"path": path}).Info("Loading config") + + _, err := os.Stat(path) + if os.IsNotExist(err) { + return nil, errors.Errorf("Unable to load config file %s", path) + } + + viper.SetConfigFile(path) + err = viper.ReadInConfig() + if err != nil { + return nil, errors.Wrapf(err, "reading config") + } + + c := &GlobalConfig{} + err = viper.Unmarshal(c) + if err != nil { + return c, errors.Wrapf(err, "marshalling config to global struct variable") + } + + return c, nil +} + // SaveConfig save global parameters to configFile func SaveConfig(path string) error { dir := filepath.Dir(path) @@ -216,3 +240,28 @@ func FillRuntimeKey() error { func GetNodesAddr() []string { return Config.NodesAddr[:] } + +// IsPrivateBlockchain check running mode +func (c GlobalConfig) IsPrivateBlockchain() bool { + return RunMode(c.RunningMode).IsPrivateBlockchain() +} + +// IsPublicBlockchain check running mode +func (c GlobalConfig) IsPublicBlockchain() bool { + return RunMode(c.RunningMode).IsPublicBlockchain() +} + +// IsVDE check running mode +func (c GlobalConfig) IsVDE() bool { + return RunMode(c.RunningMode).IsVDE() +} + +// IsVDEMaster check running mode +func (c GlobalConfig) IsVDEMaster() bool { + return RunMode(c.RunningMode).IsVDEMaster() +} + +// IsSupportingVDE check running mode +func (c GlobalConfig) IsSupportingVDE() bool { + return RunMode(c.RunningMode).IsSupportingVDE() +} diff --git a/packages/conf/runmode.go b/packages/conf/runmode.go new file mode 100644 index 000000000..a03f2aeb0 --- /dev/null +++ b/packages/conf/runmode.go @@ -0,0 +1,40 @@ +package conf + +// PrivateBlockchain const label for running mode +const privateBlockchain RunMode = "PrivateBlockchain" + +// PublicBlockchain const label for running mode +const publicBlockchain RunMode = "PublicBlockchain" + +// VDEManager const label for running mode +const vdeMaster RunMode = "VDEMaster" + +// VDE const label for running mode +const vde RunMode = "VDE" + +type RunMode string + +// IsPublicBlockchain returns true if mode equal PublicBlockchain +func (rm RunMode) IsPublicBlockchain() bool { + return rm == publicBlockchain +} + +// IsPrivateBlockchain returns true if mode equal PrivateBlockchain +func (rm RunMode) IsPrivateBlockchain() bool { + return rm == privateBlockchain +} + +// IsVDEMaster returns true if mode equal vdeMaster +func (rm RunMode) IsVDEMaster() bool { + return rm == vdeMaster +} + +// IsVDE returns true if mode equal vde +func (rm RunMode) IsVDE() bool { + return rm == vde +} + +// IsSupportingVDE returns true if mode support vde +func (rm RunMode) IsSupportingVDE() bool { + return rm.IsVDE() || rm.IsVDEMaster() +} diff --git a/packages/conf/syspar/syspar.go b/packages/conf/syspar/syspar.go index a4087493a..96b7706ca 100644 --- a/packages/conf/syspar/syspar.go +++ b/packages/conf/syspar/syspar.go @@ -202,6 +202,21 @@ func GetNumberOfNodes() int64 { return int64(len(nodesByPosition)) } +func GetNumberOfNodesFromDB(transaction *model.DbTransaction) int64 { + sp := &model.SystemParameter{} + sp.GetTransaction(transaction, FullNodes) + var fullNodes []map[string]interface{} + if len(sp.Value) > 0 { + if err := json.Unmarshal([]byte(sp.Value), &fullNodes); err != nil { + log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err, "value": sp.Value}).Error("unmarshalling fullnodes from JSON") + } + } + if len(fullNodes) == 0 { + return 1 + } + return int64(len(fullNodes)) +} + // GetNodeByPosition is retrieving node by position func GetNodeByPosition(position int64) (*FullNode, error) { mutex.RLock() diff --git a/packages/consts/consts.go b/packages/consts/consts.go index 9684221d7..45b07c9b6 100644 --- a/packages/consts/consts.go +++ b/packages/consts/consts.go @@ -157,3 +157,6 @@ const TxRequestExpire = 1 * time.Minute // DefaultTempDirName is default name of temporary directory const DefaultTempDirName = "genesis-temp" + +// DefaultVDE allways is 1 +const DefaultVDE = 1 diff --git a/packages/consts/log_types.go b/packages/consts/log_types.go index d44b81bad..5f421a00b 100644 --- a/packages/consts/log_types.go +++ b/packages/consts/log_types.go @@ -54,4 +54,6 @@ const ( BCActualizationError = "BCActualizationError" SchedulerError = "SchedulerError" SyncProcess = "SyncProcess" + WrongModeError = "WrongModeError" + VDEManagerError = "VDEManagerError" ) diff --git a/packages/converter/converter.go b/packages/converter/converter.go index 2823c29d1..bea0c00ba 100644 --- a/packages/converter/converter.go +++ b/packages/converter/converter.go @@ -426,6 +426,11 @@ func SanitizeNumber(input string) string { return Sanitize(input, `+.- `) } +func EscapeSQL(name string) string { + return strings.Replace(strings.Replace(strings.Replace(name, `"`, `""`, -1), + `;`, ``, -1), `'`, `''`, -1) +} + // EscapeName deletes unaccessable characters for input name(s) func EscapeName(name string) string { out := make([]byte, 1, len(name)+2) diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index 2db69d872..f09f81413 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -61,7 +61,7 @@ func BlockGenerator(ctx context.Context, d *daemon) error { return err } - blockTimeCalculator, err := utils.BuildBlockTimeCalculator() + blockTimeCalculator, err := utils.BuildBlockTimeCalculator(nil) if err != nil { d.logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") return err diff --git a/packages/daemons/block_generator_tx.go b/packages/daemons/block_generator_tx.go index 9b5ddb977..d96e58f8c 100644 --- a/packages/daemons/block_generator_tx.go +++ b/packages/daemons/block_generator_tx.go @@ -45,7 +45,7 @@ func (dtx *DelayedTx) RunForBlockID(blockID int64) { } func (dtx *DelayedTx) createTx(delayedContactID, keyID int64) error { - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, callDelayedContract, uint32(firstEcosystemID)) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/daemons/blocks_collection.go b/packages/daemons/blocks_collection.go index 92bfd0acb..e9692ab9c 100644 --- a/packages/daemons/blocks_collection.go +++ b/packages/daemons/blocks_collection.go @@ -33,6 +33,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/rollback" "github.com/GenesisKernel/go-genesis/packages/service" "github.com/GenesisKernel/go-genesis/packages/tcpserver" + "github.com/GenesisKernel/go-genesis/packages/transaction" "github.com/GenesisKernel/go-genesis/packages/utils" log "github.com/sirupsen/logrus" @@ -164,6 +165,7 @@ func UpdateChain(ctx context.Context, d *daemon, host string, maxBlockID int64) } if !hashMatched { + transaction.CleanCache() //it should be fork, replace our previous blocks to ones from the host err := GetBlocks(b.Header.BlockID-1, host) if err != nil { @@ -264,6 +266,9 @@ func banNode(host string, block *block.Block, err error) { blockId, blockTime int64 ) if err != nil { + if err == transaction.ErrDuplicatedTx { + return + } reason = err.Error() } diff --git a/packages/daemons/common.go b/packages/daemons/common.go index 861c03983..8f1bb4d6b 100644 --- a/packages/daemons/common.go +++ b/packages/daemons/common.go @@ -130,7 +130,7 @@ func StartDaemons() { utils.CancelFunc = cancel utils.ReturnCh = make(chan string) - daemonsToStart := serverList + daemonsToStart := getDaemonsToStart() if conf.Config.TestRollBack { daemonsToStart = rollbackList } @@ -156,3 +156,14 @@ func getHostPort(h string) string { } return fmt.Sprintf("%s:%d", h, consts.DEFAULT_TCP_PORT) } + +func getDaemonsToStart() []string { + if conf.Config.IsSupportingVDE() { + return []string{ + "Notificator", + "Scheduler", + } + } + + return serverList +} diff --git a/packages/daylight/daemonsctl/daemonsctl.go b/packages/daylight/daemonsctl/daemonsctl.go index eb8049c75..ce8134e0a 100644 --- a/packages/daylight/daemonsctl/daemonsctl.go +++ b/packages/daylight/daemonsctl/daemonsctl.go @@ -14,17 +14,19 @@ import ( // RunAllDaemons start daemons, load contracts and tcpserver func RunAllDaemons() error { - logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) - - daemons.InitialLoad(logEntry) - err := syspar.SysUpdate(nil) - if err != nil { - log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) - return err - } - - if data, ok := block.GetDataFromFirstBlock(); ok { - syspar.SetFirstBlockData(data) + if !conf.Config.IsSupportingVDE() { + logEntry := log.WithFields(log.Fields{"daemon_name": "block_collection"}) + + daemons.InitialLoad(logEntry) + err := syspar.SysUpdate(nil) + if err != nil { + log.Errorf("can't read system parameters: %s", utils.ErrInfo(err)) + return err + } + + if data, ok := block.GetDataFromFirstBlock(); ok { + syspar.SetFirstBlockData(data) + } } log.Info("load contracts") @@ -36,8 +38,7 @@ func RunAllDaemons() error { log.Info("start daemons") daemons.StartDaemons() - err = tcpserver.TcpListener(conf.Config.TCPServer.Str()) - if err != nil { + if err := tcpserver.TcpListener(conf.Config.TCPServer.Str()); err != nil { log.Errorf("can't start tcp servers, stop") return err } diff --git a/packages/daylight/start.go b/packages/daylight/start.go index c2017942c..98394511e 100644 --- a/packages/daylight/start.go +++ b/packages/daylight/start.go @@ -37,8 +37,10 @@ import ( "github.com/GenesisKernel/go-genesis/packages/model" "github.com/GenesisKernel/go-genesis/packages/publisher" "github.com/GenesisKernel/go-genesis/packages/service" + "github.com/GenesisKernel/go-genesis/packages/smart" "github.com/GenesisKernel/go-genesis/packages/statsd" "github.com/GenesisKernel/go-genesis/packages/utils" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/julienschmidt/httprouter" log "github.com/sirupsen/logrus" @@ -181,15 +183,6 @@ func initRoutes(listenHost string) { httpListener(listenHost, route) } -func logBlockchainMode() { - mode := "private" - if !conf.Config.PrivateBlockchain { - mode = "non private" - } - - log.WithFields(log.Fields{"mode": mode}).Error("Node running mode") -} - // Start starts the main code of the program func Start() { var err error @@ -218,7 +211,7 @@ func Start() { } } - logBlockchainMode() + log.WithFields(log.Fields{"mode": conf.Config.RunningMode}).Info("Node running mode") f := utils.LockOrDie(conf.Config.LockFilePath) defer f.Unlock() @@ -259,22 +252,35 @@ func Start() { os.Exit(1) } - var availableBCGap int64 = consts.AvailableBCGap - if syspar.GetRbBlocks1() > consts.AvailableBCGap { - availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap - } + if !conf.Config.IsSupportingVDE() { + var availableBCGap int64 = consts.AvailableBCGap + if syspar.GetRbBlocks1() > consts.AvailableBCGap { + availableBCGap = syspar.GetRbBlocks1() - consts.AvailableBCGap + } - blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) - blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) - blockGenerationTime := blockGenerationDuration + blocksGapDuration + blockGenerationDuration := time.Millisecond * time.Duration(syspar.GetMaxBlockGenerationTime()) + blocksGapDuration := time.Second * time.Duration(syspar.GetGapsBetweenBlocks()) + blockGenerationTime := blockGenerationDuration + blocksGapDuration - checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) - na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) - na.Run() + checkingInterval := blockGenerationTime * time.Duration(syspar.GetRbBlocks1()-consts.DefaultNodesConnectDelay) + na := service.NewNodeRelevanceService(availableBCGap, checkingInterval) + na.Run() - err = service.InitNodesBanService() - if err != nil { - log.WithError(err).Fatal("Can't init ban service") + err = service.InitNodesBanService() + if err != nil { + log.WithError(err).Fatal("Can't init ban service") + } + } + + if conf.Config.IsSupportingVDE() { + if err := smart.LoadVDEContracts(nil, converter.Int64ToStr(consts.DefaultVDE)); err != nil { + log.WithFields(log.Fields{"type": consts.VMError, "error": err}).Fatal("on loading vde virtual mashine") + Exit(1) + } + } + + if conf.Config.IsVDEMaster() { + vdemanager.InitVDEManager() } } diff --git a/packages/migration/applications_data.go b/packages/migration/applications_data.go new file mode 100644 index 000000000..aae793344 --- /dev/null +++ b/packages/migration/applications_data.go @@ -0,0 +1,3 @@ +package migration + +var applicationsDataSQL = `INSERT INTO "%[1]d_applications" (id, name, conditions) VALUES (1, 'System', 'ContractConditions("MainCondition")');` diff --git a/packages/migration/blocks_data.go b/packages/migration/blocks_data.go index 7a2525dd2..fd4414167 100644 --- a/packages/migration/blocks_data.go +++ b/packages/migration/blocks_data.go @@ -1,63 +1,69 @@ package migration -var blocksDataSQL = `INSERT INTO "%[1]d_blocks" (id, name, value, conditions) -VALUES -('1', 'admin_link', - 'If(#sort#==1){ - SetVar(sort_name, "id asc") - }.ElseIf(#sort#==2){ - SetVar(sort_name, "id desc") - }.ElseIf(#sort#==3){ - SetVar(sort_name, "name asc") - }.ElseIf(#sort#==4){ - SetVar(sort_name, "name desc") - }.Else{ - SetVar(sort, "1") - SetVar(sort_name, "id asc") - } - - If(Or(#width#==12,#width#==6,#width#==4)){ - }.Else{ - SetVar(width, "12") - } - +var blocksDataSQL = `INSERT INTO "%[1]d_blocks" (id, name, value, conditions) VALUES + (1, 'admin_link', 'If(#sort#==1){ + SetVar(sort_name, "id asc") +}.ElseIf(#sort#==2){ + SetVar(sort_name, "id desc") +}.ElseIf(#sort#==3){ + SetVar(sort_name, "name asc") +}.ElseIf(#sort#==4){ + SetVar(sort_name, "name desc") +}.Else{ + SetVar(sort, "1") + SetVar(sort_name, "id asc") +} + +If(Or(#width#==12,#width#==6,#width#==4)){ +}.Else{ + SetVar(width, "12") +} + +Form(){ Div(clearfix){ Div(pull-left){ - If(#width#==12){ - Span(Button(Body: Em(Class: fa fa-bars), Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=#sort#,width=12,current_page=#current_page#")).Style(margin-left:10px;) - }.Else{ - Span(Button(Body: Em(Class: fa fa-bars), Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=#sort#,width=12,current_page=#current_page#")).Style(margin-left:10px;) - } - If(#width#==6){ - Span(Button(Body: Em(Class: fa fa-th-large), Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=#sort#,width=6,current_page=#current_page#")).Style(margin-left:5px;) - }.Else{ - Span(Button(Body: Em(Class: fa fa-th-large), Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=#sort#,width=6,current_page=#current_page#")).Style(margin-left:5px;) - } - If(#width#==4){ - Span(Button(Body: Em(Class: fa fa-th), Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=#sort#,width=4,current_page=#current_page#")).Style(margin-left:5px;) - }.Else{ - Span(Button(Body: Em(Class: fa fa-th), Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=#sort#,width=4,current_page=#current_page#")).Style(margin-left:5px;) - } + DBFind(applications,apps) + Select(Name:AppId, Source:apps, NameColumn: name, ValueColumn: id, Value: #buffer_value_app_id#, Class: bg-gray) + } + Div(pull-left){ + Span(Button(Body: Em(Class: fa fa-play), Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=#sort#,width=#width#,current_page=#current_page#", Contract: @1ExportNewApp, Params: "ApplicationId=Val(AppId)")).Style(margin-left:3px;) } Div(pull-right){ If(#sort#==1){ - Span(Button(Body: Em(Class: fa fa-long-arrow-down) Sort by ID, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=2,width=#width#,current_page=#current_page#")).Style(margin-right:5px;) + Span(Button(Body: Em(Class: fa fa-long-arrow-down) Sort by ID, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=2,width=#width#,current_page=#current_page#")).Style(margin-left:5px;) }.ElseIf(#sort#==2){ - Span(Button(Body: Em(Class: fa fa-long-arrow-up) Sort by ID, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=1,width=#width#,current_page=#current_page#")).Style(margin-right:5px;) + Span(Button(Body: Em(Class: fa fa-long-arrow-up) Sort by ID, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=1,width=#width#,current_page=#current_page#")).Style(margin-left:5px;) }.Else{ - Span(Button(Body: Sort by ID, Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=1,width=#width#,current_page=#current_page#")).Style(margin-right:5px;) + Span(Button(Body: Sort by ID, Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=1,width=#width#,current_page=#current_page#")).Style(margin-left:5px;) } - If(#sort#==3){ - Span(Button(Body: Em(Class: fa fa-long-arrow-down) Sort by NAME, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=4,width=#width#,current_page=#current_page#")).Style(margin-right:10px;) + Span(Button(Body: Em(Class: fa fa-long-arrow-down) Sort by NAME, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=4,width=#width#,current_page=#current_page#")).Style(margin-left:5px;) }.ElseIf(#sort#==4){ - Span(Button(Body: Em(Class: fa fa-long-arrow-up) Sort by NAME, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=3,width=#width#,current_page=#current_page#")).Style(margin-right:10px;) + Span(Button(Body: Em(Class: fa fa-long-arrow-up) Sort by NAME, Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=3,width=#width#,current_page=#current_page#")).Style(margin-left:5px;) }.Else{ - Span(Button(Body: Sort by NAME, Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=3,width=#width#,current_page=#current_page#")).Style(margin-right:10px;) + Span(Button(Body: Sort by NAME, Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=3,width=#width#,current_page=#current_page#")).Style(margin-left:5px;) } } - }', 'ContractConditions("MainCondition")'), -('2', 'export_info', 'DBFind(Name: buffer_data, Source: src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) + Div(pull-right){ + If(#width#==12){ + Span(Button(Body: Em(Class: fa fa-bars), Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=#sort#,width=12,current_page=#current_page#")).Style(margin-right:5px;) + }.Else{ + Span(Button(Body: Em(Class: fa fa-bars), Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=#sort#,width=12,current_page=#current_page#")).Style(margin-right:5px;) + } + If(#width#==6){ + Span(Button(Body: Em(Class: fa fa-th-large), Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=#sort#,width=6,current_page=#current_page#")).Style(margin-right:5px;) + }.Else{ + Span(Button(Body: Em(Class: fa fa-th-large), Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=#sort#,width=6,current_page=#current_page#")).Style(margin-right:5px;) + } + If(#width#==4){ + Span(Button(Body: Em(Class: fa fa-th), Class: btn bg-gray-lighter, Page: #admin_page#, PageParams: "sort=#sort#,width=4,current_page=#current_page#")).Style(margin-right:5px;) + }.Else{ + Span(Button(Body: Em(Class: fa fa-th), Class: btn bg-gray, Page: #admin_page#, PageParams: "sort=#sort#,width=4,current_page=#current_page#")).Style(margin-right:5px;) + } + } + } +}', 'ContractConditions("MainCondition")'), + (2, 'export_info', 'DBFind(Name: buffer_data, Source: src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ DBFind(pages, src_pages).Where("app_id=#buffer_value_app_id#").Limit(250).Order("name").Count(count_pages) @@ -251,16 +257,16 @@ Div(panel panel-primary){ If(#buffer_value_app_id# > 0){ Div(panel-footer clearfix){ Div(pull-left){ - Button(Body: Em(Class: fa fa-refresh), Class: btn btn-default, Contract: Export_NewApp, Params: "app_id=#buffer_value_app_id#", Page: export_resources) + Button(Body: Em(Class: fa fa-refresh), Class: btn btn-default, Contract: @1ExportNewApp, Params: "ApplicationId=#buffer_value_app_id#", Page: export_resources) } Div(pull-right){ - Button(Body: Export, Class: btn btn-primary, Page: export_download, Contract: Export) + Button(Body: Export, Class: btn btn-primary, Page: export_download, Contract: @1Export) } } } } }', 'ContractConditions("MainCondition")'), -('3', 'export_link', 'If(And(#res_type#!="pages",#res_type#!="blocks",#res_type#!="menu",#res_type#!="parameters",#res_type#!="languages",#res_type#!="contracts",#res_type#!="tables")){ + (3, 'export_link', 'If(And(#res_type#!="pages",#res_type#!="blocks",#res_type#!="menu",#res_type#!="parameters",#res_type#!="languages",#res_type#!="contracts",#res_type#!="tables")){ SetVar(res_type, "pages") } @@ -307,8 +313,8 @@ Div(breadcrumb){ LinkPage(Body: "Tables", Page: export_resources,, "res_type=tables") } }', 'ContractConditions("MainCondition")'), -('4', 'pager', 'DBFind(#pager_table#, src_records).Where(#pager_where#).Count(records_count) - + (4, 'pager', 'DBFind(#pager_table#, src_records).Where(#pager_where#).Count(records_count) + SetVar(previous_page, Calculate(Exp: #current_page# - 1, Type: int)) SetVar(next_page, Calculate(Exp: #current_page# + 1, Type: int)) SetVar(count_div_limit_int, Calculate(Exp: (#records_count# / #pager_limit#), Type: int)) @@ -361,9 +367,9 @@ Div(){ ForList(src_pages){ Span(){ If(#id# == #current_page#){ - Button(Class: btn btn-primary float-left, Page: #pager_page#, PageParams: "current_page=#id#,sort=#sort#,width=#width#", Body: #id#) + Button(Body: #id#, Class: btn btn-primary float-left, Page: #pager_page#, PageParams: "current_page=#id#,sort=#sort#,width=#width#") }.Else{ - Button(Class: btn btn-default float-left, Page: #pager_page#, PageParams: "current_page=#id#,sort=#sort#,width=#width#", Body: #id#) + Button(Body: #id#, Class: btn btn-default float-left, Page: #pager_page#, PageParams: "current_page=#id#,sort=#sort#,width=#width#") } } } @@ -382,7 +388,7 @@ Div(){ } } }.Style("div {display:inline-block;}")', 'ContractConditions("MainCondition")'), -('5', 'pager_header', 'If(#current_page# > 0){}.Else{ + (5, 'pager_header', 'If(#current_page# > 0){}.Else{ SetVar(current_page, 1) } SetVar(pager_offset, Calculate(Exp: (#current_page# - 1) * #pager_limit#, Type: int)) diff --git a/packages/migration/data.go b/packages/migration/data.go index 53bca4f3c..02e512f57 100644 --- a/packages/migration/data.go +++ b/packages/migration/data.go @@ -121,7 +121,9 @@ var ( ); ALTER SEQUENCE rollback_tx_id_seq owned by rollback_tx.id; ALTER TABLE ONLY "rollback_tx" ADD CONSTRAINT rollback_tx_pkey PRIMARY KEY (id); - + CREATE INDEX "rollback_tx_table" ON "rollback_tx" (table_name, table_id); + + DROP TABLE IF EXISTS "install"; CREATE TABLE "install" ( "progress" varchar(10) NOT NULL DEFAULT '' ); diff --git a/packages/migration/ecosystem.go b/packages/migration/ecosystem.go index fdb0d45ec..9f7afffb4 100644 --- a/packages/migration/ecosystem.go +++ b/packages/migration/ecosystem.go @@ -16,6 +16,7 @@ func GetEcosystemScript() string { rolesDataSQL, sectionsDataSQL, tablesDataSQL, + applicationsDataSQL, } return strings.Join(scripts, "\r\n") @@ -30,7 +31,6 @@ func GetFirstEcosystemScript() string { firstEcosystemDataSQL, firstSystemParametersDataSQL, firstTablesDataSQL, - applicationsDataSQL, } return strings.Join(scripts, "\r\n") @@ -129,7 +129,7 @@ var schemaEcosystem = `DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_ke CREATE TABLE "%[1]d_contracts" ( "id" bigint NOT NULL DEFAULT '0', - "name" text NOT NULL DEFAULT '', + "name" text NOT NULL UNIQUE DEFAULT '', "value" text NOT NULL DEFAULT '', "wallet_id" bigint NOT NULL DEFAULT '0', "token_id" bigint NOT NULL DEFAULT '1', @@ -202,6 +202,7 @@ var schemaEcosystem = `DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_ke "date_created" timestamp, "date_deleted" timestamp, "company_id" bigint NOT NULL DEFAULT '0', + "roles_access" jsonb, "image_id" bigint NOT NULL DEFAULT '0' ); ALTER TABLE ONLY "%[1]d_roles" ADD CONSTRAINT "%[1]d_roles_pkey" PRIMARY KEY ("id"); diff --git a/packages/migration/first_applications_data.go b/packages/migration/first_applications_data.go deleted file mode 100644 index e41b11aac..000000000 --- a/packages/migration/first_applications_data.go +++ /dev/null @@ -1,3 +0,0 @@ -package migration - -var applicationsDataSQL = `INSERT INTO "1_applications" (id, name, conditions) VALUES (1, 'System', 'ContractConditions("MainCondition")');` diff --git a/packages/migration/first_ecosys_contracts_data.go b/packages/migration/first_ecosys_contracts_data.go index c5da1d4f3..cafe7cb85 100644 --- a/packages/migration/first_ecosys_contracts_data.go +++ b/packages/migration/first_ecosys_contracts_data.go @@ -3,242 +3,159 @@ package migration var firstEcosystemContractsSQL = ` INSERT INTO "1_contracts" (id, name, value, wallet_id, conditions, app_id) VALUES ('2', 'DelApplication', 'contract DelApplication { + data { + ApplicationId int + Value int "optional" + } + + conditions { + if $Value < 0 || $Value > 1 { + error "Incorrect value" + } + RowConditions("applications", $ApplicationId, false) + } + + action { + DBUpdate("applications", $ApplicationId, "deleted", $Value) + } + }', %[1]d, 'ContractConditions("MainCondition")', 1), +('3', 'EditAppParam', 'contract EditAppParam { data { - ApplicationId int - Value int "optional" + Id int + Value string "optional" + Conditions string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value } conditions { - RowConditions("applications", $ApplicationId, false) + RowConditions("app_params", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } } action { - if $Value == 1 { - DBUpdate("applications", $ApplicationId, "deleted", 1) + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars = Append(pars, "conditions") + vals = Append(vals, $Conditions) + } + if Len(vals) > 0 { + DBUpdate("app_params", $Id, Join(pars, ","), vals...) } - else { - DBUpdate("applications", $ApplicationId, "deleted", 0) - } } }', %[1]d, 'ContractConditions("MainCondition")', 1), -('3', 'EditAppParam', 'contract EditAppParam { - data { - Id int - Value string "optional" - Conditions string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value - } - - conditions { - RowConditions("app_params", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("app_params", $Id, Join(pars, ","), vals...) - } - } -}', %[1]d, 'ContractConditions("MainCondition")', 1), ('4', 'EditApplication', 'contract EditApplication { data { ApplicationId int Conditions string "optional" } - func onlyConditions() bool { - return $Conditions && false - } + func onlyConditions() bool { + return $Conditions && false + } conditions { - RowConditions("applications", $ApplicationId, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } + RowConditions("applications", $ApplicationId, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } } action { - var pars, vals array - if $Conditions { - pars[0] = "conditions" - vals[0] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("applications", $ApplicationId, Join(pars, ","), vals...) - } + var pars, vals array + if $Conditions { + pars[0] = "conditions" + vals[0] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("applications", $ApplicationId, Join(pars, ","), vals...) + } } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('5', 'EditColumn', 'contract EditColumn { - data { - TableName string - Name string - Permissions string - } - - conditions { - ColumnCondition($TableName, $Name, "", $Permissions) - } - - action { - PermColumn($TableName, $Name, $Permissions) - } + data { + TableName string + Name string + Permissions string + } + + conditions { + ColumnCondition($TableName, $Name, "", $Permissions) + } + + action { + PermColumn($TableName, $Name, $Permissions) + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('6', 'EditLang', 'contract EditLang { - data { - Id int - Name string "optional" - ApplicationId int "optional" - Trans string "optional" - Value array "optional" - IdLanguage array "optional" - } - - conditions { - var j int - while j < Len($IdLanguage) { - if ($IdLanguage[j] == ""){ - info("Locale empty") - } - if ($Value[j] == ""){ - info("Value empty") - } - j = j + 1 - } - EvalCondition("parameters", "changing_language", "value") - } - - action { - var i,len int - var res,langarr string - len = Len($IdLanguage) - while i < len { - if (i + 1 == len){ - res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) - } - else { - res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) - } - i = i + 1 - } - - $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() - if !$row{ - warning "Language not found" - } + data { + Id int + Trans string + } - if $ApplicationId == 0 { - $ApplicationId = Int($row["app_id"]) - } - if $Name == "" { - $Name = $row["name"] - } + conditions { + EvalCondition("parameters", "changing_language", "value") + $lang = DBFind("languages").Where("id=?", $Id).Row() + } - if (len > 0){ - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - - } - EditLanguage($Id, $Name, $Trans, $ApplicationId) - } + action { + EditLanguage($Id, $lang["name"], $Trans, Int($lang["app_id"])) + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('7', 'EditParameter', 'contract EditParameter { - data { - Id int - Value string "optional" - Conditions string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value - } + data { + Id int + Value string "optional" + Conditions string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value + } - conditions { - RowConditions("parameters", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("parameters", $Id, Join(pars, ","), vals...) - } - } + conditions { + RowConditions("parameters", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars = Append(pars, "conditions") + vals = Append(vals, $Conditions) + } + if Len(vals) > 0 { + DBUpdate("parameters", $Id, Join(pars, ","), vals...) + } + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('8', 'NewTable', 'contract NewTable { data { - ApplicationId int "optional" - Name string "optional" - Columns string "optional" - Permissions string "optional" - TableName string "optional" - Id array "optional" - Shareholding array "optional" - Insert_con string "optional" - Update_con string "optional" - New_column_con string "optional" + ApplicationId int + Name string + Columns string + Permissions string } conditions { - if Size($Name) == 0 { - error "Table name cannot be empty" - } - if $ApplicationId == 0 { warning "Application id cannot equal 0" } - } + TableConditions($Name, $Columns, $Permissions) + } action { - if Size($Name) > 0 && Size($Columns) > 0 && Size($Permissions) > 0{ - TableConditions($Name, $Columns, $Permissions) - CreateTable($Name, $Columns, $Permissions, $ApplicationId) - } else { - var i,len int - var res string - len = Len($Id) - - if Size($TableName) == 0 { - error "Table name cannot be empty" - } - - while i < len { - if i + 1 == len { - res = res + Sprintf("{\"name\":%%q,\"type\":%%q,\"conditions\":\"true\"}", $Id[i],$Shareholding[i]) - } - else { - res = res + Sprintf("{\"name\":%%q,\"type\":%%q,\"conditions\":\"true\"},", $Id[i],$Shareholding[i]) - } - i = i + 1 - } - - $Name = $TableName - $Columns = Sprintf("["+"%%v"+"]", res) - if !$Permissions { - $Permissions = Sprintf("{\"insert\":%%q,\"update\":%%q,\"new_column\":%%q}",$Insert_con,$Update_con,$New_column_con) - } - TableConditions($Name, $Columns, $Permissions) - CreateTable($Name, $Columns, $Permissions, $ApplicationId) - } + CreateTable($Name, $Columns, $Permissions, $ApplicationId) } func rollback() { RollbackTable($Name) @@ -249,7 +166,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { }', %[1]d, 'ContractConditions("MainCondition")', 1), ('9', 'UploadBinary', 'contract UploadBinary { data { - ApplicationId int "optional" + ApplicationId int Name string Data bytes "file" DataMimeType string "optional" @@ -257,12 +174,12 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { conditions { $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $ApplicationId, $key_id, $Name).One("id")) - - if $Id == 0 { - if $ApplicationId == 0 { - warning "Application id cannot equal 0" - } - } + + if $Id == 0 { + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + } } action { var hash string @@ -279,195 +196,114 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } $result = $Id - } +} }', %[1]d, 'ContractConditions("MainCondition")', 1), ('10', 'Export', 'contract Export { + data {} - func ReplaceValue(s string) string { - s = Replace(s, ` + "`" + `\` + "`" + `, ` + "`" + `\\` + "`" + `) + func escapeSpecials(s string) string { + s = Replace(s, ` + "`" + `\` + "`" + `, ` + "`" + `\\` + "`" + `) s = Replace(s, ` + "`" + ` ` + "`" + `, ` + "`" + `\t` + "`" + `) s = Replace(s, "\n", ` + "`" + `\n` + "`" + `) s = Replace(s, "\r", ` + "`" + `\r` + "`" + `) s = Replace(s, ` + "`" + `"` + "`" + `, ` + "`" + `\"` + "`" + `) - return s - } - - func AssignAll(app_name string, all_blocks string, all_contracts string, all_data string, all_languages string, all_menus string, all_pages string, all_parameters string, all_tables string) string { - - var res_str string - res_str = res_str + all_blocks - - if Size(res_str)>0 && Size(all_contracts)>0 { - res_str = res_str + "," - } - res_str = res_str + all_contracts - - if Size(res_str)>0 && Size(all_data)>0 { - res_str = res_str + "," - } - res_str = res_str + all_data - - if Size(res_str)>0 && Size(all_languages)>0 { - res_str = res_str + "," - } - res_str = res_str + all_languages - - if Size(res_str)>0 && Size(all_menus)>0 { - res_str = res_str + "," - } - res_str = res_str + all_menus - - if Size(res_str)>0 && Size(all_pages)>0 { - res_str = res_str + "," - } - res_str = res_str + all_pages - - if Size(res_str)>0 && Size(all_parameters)>0 { - res_str = res_str + "," - } - res_str = res_str + all_parameters - - if Size(res_str)>0 && Size(all_tables)>0 { - res_str = res_str + "," + if s == "0"{ + s = "" } - res_str = res_str + all_tables - - res_str = Sprintf(` + "`" + `{ - "name": "%%v", - "data": [%%v - ] -}` + "`" + `, app_name, res_str) - - return res_str - } - - func AddPage(page_name string, page_value string, page_conditions string, page_menu string) string { - var s string - s = Sprintf(` + "`" + ` { - "Type": "pages", - "Name": "%%v", - "Value": "%%v", - "Conditions": "%%v", - "Menu": "%%v" - }` + "`" + `, page_name, page_value, page_conditions, page_menu) - return s - } - - func AddMenu(menu_name string, menu_value string, menu_title string, menu_conditions string) string { - var s string - s = Sprintf(` + "`" + ` { - "Type": "menu", - "Name": "%%v", - "Value": "%%v", - "Title": "%%v", - "Conditions": "%%v" - }` + "`" + `, menu_name, menu_value, menu_title, menu_conditions) - return s - } - - func AddContract(contract_name string, contract_value string, contract_conditions string) string { - var s string - s = Sprintf(` + "`" + ` { - "Type": "contracts", - "Name": "%%v", - "Value": "%%v", - "Conditions": "%%v" - }` + "`" + `, contract_name, contract_value, contract_conditions) return s } - func AddBlock(block_name string, block_value string, block_conditions string) string { - var s string - s = Sprintf(` + "`" + ` { - "Type": "blocks", - "Name": "%%v", - "Value": "%%v", - "Conditions": "%%v" - }` + "`" + `, block_name, block_value, block_conditions) - return s - } - - func AddLanguage(language_name string, language_conditions string, language_trans string) string { - var s string - s = Sprintf(` + "`" + ` { - "Type": "languages", - "Name": "%%v", - "Conditions": "%%v", - "Trans": "%%v" - }` + "`" + `, language_name, language_conditions, language_trans) - return s + func AssignAll(app_name string, resources string) string { + return Sprintf(` + "`" + `{ + "name": "%%v", + "data": [ + %%v + ] + }` + "`" + `, app_name, resources) } - func AddParameter(parameter_name string, parameter_value string, parameter_conditions string) string { + func serializeItem(item map, type string) string { var s string - s = Sprintf(` + "`" + ` { - "Type": "app_params", - "Name": "%%v", - "Value": "%%v", - "Conditions": "%%v" - }` + "`" + `, parameter_name, parameter_value, parameter_conditions) + s = Sprintf( + ` + "`" + `{ + "Type": "%%v", + "Name": "%%v", + "Value": "%%v", + "Conditions": "%%v", + "Menu": "%%v", + "Title": "%%v", + "Trans": "%%v", + "Columns": "%%v", + "Permissions": "%%v" + }` + "`" + `, type, escapeSpecials(Str(item["name"])), escapeSpecials(Str(item["value"])), escapeSpecials(Str(item["conditions"])), escapeSpecials(Str(item["menu"])), escapeSpecials(Str(item["title"])), escapeSpecials(Str(item["res"])), escapeSpecials(Str(item["columns"])), escapeSpecials(Str(item["permissions"])) + ) return s } - func AddTable(table_name string, table_columns string, table_permissions string) string { - var s string - s = Sprintf(` + "`" + ` { - "Type": "tables", - "Name": "%%v", - "Columns": "%%v", - "Permissions": "%%v" - }` + "`" + `, table_name, table_columns, table_permissions) - return s + func getTypeForColumns(table_name string, columnsJSON string) string { + var colsMap map, result columns array + colsMap = JSONDecode(columnsJSON) + columns = GetMapKeys(colsMap) + var i int + while i < Len(columns){ + if Size(columns[i]) > 0 { + var col map + col["name"] = columns[i] + col["conditions"] = colsMap[col["name"]] + col["type"] = GetColumnType(table_name, col["name"]) + result = Append(result, col) + } + i = i + 1 + } + return JSONEncode(result) } - func AddTypeForColumns(table_name string, table_columns string) string { - var result string - - table_columns = Replace(table_columns, "{", "") - table_columns = Replace(table_columns, "}", "") - table_columns = Replace(table_columns, " ", "") - - var columns_arr array - columns_arr = Split(table_columns, ",") - - var i int - while (i < Len(columns_arr)){ - var s_split string - s_split = Str(columns_arr[i]) - - if Size(s_split) > 0 { - var clm array - clm = Split(s_split, ":") - - var s string - - if Len(clm) == 2 { - var col_name string - var col_cond string - var col_type string - - col_name = Replace(Str(clm[0]), ` + "`" + `"` + "`" + `, "") - col_cond = Str(clm[1]) - col_type = GetColumnType(table_name, col_name) - - s = Sprintf(` + "`" + `{"name":"%%v","type":"%%v","conditions":%%v}` + "`" + `, col_name, col_type, col_cond) - } - - if Size(result) > 0 { - result = result + "," - } - result = result + s - } - i = i + 1 - } - - result = Sprintf("[%%v]", result) - return result + func exportTable(type string, result array) array { + var items array, limit offset int + limit = 250 + while true{ + var rows array, where string + if type == "menu" { + if Len($menus_names) > 0 { + where = Sprintf("name in (%%v)", Join($menus_names, ",")) + } + }else{ + where = Sprintf("app_id=%%v", $ApplicationID) + } + if where { + rows = DBFind(type).Limit(limit).Offset(offset).Where(where) + } + if Len(rows) > 0{ + var i int + while iapp_id,value->app_name").Where("member_id=$ and key=$", $key_id, "export").Row() @@ -476,338 +312,135 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } $ApplicationID = Int(buffer_map["value.app_id"]) $ApplicationName = Str(buffer_map["value.app_name"]) + + var menus_names array + $menus_names = menus_names } action { - //warning $ApplicationID - - var full_result string - var i int + var exportJSON string, items array + items = exportTable("pages", items) + items = exportTable("contracts", items) + items = exportTable("blocks", items) + items = exportTable("languages", items) + items = exportTable("app_params", items) + items = exportTable("tables", items) + items = exportTable("menu", items) + + exportJSON = AssignAll($ApplicationName, Join(items, ",\r\n")) + UploadBinary("Name,Data,ApplicationId,DataMimeType", "export", exportJSON, 1, "application/json") + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('11', 'EditTable', 'contract EditTable { + data { + Name string + InsertPerm string + UpdatePerm string + NewColumnPerm string + } - var all_blocks string - var all_contracts string - var all_data string - var all_languages string - var all_menus string - var all_pages string - var all_parameters string - var all_tables string + conditions { + if !$InsertPerm { + info("Insert condition is empty") + } + if !$UpdatePerm { + info("Update condition is empty") + } + if !$NewColumnPerm { + info("New column condition is empty") + } - //===================================================================================================== - //------------------------------------Export pages----------------------------------------------------- - var string_for_menu string + var permissions map + permissions["insert"] = $InsertPerm + permissions["update"] = $UpdatePerm + permissions["new_column"] = $NewColumnPerm + $Permissions = permissions + TableConditions($Name, "", JSONEncode($Permissions)) + } - i = 0 - var pages_array array - pages_array = DBFind("pages").Limit(250).Where("app_id=?", $ApplicationID) - while i < Len(pages_array) { - var page_map map - page_map = pages_array[i] - - var page_name string - var page_value string - var page_conditions string - var page_menu string - - page_name = ReplaceValue(Str(page_map["name"])) - page_value = ReplaceValue(Str(page_map["value"])) - page_conditions = ReplaceValue(Str(page_map["conditions"])) - page_menu = ReplaceValue(Str(page_map["menu"])) - - if Size(all_pages) > 0 { - all_pages = all_pages + ",\r\n" - } else { - all_pages = all_pages + "\r\n" - } + action { + PermTable($Name, JSONEncode($Permissions)) + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('12', 'ImportUpload', 'contract ImportUpload { + data { + input_file string "file" + } + func ReplaceValue(s string) string { + s = Replace(s, "#ecosystem_id#", "#IMPORT_ECOSYSTEM_ID#") + s = Replace(s, "#key_id#", "#IMPORT_KEY_ID#") + s = Replace(s, "#isMobile#", "#IMPORT_ISMOBILE#") + s = Replace(s, "#role_id#", "#IMPORT_ROLE_ID#") + s = Replace(s, "#ecosystem_name#", "#IMPORT_ECOSYSTEM_NAME#") + s = Replace(s, "#app_id#", "#IMPORT_APP_ID#") + return s + } - if Size(string_for_menu) > 0 { - string_for_menu = string_for_menu + "," - } - string_for_menu = string_for_menu + Sprintf("''%%v''", page_menu) + conditions { + $input_file = BytesToString($input_file) + $input_file = ReplaceValue($input_file) + $limit = 5 // data piece size of import - all_pages = all_pages + AddPage(page_name, page_value, page_conditions, page_menu) - i = i + 1 + // init buffer_data, cleaning old buffer + var initJson map + $import_id = DBFind("buffer_data").Where("member_id=$ and key=$", $key_id, "import").One("id") + if $import_id { + $import_id = Int($import_id) + DBUpdate("buffer_data", $import_id, "value", initJson) + } else { + $import_id = DBInsert("buffer_data", "member_id,key,value", $key_id, "import", initJson) } - //===================================================================================================== - //------------------------------------Export menus----------------------------------------------------- - if Size(string_for_menu) > 0 { + $info_id = DBFind("buffer_data").Where("member_id=$ and key=$", $key_id, "import_info").One("id") + if $info_id { + $info_id = Int($info_id) + DBUpdate("buffer_data", $info_id, "value", initJson) + } else { + $info_id = DBInsert("buffer_data", "member_id,key,value", $key_id, "import_info", initJson) + } + } - var where_for_menu string - where_for_menu = Sprintf("name in (%%v)", string_for_menu) - //warning where_for_menu + action { + var input map + input = JSONDecode($input_file) + var arr_data array + arr_data = input["data"] - i = 0 - var menus_array array - menus_array = DBFind("menu").Limit(250).Where(where_for_menu) - while i < Len(menus_array) { - var menu_map map - menu_map = menus_array[i] + var pages_arr, blocks_arr, menu_arr, parameters_arr, languages_arr, contracts_arr, tables_arr array - var menu_name string - var menu_value string - var menu_title string - var menu_conditions string - - menu_name = ReplaceValue(Str(menu_map["name"])) - menu_value = ReplaceValue(Str(menu_map["value"])) - menu_title = ReplaceValue(Str(menu_map["title"])) - menu_conditions = ReplaceValue(Str(menu_map["conditions"])) - - if Size(all_menus) > 0 { - all_menus = all_menus + ",\r\n" - } else { - all_menus = all_menus + "\r\n" - } - - all_menus = all_menus + AddMenu(menu_name, menu_value, menu_title, menu_conditions) - i = i + 1 - } - - } - - //===================================================================================================== - //------------------------------------Export contracts------------------------------------------------- - - i = 0 - var contracts_array array - contracts_array = DBFind("contracts").Limit(250).Where("app_id=?", $ApplicationID) - while i < Len(contracts_array) { - var contract_map map - contract_map = contracts_array[i] - - var contract_name string - var contract_value string - var contract_conditions string - - contract_name = ReplaceValue(Str(contract_map["name"])) - contract_value = ReplaceValue(Str(contract_map["value"])) - contract_conditions = ReplaceValue(Str(contract_map["conditions"])) - - if Size(all_contracts) > 0 { - all_contracts = all_contracts + ",\r\n" - } else { - all_contracts = all_contracts + "\r\n" - } - - all_contracts = all_contracts + AddContract(contract_name, contract_value, contract_conditions) - i = i + 1 - } - - //===================================================================================================== - //------------------------------------Export blocks---------------------------------------------------- - - i = 0 - var blocks_array array - blocks_array = DBFind("blocks").Limit(250).Where("app_id=?", $ApplicationID) - while i < Len(blocks_array) { - var block_map map - block_map = blocks_array[i] - - var block_name string - var block_value string - var block_conditions string - - block_name = ReplaceValue(Str(block_map["name"])) - block_value = ReplaceValue(Str(block_map["value"])) - block_conditions = ReplaceValue(Str(block_map["conditions"])) - - if Size(all_blocks) > 0 { - all_blocks = all_blocks + ",\r\n" - } else { - all_blocks = all_blocks + "\r\n" - } - - all_blocks = all_blocks + AddBlock(block_name, block_value, block_conditions) - i = i + 1 - } - - //===================================================================================================== - //------------------------------------Export languages------------------------------------------------- - - i = 0 - var languages_array array - languages_array = DBFind("languages").Limit(250).Where("app_id=?", $ApplicationID) - while i < Len(languages_array) { - var language_map map - language_map = languages_array[i] - - var language_name string - var language_conditions string - var language_trans string - - language_name = ReplaceValue(Str(language_map["name"])) - language_conditions = ReplaceValue(Str(language_map["conditions"])) - language_trans = ReplaceValue(Str(language_map["res"])) - - if Size(all_languages) > 0 { - all_languages = all_languages + ",\r\n" - } else { - all_languages = all_languages + "\r\n" - } - - all_languages = all_languages + AddLanguage(language_name, language_conditions, language_trans) - i = i + 1 - } - - //===================================================================================================== - //------------------------------------Export params---------------------------------------------------- - - i = 0 - var parameters_array array - parameters_array = DBFind("app_params").Limit(250).Where("app_id=?", $ApplicationID) - while i < Len(parameters_array) { - var parameter_map map - parameter_map = parameters_array[i] - - var parameter_name string - var parameter_value string - var parameter_conditions string - - parameter_name = ReplaceValue(Str(parameter_map["name"])) - parameter_value = ReplaceValue(Str(parameter_map["value"])) - parameter_conditions = ReplaceValue(Str(parameter_map["conditions"])) - - if Size(all_parameters) > 0 { - all_parameters = all_parameters + ",\r\n" - } else { - all_parameters = all_parameters + "\r\n" - } - - all_parameters = all_parameters + AddParameter(parameter_name, parameter_value, parameter_conditions) - i = i + 1 - } - - //===================================================================================================== - //------------------------------------Export tables---------------------------------------------------- - - i = 0 - var tables_array array - tables_array = DBFind("tables").Limit(250).Where("app_id=?", $ApplicationID) - while i < Len(tables_array) { - var table_map map - table_map = tables_array[i] - - var table_name string - var table_columns string - var table_permissions string - - table_name = Str(table_map["name"]) - table_columns = Str(table_map["columns"]) - table_permissions = Str(table_map["permissions"]) - - table_columns = AddTypeForColumns(table_name, table_columns) - - table_name = ReplaceValue(table_name) - table_columns = ReplaceValue(table_columns) - table_permissions = ReplaceValue(table_permissions) - - if Size(all_tables) > 0 { - all_tables = all_tables + ",\r\n" - } else { - all_tables = all_tables + "\r\n" - } - - all_tables = all_tables + AddTable(table_name, table_columns, table_permissions) - i = i + 1 - } - - //===================================================================================================== - - full_result = AssignAll($ApplicationName, all_blocks, all_contracts, all_data, all_languages, all_menus, all_pages, all_parameters, all_tables) - UploadBinary("Name,Data,ApplicationId,DataMimeType", "export", full_result, 1, "application/json") - } -}', %[1]d, 'ContractConditions("MainCondition")', 1), -('11', 'EditTable', 'contract EditTable { - data { - Name string - Permissions string "optional" - Insert_con string "optional" - Update_con string "optional" - New_column_con string "optional" - } - - conditions { - if !$Permissions { - var permissions string - permissions = Sprintf("{\"insert\":%%q,\"update\":%%q,\"new_column\":%%q}",$Insert_con,$Update_con,$New_column_con) - $Permissions = permissions - } - TableConditions($Name, "", $Permissions) - } - - action { - PermTable($Name, $Permissions ) - } -}', %[1]d, 'ContractConditions("MainCondition")', 1), -('12', 'Import_Upload', 'contract Import_Upload { - data { - input_file string "file" - } - - conditions { - $input_file = BytesToString($input_file) - - // init buffer_data, cleaning old buffer - var initJson string - initJson = "{}" - $import_id = DBFind("buffer_data").Where("member_id=$ and key=$", $key_id, "import").One("id") - if $import_id { - $import_id = Int($import_id) - DBUpdate("buffer_data", $import_id, "value", initJson) - } else { - $import_id = DBInsert("buffer_data", "member_id,key,value", $key_id, "import", initJson) - } - - $info_id = DBFind("buffer_data").Where("member_id=$ and key=$", $key_id, "import_info").One("id") - if $info_id { - $info_id = Int($info_id) - DBUpdate("buffer_data", $info_id, "value", initJson) - } else { - $info_id = DBInsert("buffer_data", "member_id,key,value", $key_id, "import_info", initJson) - } - } - - action { - var json map - json = JSONToMap($input_file) - var arr_data array - arr_data = json["data"] - - var pages_arr, blocks_arr, menu_arr, parameters_arr, languages_arr, contracts_arr, tables_arr array - - var i int - while i 0{ + var batch map + batch["Data"] = JSONEncode(contracts) + sliced = Append(sliced, batch) + } + input["data"] = sliced + + // storing + DBUpdate("buffer_data", $import_id, "value", input) DBUpdate("buffer_data", $info_id, "value", info_map) var app_id int - app_id = DBFind("applications").Columns("id").Where("name=$", Str(json["name"])).One("id") + app_id = DBFind("applications").Columns("id").Where("name=$", Str(input["name"])).One("id") if !app_id { - DBInsert("applications", "name,conditions", Str(json["name"]), "true") + DBInsert("applications", "name,conditions", Str(input["name"]), "true") } } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('13', 'NewAppParam', 'contract NewAppParam { data { - ApplicationId int "optional" + ApplicationId int Name string Value string Conditions string @@ -870,7 +532,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { conditions { ValidateCondition($Conditions, $ecosystem_id) - + if Size($Name) == 0 { warning "Application name missing" } @@ -886,7 +548,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { }', %[1]d, 'ContractConditions("MainCondition")', 1), ('15', 'NewBlock', 'contract NewBlock { data { - ApplicationId int "optional" + ApplicationId int Name string Value string Conditions string @@ -930,7 +592,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { }', %[1]d, 'ContractConditions("MainCondition")', 1), ('17', 'NewContract', 'contract NewContract { data { - ApplicationId int "optional" + ApplicationId int Value string Conditions string Wallet string "optional" @@ -939,11 +601,11 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { conditions { ValidateCondition($Conditions,$ecosystem_id) - + if $ApplicationId == 0 { warning "Application id cannot equal 0" } - + $walletContract = $key_id if $Wallet { $walletContract = AddressToId($Wallet) @@ -951,22 +613,13 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { error Sprintf("wrong wallet %%s", $Wallet) } } - var list array - list = ContractsList($Value) - if Len(list) == 0 { - error "must be the name" - } + $contract_name = ContractName($Value) - var i int - while i < Len(list) { - if IsObject(list[i], $ecosystem_id) { - warning Sprintf("Contract or function %%s exists", list[i]) - } - i = i + 1 + if !$contract_name { + error "must be the name" } - $contract_name = list[0] if !$TokenEcosystem { $TokenEcosystem = 1 } else { @@ -977,20 +630,10 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } action { - var root, id int - root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) - id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) - FlushContract(root, id, false) - $result = id + $result = CreateContract($contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) } func rollback() { - var list array - list = ContractsList($Value) - var i int - while i < Len(list) { - RollbackContract(list[i]) - i = i + 1 - } + RollbackNewContract($Value) } func price() int { return SysParamInt("contract_price") @@ -998,11 +641,9 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { }', %[1]d, 'ContractConditions("MainCondition")', 1), ('18', 'NewLang', 'contract NewLang { data { - ApplicationId int "optional" + ApplicationId int Name string - Trans string "optional" - Value array "optional" - IdLanguage array "optional" + Trans string } conditions { @@ -1013,39 +654,13 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { warning Sprintf( "Language resource %%s already exists", $Name) } - - var j int - while j < Len($IdLanguage) { - if $IdLanguage[j] == "" { - info("Locale empty") - } - if $Value[j] == "" { - info("Value empty") - } - j = j + 1 - } + EvalCondition("parameters", "changing_language", "value") } action { - var i,len,lenshar int - var res,langarr string - len = Len($IdLanguage) - lenshar = Len($Value) - while i < len { - if i + 1 == len { - res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) - } else { - res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) - } - i = i + 1 - } - if len > 0 { - langarr = Sprintf("{"+"%%v"+"}", res) - $Trans = langarr - } - $result = CreateLanguage($Name, $Trans, $ApplicationId) - } + CreateLanguage($Name, $Trans, $ApplicationId) + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('19', 'NewMenu', 'contract NewMenu { data { @@ -1072,12 +687,13 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { }', %[1]d, 'ContractConditions("MainCondition")', 1), ('20', 'NewPage', 'contract NewPage { data { - ApplicationId int "optional" + ApplicationId int Name string Value string Menu string Conditions string ValidateCount int "optional" + ValidateMode string "optional" } func preparePageValidateCount(count int) int { var min, max int @@ -1106,10 +722,16 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } $ValidateCount = preparePageValidateCount($ValidateCount) + + if $ValidateMode { + if $ValidateMode != "1" { + $ValidateMode = "0" + } + } } action { - DBInsert("pages", "name,value,menu,validate_count,conditions,app_id", $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId) + DBInsert("pages", "name,value,menu,validate_count,validate_mode,conditions,app_id", $Name, $Value, $Menu, $ValidateCount, $ValidateMode, $Conditions, $ApplicationId) } func price() int { return SysParamInt("page_price") @@ -1136,21 +758,22 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { }', %[1]d, 'ContractConditions("MainCondition")', 1), ('22', 'Import', 'contract Import { data { - Type string - Name string "optional" - Value string "optional" - Conditions string "optional" - Menu string "optional" - Trans string "optional" - Columns string "optional" - Permissions string "optional" - Title string "optional" + Data string + } + func ReplaceValue(s string) string { + s = Replace(s, "#IMPORT_ECOSYSTEM_ID#", "#ecosystem_id#") + s = Replace(s, "#IMPORT_KEY_ID#", "#key_id#") + s = Replace(s, "#IMPORT_ISMOBILE#", "#isMobile#") + s = Replace(s, "#IMPORT_ROLE_ID#", "#role_id#") + s = Replace(s, "#IMPORT_ECOSYSTEM_NAME#", "#ecosystem_name#") + s = Replace(s, "#IMPORT_APP_ID#", "#app_id#") + return s } conditions { - Println(Sprintf("import: %%v, type: %%v, time: %%v", $Name, $Type, $time)) - $ApplicationId = 0 + $Data = ReplaceValue($Data) + $ApplicationId = 0 var app_map map app_map = DBFind("buffer_data").Columns("value->app_name").Where("key=''import_info'' and member_id=$", $key_id).Row() if app_map{ @@ -1163,17 +786,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } action { - var cdata, editors, creators, item map - cdata["Value"] = $Value - cdata["Conditions"] = $Conditions - cdata["ApplicationId"] = $ApplicationId - cdata["Name"] = $Name - cdata["Title"] = $Title - cdata["Trans"] = $Trans - cdata["Menu"] = $Menu - cdata["Columns"] = $Columns - cdata["Permissions"] = $Permissions - + var editors, creators map editors["pages"] = "EditPage" editors["blocks"] = "EditBlock" editors["menu"] = "EditMenu" @@ -1190,36 +803,59 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { creators["contracts"] = "NewContract" creators["tables"] = "NewTable" - item = DBFind($Type).Where("name=?", $Name).Row() + var dataImport array + dataImport = JSONDecode($Data) + var i int + while i time: %%v", $time)) } }', %[1]d, 'ContractConditions("MainCondition")', 1), -('23', 'Export_NewApp', 'contract Export_NewApp { +('23', 'ExportNewApp', 'contract ExportNewApp { data { - app_id int + ApplicationId int } conditions { - $app_map = DBFind("applications").Columns("id,name").Where("id=$", $app_id).Row() + $app_map = DBFind("applications").Columns("id,name").Where("id=$", $ApplicationId).Row() if !$app_map{ warning "Application not found" } @@ -1232,16 +868,16 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { var i int var pages_array array var menu_name_array array - var menu_id_array array + var menu_id_array array i = 0 var pages_ret array - pages_ret = DBFind("pages").Where("app_id=?", $app_id) + pages_ret = DBFind("pages").Where("app_id=?", $ApplicationId) while i < Len(pages_ret) { var page_map map page_map = pages_ret[i] - pages_array[Len(pages_array)] = Sprintf("''%%v''", Str(page_map["menu"])) + pages_array = Append(pages_array, Sprintf("''%%v''", Str(page_map["menu"]))) i = i + 1 } @@ -1256,28 +892,28 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { var menu_map map menu_map = menu_ret[i] - menu_name_array[Len(menu_name_array)] = Str(menu_map["name"]) - menu_id_array[Len(menu_id_array)] = Str(menu_map["id"]) + menu_name_array = Append(menu_name_array, Str(menu_map["name"])) + menu_id_array = Append(menu_id_array, Str(menu_map["id"])) i = i + 1 } } //===================================================================================================== //------------------------------------Creating settings------------------------------------------------ - + var value map - value["app_id"] = Str($app_id) + value["app_id"] = Str($ApplicationId) value["app_name"] = Str($app_map["name"]) - - if Len(menu_name_array) > 0 { - value["menu_id"] = Str(Join(menu_id_array, ", ")) - value["menu_name"] = Str(Join(menu_name_array, ", ")) - value["count_menu"] = Str(Len(menu_name_array)) - } else { - value["menu_id"] = "0" - value["menu_name"] = "" - value["count_menu"] = "0" - } + + if Len(menu_name_array) > 0 { + value["menu_id"] = Str(Join(menu_id_array, ", ")) + value["menu_name"] = Str(Join(menu_name_array, ", ")) + value["count_menu"] = Str(Len(menu_name_array)) + } else { + value["menu_id"] = "0" + value["menu_name"] = "" + value["count_menu"] = "0" + } $buffer_id = DBFind("buffer_data").Where("member_id=$ and key=$", $key_id, "export").One("id") if !$buffer_id { @@ -1288,225 +924,180 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('24', 'EditBlock', 'contract EditBlock { - data { - Id int - Value string "optional" - Conditions string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value - } + data { + Id int + Value string "optional" + Conditions string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value + } - conditions { - RowConditions("blocks", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("blocks", $Id, Join(pars, ","), vals...) - } - } + conditions { + RowConditions("blocks", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars = Append(pars, "conditions") + vals = Append(vals, $Conditions) + } + if Len(vals) > 0 { + DBUpdate("blocks", $Id, Join(pars, ","), vals...) + } + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('25', 'EditMenu', 'contract EditMenu { - data { - Id int - Value string "optional" - Title string "optional" - Conditions string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value && !$Title - } + data { + Id int + Value string "optional" + Title string "optional" + Conditions string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$Title + } - conditions { - RowConditions("menu", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Title { - pars[Len(pars)] = "title" - vals[Len(vals)] = $Title - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("menu", $Id, Join(pars, ","), vals...) - } - } + conditions { + RowConditions("menu", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Title { + pars = Append(pars, "title") + vals = Append(vals, $Title) + } + if $Conditions { + pars = Append(pars, "conditions") + vals = Append(vals, $Conditions) + } + if Len(vals) > 0 { + DBUpdate("menu", $Id, Join(pars, ","), vals...) + } + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('26', 'EditPage', 'contract EditPage { - data { - Id int - Value string "optional" - Menu string "optional" - Conditions string "optional" - ValidateCount int "optional" - ValidateMode string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value && !$Menu && !$ValidateCount - } - func preparePageValidateCount(count int) int { - var min, max int - min = Int(EcosysParam("min_page_validate_count")) - max = Int(EcosysParam("max_page_validate_count")) - if count < min { - count = min - } else { - if count > max { - count = max - } - } - return count - } - - conditions { - RowConditions("pages", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - $ValidateCount = preparePageValidateCount($ValidateCount) - } - - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Menu { - pars[Len(pars)] = "menu" - vals[Len(vals)] = $Menu - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if $ValidateCount { - pars[Len(pars)] = "validate_count" - vals[Len(vals)] = $ValidateCount - } - if $ValidateMode { - if $ValidateMode != "1" { - $ValidateMode = "0" - } - pars[Len(pars)] = "validate_mode" - vals[Len(vals)] = $ValidateMode - } - if Len(vals) > 0 { - DBUpdate("pages", $Id, Join(pars, ","), vals...) - } - } -}', %[1]d, 'ContractConditions("MainCondition")', 1), -('27', 'EditContract', 'contract EditContract { - data { - Id int - Value string "optional" - Conditions string "optional" - WalletId string "optional" - } - func onlyConditions() bool { - return $Conditions && !$Value && !$WalletId - } - - conditions { - RowConditions("contracts", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - $cur = DBFind("contracts").Columns("id,value,conditions,active,wallet_id,token_id").WhereId($Id).Row() - if !$cur { - error Sprintf("Contract %%d does not exist", $Id) - } - if $Value { - var list, curlist array - list = ContractsList($Value) - curlist = ContractsList($cur["value"]) - if Len(list) != Len(curlist) { - error "Contracts cannot be removed or inserted" - } - var i int - while i < Len(list) { - var j int - var ok bool - while j < Len(curlist) { - if curlist[j] == list[i] { - ok = true - break - } - j = j + 1 - } - if !ok { - error "Contracts or functions names cannot be changed" - } - i = i + 1 - } - } - if $WalletId != "" { - $recipient = AddressToId($WalletId) - if $recipient == 0 { - error Sprintf("New contract owner %%s is invalid", $WalletId) - } - if Int($cur["active"]) == 1 { - error "Contract must be deactivated before wallet changing" - } - } else { - $recipient = Int($cur["wallet_id"]) - } - } - - action { - var root int - var pars, vals array - if $Value { - root = CompileContract($Value, $ecosystem_id, $recipient, Int($cur["token_id"])) - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if $WalletId != "" { - pars[Len(pars)] = "wallet_id" - vals[Len(vals)] = $recipient - } - if Len(vals) > 0 { - DBUpdate("contracts", $Id, Join(pars, ","), vals...) - } - if $Value { - FlushContract(root, $Id, Int($cur["active"]) == 1) - } else { - if $WalletId != "" { - SetContractWallet($Id, $ecosystem_id, $recipient) - } - } - } - func rollback() { - RollbackEditContract() - } + data { + Id int + Value string "optional" + Menu string "optional" + Conditions string "optional" + ValidateCount int "optional" + ValidateMode string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$Menu && !$ValidateCount + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + if count < min { + count = min + } else { + if count > max { + count = max + } + } + return count + } + + conditions { + RowConditions("pages", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + $ValidateCount = preparePageValidateCount($ValidateCount) + } + + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Menu { + pars = Append(pars, "menu") + vals = Append(vals, $Menu) + } + if $Conditions { + pars = Append(pars, "conditions") + vals = Append(vals, $Conditions) + } + if $ValidateCount { + pars = Append(pars, "validate_count") + vals = Append(vals, $ValidateCount) + } + if $ValidateMode { + if $ValidateMode != "1" { + $ValidateMode = "0" + } + pars = Append(pars, "validate_mode") + vals = Append(vals, $ValidateMode) + } + if Len(vals) > 0 { + DBUpdate("pages", $Id, Join(pars, ","), vals...) + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('27', 'EditContract', 'contract EditContract { + data { + Id int + Value string "optional" + Conditions string "optional" + WalletId string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$WalletId + } + + conditions { + RowConditions("contracts", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + $cur = DBFind("contracts").Columns("id,value,conditions,active,wallet_id,token_id").WhereId($Id).Row() + if !$cur { + error Sprintf("Contract %%d does not exist", $Id) + } + if $Value { + ValidateEditContractNewValue($Value, $cur["value"]) + } + if $WalletId != "" { + $recipient = AddressToId($WalletId) + if $recipient == 0 { + error Sprintf("New contract owner %%s is invalid", $WalletId) + } + if Int($cur["active"]) == 1 { + error "Contract must be deactivated before wallet changing" + } + } else { + $recipient = Int($cur["wallet_id"]) + } + } + + action { + UpdateContract($Id, $Value, $Conditions, $WalletId, $recipient, $cur["active"], $cur["token_id"]) + } + func rollback() { + RollbackEditContract() + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('28','MoneyTransfer','contract MoneyTransfer { data { @@ -1621,56 +1212,52 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('33','NewSign','contract NewSign { - data { - Name string - Value string - Conditions string - } - conditions { - ValidateCondition($Conditions,$ecosystem_id) - var exist string - - var row map - row = DBRow("signatures").Columns("id").Where("name = ?", $Name) + data { + Name string + Value string + Conditions string + } + conditions { + ValidateCondition($Conditions, $ecosystem_id) - if row { - error Sprintf("The signature %%s already exists", $Name) - } - } - action { - DBInsert("signatures", "name,value,conditions", $Name, $Value, $Conditions ) - } + if DBFind("signatures").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf("The signature %%s already exists", $Name) + } + } + action { + DBInsert("signatures", "name,value,conditions", $Name, $Value, $Conditions) + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('34','EditSign','contract EditSign { - data { - Id int - Value string "optional" - Conditions string "optional" - } + data { + Id int + Value string "optional" + Conditions string "optional" + } + func onlyConditions() bool { + return $Conditions && false + } - func onlyConditions() bool { - return $Conditions && !$Value - } - conditions { - RowConditions("signatures", $Id, onlyConditions()) - if $Conditions { - ValidateCondition($Conditions, $ecosystem_id) - } - } - action { - var pars, vals array - if $Value { - pars[0] = "value" - vals[0] = $Value - } - if $Conditions { - pars[Len(pars)] = "conditions" - vals[Len(vals)] = $Conditions - } - if Len(vals) > 0 { - DBUpdate("signatures", $Id, Join(pars, ","), vals...) - } - } + conditions { + RowConditions("signatures", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars = Append(pars, "conditions") + vals = Append(vals, $Conditions) + } + if Len(vals) > 0 { + DBUpdate("signatures", $Id, Join(pars, ","), vals...) + } + } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('35','DeactivateContract','contract DeactivateContract { data { @@ -1698,15 +1285,26 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('36','UpdateSysParam','contract UpdateSysParam { - data { - Name string - Value string - Conditions string "optional" - } - action { - DBUpdateSysParam($Name, $Value, $Conditions ) - } -}', %[1]d, 'ContractConditions("MainCondition")', 1), + data { + Name string + Value string + Conditions string "optional" + } + + conditions { + if GetContractByName($Name){ + var params map + params["Value"] = $Value + CallContract($Name, params) + } else { + warning "System parameter not found" + } + } + + action { + DBUpdateSysParam($Name, $Value, $Conditions) + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), ('37', 'NewDelayedContract','contract NewDelayedContract { data { Contract string @@ -1901,5 +1499,1100 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { action { UpdateNodesBan($block_time) } -}', %[1]d, 'ContractConditions("MainCondition")', 1); +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('46', 'EditLangJoint', 'contract EditLangJoint { + data { + Id int + ValueArr array + LocaleArr array + } + + conditions { + var i int + while i < Len($LocaleArr) { + if Size($LocaleArr[i]) == 0 { + info("Locale is empty") + } + if Size($ValueArr[i]) == 0 { + info("Value is empty") + } + i = i + 1 + } + } + + action { + var i int + var Trans map + while i < Len($LocaleArr) { + Trans[$LocaleArr[i]] = $ValueArr[i] + i = i + 1 + } + var params map + params["Id"] = $Id + params["Trans"] = JSONEncode(Trans) + CallContract("EditLang", params) + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('47', 'EditSignJoint', 'contract EditSignJoint { + data { + Id int + Title string + Parameter string + Conditions string + } + + conditions { + if !$Title { + info("Title is empty") + } + if !$Parameter { + info("Parameter is empty") + } + } + + action { + var Value map + Value["title"] = $Title + Value["params"] = $Parameter + + var params map + params["Id"] = $Id + params["Value"] = JSONEncode(Value) + params["Conditions"] = $Conditions + CallContract("EditSign", params) + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('48', 'NewLangJoint', 'contract NewLangJoint { + data { + ApplicationId int + Name string + ValueArr array + LocaleArr array + } + + conditions { + var i int + while i < Len($LocaleArr) { + if Size($LocaleArr[i]) == 0 { + info("Locale is empty") + } + if Size($ValueArr[i]) == 0 { + info("Value is empty") + } + i = i + 1 + } + } + + action { + var i int + var Trans map + while i < Len($LocaleArr) { + Trans[$LocaleArr[i]] = $ValueArr[i] + i = i + 1 + } + var params map + params["ApplicationId"] = $ApplicationId + params["Name"] = $Name + params["Trans"] = JSONEncode(Trans) + CallContract("NewLang", params) + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('49', 'NewSignJoint', 'contract NewSignJoint { + data { + Name string + Title string + ParamArr array + ValueArr array + Conditions string + } + + conditions { + var i int + while i < Len($ParamArr) { + if Size($ParamArr[i]) == 0 { + info("Parameter is empty") + } + if Size($ValueArr[i]) == 0 { + info("Value is empty") + } + i = i + 1 + } + } + + action { + var par_arr array + + var i int + while i < Len($ParamArr) { + var par_map map + par_map["name"] = $ParamArr[i] + par_map["text"] = $ValueArr[i] + par_arr = Append(par_arr, JSONEncode(par_map)) + i = i + 1 + } + + var params map + params["Name"] = $Name + params["Value"] = Sprintf(` + "`" + `{"title":"%%v","params":[%%v]}` + "`" + `, $Title, Join(par_arr, ",")) + params["Conditions"] = $Conditions + CallContract("NewSign", params) + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('50', 'NewTableJoint', 'contract NewTableJoint { + data { + ApplicationId int + Name string + ColumnsArr array + TypesArr array + InsertPerm string + UpdatePerm string + NewColumnPerm string + } + + conditions { + var i int + while i < Len($ColumnsArr) { + if Size($ColumnsArr[i]) == 0 { + info("Columns is empty") + } + if Size($TypesArr[i]) == 0 { + info("Type is empty") + } + i = i + 1 + } + } + + action { + var i int + var col_arr array + while i < Len($ColumnsArr) { + var col_map map + col_map["name"] = $ColumnsArr[i] + col_map["type"] = $TypesArr[i] + col_map["conditions"] = "true" + col_arr[i] = JSONEncode(col_map) + i = i + 1 + } + + var Permissions map + Permissions["insert"] = $InsertPerm + Permissions["update"] = $UpdatePerm + Permissions["new_column"] = $NewColumnPerm + + var params map + params["ApplicationId"] = $ApplicationId + params["Name"] = $Name + params["Columns"] = JSONEncode(col_arr) + params["Permissions"] = JSONEncode(Permissions) + CallContract("NewTable", params) + } +}', %[1]d, 'ContractConditions("MainCondition")', 1), +('51', 'blockchain_url', 'contract blockchain_url { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if !(HasPrefix($Value, "http://") || HasPrefix($Value, "https://")) { + warning "URL ivalid (not found protocol)" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('52', 'block_reward', 'contract block_reward { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) < 3 || Int($Value) > 9999 { + warning "Value must be between 3 and 9999" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('53', 'column_price', 'contract column_price { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('54', 'commission_size', 'contract commission_size { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('55', 'commission_wallet', 'contract commission_wallet { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('56', 'contract_price', 'contract contract_price { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('57', 'default_ecosystem_contract', 'contract default_ecosystem_contract { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('58', 'default_ecosystem_menu', 'contract default_ecosystem_menu { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + } + }', %[1]d, 'ContractConditions("MainCondition")', 2), +('59', 'default_ecosystem_page', 'contract default_ecosystem_page { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('60', 'ecosystem_price', 'contract ecosystem_price { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('61', 'extend_cost_activate', 'contract extend_cost_activate { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('62', 'extend_cost_address_to_id', 'contract extend_cost_address_to_id { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('63', 'extend_cost_column_condition', 'contract extend_cost_column_condition { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('64', 'extend_cost_compile_contract', 'contract extend_cost_compile_contract { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('65', 'extend_cost_contains', 'contract extend_cost_contains { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('66', 'extend_cost_contracts_list', 'contract extend_cost_contracts_list { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('67', 'extend_cost_create_column', 'contract extend_cost_create_column { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('68', 'extend_cost_create_ecosystem', 'contract extend_cost_create_ecosystem { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('69', 'extend_cost_create_table', 'contract extend_cost_create_table { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('70', 'extend_cost_deactivate', 'contract extend_cost_deactivate { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('71', 'extend_cost_ecosys_param', 'contract extend_cost_ecosys_param { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('72', 'extend_cost_eval_condition', 'contract extend_cost_eval_condition { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('73', 'extend_cost_eval', 'contract extend_cost_eval { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('74', 'extend_cost_flush_contract', 'contract extend_cost_flush_contract { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('75', 'extend_cost_has_prefix', 'contract extend_cost_has_prefix { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('76', 'extend_cost_id_to_address', 'contract extend_cost_id_to_address { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('77', 'extend_cost_is_object', 'contract extend_cost_is_object { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('78', 'extend_cost_join', 'contract extend_cost_join { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('79', 'extend_cost_json_to_map', 'contract extend_cost_json_to_map { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('80', 'extend_cost_len', 'contract extend_cost_len { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('81', 'extend_cost_new_state', 'contract extend_cost_new_state { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('82', 'extend_cost_perm_column', 'contract extend_cost_perm_column { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('83', 'extend_cost_perm_table', 'contract extend_cost_perm_table { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('84', 'extend_cost_pub_to_id', 'contract extend_cost_pub_to_id { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('85', 'extend_cost_replace', 'contract extend_cost_replace { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('86', 'extend_cost_sha256', 'contract extend_cost_sha256 { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('87', 'extend_cost_size', 'contract extend_cost_size { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('88', 'extend_cost_substr', 'contract extend_cost_substr { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('89', 'extend_cost_sys_fuel', 'contract extend_cost_sys_fuel { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('90', 'extend_cost_sys_param_int', 'contract extend_cost_sys_param_int { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('91', 'extend_cost_sys_param_string', 'contract extend_cost_sys_param_string { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('92', 'extend_cost_table_conditions', 'contract extend_cost_table_conditions { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('93', 'extend_cost_update_lang', 'contract extend_cost_update_lang { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('94', 'extend_cost_validate_condition', 'contract extend_cost_validate_condition { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('95', 'fuel_rate', 'contract fuel_rate { + data { + Value string + } + + conditions { + $Value = TrimSpace($Value) + if Size($Value) == 0 { + warning "Value was not received" + } + // [["x1","number"]] + if !(HasPrefix($Value, "[") && "]" == Substr($Value, Size($Value)-1, 1)){ + warning "Invalid value" + } + var rates newRate array + rates = JSONDecode($Value) + if Len(rates) > 1{ + warning "Invalid size array" + } + newRate = rates[0] + if Len(newRate) != 2{ + warning "Invalid size new rate array" + } + if newRate[0] != 1 { + warning "Invalid ecosystem number" + } + if Int(newRate[1]) <= 0 { + warning "Invalid fuel value" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('96', 'full_nodes', 'contract full_nodes { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + + var full_nodes_arr array + full_nodes_arr = JSONDecode($Value) + + var len_arr int + len_arr = Len(full_nodes_arr) + + if len_arr == 0 { + warning "Wrong array structure" + } + + var i int + while(i < len_arr){ + var node_map map + node_map = full_nodes_arr[i] + + var public_key string + var tcp_address string + var api_address string + var key_id string + + public_key = node_map["public_key"] + tcp_address = node_map["tcp_address"] + api_address = node_map["api_address"] + key_id = node_map["key_id"] + + if Size(public_key) == 0 { + warning "Public key was not received" + } + if Size(tcp_address) == 0 { + warning "TCP address was not received" + } + if Size(api_address) == 0 { + warning "API address was not received" + } + if Size(key_id) == 0 { + warning "Key ID was not received" + } + + i = i + 1 + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('97', 'gap_between_blocks', 'contract gap_between_blocks { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 || Int($Value) >= 86400 { + warning "Value must be between 1 and 86399" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('98', 'max_block_generation_time', 'contract max_block_generation_time { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('99', 'max_block_size', 'contract max_block_size { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('100', 'max_block_user_tx', 'contract max_block_user_tx { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('101', 'max_columns', 'contract max_columns { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('102', 'max_fuel_block', 'contract max_fuel_block { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('103', 'max_fuel_tx', 'contract max_fuel_tx { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('104', 'max_indexes', 'contract max_indexes { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('105', 'max_tx_count', 'contract max_tx_count { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('106', 'max_tx_size', 'contract max_tx_size { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('107', 'menu_price', 'contract menu_price { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('108', 'new_version_url', 'contract new_version_url { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('109', 'number_of_nodes', 'contract number_of_nodes { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) < 1 || Int($Value) > 999 { + warning "Value must be between 1 and 999" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('110', 'page_price', 'contract page_price { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('111', 'rb_blocks_1', 'contract rb_blocks_1 { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) < 1 || Int($Value) > 999 { + warning "Value must be between 1 and 999" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('112', 'table_price', 'contract table_price { + data { + Value string + } + + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2); ` diff --git a/packages/migration/first_ecosystems_data.go b/packages/migration/first_ecosystems_data.go index 0475c851c..20ce06909 100644 --- a/packages/migration/first_ecosystems_data.go +++ b/packages/migration/first_ecosystems_data.go @@ -2,4 +2,15 @@ package migration var firstEcosystemDataSQL = ` INSERT INTO "1_ecosystems" ("id", "name", "is_valued") VALUES ('1', 'platform ecosystem', 0); + +INSERT INTO "1_roles" ("id", "default_page", "role_name", "deleted", "role_type", + "date_created","creator","roles_access") VALUES + ('3','', 'Apla Consensus asbl', '0', '3', NOW(), '{}', '{"rids": "1"}'), + ('4','', 'Candidate for validators', '0', '3', NOW(), '{}', '{}'), + ('5','', 'Validator', '0', '3', NOW(), '{}', '{}'), + ('6','', 'Investor with voting rights', '0', '3', NOW(), '{}', '{}'), + ('7','', 'Delegate', '0', '3', NOW(), '{}', '{}'); + +INSERT INTO "1_applications" (id, name, conditions) VALUES (2, 'System parameters', + 'ContractConditions("MainCondition")'); ` diff --git a/packages/migration/menu_data.go b/packages/migration/menu_data.go index 0952bd931..ce203d982 100644 --- a/packages/migration/menu_data.go +++ b/packages/migration/menu_data.go @@ -4,15 +4,17 @@ var menuDataSQL = `INSERT INTO "%[1]d_menu" (id, name, value, conditions) VALUES (2, 'admin_menu', 'MenuItem(Title:"Application", Page:apps_list, Icon:"icon-folder") MenuItem(Title:"Ecosystem parameters", Page:params_list, Icon:"icon-settings") MenuItem(Title:"Menu", Page:menus_list, Icon:"icon-list") +MenuItem(Title:"Confirmations", Page:confirmations, Icon:"icon-check") MenuItem(Title:"Import", Page:import_upload, Icon:"icon-cloud-upload") MenuItem(Title:"Export", Page:export_resources, Icon:"icon-cloud-download") MenuGroup(Title:"Resources", Icon:"icon-share"){ - MenuItem(Title:"Pages", Page:app_pages, Icon:"icon-screen-desktop") - MenuItem(Title:"Blocks", Page:app_blocks, Icon:"icon-grid") - MenuItem(Title:"Tables", Page:app_tables, Icon:"icon-docs") - MenuItem(Title:"Contracts", Page:app_contracts, Icon:"icon-briefcase") - MenuItem(Title:"Application parameters", Page:app_params, Icon:"icon-wrench") - MenuItem(Title:"Language resources", Page:app_langres, Icon:"icon-globe") - MenuItem(Title:"Binary data", Page:app_binary, Icon:"icon-layers") -}', 'ContractConditions("MainCondition")'); + MenuItem(Title:"Pages", Page:app_pages, Icon:"icon-screen-desktop") + MenuItem(Title:"Blocks", Page:app_blocks, Icon:"icon-grid") + MenuItem(Title:"Tables", Page:app_tables, Icon:"icon-docs") + MenuItem(Title:"Contracts", Page:app_contracts, Icon:"icon-briefcase") + MenuItem(Title:"Application parameters", Page:app_params, Icon:"icon-wrench") + MenuItem(Title:"Language resources", Page:app_langres, Icon:"icon-globe") + MenuItem(Title:"Binary data", Page:app_binary, Icon:"icon-layers") +} +MenuItem(Title:"Dashboard", Page:admin_dashboard, Icon:"icon-wrench")', 'ContractConditions("MainCondition")'); ` diff --git a/packages/migration/pages_data.go b/packages/migration/pages_data.go index 9fedd755d..575e936c3 100644 --- a/packages/migration/pages_data.go +++ b/packages/migration/pages_data.go @@ -1,170 +1,337 @@ package migration var pagesDataSQL = `INSERT INTO "%[1]d_pages" (id, name, value, menu, conditions) VALUES -(2, 'app_binary', 'DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) + (2, 'admin_dashboard', 'SetVar(this_page,admin_dashboard) +If(GetVar(block)){ + Div(breadcrumb){ + LinkPage(Body:Dashboard,Page:#this_page#) + Span(/).Style(margin-right: 10px; margin-left: 10px;) + Span(Class: text-muted, Body: Block: #block#) + } + Include(Name:#block#) +}.Else{ + SetTitle(Dashboard) + DBFind(buffer_data).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) + Data(tables, "Table,Cols,Page"){ + contracts,"id,app_id,name,active",editor + pages,"id,app_id,name",editor + blocks,"id,app_id,name",editor + tables,"id,app_id,name",table_create + app_params,"id,app_id,name,value",app_params_edit + binaries,"id,app_id,name",app_upload_binary + languages,"id,app_id,name",langres_add + } + DBFind(applications,src_apps).Order(id).Count(apps_count) + SetVar(active_btn,"btn btn-info").(create_icon,fa fa-plus-square).(cols,3) + If(GetVar(appid)){ + SetVar(where,"app_id=#appid#") + }.Else{ + If(#buffer_value_app_id#>0){ + SetVar(appid,#buffer_value_app_id#).(where,"app_id=#appid#") + }.Else{ + SetVar(where,"id>0").(appid,) + } + } + Div(container){ + If(#apps_count#>8){ + Div(row){ + Form(col-sm-4 input-group form-group){ + Div(input-group-addon){ + App ID: #appid# + } + Select(Name:appid, Source:src_apps, NameColumn:name, ValueColumn:id, Value:#appid#) + Div(input-group-btn){ + Button(Page: #this_page#, Class: #active_btn# fa fa-check, PageParams: "appid=Val(appid)") + Button(Page: #this_page#, Class: btn btn-default fa fa-refresh, PageParams: "appid=#buffer_value_app_id#") + } + } + } + }.Else{ + Div(row){ + Div(col-sm-12 btn-group){ + ForList(src_apps){ + If(#id#==1){ + If(#appid#==#buffer_value_app_id#){ + Button(Class: btn btn-default disabled fa fa-refresh) + }.Else{ + Button(Page: #this_page#, Class: btn btn-default fa fa-refresh, PageParams: "appid=#buffer_value_app_id#") + } + } + If(#appid#==#id#){ + Button(Class: #active_btn# disabled, Body:"#id#:#name#") + }.Else{ + Button(Page: #this_page#, Class: btn btn-default, PageParams: "appid=#id#", Body:"#id#:#name#") + } + } + } + } + } + Div(form-group){ + ForList(tables){ + DBFind(#Table#, src_table).Limit(250).Columns(#Cols#).Order("name").Where(#where#) + Div(row){ + Div(h3){ + LangRes(#Table#) + } + } + Div(row list-group-item){ + Div(cols){ + SetVar(value,) + ForList(src_table){ + Div(clearfix){ + If(#Table#==contracts){ + LinkPage(Page: #Page#, PageParams: "open=contract,name=#name#"){#name#} + } + If(#Table#==pages){ + LinkPage(Page: #Page#, PageParams: "open=page,name=#name#"){#name#} + } + If(#Table#==blocks){ + LinkPage(Page: #Page#, PageParams: "open=block,name=#name#"){#name#} + } + If(#Table#==tables){ + LinkPage(Page: table_edit, PageParams: "tabl_id=#id#"){#name#} + } + If(#Table#==app_params){ + LinkPage(Page: #Page#, PageParams: "id=#id#"){#name#} + } + If(#Table#==binaries){ + LinkPage(Page: #Page#, PageParams: "id=#id#,application_id=#appid#"){#name#} + } + If(#Table#==languages){ + LinkPage(Class: text-primary h4, Page: langres_edit, PageParams: "lang_id=#id#"){#name#} + } + If(` + "`" + `#value#` + "`" + `!=""){ + :Div(text-muted){` + "`" + `#value#` + "`" + `}.Style(max-height:1.5em;overflow:hidden;) + } + Div(pull-right){ + If(#Table#==contracts){ + If(#active#==1){ + Span(actived,text-success mr-lg) + } + LinkPage(Class: text-muted fa fa-cogs, Page: properties_edit, PageParams: "edit_property_id=#id#,type=contract") + } + If(#Table#==pages){ + LinkPage(Class: text-muted fa fa-eye, Page: #name#) + LinkPage(Class: text-muted fa fa-cogs, Page: properties_edit, PageParams: "edit_property_id=#id#,type=page") + } + If(#Table#==blocks){ + LinkPage(Class: text-muted fa fa-eye, Page: #this_page#, PageParams:"block=#name#") + LinkPage(Class: text-muted fa fa-cogs, Page: properties_edit, PageParams: "edit_property_id=#id#,type=block") + } + If(#Table#==tables){ + LinkPage(Class: text-muted fa fa-eye, Page: table_view, PageParams: "tabl_id=#id#,table_name=#name#") + } + } + } + } + } + Div(row col-sm-12 mt-lg text-right){ + If(#Table#==contracts){ + LinkPage(Page: #Page#, PageParams: "create=contract,appId=#appid#"){ + Em(Class: #create_icon#) CREATE Em(Class: #create_icon#) + } + }.ElseIf(#Table#==pages){ + LinkPage(Page: #Page#, PageParams: "create=page,appId=#appid#"){ + Em(Class: #create_icon#) CREATE Em(Class: #create_icon#) + } + }.ElseIf(#Table#==blocks){ + LinkPage(Page: #Page#, PageParams: "create=block,appId=#appid#"){ + Em(Class: #create_icon#) CREATE Em(Class: #create_icon#) + } + }.ElseIf(#Table#==tables){ + LinkPage(Page: #Page#, PageParams: "application_id=#appid#"){ + Em(Class: #create_icon#) CREATE Em(Class: #create_icon#) + } + }.ElseIf(#Table#==app_params){ + LinkPage(Page: #Page#, PageParams: "application_id=#appid#,create=create"){ + Em(Class: #create_icon#) CREATE Em(Class: #create_icon#) + } + }.ElseIf(#Table#==binaries){ + LinkPage(Page: #Page#, PageParams: "application_id=#appid#"){ + Em(Class: #create_icon#) CREATE Em(Class: #create_icon#) + } + }.ElseIf(#Table#==languages){ + LinkPage(Page: #Page#, PageParams: "application_id=#appid#"){ + Em(Class: #create_icon#) CREATE Em(Class: #create_icon#) + } + } + } + } + } + } + }.Style( + .pull-right a { + margin-right:10px; + } + .cols { + -moz-column-count: #cols#; + -webkit-column-count: #cols#; + column-count: #cols#; + } + ) +}', 'admin_menu', 'ContractAccess("@1EditPage")'), + (3, 'app_binary', 'DBFind(buffer_data, src_buffer).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ - DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Limit(1).Vars("app") - - Div(content-wrapper){ - SetTitle("Binary data": #app_name#) - AddToolButton(Title: "Upload binary", Page: app_upload_binary, Icon: icon-plus, PageParams: "app_id=#app_id#") - - SetVar(pager_table, binaries).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_binary).(pager_limit, 50) - Include(pager_header) - - SetVar(admin_page, app_binary) - Include(admin_link) - - DBFind(binaries, src_binparameters).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") - - Form(panel panel-primary){ - Div(panel-body){ - Div(row){ - ForList(src_binparameters){ - Div(col-md-#width# col-sm-12){ - Div(list-group-item){ - Div(row){ - Div(col-md-4){ - Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) - If(#member_id# == #key_id#){ - LinkPage(Class: text-primary h5, Body: #name#, Page: app_upload_binary, PageParams: "id=#id#,app_id=#buffer_value_app_id#") - }.Else{ - Span(Class: h5, Body: #name#) - } - } - Div(col-md-8 text-right){ - Span(#hash#) - } - } - } - } - } - } - } - Div(panel-footer clearfix){ - Include(pager) - } - } - } + DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Vars("application") + + Div(content-wrapper){ + SetTitle("Binary data": #application_name#) + AddToolButton(Title: "Upload binary", Page: app_upload_binary, Icon: icon-plus, PageParams: "application_id=#application_id#") + + SetVar(pager_table, binaries).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_binary).(pager_limit, 50) + Include(pager_header) + + SetVar(admin_page, app_binary) + Include(admin_link) + + DBFind(binaries, src_binparameters).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") + + Form(panel panel-primary){ + Div(panel-body){ + Div(row){ + ForList(src_binparameters){ + Div(col-md-#width# col-sm-12){ + Div(list-group-item){ + Div(row){ + Div(col-md-4){ + Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) + If(#member_id# == #key_id#){ + LinkPage(Class: text-primary h5, Body: #name#, Page: app_upload_binary, PageParams: "id=#id#,application_id=#buffer_value_app_id#") + }.Else{ + Span(Class: h5, Body: #name#) + } + } + Div(col-md-8 text-right){ + Span(#hash#) + } + } + } + } + } + } + } + Div(panel-footer clearfix){ + Include(pager) + } + } + } }.Else{ - SetTitle("Binary data") - Div(breadcrumb){ - Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") - } + SetTitle("Binary data") + Div(breadcrumb){ + Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(3, 'app_blocks', 'DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - + (4, 'app_blocks', 'DBFind(buffer_data, src_buffer).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ - DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Limit(1).Vars("app") - - Div(content-wrapper){ - SetTitle("Blocks": #app_name#) - AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=block,appId=#buffer_value_app_id#") - - SetVar(pager_table, blocks).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_blocks).(pager_limit, 50) - Include(pager_header) - - SetVar(admin_page, app_blocks) - Include(admin_link) - - DBFind(blocks, src_blocks).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") - - Form(panel panel-primary){ - Div(panel-body){ - Div(row){ - ForList(src_blocks){ - Div(col-md-#width# col-sm-12){ - Div(list-group-item){ - Div(row){ - Div(col-md-4){ - Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) - Span(Class: h5, Body: "#name#") - } - Div(col-md-8){ - Div(pull-right){ - Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=block")).Style(margin-right: 15px;) - Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=block,name=#name#")) - } - } - } - } - } - } - } - } - Div(panel-footer clearfix){ - Include(pager) - } - } - } + DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Vars("application") + + Div(content-wrapper){ + SetTitle("Blocks": #application_name#) + AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=block,appId=#buffer_value_app_id#") + + SetVar(pager_table, blocks).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_blocks).(pager_limit, 50) + Include(pager_header) + + SetVar(admin_page, app_blocks) + Include(admin_link) + + DBFind(blocks, src_blocks).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") + + Form(panel panel-primary){ + Div(panel-body){ + Div(row){ + ForList(src_blocks){ + Div(col-md-#width# col-sm-12){ + Div(list-group-item){ + Div(row){ + Div(col-md-4){ + Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) + Span(Class: h5, Body: "#name#") + } + Div(col-md-8){ + Div(pull-right){ + Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=block")).Style(margin-right: 15px;) + Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=block,name=#name#")) + } + } + } + } + } + } + } + } + Div(panel-footer clearfix){ + Include(pager) + } + } + } }.Else{ - SetTitle("Blocks") - Div(breadcrumb){ - Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") - } + SetTitle("Blocks") + Div(breadcrumb){ + Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(4, 'app_contracts', 'DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - + (5, 'app_contracts', 'DBFind(buffer_data, src_buffer).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ - DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Limit(1).Vars("app") - - Div(content-wrapper){ - SetTitle("Contracts": #app_name#) - AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=contract,appId=#buffer_value_app_id#") - - SetVar(pager_table, contracts).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_contracts).(pager_limit, 50) - Include(pager_header) - - SetVar(admin_page, app_contracts) - Include(admin_link) - - DBFind(contracts, src_contracts).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") - - Form(panel panel-primary){ - Div(panel-body){ - Div(row){ - ForList(src_contracts){ - Div(col-md-#width# col-sm-12){ - Div(list-group-item){ - Div(row){ - Div(col-md-4){ - Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) - Span(Class: h5, Body: "#name#") - } - Div(col-md-8){ - Div(pull-right){ - Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=contract")).Style(margin-right: 15px;) - Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=contract,name=#name#")) - } - Div(pull-right){ - If(#active#==1){ - Span(Class: h5, Body: Em(Class: fa fa-check)).Style(margin-right: 50px;) - }.Else{ - Span(Class: h5 text-muted, Body: Em(Class: fa fa-minus)).Style(margin-right: 50px;) - } - } - } - } - } - } - } - } - } - Div(panel-footer clearfix){ - Include(pager) - } - } - } + DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Vars("application") + + Div(content-wrapper){ + SetTitle("Contracts": #application_name#) + AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=contract,appId=#buffer_value_app_id#") + + SetVar(pager_table, contracts).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_contracts).(pager_limit, 50) + Include(pager_header) + + SetVar(admin_page, app_contracts) + Include(admin_link) + + DBFind(contracts, src_contracts).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") + + Form(panel panel-primary){ + Div(panel-body){ + Div(row){ + ForList(src_contracts){ + Div(col-md-#width# col-sm-12){ + Div(list-group-item){ + Div(row){ + Div(col-md-4){ + Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) + Span(Class: h5, Body: "#name#") + } + Div(col-md-8){ + Div(pull-right){ + Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=contract")).Style(margin-right: 15px;) + Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=contract,name=#name#")) + } + Div(pull-right){ + If(#active#==1){ + Span(Class: h5, Body: Em(Class: fa fa-check)).Style(margin-right: 50px;) + }.Else{ + Span(Class: h5 text-muted, Body: Em(Class: fa fa-minus)).Style(margin-right: 50px;) + } + } + } + } + } + } + } + } + } + Div(panel-footer clearfix){ + Include(pager) + } + } + } }.Else{ - SetTitle("Contracts") - Div(breadcrumb){ - Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") - } + SetTitle("Contracts") + Div(breadcrumb){ + Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(5, 'app_edit', 'Div(content-wrapper){ - SetTitle("Application") - Div(breadcrumb){ + (6, 'app_edit', 'Div(content-wrapper){ + SetTitle("Applications") + Div(breadcrumb){ LinkPage("Applications", apps_list) Span(/).Style(margin-right: 10px; margin-left: 10px;) If(#id# > 0){ @@ -172,215 +339,192 @@ If(#buffer_value_app_id# > 0){ }.Else{ Span(Class: text-muted, Body: "New") } - } + } Form(){ If(#id# > 0){ - DBFind(applications, src_apps).Columns("id,name,conditions,deleted").Where("id=#id#").Limit(1).Vars("app") - Div(col-md-12){ - Div(form-group){ - Div(text-left){ - Label("Name") - } - Input(Name: name, Disabled: "true", Value: #app_name#) - } - Div(form-group){ - Div(text-left){ - Label("Change conditions") - } - Input(Name: conditions, Value: #app_conditions#) - } + DBFind(applications, src_apps).Columns("id,name,conditions,deleted").Where("id=#id#").Vars("application") + Div(form-group){ + Label("Name") + Input(Name: Name, Disabled: "true", Value: #application_name#) + } + Div(form-group){ + Label("Change conditions") + Input(Name: Conditions, Value: #application_conditions#) + } + Div(form-group){ Div(row){ - Div(form-group){ - Div(text-left col-md-6){ - Button(Body: "Save", Class: btn btn-primary, Page: apps_list, Contract: EditApplication, Params: "ApplicationId=#id#,Conditions=Val(conditions)") - } - Div(text-right col-md-6){ - If(#app_deleted# == 0){ - Button(Body: "Delete", Class: btn btn-danger, Page: apps_list, Contract: DelApplication, Params: "ApplicationId=#app_id#,Value=1") - } + Div(text-left col-md-6){ + Button(Body: "Save", Class: btn btn-primary, Page: apps_list, Contract: @1EditApplication, Params: "ApplicationId=#id#") + } + Div(text-right col-md-6){ + If(#application_deleted# == 0){ + Button(Body: "Delete", Class: btn btn-danger, Page: apps_list, Contract: @1DelApplication, Params: "ApplicationId=#application_id#,Value=1") } } } } }.Else{ - Div(col-md-12){ - Div(form-group){ - Div(text-left){ - Label("Name") - } - Input(Name: name) - } - Div(form-group){ - Div(text-left){ - Label("Change conditions") - } - Input(Name: conditions) - } - Div(form-group){ - Div(text-left){ - Button(Body: "Save", Class: btn btn-primary, Page: apps_list, Contract: NewApplication, Params: "Name=Val(name),Conditions=Val(conditions)") - } + Div(form-group){ + Label("Name") + Input(Name: Name) + } + Div(form-group){ + Label("Change conditions") + Input(Name: Conditions) + } + Div(form-group){ + Div(text-left){ + Button(Body: "Save", Class: btn btn-primary, Page: apps_list, Contract: @1NewApplication) } } } - } + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(6, 'app_langres', 'DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - + (7, 'app_langres', 'DBFind(buffer_data, src_buffer).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ - DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Limit(1).Vars("app") - - Div(content-wrapper){ - SetTitle("Language resources": #app_name#) - AddToolButton(Title: "Create", Page: langres_add, Icon: icon-plus, PageParams: "app_id=#app_id#") - - SetVar(pager_table, languages).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_langres).(pager_limit, 50) - Include(pager_header) - - SetVar(admin_page, app_langres) - Include(admin_link) - - DBFind(languages, src_languages).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") - - Form(panel panel-primary){ - Div(panel-body){ - Div(row){ - ForList(src_languages){ - Div(col-md-#width# col-sm-12){ - Div(list-group-item){ - Div(row){ - Div(col-md-4){ - Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) - Span(Class: h5, Body: "#name#") - } - Div(col-md-8){ - Div(pull-right){ - Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: langres_edit, PageParams: "lang_id=#id#")) - } - } - } - } - } - } - } - } - Div(panel-footer clearfix){ - Include(pager) - } - } - } + DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Vars("application") + + Div(content-wrapper){ + SetTitle("Language resources": #application_name#) + AddToolButton(Title: "Create", Page: langres_add, Icon: icon-plus, PageParams: "application_id=#application_id#") + + SetVar(pager_table, languages).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_langres).(pager_limit, 50) + Include(pager_header) + + SetVar(admin_page, app_langres) + Include(admin_link) + + DBFind(languages, src_languages).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") + + Form(panel panel-primary){ + Div(panel-body){ + Div(row){ + ForList(src_languages){ + Div(col-md-#width# col-sm-12){ + Div(list-group-item clearfix){ + Span(Class: mr-sm text-bold, Body: "#id#") + #name# + LinkPage(Class:fa fa-edit pull-right, Page: langres_edit, PageParams: "lang_id=#id#") + } + } + } + } + } + Div(panel-footer){ + Include(pager) + } + } + } }.Else{ - SetTitle("Language resources") - Div(breadcrumb){ - Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") - } + SetTitle("Language resources") + Div(breadcrumb){ + Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(7, 'app_pages', 'DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - + (8, 'app_pages', 'DBFind(buffer_data, src_buffer).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ - DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Limit(1).Vars("app") - - Div(content-wrapper){ - SetTitle("Pages": #app_name#) - AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=page,appId=#buffer_value_app_id#") - - SetVar(pager_table, pages).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_pages).(pager_limit, 50) - Include(pager_header) - - SetVar(admin_page, app_pages) - Include(admin_link) - - DBFind(pages, src_pages).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") - - Form(panel panel-primary){ - Div(panel-body){ - Div(row){ - ForList(src_pages){ - Div(col-md-#width# col-sm-12){ - Div(list-group-item){ - Div(row){ - Div(col-md-4){ - Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) - LinkPage(Page: #name#, Class: text-primary h5, Body: "#name#") - } - Div(col-md-8){ - Div(pull-right){ - Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=page")).Style(margin-right: 15px;) - Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=page,name=#name#")) - } - } - } - } - } - } - } - } - Div(panel-footer clearfix){ - Include(pager) - } - } - } + DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Vars("application") + + Div(content-wrapper){ + SetTitle("Pages": #application_name#) + AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=page,appId=#buffer_value_app_id#") + + SetVar(pager_table, pages).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_pages).(pager_limit, 50) + Include(pager_header) + + SetVar(admin_page, app_pages) + Include(admin_link) + + DBFind(pages, src_pages).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") + + Form(panel panel-primary){ + Div(panel-body){ + Div(row){ + ForList(src_pages){ + Div(col-md-#width# col-sm-12){ + Div(list-group-item){ + Div(row){ + Div(col-md-4){ + Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) + LinkPage(Page: #name#, Class: text-primary h5, Body: "#name#") + } + Div(col-md-8){ + Div(pull-right){ + Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=page")).Style(margin-right: 15px;) + Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=page,name=#name#")) + } + } + } + } + } + } + } + } + Div(panel-footer clearfix){ + Include(pager) + } + } + } }.Else{ - SetTitle("Pages") - Div(breadcrumb){ - Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") - } + SetTitle("Pages") + Div(breadcrumb){ + Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(8, 'app_params', 'DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - + (9, 'app_params', 'DBFind(buffer_data, src_buffer).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ - DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Limit(1).Vars("app") - - Div(content-wrapper){ - SetTitle("Application parameters": #app_name#) - AddToolButton(Title: "Create", Page: app_params_edit, Icon: icon-plus, PageParams: "app_id=#app_id#,create=create") - - SetVar(pager_table, app_params).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_params).(pager_limit, 50) - Include(pager_header) - - SetVar(admin_page, app_params) - Include(admin_link) - - DBFind(app_params, src_appparameters).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") - - Form(panel panel-primary){ - Div(panel-body){ - Div(row){ - ForList(src_appparameters){ - Div(col-md-#width# col-sm-12){ - Div(list-group-item){ - Div(row){ - Div(col-md-4){ - Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) - Span(Class: h5, Body: "#name#") - } - Div(col-md-8 text-right){ - Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: app_params_edit, PageParams: "id=#id#")) - } - } - } - } - } - } - } - Div(panel-footer clearfix){ - Include(pager) - } - } - } + DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Vars("application") + + Div(content-wrapper){ + SetTitle("Application parameters": #application_name#) + AddToolButton(Title: "Create", Page: app_params_edit, Icon: icon-plus, PageParams: "application_id=#application_id#,create=create") + + SetVar(pager_table, app_params).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_params).(pager_limit, 50) + Include(pager_header) + + SetVar(admin_page, app_params) + Include(admin_link) + + DBFind(app_params, src_appparameters).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") + + Form(panel panel-primary){ + Div(panel-body){ + Div(row){ + ForList(src_appparameters){ + Div(col-md-#width# col-sm-12){ + Div(list-group-item){ + Div(row){ + Div(col-md-4){ + Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) + Span(Class: h5, Body: "#name#") + } + Div(col-md-8 text-right){ + Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: app_params_edit, PageParams: "id=#id#")) + } + } + } + } + } + } + } + Div(panel-footer clearfix){ + Include(pager) + } + } + } }.Else{ - SetTitle("Application parameters") - Div(breadcrumb){ - Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") - } + SetTitle("Application parameters") + Div(breadcrumb){ + Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(9, 'app_params_edit', 'Div(content-wrapper){ - If(#create# == create){ - SetVar(param_name, "New") - }.Else{ - DBFind(app_params, src_params).Where("id = #id#").Limit(1).Vars("param") - } + (10, 'app_params_edit', 'Div(content-wrapper){ + If(#create# == create){ + SetVar(param_name, "New") + }.Else{ + DBFind(app_params, src_params).Where("id=#id#").Vars("param") + } SetTitle("Application parameter") Div(Class: breadcrumb){ @@ -389,105 +533,104 @@ If(#buffer_value_app_id# > 0){ Span(Class: text-muted, Body: #param_name#) } - Form(){ - Div(form-group){ - Label("Name") - If(#create# == create){ - Input(Name: name) - }.Else{ - Input(Name: name, Value: #param_name#, Disabled: "true") - } - } - Div(form-group){ - If(#create# == create){ - Input(Type: textarea, Name: value).Style(height: 500px !important;) - }.Else{ - Input(Type: textarea, Name: value, Value: "#param_value#").Style(height: 500px !important;) - } - } - Div(form-group){ - Label("Change conditions") - If(#create# == create){ - Input(Name: conditions) - }.Else{ - Input(Name: conditions, Value: #param_conditions#) - } - } - Div(form-group){ - If(#create# == create){ - Button(Class: btn btn-primary, Body: "Save", Contract: NewAppParam, Params: "Name=Val(name),Value=Val(value),Conditions=Val(conditions),ApplicationId=#app_id#", Page: app_params) - }.Else{ - Button(Class: btn btn-primary, Body: "Save", Contract: EditAppParam, Params: "Id=#id#,Value=Val(value),Conditions=Val(conditions)", Page: app_params) - } - } - } + Form(){ + Div(form-group){ + Label("Name") + If(#create# == create){ + Input(Name: name) + }.Else{ + Input(Name: name, Value: #param_name#, Disabled: "true") + } + } + Div(form-group){ + If(#create# == create){ + Input(Type: textarea, Name: value).Style(height: 500px !important;) + }.Else{ + Input(Type: textarea, Name: value, Value: "#param_value#").Style(height: 500px !important;) + } + } + Div(form-group){ + Label("Change conditions") + If(#create# == create){ + Input(Name: conditions) + }.Else{ + Input(Name: conditions, Value: #param_conditions#) + } + } + Div(form-group){ + If(#create# == create){ + Button(Class: btn btn-primary, Body: "Save", Contract: @1NewAppParam, Params: "Name=Val(name),Value=Val(value),Conditions=Val(conditions),ApplicationId=#application_id#", Page: app_params) + }.Else{ + Button(Class: btn btn-primary, Body: "Save", Contract: @1EditAppParam, Params: "Id=#id#,Value=Val(value),Conditions=Val(conditions)", Page: app_params) + } + } + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(10, 'app_tables', 'DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - + (11, 'app_tables', 'DBFind(buffer_data, src_buffer).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) If(#buffer_value_app_id# > 0){ - DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Limit(1).Vars("app") - - Div(content-wrapper){ - SetTitle("Tables": #app_name#) - AddToolButton(Title: "Create", Page: table_create, Icon: icon-plus, PageParams: "app_id=#app_id#") - - SetVar(pager_table, tables).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_tables).(pager_limit, 50) - Include(pager_header) - - SetVar(admin_page, app_tables) - Include(admin_link) - - DBFind(tables, src_tables).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") - - Form(panel panel-primary){ - Div(panel-body){ - Div(row){ - ForList(src_tables){ - Div(col-md-#width# col-sm-12){ - Div(list-group-item){ - Div(row){ - Div(col-md-4){ - Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) - LinkPage(Page: table_view, Class: text-primary h5, Body: "#name#", PageParams: "table_name=#name#") - } - Div(col-md-8){ - Div(pull-right){ - Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: table_edit, PageParams: "tabl_id=#id#")) - } - Div(pull-right){ - DBFind(#name#).Columns("id").Count(countvar) - Span(Class: h5 text-muted, Body: #countvar#).Style(margin-right: 50px;) - } - } - } - } - } - } - } - } - Div(panel-footer clearfix){ - Include(pager) - } - } - } + DBFind(applications, src_app).Where("id=#buffer_value_app_id#").Vars("application") + + Div(content-wrapper){ + SetTitle("Tables": #application_name#) + AddToolButton(Title: "Create", Page: table_create, Icon: icon-plus, PageParams: "application_id=#application_id#") + + SetVar(pager_table, tables).(pager_where, "app_id=#buffer_value_app_id#").(pager_page, app_tables).(pager_limit, 50) + Include(pager_header) + + SetVar(admin_page, app_tables) + Include(admin_link) + + DBFind(tables, src_tables).Limit(#pager_limit#).Order(#sort_name#).Offset(#pager_offset#).Where("app_id=#buffer_value_app_id#") + + Form(panel panel-primary){ + Div(panel-body){ + Div(row){ + ForList(src_tables){ + Div(col-md-#width# col-sm-12){ + Div(list-group-item){ + Div(row){ + Div(col-md-4){ + Span(Class: h5 text-bold, Body: "#id#").Style(margin-right: 10px;) + LinkPage(Page: table_view, Class: text-primary h5, Body: "#name#", PageParams: "tabl_id=#id#") + } + Div(col-md-8){ + Div(pull-right){ + Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: table_edit, PageParams: "tabl_id=#id#")) + } + Div(pull-right){ + DBFind(#name#).Columns("id").Count(countvar) + Span(Class: h5 text-muted, Body: #countvar#).Style(margin-right: 50px;) + } + } + } + } + } + } + } + } + Div(panel-footer clearfix){ + Include(pager) + } + } + } }.Else{ - SetTitle("Tables") - Div(breadcrumb){ - Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") - } + SetTitle("Tables") + Div(breadcrumb){ + Span(Class: text-muted, Body: "You did not select the application. Viewing resources is not available") + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(11, 'app_upload_binary', 'Div(content-wrapper){ - SetTitle("Binary data") - Div(breadcrumb){ - LinkPage("Binary data", app_binary) - Span(/).Style(margin-right: 10px; margin-left: 10px;) + (12, 'app_upload_binary', 'Div(content-wrapper){ + SetTitle("Binary data") + Div(breadcrumb){ + LinkPage("Binary data", app_binary) + Span(/).Style(margin-right: 10px; margin-left: 10px;) If(#id# > 0){ Span("Edit", text-muted) DBFind(binaries).Columns(name).Where(id = #id#).Vars(binary) }.Else{ Span("Upload", text-muted) } - } + } Form(){ Div(form-group){ @@ -507,100 +650,100 @@ If(#buffer_value_app_id# > 0){ Input(Name: databin, Type: file) } Div(form-group text-left){ - Button(Body: "Upload", Contract: UploadBinary, Class: btn btn-primary, Params: "Name=Val(name),ApplicationId=#app_id#,Data=Val(databin),MemberID=#key_id#", Page: app_binary) + Button(Body: "Upload", Contract: @1UploadBinary, Class: btn btn-primary, Params: "Name=Val(name),ApplicationId=#application_id#,Data=Val(databin),MemberID=#key_id#", Page: app_binary) } - } + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(12, 'apps_list', 'Div(fullscreen){ - If(#deleted# == deleted){ - SetTitle("Inactive applications") + (13, 'apps_list', 'Div(fullscreen){ + If(#deleted# == deleted){ + SetTitle("Inactive applications") Div(breadcrumb){ LinkPage("Applications", apps_list) Span(/).Style(margin-right: 10px; margin-left: 10px;) Span(Class: text-muted, Body: "Inactive applications") } - DBFind(applications, src_applications).Where("deleted=1").Order("id").Count(countvar).Custom(restore_btn){ - Button(Class: btn btn-link, Page: apps_list, Contract: DelApplication, Params: "ApplicationId=#id#", Body: "Restore") - } - If(#countvar# > 0) { - Table(Source: src_applications, Columns: "ID=id,Name=name,Conditions=conditions,=restore_btn").Style( - tbody > tr:nth-of-type(odd) { - background-color: #fafbfc; - } - tbody > tr > td { - word-break: break-all; - font-weight: 400; - font-size: 13px; - color: #666; - border-top: 1px solid #eee; - vertical-align: middle; - } - tr > *:first-child { - padding-left:20px; - width: 80px; - } - tr > *:last-child { - padding-right:80px; - text-align:right; - width: 100px; - } - thead { - background-color: #eee; - } - ) - }.Else{ - Div(content-wrapper){ - Span(Class: text-muted, Body: "You don''t have any inactive applications") - } - } - }.Else{ - SetTitle("Applications") + DBFind(applications, src_applications).Where("deleted=1").Order("id").Count(countvar).Custom(restore_btn){ + Button(Class: btn btn-link, Page: apps_list, Contract: @1DelApplication, Params: "ApplicationId=#id#", Body: "Restore") + } + If(#countvar# > 0) { + Table(Source: src_applications, Columns: "ID=id,Name=name,Conditions=conditions,=restore_btn").Style( + tbody > tr:nth-of-type(odd) { + background-color: #fafbfc; + } + tbody > tr > td { + word-break: break-all; + font-weight: 400; + font-size: 13px; + color: #666; + border-top: 1px solid #eee; + vertical-align: middle; + } + tr > *:first-child { + padding-left:20px; + width: 80px; + } + tr > *:last-child { + padding-right:80px; + text-align:right; + width: 100px; + } + thead { + background-color: #eee; + } + ) + }.Else{ + Div(content-wrapper){ + Span(Class: text-muted, Body: "You don''t have any inactive applications") + } + } + }.Else{ + SetTitle("Applications") Div(breadcrumb){ Span(Class: text-muted, Body: "This section is used to select installed applications") } - AddToolButton(Title: "Inactive apps", Page: apps_list, Icon: icon-close, PageParams:"deleted=deleted") - AddToolButton(Title: "Create", Page: app_edit, Icon: icon-plus) + AddToolButton(Title: "Inactive apps", Page: apps_list, Icon: icon-close, PageParams:"deleted=deleted") + AddToolButton(Title: "Create", Page: app_edit, Icon: icon-plus) - DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - DBFind(applications, src_applications).Where("deleted=0").Order("id").Custom(custom_check){ - If(#id#==#buffer_value_app_id#){ - Span(Em(Class: fa fa-check)).Style(margin-left:30px;) - }.Else{ - Button(Class: btn btn-link, Contract: Export_NewApp, Params: "app_id=#id#", Page: apps_list, Body: "select") - } - }.Custom(custom_actions){ - Button(Class: btn btn-link, Body: Em(Class: fa fa-edit), Page: app_edit, PageParams: "id=#id#") - } - - Table(Source: src_applications, Columns: "ID=id,Name=name,Conditions=conditions,Selected=custom_check,=custom_actions").Style( - tbody > tr:nth-of-type(odd) { - background-color: #fafbfc; - } - tbody > tr > td { - word-break: break-all; - font-weight: 400; - font-size: 13px; - color: #666; - border-top: 1px solid #eee; - vertical-align: middle; - } - tr > *:first-child { - padding-left:20px; - width: 80px; - } - tr > *:last-child { - padding-right:15px; - text-align:right; - width: 100px; - } - thead { - background-color: #eee; - } - ) - } + DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) + DBFind(applications, src_applications).Where("deleted=0").Order("id").Custom(custom_check){ + If(#id#==#buffer_value_app_id#){ + Span(Em(Class: fa fa-check)).Style(margin-left:30px;) + }.Else{ + Button(Class: btn btn-link, Contract: @1ExportNewApp, Params: "ApplicationId=#id#", Page: apps_list, Body: "select") + } + }.Custom(custom_actions){ + Button(Class: btn btn-link, Body: Em(Class: fa fa-edit), Page: app_edit, PageParams: "id=#id#") + } + + Table(Source: src_applications, Columns: "ID=id,Name=name,Conditions=conditions,Selected=custom_check,=custom_actions").Style( + tbody > tr:nth-of-type(odd) { + background-color: #fafbfc; + } + tbody > tr > td { + word-break: break-all; + font-weight: 400; + font-size: 13px; + color: #666; + border-top: 1px solid #eee; + vertical-align: middle; + } + tr > *:first-child { + padding-left:20px; + width: 80px; + } + tr > *:last-child { + padding-right:15px; + text-align:right; + width: 100px; + } + thead { + background-color: #eee; + } + ) + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(13, 'column_add', 'Div(content-wrapper){ + (14, 'column_add', 'Div(content-wrapper){ SetTitle("Tables") Div(breadcrumb){ Div(){ @@ -638,11 +781,11 @@ If(#buffer_value_app_id# > 0){ } } Div(panel-footer clearfix){ - Button(Body: "Add column", Contract: NewColumn, Class: btn btn-primary, Page: table_edit, PageParams: "tabl_id=#tabl_id#", Params: "TableName=#next_table_name#,Name=Val(ColumnName),Type=Val(Coltype),Permissions=Val(ColumnUp)") + Button(Body: "Add column", Contract: @1NewColumn, Class: btn btn-primary, Page: table_edit, PageParams: "tabl_id=#tabl_id#", Params: "TableName=#next_table_name#,Name=Val(ColumnName),Type=Val(Coltype),Permissions=Val(ColumnUp)") } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(14, 'column_edit', 'Div(content-wrapper){ + (15, 'column_edit', 'Div(content-wrapper){ SetTitle("Edit column") Div(breadcrumb){ Div(){ @@ -688,9 +831,6 @@ If(#buffer_value_app_id# > 0){ If(#col_type# == double){ SetVar(input_type, "Double") } - If(#col_type# == character){ - SetVar(input_type, "Character") - } If(#col_type# == json){ SetVar(input_type, "JSON") } @@ -710,285 +850,419 @@ If(#buffer_value_app_id# > 0){ } } Div(panel-footer clearfix){ - Button(Body: "Save", Contract: EditColumn, Class: btn btn-primary, Page: table_edit, PageParams: "tabl_id=#tabl_id#", Params: "TableName=#pre_name#,Name=Val(ColumnName),Type=Val(Coltype),Permissions=Val(ColumnUp)") + Button(Body: "Save", Contract: @1EditColumn, Class: btn btn-primary, Page: table_edit, PageParams: "tabl_id=#tabl_id#", Params: "TableName=#pre_name#,Name=Val(ColumnName),Type=Val(Coltype),Permissions=Val(ColumnUp)") + } + } +} +', 'admin_menu', 'ContractAccess("@1EditPage")'), + (16, 'confirmations', 'Div(fullscreen){ + SetTitle(Confirmations) + AddToolButton(Title: "Create", Page: confirmations_new, Icon: icon-plus) + Div(breadcrumb){ + Span(Class: text-muted, Body: "This section is used to manage contracts with confirmation") + } + + DBFind(signatures, src_sign).Limit(250).Order("id").Columns("id,name,value->params,value->title,conditions").Custom(custom_title){ + Span(#value.title#) + }.Custom(custom_params){ + Span(#value.params#) + }.Custom(action){ + Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: confirmations_edit, PageParams: "sign_id=#id#")) + } + + Table(Source:src_sign, Columns:"Contract=name,Title=custom_title,Params=custom_params,Conditions=conditions,=action").Style( + tbody > tr:nth-of-type(odd) { + background-color: #fafbfc; + } + tbody > tr > td { + word-break: break-all; + font-weight: 400; + font-size: 13px; + color: #666; + border-top: 1px solid #eee; + vertical-align: middle; + } + tr > *:first-child { + padding-left:20px; + } + tr > *:last-child { + padding-right:30px; + text-align:right; + width: 100px; + } + thead { + background-color: #eee; + }) +}', 'admin_menu', 'ContractAccess("@1EditPage")'), + (17, 'confirmations_edit', 'Div(content-wrapper){ + SetTitle("Confirmations") + Div(Class: breadcrumb){ + LinkPage("Confirmations", confirmations) + Span(/).Style(margin-right: 10px; margin-left: 10px;) + Span(Class: text-muted, Body: "Edit") + } + + Form(){ + DBFind(signatures, src_signatures).Columns("name,conditions,value->title,value->params").Vars(pre).WhereId(#sign_id#) + Div(form-group){ + Label("Contract name") + Input(Name: Name, Value: #pre_name#, Disabled: 1) + } + Div(form-group){ + Label("Title of confirmation") + Input(Name: Title, Value: #pre_value_title#) + } + Div(form-group){ + Label("Parameters") + Input(Name: Parameter, Value: #pre_value_params#) + } + Div(form-group){ + Label("Conditions") + Input(Name: Conditions, Value: #pre_conditions#) + } + Div(form-group){ + Button(Body: "Save", Class: btn btn-primary, Contract: @1EditSignJoint, Page: confirmations, Params: "Id=#sign_id#") } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(15, 'export_download', 'Div(fullscreen){ - SetTitle("Export") - Div(breadcrumb){ - Span(Class: text-muted, Body: "Payload was formed. You can download it now") - } - - DBFind(binaries, src_binaries).Where("name=''export'' and member_id=#key_id# and app_id=1").Custom(app_name){ - DBFind(Name: buffer_data, Source: src_buffer).Columns("value->app_name").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - Span(#buffer_value_app_name#) - } - - Table(Source: src_binaries, "Applications=app_name,=data").Style( - tbody > tr:nth-of-type(odd) { - background-color: #fafbfc; - } - tbody > tr > td { - word-break: break-all; - font-weight: 400; - font-size: 13px; - color: #666; - border-top: 1px solid #eee; - vertical-align: middle; - } - tr > *:first-child { - padding-left:20px; - width: 100px; - } - tr > *:last-child { - padding-right:20px; - text-align:right; - } - thead { - background-color: #eee; - } - ) + (18, 'confirmations_new', 'Div(content-wrapper){ + SetTitle("Confirmations") + Div(Class: breadcrumb){ + LinkPage("Confirmations", confirmations) + Span(/).Style(margin-right: 10px; margin-left: 10px;) + Span(Class: text-muted, Body: "Create") + } + + Form(panel panel-default){ + Div(panel-body){ + Div(form-group){ + Label("Contract name") + Input(Name: Name, Placeholder: "Name") + } + Div(form-group){ + Label("Title of confirmation") + Input(Name: Title, Placeholder: "Title") + } + Div(form-group){ + Label("Conditions") + Input(Name: Conditions, Placeholder: "Conditions") + } + Div(row){ + Div(col-md-4){ + Label(Class: text-bold, Body: "Parameter") + } + Div(col-md-7){ + Label(Class: text-bold, Body: "Value") + } + Div(col-md-1){ + Label(Class: text-bold, Body: "Action") + } + } + If(GetVar(cs)==""){ + SetVar(cs, Calculate( Exp: 0, Type: int)) + } + If(#del# == 1){ + SetVar(cs, Calculate( Exp: #cs# - 1, Type: int)) + }.Else{ + SetVar(cs, Calculate( Exp: #cs# + 1, Type: int)) + } + Range(params_range, 0, #cs#) + ForList(Source: params_range){ + Div(row){ + Div(col-md-4 mt-sm){ + Input(Name:ParamArr) + } + Div(col-md-7 mt-sm){ + Input(Name:ValueArr) + } + Div(col-md-1 mt-sm){ + If(And(#cs#==#params_range_index#,#cs#>1)){ + Button(Body: Em(Class: fa fa-trash), Class: btn btn-default, PageParams: "cs=#cs#,del=1,application_id=#application_id#", Page: confirmations_new) + } + } + } + } + Div(row){ + Div(col-md-12 mt-lg){ + LinkPage(Body: "Add parameter", Page: confirmations_new, PageParams:"cs=#cs#,application_id=#application_id#") + } + } + } + Div(panel-footer){ + Button(Body: "Save", Class: btn btn-primary, Contract: @1NewSignJoint, Page: confirmations) + } + } +}', 'admin_menu', 'ContractAccess("@1EditPage")'), + (19, 'export_download', 'Div(fullscreen){ + SetTitle("Export") + Div(breadcrumb){ + Span(Class: text-muted, Body: "Payload was formed. You can download it now") + } + + DBFind(binaries, src_binaries).Where("name=''export'' and member_id=#key_id# and app_id=1").Custom(app_name){ + DBFind(Name: buffer_data, Source: src_buffer).Columns("value->app_name").Where("key=''export'' and member_id=#key_id#").Vars(buffer) + Span(#buffer_value_app_name#) + } + + Table(Source: src_binaries, "Applications=app_name,=data").Style( + tbody > tr:nth-of-type(odd) { + background-color: #fafbfc; + } + tbody > tr > td { + word-break: break-all; + font-weight: 400; + font-size: 13px; + color: #666; + border-top: 1px solid #eee; + vertical-align: middle; + } + tr > *:first-child { + padding-left:20px; + width: 100px; + } + tr > *:last-child { + padding-right:20px; + text-align:right; + } + thead { + background-color: #eee; + } + ) }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(16, 'export_resources', 'Div(content-wrapper){ - SetTitle("Export") - Div(breadcrumb){ - Span(Class: text-muted, Body: "Select the application which do you want to export and proceed to the payload generation process.") - } - - Include(export_link) - DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) - - If(#buffer_value_app_id# > 0){ - If(#res_type#=="pages"){ - DBFind(pages, src).Custom(cbox){ - Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) - }.Where("app_id = #buffer_value_app_id#").Order("id") - } - If(#res_type#=="blocks"){ - DBFind(blocks, src).Custom(cbox){ - Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) - }.Where("app_id = #buffer_value_app_id#").Order("id") - } - If(#res_type#=="menu"){ - DBFind(menu, src).Custom(cbox){ - Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) - }.Where("id in (#buffer_value_menu_id#)").Order("id") - } - If(#res_type#=="parameters"){ - DBFind(app_params, src).Custom(cbox){ - Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) - }.Where("app_id = #buffer_value_app_id#").Order("id") - } - If(#res_type#=="languages"){ - DBFind(languages, src).Custom(cbox){ - Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) - }.Where("app_id = #buffer_value_app_id#").Order("id") - } - If(#res_type#=="contracts"){ - DBFind(contracts, src).Custom(cbox){ - Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) - }.Where("app_id = #buffer_value_app_id#").Order("id") - } - If(#res_type#=="tables"){ - DBFind(tables, src).Custom(cbox){ - Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) - }.Where("app_id = #buffer_value_app_id#").Order("id") - } - } - - Div(row){ - Div(col-md-9 col-md-offset-0){ - Table(src, "ID=id,Name=name,=cbox").Style( - tbody > tr:nth-of-type(odd) { - background-color: #fafbfc; - } - tbody > tr > td { - word-break: break-all; - padding: 8px 20px !important; - font-weight: 400; - font-size: 13px; - color: #666; - border-top: 1px solid #eee; - vertical-align: middle; - } - tr > *:first-child { - padding-left:20px; - width: 100px; - } - tr > *:last-child { - text-align:right; - padding-right:20px; - width: 50px; - } - thead { - background-color: #eee; - } - ) - } - Div(col-md-3 col-md-offset-0){ - Include(export_info) - } - } + (20, 'export_resources', 'Div(content-wrapper){ + SetTitle("Export") + Div(breadcrumb){ + Span(Class: text-muted, Body: "Select the application which do you want to export and proceed to the payload generation process.") + } + + Include(export_link) + DBFind(buffer_data, src_buffer).Columns("value->app_id,value->app_name,value->menu_name,value->menu_id,value->count_menu").Where("key=''export'' and member_id=#key_id#").Vars(buffer) + + If(#buffer_value_app_id# > 0){ + If(#res_type#=="pages"){ + DBFind(pages, src).Custom(cbox){ + Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) + }.Where("app_id = #buffer_value_app_id#").Order("id") + } + If(#res_type#=="blocks"){ + DBFind(blocks, src).Custom(cbox){ + Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) + }.Where("app_id = #buffer_value_app_id#").Order("id") + } + If(#res_type#=="menu"){ + DBFind(menu, src).Custom(cbox){ + Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) + }.Where("id in (#buffer_value_menu_id#)").Order("id") + } + If(#res_type#=="parameters"){ + DBFind(app_params, src).Custom(cbox){ + Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) + }.Where("app_id = #buffer_value_app_id#").Order("id") + } + If(#res_type#=="languages"){ + DBFind(languages, src).Custom(cbox){ + Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) + }.Where("app_id = #buffer_value_app_id#").Order("id") + } + If(#res_type#=="contracts"){ + DBFind(contracts, src).Custom(cbox){ + Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) + }.Where("app_id = #buffer_value_app_id#").Order("id") + } + If(#res_type#=="tables"){ + DBFind(tables, src).Custom(cbox){ + Input(Name: cbox, Type: checkbox, Value: true, Disabled: 1) + }.Where("app_id = #buffer_value_app_id#").Order("id") + } + } + + Div(row){ + Div(col-md-9 col-md-offset-0){ + Table(src, "ID=id,Name=name,=cbox").Style( + tbody > tr:nth-of-type(odd) { + background-color: #fafbfc; + } + tbody > tr > td { + word-break: break-all; + padding: 8px 20px !important; + font-weight: 400; + font-size: 13px; + color: #666; + border-top: 1px solid #eee; + vertical-align: middle; + } + tr > *:first-child { + padding-left:20px; + width: 100px; + } + tr > *:last-child { + text-align:right; + padding-right:20px; + width: 50px; + } + thead { + background-color: #eee; + } + ) + } + Div(col-md-3 col-md-offset-0){ + Include(export_info) + } + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(17, 'import_app', 'Div(content-wrapper){ - DBFind(buffer_data, src_buffer).Columns("id,value->name,value->data").Where("key=''import'' and member_id=#key_id#").Vars(prefix) - DBFind(buffer_data, src_buffer).Columns("value->app_name,value->pages,value->pages_count,value->blocks,value->blocks_count,value->menu,value->menu_count,value->parameters,value->parameters_count,value->languages,value->languages_count,value->contracts,value->contracts_count,value->tables,value->tables_count").Where("key=''import_info'' and member_id=#key_id#").Vars(info) - - SetTitle("Import - #info_value_app_name#") - Data(data_info, "name,count,info"){ - Pages,"#info_value_pages_count#","#info_value_pages#" - Blocks,"#info_value_blocks_count#","#info_value_blocks#" - Menu,"#info_value_menu_count#","#info_value_menu#" - Parameters,"#info_value_parameters_count#","#info_value_parameters#" - Language resources,"#info_value_languages_count#","#info_value_languages#" - Contracts,"#info_value_contracts_count#","#info_value_contracts#" - Tables,"#info_value_tables_count#","#info_value_tables#" - } - Div(breadcrumb){ - Span(Class: text-muted, Body: "Your data that you can import") - } - - Div(panel panel-primary){ - ForList(data_info){ - Div(list-group-item){ - Div(row){ - Div(col-md-10 mc-sm text-left){ - Span(Class: text-bold, Body: "#name#") - } - Div(col-md-2 mc-sm text-right){ - If(#count# > 0){ - Span(Class: text-bold, Body: "(#count#)") - }.Else{ - Span(Class: text-muted, Body: "(0)") - } - } - } - Div(row){ - Div(col-md-12 mc-sm text-left){ - If(#count# > 0){ - Span(Class: h6, Body: "#info#") - }.Else{ - Span(Class: text-muted h6, Body: "Nothing selected") - } - } - } - } - } - If(#prefix_id# > 0){ - Div(list-group-item text-right){ - Button(Body: "Import", Class: btn btn-primary, Page: apps_list).CompositeContract("Import", "#prefix_value_data#") - } - } - } + (21, 'import_app', 'Div(content-wrapper){ + DBFind(buffer_data, src_buffer).Columns("id,value->name,value->data").Where("key=''import'' and member_id=#key_id#").Vars(hash00001) + DBFind(buffer_data, src_buffer).Columns("value->app_name,value->pages,value->pages_count,value->blocks,value->blocks_count,value->menu,value->menu_count,value->parameters,value->parameters_count,value->languages,value->languages_count,value->contracts,value->contracts_count,value->tables,value->tables_count").Where("key=''import_info'' and member_id=#key_id#").Vars(hash00002) + + SetTitle("Import - #hash00002_value_app_name#") + Data(data_info, "hash00003_name,hash00003_count,hash00003_info"){ + Pages,"#hash00002_value_pages_count#","#hash00002_value_pages#" + Blocks,"#hash00002_value_blocks_count#","#hash00002_value_blocks#" + Menu,"#hash00002_value_menu_count#","#hash00002_value_menu#" + Parameters,"#hash00002_value_parameters_count#","#hash00002_value_parameters#" + Language resources,"#hash00002_value_languages_count#","#hash00002_value_languages#" + Contracts,"#hash00002_value_contracts_count#","#hash00002_value_contracts#" + Tables,"#hash00002_value_tables_count#","#hash00002_value_tables#" + } + Div(breadcrumb){ + Span(Class: text-muted, Body: "Your data that you can import") + } + + Div(panel panel-primary){ + ForList(data_info){ + Div(list-group-item){ + Div(row){ + Div(col-md-10 mc-sm text-left){ + Span(Class: text-bold, Body: "#hash00003_name#") + } + Div(col-md-2 mc-sm text-right){ + If(#hash00003_count# > 0){ + Span(Class: text-bold, Body: "(#hash00003_count#)") + }.Else{ + Span(Class: text-muted, Body: "(0)") + } + } + } + Div(row){ + Div(col-md-12 mc-sm text-left){ + If(#hash00003_count# > 0){ + Span(Class: h6, Body: "#hash00003_info#") + }.Else{ + Span(Class: text-muted h6, Body: "Nothing selected") + } + } + } + } + } + If(#hash00001_id# > 0){ + Div(list-group-item text-right){ + Button(Body: "Import", Class: btn btn-primary, Page: apps_list).CompositeContract(@1Import, "#hash00001_value_data#") + } + } + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(18, 'import_upload', 'Div(content-wrapper){ - SetTitle("Import") - Div(breadcrumb){ - Span(Class: text-muted, Body: "Select payload that you want to import") - } - Form(panel panel-primary){ - Div(list-group-item){ - Input(Name: input_file, Type: file) - } - Div(list-group-item text-right){ - Button(Body: "Load", Class: btn btn-primary, Contract: Import_Upload, Page: import_app) - } - } + (22, 'import_upload', 'Div(content-wrapper){ + SetTitle("Import") + Div(breadcrumb){ + Span(Class: text-muted, Body: "Select payload that you want to import") + } + Form(panel panel-primary){ + Div(list-group-item){ + Input(Name: input_file, Type: file) + } + Div(list-group-item text-right){ + Button(Body: "Load", Class: btn btn-primary, Contract: @1ImportUpload, Page: import_app) + } + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(19, 'langres_edit', 'Div(content-wrapper){ + (23, 'langres_add', 'If(GetVar(application_id)){}.Else{ + DBFind(buffer_data).Columns("value->app_id").Where("key=''export'' and member_id=#key_id#").Vars(buffer) + If(#buffer_value_app_id#>0){ + SetVar(application_id,#buffer_value_app_id#) + }.Else{ + SetVar(application_id,1) + } +} +If(GetVar(name)){}.Else{ + SetVar(name,) +} +Div(content-wrapper){ SetTitle("Language resources") Div(Class: breadcrumb){ LinkPage("Language resources", app_langres) Span(/).Style(margin-right: 10px; margin-left: 10px;) - Span(Class: text-muted, Body: "Edit") + Span(Class: text-muted, Body: "Create") } - + Form(panel panel-default){ Div(panel-body){ - DBFind(languages, src_leng).Vars(pre).WhereId(#lang_id#) Div(row){ Div(col-md-12){ Label("Name") - Input(Name: LangName, Disabled: "true", Value: #pre_name#) + Input(Name:Name, Value:#name#) } } - Div(row){ + Div(row text-muted){ Div(col-md-1 mt-lg){ - Label(Class: text-muted, Body: "Locale") + Label(){Locale} } Div(col-md-10 mt-lg){ - Label(Class: text-muted, Body: "Value") + Label(){Value} } Div(col-md-1 mt-lg){ - Label(Class: text-muted, Body: "Action") + Label(){Action} } } - SetVar(json,#pre_res#) - JsonToSource(pv, #json#) - ForList(Source: pv){ - Div(row){ - Div(col-md-1 mt-sm){ - Input(Name: idshare, Value: #key#) - } - Div(col-md-10 mt-sm){ - Input(Name: share, Value: #value#) - } - Div(col-md-1 mt-sm){ - } - } + If(GetVar(cs)==""){ + SetVar(cs,0) } If(#del# == 1){ - SetVar(next_count, Calculate( Exp: #count_sec# - 1, Type: int)) + SetVar(cs,Calculate(#cs# - 1)) }.Else{ - If(GetVar(count)==""){ - SetVar(count, 0) - SetVar(next_count, Calculate( Exp: #count#, Type: int)) - }.Else{ - SetVar(next_count, Calculate( Exp: #count_sec# + 1, Type: int)) - } + SetVar(cs,Calculate(#cs# + 1)) } - Range(params_range, 0, #next_count#) + Range(params_range, 0, #cs#) ForList(Source: params_range){ - Div(row){ - Div(col-md-1 mt-sm){ - Input(Name:idshare) - } - Div(col-md-10 mt-sm){ - Input(Name:share) + Div(row mt-sm){ + Div(col-md-1){ + Input(Name:LocaleArr) + }.Style(input {padding: 6px;text-align:center;}) + Div(col-md-10){ + Input(Name:ValueArr) } - Div(col-md-1 mt-sm){ - If(And(#next_count# == #params_range_index#, #next_count# > 0)){ - Button(Em(Class: fa fa-trash), Class: btn btn-default, PageParams: "lang_id=#lang_id#,count_sec=#next_count#,count=#count#,del=1", Page:langres_edit) + Div(col-md-1){ + If(And(#cs#==#params_range_index#,#cs#>1)){ + Button(Class:fa fa-trash btn btn-default, PageParams: "cs=#cs#,del=1,application_id=#application_id#", Page: langres_add) } } } } Div(row){ Div(col-md-12 mt-lg){ - LinkPage(Body: "Add localization", Page: langres_edit, PageParams: "lang_id=#lang_id#,count_sec=#next_count#,count=#count#") - } - } + LinkPage(Body: "Add localization", Page: langres_add, PageParams:"cs=#cs#,application_id=#application_id#") + } + } } Div(panel-footer){ - Button(Body: "Save", Class: btn btn-primary, Contract: @1EditLang, Params: "Value=Val(share),IdLanguage=Val(idshare),Id=#lang_id#", Page: app_langres) + Button(Body: "Save", Class: btn btn-primary, Contract: @1NewLangJoint, Page: app_langres, Params: "ApplicationId=#application_id#") } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(20, 'langres_add', 'Div(content-wrapper){ + (24, 'langres_edit', 'Div(content-wrapper){ SetTitle("Language resources") Div(Class: breadcrumb){ LinkPage("Language resources", app_langres) Span(/).Style(margin-right: 10px; margin-left: 10px;) - Span(Class: text-muted, Body: "Create") + Span(Class: text-muted, Body: "Edit") } - + Form(panel panel-default){ Div(panel-body){ + DBFind(languages, src_leng).Vars(pre).WhereId(#lang_id#) Div(row){ Div(col-md-12){ Label("Name") - Input(Name: LangName) + Input(Name: LangName, Disabled: "true", Value: #pre_name#) } } Div(row){ @@ -1002,81 +1276,104 @@ If(#buffer_value_app_id# > 0){ Label(Class: text-muted, Body: "Action") } } - If(#del# == 1){ - SetVar(next_count, Calculate( Exp: #count_sec# - 1, Type: int)) - }.Else{ - If(GetVar(count)==""){ - SetVar(count, 0) - SetVar(next_count, Calculate( Exp: #count# + 1, Type: int)) + + JsonToSource(pv, #pre_res#) + ForList(Source: pv, Index:s_ind){ + SetVar(max_sec, #s_ind#) + } + If(GetVar(cs)==""){ + SetVar(cs, #max_sec#) + } + If(Or(#del_flag#==1,#del_data#>0)){ + SetVar(cs, Calculate(Exp:#cs#-1, Type: int)) + } + + SetVar(next_sec, Calculate(Exp:#cs#+1, Type: int)) + SetVar(data_sec, Calculate(Exp:#cs#-#max_sec#, Type: int)) + + ForList(Source: pv, Index:s_ind){ + If(#s_ind#>#cs#){ }.Else{ - SetVar(next_count, Calculate( Exp: #count_sec# + 1, Type: int)) + Div(row){ + Div(col-md-1 mt-sm){ + Input(Name: LocaleArr, Value: #key#) + }.Style(input {padding: 6px;text-align:center;}) + Div(col-md-10 mt-sm){ + Input(Name: ValueArr, Value: #value#) + } + Div(col-md-1 mt-sm){ + If(And(#s_ind#>1,#s_ind#==#cs#)){ + Button(Body: Em(Class: fa fa-trash), Class: btn btn-default, PageParams: "lang_id=#lang_id#,cs=#cs#,del_data=#s_ind#", Page: langres_edit) + } + } + } } } - Range(params_range, 0, #next_count#) - ForList(Source: params_range){ + Range(params_range, #max_sec#, #cs#) + ForList(Source: params_range, Index:s_ind){ Div(row){ Div(col-md-1 mt-sm){ - Input(Name:idshare) - } + Input(Name:LocaleArr) + }.Style(input {padding: 6px;text-align:center;}) Div(col-md-10 mt-sm){ - Input(Name:share) + Input(Name:ValueArr) } Div(col-md-1 mt-sm){ - If(And(#next_count# == #params_range_index#, #next_count# > 1)){ - Button(Body: Em(Class: fa fa-trash), Class: btn btn-default, PageParams: "count_sec=#next_count#,count=#count#,del=1,app_id=#app_id#", Page: langres_add) + If(#s_ind#==#data_sec#){ + Button(Body: Em(Class: fa fa-trash), Class: btn btn-default, PageParams: "lang_id=#lang_id#,cs=#cs#,del_flag=1", Page: langres_edit) } } } } Div(row){ Div(col-md-12 mt-lg){ - LinkPage(Body: "Add localization", Page: langres_add, PageParams:"count_sec=#next_count#,count=#count#,app_id=#app_id#") + LinkPage(Body: "Add localization", Page: langres_edit, PageParams: "lang_id=#lang_id#,cs=#next_sec#") } } } Div(panel-footer){ - Button(Body: "Save", Class: btn btn-primary, Contract:@1NewLang, Page: app_langres, Params: "ApplicationId=#app_id#,Name=Val(LangName),Value=Val(share),IdLanguage=Val(idshare)") + Button(Body: "Save", Class: btn btn-primary, Contract: @1EditLangJoint, Params: "Id=#lang_id#", Page: app_langres) } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(21, 'menus_list', 'Div(fullscreen){ - SetTitle("Menu") - AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=menu,appId=0") + (25, 'menus_list', 'Div(fullscreen){ + SetTitle("Menu") + AddToolButton(Title: "Create", Page: editor, Icon: icon-plus, PageParams: "create=menu,appId=0") Div(breadcrumb){ Span(Class: text-muted, Body: "This section is used to manage the menu") } - DBFind(menu, src_menus).Limit(250).Order("id").Custom(action){ - Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=menu")).Style(margin-right: 20px;) - Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=menu,name=#name#")) - } - - Table(src_menus, "ID=id,Name=name,Title=title,Conditions=conditions,=action").Style( - tbody > tr:nth-of-type(odd) { - background-color: #fafbfc; - } - tbody > tr > td { - word-break: break-all; - font-weight: 400; - font-size: 13px; - color: #666; - border-top: 1px solid #eee; - vertical-align: middle; - } - tr > *:first-child { - padding-left:20px; - width: 80px; - } - tr > *:last-child { - padding-right:30px; - text-align:right; - width: 100px; - } - thead { - background-color: #eee; - }) + DBFind(menu, src_menus).Limit(250).Order("id").Custom(action){ + Span(LinkPage(Body: Em(Class: fa fa-cogs), Class: text-primary h4, Page: properties_edit, PageParams: "edit_property_id=#id#,type=menu")).Style(margin-right: 20px;) + Span(LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: editor, PageParams: "open=menu,name=#name#")) + } + + Table(src_menus, "ID=id,Name=name,Title=title,Conditions=conditions,=action").Style( + tbody > tr:nth-of-type(odd) { + background-color: #fafbfc; + } + tbody > tr > td { + word-break: break-all; + font-weight: 400; + font-size: 13px; + color: #666; + border-top: 1px solid #eee; + vertical-align: middle; + } + tr > *:first-child { + padding-left:20px; + width: 80px; + } + tr > *:last-child { + padding-right:30px; + text-align:right; + width: 100px; + } + thead { + background-color: #eee; + }) }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(22, 'params_edit', 'Div(content-wrapper){ + (26, 'params_edit', 'Div(content-wrapper){ If(#stylesheet# == stylesheet){ DBFind(parameters, src_params).Where(name=''#stylesheet#'').Vars("param") }.Else{ @@ -1088,11 +1385,11 @@ If(#buffer_value_app_id# > 0){ } SetTitle("Ecosystem parameters") - Div(Class: breadcrumb){ - LinkPage("Ecosystem parameters", params_list) - Span(/).Style(margin-right: 10px; margin-left: 10px;) - Span(Class: text-muted, Body: #param_name#) - } + Div(Class: breadcrumb){ + LinkPage("Ecosystem parameters", params_list) + Span(/).Style(margin-right: 10px; margin-left: 10px;) + Span(Class: text-muted, Body: #param_name#) + } Form(){ Div(form-group){ @@ -1120,52 +1417,52 @@ If(#buffer_value_app_id# > 0){ } Div(form-group){ If(#param_id#>0){ - Button(Class: btn btn-primary, Body: "Save", Contract: EditParameter, Params: "Id=#param_id#,Value=Val(value),Conditions=Val(conditions)", Page: params_list) + Button(Class: btn btn-primary, Body: "Save", Contract: @1EditParameter, Params: "Id=#param_id#,Value=Val(value),Conditions=Val(conditions)", Page: params_list) }.Else{ - Button(Class: btn btn-primary, Body: "Save", Contract: NewParameter, Params: "Name=Val(name),Value=Val(value),Conditions=Val(conditions)", Page: params_list) + Button(Class: btn btn-primary, Body: "Save", Contract: @1NewParameter, Params: "Name=Val(name),Value=Val(value),Conditions=Val(conditions)", Page: params_list) } } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(23, 'params_list', 'Div(fullscreen){ - SetTitle("Ecosystem parameters") - AddToolButton(Title: "Manage stylesheet", Page: params_edit, Icon: icon-picture, PageParams:"stylesheet=stylesheet") - AddToolButton(Title: "Create", Page: params_edit, Icon: icon-plus) - Div(breadcrumb){ - Span(Class: text-muted, Body: "This section is used to configure stored reusable parameters") - } - - DBFind(parameters, src_appparameters).Order("id").Custom(custom_actions){ - LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: params_edit, PageParams: "id=#id#") - } - - Table(src_appparameters, "ID=id,Name=name,Application=app_id,Value=value,Conditions=conditions,=custom_actions").Style( - tbody > tr:nth-of-type(odd) { - background-color: #fafbfc; - } - tbody > tr > td { - word-break: break-all; - font-weight: 400; - font-size: 13px; - color: #666; - border-top: 1px solid #eee; - vertical-align: middle; - } - tr > *:first-child { - padding-left:20px; - width: 80px; - } - tr > *:last-child { - padding-right:30px; - text-align:right; - width: 100px; - } - thead { - background-color: #eee; - } - ) + (27, 'params_list', 'Div(fullscreen){ + SetTitle("Ecosystem parameters") + AddToolButton(Title: "Manage stylesheet", Page: params_edit, Icon: icon-picture, PageParams:"stylesheet=stylesheet") + AddToolButton(Title: "Create", Page: params_edit, Icon: icon-plus) + Div(breadcrumb){ + Span(Class: text-muted, Body: "This section is used to configure stored reusable parameters") + } + + DBFind(parameters, src_appparameters).Order("id").Custom(custom_actions){ + LinkPage(Body: Em(Class: fa fa-edit), Class: text-primary h4, Page: params_edit, PageParams: "id=#id#") + } + + Table(src_appparameters, "ID=id,Name=name,Value=value,Conditions=conditions,=custom_actions").Style( + tbody > tr:nth-of-type(odd) { + background-color: #fafbfc; + } + tbody > tr > td { + word-break: break-all; + font-weight: 400; + font-size: 13px; + color: #666; + border-top: 1px solid #eee; + vertical-align: middle; + } + tr > *:first-child { + padding-left:20px; + width: 80px; + } + tr > *:last-child { + padding-right:30px; + text-align:right; + width: 100px; + } + thead { + background-color: #eee; + } + ) }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(24, 'properties_edit', 'Div(Class: content-wrapper){ + (28, 'properties_edit', 'Div(Class: content-wrapper){ SetTitle("Edit properties") Div(breadcrumb){ Div(){ @@ -1182,7 +1479,7 @@ If(#buffer_value_app_id# > 0){ Span("Edit contract", text-muted) DBFind(Name: contracts, Source: src_contract).WhereId(#edit_property_id#).Vars(item) } - If(#type# == block){ + If(#type# == block){ LinkPage("Blocks", app_blocks) Span(/).Style(margin-right: 10px; margin-left: 10px;) Span("Edit block", text-muted) @@ -1196,7 +1493,7 @@ If(#buffer_value_app_id# > 0){ } } } - Form(){ + Form(){ Div(form-group){ Label("Name") Input(Name: Name, Value: #item_name#, Disabled: "true") @@ -1211,7 +1508,7 @@ If(#buffer_value_app_id# > 0){ Input(Name: Conditions, Value: #item_conditions#) } Div(form-group){ - Button(Body: "Save", Class: btn btn-primary, Page: app_pages, Contract: EditPage, Params: "Menu=Val(Menu),Conditions=Val(Conditions),Id=#edit_property_id#") + Button(Body: "Save", Class: btn btn-primary, Page: app_pages, Contract: @1EditPage, Params: "Menu=Val(Menu),Conditions=Val(Conditions),Id=#edit_property_id#") } } If(#type# == contract){ @@ -1223,19 +1520,20 @@ If(#buffer_value_app_id# > 0){ Label("Wallet") Div(row){ Div(col-md-10){ - Input(Name: Wallet,Value: Address(#item_wallet_id#)) + SetVar(address_item_wallet_id, Address(#item_wallet_id#)) + Input(Name: Wallet,Value: #address_item_wallet_id#) } Div(col-md-2){ If(#item_active# == 0){ - Button(Body: "Bind", Class: btn btn-primary btn-block, Contract: ActivateContract, Params: "Id=#edit_property_id#", Page:app_contracts) + Button(Body: "Bind", Class: btn btn-primary btn-block, Contract: @1ActivateContract, Params: "Id=#edit_property_id#", Page:app_contracts) }.Else{ - Button(Body: "Unbind", Class: btn btn-primary btn-block, Contract: DeactivateContract, Params: "Id=#edit_property_id#", Page:properties_edit, PageParams: "edit_property_id=#edit_property_id#,type=#type#") + Button(Body: "Unbind", Class: btn btn-primary btn-block, Contract: @1DeactivateContract, Params: "Id=#edit_property_id#", Page:properties_edit, PageParams: "edit_property_id=#edit_property_id#,type=#type#") } } } } Div(form-group){ - Button(Body: "Save", Class: btn btn-primary, Page: app_contracts, Contract: EditContract, Params: "Conditions=Val(Conditions),WalletId=Val(Wallet),Id=#edit_property_id#") + Button(Body: "Save", Class: btn btn-primary, Page: app_contracts, Contract: @1EditContract, Params: "Conditions=Val(Conditions),WalletId=Val(Wallet),Id=#edit_property_id#") } } If(#type# == block){ @@ -1244,7 +1542,7 @@ If(#buffer_value_app_id# > 0){ Input(Name: Conditions, Value: #item_conditions#) } Div(form-group){ - Button(Body: "Save", Class: btn btn-primary, Page: app_blocks, Contract: EditBlock, Params: "Conditions=Val(Conditions),Id=#edit_property_id#") + Button(Body: "Save", Class: btn btn-primary, Page: app_blocks, Contract: @1EditBlock, Params: "Conditions=Val(Conditions),Id=#edit_property_id#") } } If(#type# == menu){ @@ -1257,12 +1555,12 @@ If(#buffer_value_app_id# > 0){ Input(Name: Conditions, Value: #item_conditions#) } Div(form-group){ - Button(Body: "Save", Class: btn btn-primary, Page: menus_list, Contract: EditMenu, Params: "Conditions=Val(Conditions),Id=#edit_property_id#,NameTitle=Val(Title)") + Button(Body: "Save", Class: btn btn-primary, Page: menus_list, Contract: @1EditMenu, Params: "Conditions=Val(Conditions),Id=#edit_property_id#,NameTitle=Val(Title)") } } - } + } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(25, 'table_create', 'Div(content-wrapper){ + (29, 'table_create', 'Div(content-wrapper){ SetTitle("Create table") Div(breadcrumb){ Div(){ @@ -1272,13 +1570,23 @@ If(#buffer_value_app_id# > 0){ } } + Data(src_type,"type,name"){ + text,"Text" + number,"Number" + varchar,"Varchar" + datetime,"Date/Time" + money,"Money" + double,"Double" + character,"Character" + json,"JSON" + } Form(){ Div(panel panel-default){ Div(panel-body){ Div(row){ Div(col-md-12){ Label("Name") - Input(Name:TableName) + Input(Name:Name) } } Div(row){ @@ -1294,45 +1602,33 @@ If(#buffer_value_app_id# > 0){ Label(Class: text-muted, Body: "Action") } } - Data(src_type,"type,name"){ - text,"Text" - number,"Number" - varchar,"Varchar" - datetime,"Date/Time" - money,"Money" - double,"Double" - character,"Character" - json,"JSON" + If(GetVar(cs)==""){ + SetVar(cs, Calculate( Exp: 0, Type: int)) } If(#del# == 1){ - SetVar(next_count, Calculate( Exp: #count_sec# - 1, Type: int)) + SetVar(cs, Calculate( Exp: #cs# - 1, Type: int)) }.Else{ - If(GetVar(count)==""){ - SetVar(count, 0) - SetVar(next_count, Calculate( Exp: #count# + 1, Type: int)) - }.Else{ - SetVar(next_count, Calculate( Exp: #count_sec# + 1, Type: int)) - } + SetVar(cs, Calculate( Exp: #cs# + 1, Type: int)) } - Range(params_range, 0, #next_count#) + Range(params_range, 0, #cs#) ForList(Source: params_range){ Div(row){ Div(col-md-4 mt-sm){ - Input(Name:idshare) + Input(Name:ColumnsArr) } Div(col-md-7 mt-sm){ - Select(Name: share, Source: src_type, NameColumn: name, ValueColumn: type,Value:"text") + Select(Name: TypesArr, Source: src_type, NameColumn: name, ValueColumn: type) } Div(col-md-1 mt-sm){ - If(And(#next_count# == #params_range_index#, #next_count# > 1)){ - Button(Body: Em(Class: fa fa-trash), Class: btn btn-default, PageParams: "count_sec=#next_count#,count=#count#,del=1,app_id=#app_id#", Page: table_create) + If(And(#cs#==#params_range_index#, #cs# > 1)){ + Button(Body: Em(Class: fa fa-trash), Class: btn btn-default, PageParams: "cs=#cs#,del=1,application_id=#application_id#", Page: table_create) } } } } } Div(panel-footer){ - Button(Body: "Add column", Class: btn btn-primary, Page: table_create, PageParams: "count_sec=#next_count#,count=#count#,app_id=#app_id#") + Button(Body: "Add column", Class: btn btn-primary, Page: table_create, PageParams: "cs=#cs#,application_id=#application_id#") } } Div(row){ @@ -1342,31 +1638,35 @@ If(#buffer_value_app_id# > 0){ Div(panel-body){ Div(form-group){ Label(Insert) - Input(Name: Insert_con, Value: ContractConditions("MainCondition")) + Input(Name: InsertPerm, Value: ContractConditions("MainCondition")) } Div(form-group){ Label(Update) - Input(Name: Update_con, Value: ContractConditions("MainCondition")) + Input(Name: UpdatePerm, Value: ContractConditions("MainCondition")) } Div(form-group){ Label(New column) - Input(Name: New_column_con, Value: ContractConditions("MainCondition")) + Input(Name: NewColumnPerm, Value: ContractConditions("MainCondition")) } } Div(panel-footer){ - Button(Body: "Save", Class: btn btn-primary, Contract: @1NewTable, Page: app_tables, Params: "Shareholding=Val(share),Id=Val(idshare),ApplicationId=#app_id#") + Button(Body: "Save", Class: btn btn-primary, Contract: @1NewTableJoint, Page: app_tables, Params: "ApplicationId=#application_id#") } } } } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(26, 'table_edit', 'Div(content-wrapper){ - SetTitle(Tables) + (30, 'table_edit', 'Div(content-wrapper){ + DBFind(tables, src_mem).Columns("id,name,columns,conditions,permissions->insert,permissions->update,permissions->new_column").Vars(pre).WhereId(#tabl_id#) + + SetTitle("Tables") Div(breadcrumb){ Div(){ LinkPage("Tables", app_tables) Span(/).Style(margin-right: 10px; margin-left: 10px;) + LinkPage(#pre_name#, table_view,, "tabl_id=#tabl_id#") + Span(/).Style(margin-right: 10px; margin-left: 10px;) Span("Edit", text-muted) } } @@ -1387,7 +1687,6 @@ If(#buffer_value_app_id# > 0){ Div(col-md-2 h4 text-right){ } } - DBFind(tables, src_mem).Columns("id,name,columns,conditions,permissions->insert,permissions->update,permissions->new_column").Vars(pre).WhereId(#tabl_id#) JsonToSource(src_columns, #pre_columns#) ForList(src_columns){ Div(list-group-item){ @@ -1397,9 +1696,6 @@ If(#buffer_value_app_id# > 0){ } Div(col-md-2 h5){ SetVar(col_type,GetColumnType(#pre_name#, #key#)) - If(#col_type# == character){ - Span(Character) - } If(#col_type# == text){ Span("Text") } @@ -1452,15 +1748,15 @@ If(#buffer_value_app_id# > 0){ Div(panel-body){ Div(form-group){ Label("Insert") - Input(Name: Insert_con, Type: text, Value: #pre_permissions_insert#) + Input(Name: InsertPerm, Type: text, Value: #pre_permissions_insert#) } Div(form-group){ Label("Update") - Input(Name: Update_con, Type: text, Value: #pre_permissions_update#) + Input(Name: UpdatePerm, Type: text, Value: #pre_permissions_update#) } Div(form-group){ Label("New column") - Input(Name: New_column_con, Type: text, Value: #pre_permissions_new_column#) + Input(Name: NewColumnPerm, Type: text, Value: #pre_permissions_new_column#) } } Div(panel-footer){ @@ -1473,7 +1769,7 @@ If(#buffer_value_app_id# > 0){ Div(panel-heading){Conditions for changing permissions} Div(panel-body){ Div(form-group){ - Input(Name: Insert_condition, Disabled:"true", Type: text, Value: #pre_conditions#) + Input(Name: Insert_condition, Disabled: true, Type: text, Value: #pre_conditions#) } } } @@ -1481,58 +1777,93 @@ If(#buffer_value_app_id# > 0){ } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(27, 'table_view', 'Div(content-wrapper){ - SetTitle("Tables") - Div(breadcrumb){ - LinkPage("Tables", app_tables) - Span(/).Style(margin-right: 10px; margin-left: 10px;) - Span(#table_name#, text-muted) - } - + (31, 'table_view', 'Div(content-wrapper){ + DBFind(tables).WhereId(#tabl_id#).Columns("id,name").Vars(pre) + + SetTitle("Tables") + Div(breadcrumb){ + LinkPage("Tables", app_tables) + Span(/).Style(margin-right: 10px; margin-left: 10px;) + Span(#pre_name#, text-muted) + Span(/).Style(margin-right: 10px; margin-left: 10px;) + LinkPage(Body:"Edit", Page: table_edit, PageParams: "tabl_id=#tabl_id#") + } + + DBFind(#pre_name#).Count(count) + If(#page#>0){ + SetVar(prev_page,Calculate(#page#-1) + }.Else{ + SetVar(page,0).(prev_page,0) + } + SetVar(per_page,25).(off,Calculate(#page#*#per_page#)).(last_page,Calculate(#count#/#per_page#)).(next_page,#last_page#) + If(#count#>Calculate(#off#+#per_page#)){ + SetVar(next_page,Calculate(#page#+1) + } + Div(button-group){ + If(#page#>0){ + Button(Body:"1", Class:btn btn-default, Page:table_view, PageParams: "tabl_id=#tabl_id#,page=0") + }.Else{ + Button(Body:"1", Class:btn btn-default disabled) + } + If(#page#>1){ + Button(Body:Calculate(#prev_page#+1), Class:btn btn-default, Page:table_view, PageParams: "tabl_id=#tabl_id#,page=#prev_page#") + } + If(And(#page#>0,#page#<#last_page#)){ + Button(Body:Calculate(#page#+1), Class:btn btn-default disabled) + } + If(#next_page#<#last_page#){ + Button(Body:Calculate(#next_page#+1), Class:btn btn-default, Page:table_view, PageParams: "tabl_id=#tabl_id#,page=#next_page#") + } + If(#page#<#last_page#){ + Button(Body:Calculate(#last_page#+1), Class:btn btn-default, Page:table_view, PageParams: "tabl_id=#tabl_id#,page=#last_page#") + }.ElseIf(#last_page#>0){ + Button(Body:Calculate(#last_page#+1), Class:btn btn-default disabled) + } + } Div(panel panel-default){ Div(panel-body){ Div(table-responsive){ - DBFind(#table_name#, src_mem) - Table(Source: src_mem) + DBFind(#pre_name#, src_mem).Offset(#off#).Order(id) + Table(src_mem) } } } }', 'admin_menu', 'ContractAccess("@1EditPage")'), -(28, 'admin_index', '', 'admin_menu', true), -(29,'notifications',$$DBFind(Name: notifications, Source: notifications_members).Columns("id,page_name,notification->icon,notification->header,notification->body").Where("closed=0 and notification->type='1' and recipient->member_id='#key_id#'") - ForList(notifications_members){ - Div(Class: list-group-item){ - LinkPage(Page: #page_name#, PageParams: "notific_id=#id#"){ - Div(media-box){ - Div(Class: pull-left){ - Em(Class: fa #notification.icon# fa-1x text-primary) - } - Div(media-box-body clearfix){ - Div(Class: m0 text-normal, Body: #notification.header#) - Div(Class: m0 text-muted h6, Body: #notification.body#) - } - } - } + (32, 'admin_index', '', 'admin_menu', true), + (33, 'notifications', $$DBFind(Name: notifications, Source: notifications_members).Columns("id,page_name,notification->icon,notification->header,notification->body").Where("closed=0 and notification->type='1' and recipient->member_id='#key_id#'") +ForList(notifications_members){ + Div(Class: list-group-item){ + LinkPage(Page: #page_name#, PageParams: "notific_id=#id#"){ + Div(media-box){ + Div(Class: pull-left){ + Em(Class: fa #notification.icon# fa-1x text-primary) + } + Div(media-box-body clearfix){ + Div(Class: m0 text-normal, Body: #notification.header#) + Div(Class: m0 text-muted h6, Body: #notification.body#) } } + } + } +} - DBFind(Name: notifications, Source: notifications_roles).Columns("id,page_name,notification->icon,notification->header,notification->body,recipient->role_id").Where("closed=0 and notification->type='2' and (date_start_processing is null or processing_info->member_id='#key_id#')") - ForList(notifications_roles){ - DBFind(Name: roles_participants, Source: src_roles).Columns("id").Where("member->member_id='#key_id#' and role->id='#recipient.role_id#' and deleted=0").Vars(prefix) - If(#prefix_id# > 0){ - Div(Class: list-group-item){ - LinkPage(Page: #page_name#, PageParams: "notific_id=#id#"){ - Div(media-box){ - Div(Class: pull-left){ - Em(Class: fa #notification.icon# fa-1x text-primary) - } - Div(media-box-body clearfix){ - Div(Class: m0 text-normal, Body: #notification.header#) - Div(Class: m0 text-muted h6, Body: #notification.body#) - } - } - } +DBFind(Name: notifications, Source: notifications_roles).Columns("id,page_name,notification->icon,notification->header,notification->body,recipient->role_id").Where("closed=0 and notification->type='2' and (date_start_processing is null or processing_info->member_id='#key_id#')") +ForList(notifications_roles){ + DBFind(Name: roles_participants, Source: src_roles).Columns("id").Where("member->member_id='#key_id#' and role->id='#recipient.role_id#' and deleted=0").Vars(prefix) + If(#prefix_id# > 0){ + Div(Class: list-group-item){ + LinkPage(Page: #page_name#, PageParams: "notific_id=#id#"){ + Div(media-box){ + Div(Class: pull-left){ + Em(Class: fa #notification.icon# fa-1x text-primary) + } + Div(media-box-body clearfix){ + Div(Class: m0 text-normal, Body: #notification.header#) + Div(Class: m0 text-muted h6, Body: #notification.body#) } } + } + } + } }$$,'default_menu','ContractAccess("@1EditPage")'); ` diff --git a/packages/migration/roles_data.go b/packages/migration/roles_data.go index 4a088f2ef..45093d690 100644 --- a/packages/migration/roles_data.go +++ b/packages/migration/roles_data.go @@ -2,18 +2,14 @@ package migration var rolesDataSQL = ` INSERT INTO "%[1]d_roles" ("id", "default_page", "role_name", "deleted", "role_type", - "date_created","creator") VALUES - ('1','default_ecosystem_page', 'Admin', '0', '3', NOW(), '{}'), - ('2','', 'Candidate for validators', '0', '3', NOW(), '{}'), - ('3','', 'Validator', '0', '3', NOW(), '{}'), - ('4','', 'Investor with voting rights', '0', '3', NOW(), '{}'), - ('5','', 'Delegate', '0', '3', NOW(), '{}'), - ('6','', 'Developer', '0', '3', NOW(), '{}'); + "date_created","creator","roles_access") VALUES + ('1','default_ecosystem_page', 'Admin', '0', '3', NOW(), '{}', '{}'), + ('2','', 'Developer', '0', '3', NOW(), '{}', '{}'); INSERT INTO "%[1]d_roles_participants" ("id","role" ,"member", "date_created") - VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()), - ('2', '{"id": "6", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[4]d", "member_name": "founder", "image_id": "0"}', NOW()); + VALUES ('1', '{"id": "1", "type": "3", "name": "Admin", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()), + ('2', '{"id": "2", "type": "3", "name": "Developer", "image_id":"0"}', '{"member_id": "%[2]d", "member_name": "founder", "image_id": "0"}', NOW()); - INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[4]d', 'founder'); + INSERT INTO "%[1]d_members" ("id", "member_name") VALUES('%[2]d', 'founder'); ` diff --git a/packages/migration/tables_data.go b/packages/migration/tables_data.go index a7086eafe..be8177b61 100644 --- a/packages/migration/tables_data.go +++ b/packages/migration/tables_data.go @@ -82,6 +82,7 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co "image_id":"ContractAccess(\"Roles_Create\")", "role_name":"false", "date_created":"false", + "roles_access":"ContractAccess(\"Roles_AccessManager\")", "role_type":"false"}', 'ContractConditions("MainCondition")'), ('11', 'roles_participants', @@ -96,17 +97,17 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co "appointed":"false"}', 'ContractConditions("MainCondition")'), ('12', 'notifications', - '{"insert":"ContractAccess(\"Notifications_Single_Send_map\", \"Notifications_Roles_Send_map\", \"CheckNodesBan\")", - "update":"ContractConditions(\"MainCondition\")", + '{"insert":"ContractAccess(\"notifications_Send\", \"CheckNodesBan\")", + "update":"ContractAccess(\"notifications_Send\", \"notifications_Close\", \"notifications_Process\")", "new_column":"ContractConditions(\"MainCondition\")"}', - '{"date_closed":"ContractAccess(\"Notifications_Single_Close\",\"Notifications_Roles_Close\")", + '{"date_closed":"ContractAccess(\"notifications_Close\")", "sender":"false", - "processing_info":"ContractAccess(\"Notifications_Single_Close\",\"Notifications_Roles_Processing\")", - "date_start_processing":"ContractAccess(\"Notifications_Single_Close\",\"Notifications_Roles_Processing\")", + "processing_info":"ContractAccess(\"notifications_Close\",\"notifications_Process\")", + "date_start_processing":"ContractAccess(\"notifications_Close\",\"notifications_Process\")", "notification":"false", "page_name":"false", "page_params":"false", - "closed":"ContractAccess(\"Notifications_Single_Close\",\"Notifications_Roles_Close\")", + "closed":"ContractAccess(\"notifications_Close\")", "date_created":"false", "recipient":"false"}', 'ContractAccess("@1EditTable")'), @@ -129,12 +130,12 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co "deleted": "ContractConditions(\"MainCondition\")"}', 'ContractConditions("MainCondition")'), ('15', 'binaries', - '{"insert":"ContractAccess(\"UploadBinary\")", - "update":"ContractConditions(\"MainCondition\")", + '{"insert":"ContractAccess(\"@1UploadBinary\")", + "update":"ContractAccess(\"@1UploadBinary\")", "new_column":"ContractConditions(\"MainCondition\")"}', - '{"hash":"ContractAccess(\"UploadBinary\")", + '{"hash":"ContractAccess(\"@1UploadBinary\")", "member_id":"false", - "data":"ContractAccess(\"UploadBinary\")", + "data":"ContractAccess(\"@1UploadBinary\")", "name":"false", "app_id":"false"}', 'ContractConditions(\"MainCondition\")'), diff --git a/packages/migration/vde/vde_data_contracts.go b/packages/migration/vde/vde_data_contracts.go new file mode 100644 index 000000000..ccab65155 --- /dev/null +++ b/packages/migration/vde/vde_data_contracts.go @@ -0,0 +1,867 @@ +package vde + +var contractsDataSQL = `INSERT INTO "%[1]d_contracts" ("id", "name", "value", "conditions") VALUES + ('1','MainCondition','contract MainCondition { + conditions { + if EcosysParam("founder_account")!=$key_id + { + warning "Sorry, you do not have access to this action." + } + } + }', 'ContractConditions("MainCondition")'), + ('2','NewContract','contract NewContract { + data { + Value string + Conditions string + Wallet string "optional" + TokenEcosystem int "optional" + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + $walletContract = $key_id + if $Wallet { + $walletContract = AddressToId($Wallet) + if $walletContract == 0 { + error Sprintf("wrong wallet %%s", $Wallet) + } + } + var list array + list = ContractsList($Value) + + if Len(list) == 0 { + error "must be the name" + } + + var i int + while i < Len(list) { + if IsObject(list[i], $ecosystem_id) { + warning Sprintf("Contract or function %%s exists", list[i] ) + } + i = i + 1 + } + + $contract_name = list[0] + if !$TokenEcosystem { + $TokenEcosystem = 1 + } else { + if !SysFuel($TokenEcosystem) { + warning Sprintf("Ecosystem %%d is not system", $TokenEcosystem ) + } + } + } + action { + var root, id int + root = CompileContract($Value, $ecosystem_id, $walletContract, $TokenEcosystem) + id = DBInsert("contracts", "name,value,conditions, wallet_id, token_id,app_id", + $contract_name, $Value, $Conditions, $walletContract, $TokenEcosystem, $ApplicationId) + FlushContract(root, id, false) + $result = id + } + func rollback() { + var list array + list = ContractsList($Value) + var i int + while i < Len(list) { + RollbackContract(list[i]) + i = i + 1 + } + } + func price() int { + return SysParamInt("contract_price") + } + }', 'ContractConditions("MainCondition")'), + ('3','EditContract','contract EditContract { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("contracts", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + + var row array + row = DBFind("contracts").Columns("id,value,conditions").WhereId($Id) + if !Len(row) { + error Sprintf("Contract %%d does not exist", $Id) + } + $cur = row[0] + if $Value { + var list, curlist array + list = ContractsList($Value) + curlist = ContractsList($cur["value"]) + if Len(list) != Len(curlist) { + error "Contracts cannot be removed or inserted" + } + var i int + while i < Len(list) { + var j int + var ok bool + while j < Len(curlist) { + if curlist[j] == list[i] { + ok = true + break + } + j = j + 1 + } + if !ok { + error "Contracts or functions names cannot be changed" + } + i = i + 1 + } + } + } + action { + var root int + var pars, vals array + + if $Value { + root = CompileContract($Value, $ecosystem_id, 0, 0) + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("contracts", $Id, Join(pars, ","), vals...) + } + if $Value { + FlushContract(root, $Id, false) + } + } + }', 'ContractConditions("MainCondition")'), + ('4','NewParameter','contract NewParameter { + data { + Name string + Value string + Conditions string + } + conditions { + var ret array + ValidateCondition($Conditions, $ecosystem_id) + ret = DBFind("parameters").Columns("id").Where("name=?", $Name).Limit(1) + if Len(ret) > 0 { + warning Sprintf( "Parameter %%s already exists", $Name) + } + } + action { + $result = DBInsert("parameters", "name,value,conditions", $Name, $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('5','EditParameter','contract EditParameter { + data { + Id int + Value string + Conditions string + } + func onlyConditions() bool { + return $Conditions && !$Value + } + conditions { + RowConditions("parameters", $Id, onlyConditions()) + ValidateCondition($Conditions, $ecosystem_id) + } + action { + DBUpdate("parameters", $Id, "value,conditions", $Value, $Conditions ) + } + }', 'ContractConditions("MainCondition")'), + ('6', 'NewMenu','contract NewMenu { + data { + Name string + Value string + Title string "optional" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("menu").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Menu %%s already exists", $Name) + } + } + action { + DBInsert("menu", "name,value,title,conditions", $Name, $Value, $Title, $Conditions ) + } + func price() int { + return SysParamInt("menu_price") + } + }', 'ContractConditions("MainCondition")'), + ('7','EditMenu','contract EditMenu { + data { + Id int + Value string "optional" + Title string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value && !$Title + } + conditions { + RowConditions("menu", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Title { + pars[Len(pars)] = "title" + vals[Len(vals)] = $Title + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("menu", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('8','AppendMenu','contract AppendMenu { + data { + Id int + Value string + } + conditions { + RowConditions("menu", $Id, false) + } + action { + var row map + row = DBRow("menu").Columns("value").WhereId($Id) + DBUpdate("menu", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('9','NewPage','contract NewPage { + data { + Name string + Value string + Menu string + Conditions string + ValidateCount int "optional" + ApplicationId int "optional" + ValidateMode int "optional" + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("pages").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Page %%s already exists", $Name) + } + + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + DBInsert("pages", "name,value,menu,validate_count,conditions,app_id,validate_mode", + $Name, $Value, $Menu, $ValidateCount, $Conditions, $ApplicationId, $ValidateMode) + } + func price() int { + return SysParamInt("page_price") + } + }', 'ContractConditions("MainCondition")'), + ('10','EditPage','contract EditPage { + data { + Id int + Value string "optional" + Menu string "optional" + Conditions string "optional" + ValidateCount int "optional" + ValidateMode string "optional" + } + func onlyConditions() bool { + return $Conditions && !$Value && !$Menu + } + func preparePageValidateCount(count int) int { + var min, max int + min = Int(EcosysParam("min_page_validate_count")) + max = Int(EcosysParam("max_page_validate_count")) + + if count < min { + count = min + } else { + if count > max { + count = max + } + } + + return count + } + conditions { + RowConditions("pages", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + $ValidateCount = preparePageValidateCount($ValidateCount) + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Menu { + pars[Len(pars)] = "menu" + vals[Len(vals)] = $Menu + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if $ValidateCount { + pars[Len(pars)] = "validate_count" + vals[Len(vals)] = $ValidateCount + } + if $ValidateMode { + if $ValidateMode != "1" { + $ValidateMode = "0" + } + pars[Len(pars)] = "validate_mode" + vals[Len(vals)] = $ValidateMode + } + if Len(vals) > 0 { + DBUpdate("pages", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('11','AppendPage','contract AppendPage { + data { + Id int + Value string + } + conditions { + RowConditions("pages", $Id, false) + } + action { + var row map + row = DBRow("pages").Columns("value").WhereId($Id) + DBUpdate("pages", $Id, "value", row["value"] + "\r\n" + $Value) + } + }', 'ContractConditions("MainCondition")'), + ('12','NewBlock','contract NewBlock { + data { + Name string + Value string + Conditions string + ApplicationId int "optional" + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + + var row map + row = DBRow("blocks").Columns("id").Where("name = ?", $Name) + + if row { + warning Sprintf( "Block %%s already exists", $Name) + } + } + action { + DBInsert("blocks", "name,value,conditions,app_id", $Name, $Value, $Conditions, $ApplicationId ) + } + }', 'ContractConditions("MainCondition")'), + ('13','EditBlock','contract EditBlock { + data { + Id int + Value string "optional" + Conditions string "optional" + } + + func onlyConditions() bool { + return $Conditions && !$Value + } + + conditions { + RowConditions("blocks", $Id, onlyConditions()) + if $Conditions { + ValidateCondition($Conditions, $ecosystem_id) + } + } + action { + var pars, vals array + if $Value { + pars[0] = "value" + vals[0] = $Value + } + if $Conditions { + pars[Len(pars)] = "conditions" + vals[Len(vals)] = $Conditions + } + if Len(vals) > 0 { + DBUpdate("blocks", $Id, Join(pars, ","), vals...) + } + } + }', 'ContractConditions("MainCondition")'), + ('14','NewTable','contract NewTable { + data { + Name string + Columns string + Permissions string + ApplicationId int "optional" + } + conditions { + TableConditions($Name, $Columns, $Permissions) + } + action { + CreateTable($Name, $Columns, $Permissions, $ApplicationId) + } + func rollback() { + RollbackTable($Name) + } + func price() int { + return SysParamInt("table_price") + } + }', 'ContractConditions("MainCondition")'), + ('15','EditTable','contract EditTable { + data { + Name string + Permissions string + } + conditions { + TableConditions($Name, "", $Permissions) + } + action { + PermTable($Name, $Permissions ) + } + }', 'ContractConditions("MainCondition")'), + ('16','NewColumn','contract NewColumn { + data { + TableName string + Name string + Type string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, $Type, $Permissions) + } + action { + CreateColumn($TableName, $Name, $Type, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('17','EditColumn','contract EditColumn { + data { + TableName string + Name string + Permissions string + } + conditions { + ColumnCondition($TableName, $Name, "", $Permissions) + } + action { + PermColumn($TableName, $Name, $Permissions) + } + }', 'ContractConditions("MainCondition")'), + ('18','NewLang', 'contract NewLang { + data { + ApplicationId int "optional" + Name string + Trans string "optional" + Value array "optional" + IdLanguage array "optional" + } + + conditions { + if $ApplicationId == 0 { + warning "Application id cannot equal 0" + } + + if DBFind("languages").Columns("id").Where("name = ?", $Name).One("id") { + warning Sprintf( "Language resource %%s already exists", $Name) + } + + var j int + while j < Len($IdLanguage) { + if $IdLanguage[j] == "" { + info("Locale empty") + } + if $Value[j] == "" { + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") + } + + action { + var i,len,lenshar int + var res,langarr string + len = Len($IdLanguage) + lenshar = Len($Value) + while i < len { + if i + 1 == len { + res = res + Sprintf("%%q: %%q",$IdLanguage[i],$Value[i]) + } else { + res = res + Sprintf("%%q: %%q,",$IdLanguage[i],$Value[i]) + } + i = i + 1 + } + if len > 0 { + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + } + $result = CreateLanguage($Name, $Trans, $ApplicationId) + } + }', 'ContractConditions("MainCondition")'), + ('19','EditLang','contract EditLang { + data { + Id int + Name string "optional" + ApplicationId int "optional" + Trans string "optional" + Value array "optional" + IdLanguage array "optional" + } + + conditions { + var j int + while j < Len($IdLanguage) { + if ($IdLanguage[j] == ""){ + info("Locale empty") + } + if ($Value[j] == ""){ + info("Value empty") + } + j = j + 1 + } + EvalCondition("parameters", "changing_language", "value") + } + + action { + var i,len int + var res,langarr string + len = Len($IdLanguage) + while i < len { + if (i + 1 == len){ + res = res + Sprintf("%%q: %%q", $IdLanguage[i],$Value[i]) + } + else { + res = res + Sprintf("%%q: %%q, ", $IdLanguage[i],$Value[i]) + } + i = i + 1 + } + + $row = DBFind("languages").Columns("name,app_id").WhereId($Id).Row() + if !$row{ + warning "Language not found" + } + + if $ApplicationId == 0 { + $ApplicationId = Int($row["app_id"]) + } + if $Name == "" { + $Name = $row["name"] + } + + if (len > 0){ + langarr = Sprintf("{"+"%%v"+"}", res) + $Trans = langarr + + } + EditLanguage($Id, $Name, $Trans, $ApplicationId) + } + }', 'ContractConditions("MainCondition")'), + ('20','Import','contract Import { + data { + Data string + } + conditions { + $list = JSONDecode($Data) + } + func ImportList(row array, cnt string) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + idata = row[i] + if(cnt == "pages"){ + $ret_page = DBFind("pages").Columns("id").Where("name=$", idata["Name"]) + $page_id = One($ret_page, "id") + if ($page_id != nil){ + idata["Id"] = Int($page_id) + CallContract("EditPage", idata) + } else { + CallContract("NewPage", idata) + } + } + if(cnt == "blocks"){ + $ret_block = DBFind("blocks").Columns("id").Where("name=$", idata["Name"]) + $block_id = One($ret_block, "id") + if ($block_id != nil){ + idata["Id"] = Int($block_id) + CallContract("EditBlock", idata) + } else { + CallContract("NewBlock", idata) + } + } + if(cnt == "menus"){ + $ret_menu = DBFind("menu").Columns("id,value").Where("name=$", idata["Name"]) + $menu_id = One($ret_menu, "id") + $menu_value = One($ret_menu, "value") + if ($menu_id != nil){ + idata["Id"] = Int($menu_id) + idata["Value"] = Str($menu_value) + "\n" + Str(idata["Value"]) + CallContract("EditMenu", idata) + } else { + CallContract("NewMenu", idata) + } + } + if(cnt == "parameters"){ + $ret_param = DBFind("parameters").Columns("id").Where("name=$", idata["Name"]) + $param_id = One($ret_param, "id") + if ($param_id != nil){ + idata["Id"] = Int($param_id) + CallContract("EditParameter", idata) + } else { + CallContract("NewParameter", idata) + } + } + if(cnt == "languages"){ + $ret_lang = DBFind("languages").Columns("id").Where("name=$", idata["Name"]) + $lang_id = One($ret_lang, "id") + if ($lang_id != nil){ + CallContract("EditLang", idata) + } else { + CallContract("NewLang", idata) + } + } + if(cnt == "contracts"){ + if IsObject(idata["Name"], $ecosystem_id){ + } else { + CallContract("NewContract", idata) + } + } + if(cnt == "tables"){ + $ret_table = DBFind("tables").Columns("id").Where("name=$", idata["Name"]) + $table_id = One($ret_table, "id") + if ($table_id != nil){ + } else { + CallContract("NewTable", idata) + } + } + i = i + 1 + } + } + func ImportData(row array) { + if !row { + return + } + var i int + while i < Len(row) { + var idata map + var list array + var tblname, columns string + idata = row[i] + i = i + 1 + tblname = idata["Table"] + columns = Join(idata["Columns"], ",") + list = idata["Data"] + if !list { + continue + } + var j int + while j < Len(list) { + var ilist array + ilist = list[j] + DBInsert(tblname, columns, ilist) + j=j+1 + } + } + } + action { + ImportList($list["pages"], "pages") + ImportList($list["blocks"], "blocks") + ImportList($list["menus"], "menus") + ImportList($list["parameters"], "parameters") + ImportList($list["languages"], "languages") + ImportList($list["contracts"], "contracts") + ImportList($list["tables"], "tables") + ImportData($list["data"]) + } + }', 'ContractConditions("MainCondition")'), + ('21', 'NewCron','contract NewCron { + data { + Cron string + Contract string + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ValidateCondition($Conditions,$ecosystem_id) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + $result = DBInsert("cron", "owner,cron,contract,counter,till,conditions", + $key_id, $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($result) + } + }', 'ContractConditions("MainCondition")'), + ('22','EditCron','contract EditCron { + data { + Id int + Contract string + Cron string "optional" + Limit int "optional" + Till string "optional date" + Conditions string + } + conditions { + ConditionById("cron", true) + ValidateCron($Cron) + } + action { + if !$Till { + $Till = "1970-01-01 00:00:00" + } + if !HasPrefix($Contract, "@") { + $Contract = "@" + Str($ecosystem_id) + $Contract + } + DBUpdate("cron", $Id, "cron,contract,counter,till,conditions", + $Cron, $Contract, $Limit, $Till, $Conditions) + UpdateCron($Id) + } + }', 'ContractConditions("MainCondition")'), + ('23', 'UploadBinary', 'contract UploadBinary { + data { + Name string + Data bytes "file" + AppID int + DataMimeType string "optional" + MemberID int "optional" + } + conditions { + $Id = Int(DBFind("binaries").Columns("id").Where("app_id = ? AND member_id = ? AND name = ?", $AppID, $MemberID, $Name).One("id")) + } + action { + var hash string + hash = MD5($Data) + + if $DataMimeType == "" { + $DataMimeType = "application/octet-stream" + } + + if $Id != 0 { + DBUpdate("binaries", $Id, "data,hash,mime_type", $Data, hash, $DataMimeType) + } else { + $Id = DBInsert("binaries", "app_id,member_id,name,data,hash,mime_type", $AppID, $MemberID, $Name, $Data, hash, $DataMimeType) + } + + $result = $Id + } + }', 'ContractConditions("MainCondition")'), + ('24', 'NewUser','contract NewUser { + data { + NewPubkey string + } + conditions { + Println($NewPubkey) + $newId = PubToID($NewPubkey) + if $newId == 0 { + error "Wrong pubkey" + } + if DBFind("keys").Columns("id").WhereId($newId).One("id") != nil { + error "User already exists" + } + } + action { + DBInsert("keys", "id", $newId) + SetPubKey($newId, StringToBytes($NewPubkey)) + } + }', 'ContractConditions("MainCondition")'), + ('25', 'NewVDE', 'contract NewVDE { + data { + VDEName string + DBUser string + DBPassword string + VDEAPIPort int + } + + conditions { + } + + action { + CreateVDE($VDEName, $DBUser, $DBPassword, $VDEAPIPort) + } + }', 'ContractConditions("MainCondition")'), + ('26', 'ListVDE', 'contract ListVDE { + data {} + + conditions {} + + action { + return GetVDEList() + } + }', 'ContractConditions("MainCondition")'), + ('27', 'RunVDE', 'contract RunVDE { + data { + VDEName string + } + + conditions { + } + + action { + StartVDE($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('28', 'StopVDE', 'contract StopVDE { + data { + VDEName string + } + + conditions { + } + + action { + StopVDEProcess($VDEName) + } + }', 'ContractConditions("MainCondition")'), + ('29', 'RemoveVDE', 'contract RemoveVDE { + data { + VDEName string + } + conditions {} + action{ + DeleteVDE($VDEName) + } + }', 'ContractConditions("MainCondition")');` diff --git a/packages/migration/vde/vde_data_keys.go b/packages/migration/vde/vde_data_keys.go new file mode 100644 index 000000000..42e26c843 --- /dev/null +++ b/packages/migration/vde/vde_data_keys.go @@ -0,0 +1,6 @@ +package vde + +var keysDataSQL = ` +INSERT INTO "%[1]d_keys" (id, pub) +VALUES (4544233900443112470, '489347a1205c818d9a02f285faaedd0122a56138e3d985f5e1b4f6a9470f90f692a00a3453771dd7feea388ceb7aefeaf183e299c70ad1aecb7f870bfada3b86'); +` diff --git a/packages/migration/vde/vde_data_members.go b/packages/migration/vde/vde_data_members.go new file mode 100644 index 000000000..069f1ea2b --- /dev/null +++ b/packages/migration/vde/vde_data_members.go @@ -0,0 +1,7 @@ +package vde + +var membersDataSQL = ` +INSERT INTO "%[1]d_members" ("id", "member_name") +VALUES('%[2]d', 'founder'), +('4544233900443112470', 'guest'); +` diff --git a/packages/migration/vde/vde_data_menu.go b/packages/migration/vde/vde_data_menu.go new file mode 100644 index 000000000..b52a1699f --- /dev/null +++ b/packages/migration/vde/vde_data_menu.go @@ -0,0 +1,45 @@ +package vde + +var menuDataSQL = ` +INSERT INTO "%[1]d_menu" ("id","name","title","value","conditions") VALUES('2','admin_menu','Admin menu','MenuItem( + Icon: "icon-screen-desktop", + Page: "interface", + Vde: "true", + Title: "Interface" +) +MenuItem( + Icon: "icon-docs", + Page: "tables", + Vde: "true", + Title: "Tables" +) +MenuItem( + Icon: "icon-briefcase", + Page: "contracts", + Vde: "true", + Title: "Smart Contracts" +) +MenuItem( + Icon: "icon-settings", + Page: "parameters", + Vde: "true", + Title: "Ecosystem parameters" +) +MenuItem( + Icon: "icon-globe", + Page: "languages", + Vde: "true", + Title: "Language resources" +) +MenuItem( + Icon: "icon-cloud-upload", + Page: "import", + Vde: "true", + Title: "Import" +) +MenuItem( + Icon: "icon-cloud-download", + Page: "export", + Vde: "true", + Title: "Export" +)','true');` diff --git a/packages/migration/vde/vde_data_pages.go b/packages/migration/vde/vde_data_pages.go new file mode 100644 index 000000000..b013166b1 --- /dev/null +++ b/packages/migration/vde/vde_data_pages.go @@ -0,0 +1,5 @@ +package vde + +var pagesDataSQL = ` +INSERT INTO "%[1]d_pages" ("id","name","value","menu","conditions") VALUES('1', 'default_page', '', 'admin_menu', 'true'),('2','admin_index','','admin_menu','true'); +` diff --git a/packages/migration/vde/vde_data_parameters.go b/packages/migration/vde/vde_data_parameters.go new file mode 100644 index 000000000..3ba29e2f9 --- /dev/null +++ b/packages/migration/vde/vde_data_parameters.go @@ -0,0 +1,18 @@ +package vde + +var parametersDataSQL = ` +INSERT INTO "%[1]d_parameters" ("id","name", "value", "conditions") VALUES + ('1','founder_account', '%[2]d', 'ContractConditions("MainCondition")'), + ('2','new_table', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('3','new_column', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('4','changing_tables', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('5','changing_language', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('6','changing_signature', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('7','changing_page', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('8','changing_menu', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('9','changing_contracts', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'), + ('10','stylesheet', 'body { + /* You can define your custom styles here or create custom CSS rules */ + }', 'ContractConditions("MainCondition")'), + ('11','changing_blocks', 'ContractConditions("MainCondition")', 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_data_tables.go b/packages/migration/vde/vde_data_tables.go new file mode 100644 index 000000000..955514d55 --- /dev/null +++ b/packages/migration/vde/vde_data_tables.go @@ -0,0 +1,76 @@ +package vde + +var tablesDataSQL = ` +INSERT INTO "%[1]d_tables" ("id", "name", "permissions","columns", "conditions") VALUES ('1', 'contracts', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "false", + "value": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('2', 'languages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{ "name": "ContractConditions(\"MainCondition\")", + "res": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("EditTable")'), + ('3', 'menu', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('4', 'pages', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"menu": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")", +"validate_count": "ContractConditions(\"MainCondition\")", +"validate_mode": "ContractConditions(\"MainCondition\")", +"app_id": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('5', 'blocks', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('6', 'signatures', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"name": "ContractConditions(\"MainCondition\")", +"value": "ContractConditions(\"MainCondition\")", +"conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractAccess("EditTable")'), + ('7', 'cron', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"owner": "ContractConditions(\"MainCondition\")", + "cron": "ContractConditions(\"MainCondition\")", + "contract": "ContractConditions(\"MainCondition\")", + "counter": "ContractConditions(\"MainCondition\")", + "till": "ContractConditions(\"MainCondition\")", + "conditions": "ContractConditions(\"MainCondition\")" + }', 'ContractConditions("MainCondition")'), + ('8', 'binaries', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"app_id": "ContractConditions(\"MainCondition\")", + "member_id": "ContractConditions(\"MainCondition\")", + "name": "ContractConditions(\"MainCondition\")", + "data": "ContractConditions(\"MainCondition\")", + "hash": "ContractConditions(\"MainCondition\")", + "mime_type": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'), + ('9', 'keys', + '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", + "new_column": "ContractConditions(\"MainCondition\")"}', + '{"pub": "ContractConditions(\"MainCondition\")", + "multi": "ContractConditions(\"MainCondition\")", + "deleted": "ContractConditions(\"MainCondition\")", + "blocked": "ContractConditions(\"MainCondition\")"}', + 'ContractConditions("MainCondition")'); +` diff --git a/packages/migration/vde/vde_schema.go b/packages/migration/vde/vde_schema.go new file mode 100644 index 000000000..7edf5da94 --- /dev/null +++ b/packages/migration/vde/vde_schema.go @@ -0,0 +1,172 @@ +package vde + +import ( + "strings" +) + +// GetVDEScript returns script for VDE schema +func GetVDEScript() string { + scripts := []string{ + schemaVDE, + membersDataSQL, + menuDataSQL, + pagesDataSQL, + parametersDataSQL, + tablesDataSQL, + contractsDataSQL, + keysDataSQL, + } + + return strings.Join(scripts, "\r\n") +} + +var schemaVDE = ` + DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_keys" ( + "id" bigint NOT NULL DEFAULT '0', + "pub" bytea NOT NULL DEFAULT '', + "multi" bigint NOT NULL DEFAULT '0', + "deleted" bigint NOT NULL DEFAULT '0', + "blocked" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_keys" ADD CONSTRAINT "%[1]d_keys_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_members"; + CREATE TABLE "%[1]d_members" ( + "id" bigint NOT NULL DEFAULT '0', + "member_name" varchar(255) NOT NULL DEFAULT '', + "image_id" bigint, + "member_info" jsonb + ); + ALTER TABLE ONLY "%[1]d_members" ADD CONSTRAINT "%[1]d_members_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_languages"; CREATE TABLE "%[1]d_languages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "res" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_languages" ADD CONSTRAINT "%[1]d_languages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_languages_index_name" ON "%[1]d_languages" (name); + + DROP TABLE IF EXISTS "%[1]d_menu"; CREATE TABLE "%[1]d_menu" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "title" character varying(255) NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_menu" ADD CONSTRAINT "%[1]d_menu_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_menu_index_name" ON "%[1]d_menu" (name); + + DROP TABLE IF EXISTS "%[1]d_pages"; CREATE TABLE "%[1]d_pages" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "menu" character varying(255) NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '', + "validate_count" bigint NOT NULL DEFAULT '1', + "app_id" bigint NOT NULL DEFAULT '0', + "validate_mode" character(1) NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_pages" ADD CONSTRAINT "%[1]d_pages_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_pages_index_name" ON "%[1]d_pages" (name); + + DROP TABLE IF EXISTS "%[1]d_blocks"; CREATE TABLE "%[1]d_blocks" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_blocks" ADD CONSTRAINT "%[1]d_blocks_pkey" PRIMARY KEY (id); + CREATE INDEX "%[1]d_blocks_index_name" ON "%[1]d_blocks" (name); + + DROP TABLE IF EXISTS "%[1]d_signatures"; CREATE TABLE "%[1]d_signatures" ( + "id" bigint NOT NULL DEFAULT '0', + "name" character varying(100) NOT NULL DEFAULT '', + "value" jsonb, + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_signatures" ADD CONSTRAINT "%[1]d_signatures_pkey" PRIMARY KEY (name); + + CREATE TABLE "%[1]d_contracts" ( + "id" bigint NOT NULL DEFAULT '0', + "name" text NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_contracts" ADD CONSTRAINT "%[1]d_contracts_pkey" PRIMARY KEY (id); + + DROP TABLE IF EXISTS "%[1]d_parameters"; + CREATE TABLE "%[1]d_parameters" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) UNIQUE NOT NULL DEFAULT '', + "value" text NOT NULL DEFAULT '', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_parameters" ADD CONSTRAINT "%[1]d_parameters_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_parameters_index_name" ON "%[1]d_parameters" (name); + + DROP TABLE IF EXISTS "%[1]d_cron"; + CREATE TABLE "%[1]d_cron" ( + "id" bigint NOT NULL DEFAULT '0', + "owner" bigint NOT NULL DEFAULT '0', + "cron" varchar(255) NOT NULL DEFAULT '', + "contract" varchar(255) NOT NULL DEFAULT '', + "counter" bigint NOT NULL DEFAULT '0', + "till" timestamp NOT NULL DEFAULT timestamp '1970-01-01 00:00:00', + "conditions" text NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_cron" ADD CONSTRAINT "%[1]d_cron_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_binaries"; + CREATE TABLE "%[1]d_binaries" ( + "id" bigint NOT NULL DEFAULT '0', + "app_id" bigint NOT NULL DEFAULT '1', + "member_id" bigint NOT NULL DEFAULT '0', + "name" varchar(255) NOT NULL DEFAULT '', + "data" bytea NOT NULL DEFAULT '', + "hash" varchar(32) NOT NULL DEFAULT '', + "mime_type" varchar(255) NOT NULL DEFAULT '' + ); + ALTER TABLE ONLY "%[1]d_binaries" ADD CONSTRAINT "%[1]d_binaries_pkey" PRIMARY KEY (id); + CREATE UNIQUE INDEX "%[1]d_binaries_index_app_id_member_id_name" ON "%[1]d_binaries" (app_id, member_id, name); + + CREATE TABLE "%[1]d_tables" ( + "id" bigint NOT NULL DEFAULT '0', + "name" varchar(100) UNIQUE NOT NULL DEFAULT '', + "permissions" jsonb, + "columns" jsonb, + "conditions" text NOT NULL DEFAULT '', + "app_id" bigint NOT NULL DEFAULT '1' + ); + ALTER TABLE ONLY "%[1]d_tables" ADD CONSTRAINT "%[1]d_tables_pkey" PRIMARY KEY ("id"); + CREATE INDEX "%[1]d_tables_index_name" ON "%[1]d_tables" (name); + + DROP TABLE IF EXISTS "%[1]d_notifications"; + CREATE TABLE "%[1]d_notifications" ( + "id" bigint NOT NULL DEFAULT '0', + "recipient" jsonb, + "sender" jsonb, + "notification" jsonb, + "page_params" jsonb, + "processing_info" jsonb, + "page_name" varchar(255) NOT NULL DEFAULT '', + "date_created" timestamp, + "date_start_processing" timestamp, + "date_closed" timestamp, + "closed" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_notifications" ADD CONSTRAINT "%[1]d_notifications_pkey" PRIMARY KEY ("id"); + + DROP TABLE IF EXISTS "%[1]d_roles_participants"; + CREATE TABLE "%[1]d_roles_participants" ( + "id" bigint NOT NULL DEFAULT '0', + "role" jsonb, + "member" jsonb, + "appointed" jsonb, + "date_created" timestamp, + "date_deleted" timestamp, + "deleted" bigint NOT NULL DEFAULT '0' + ); + ALTER TABLE ONLY "%[1]d_roles_participants" ADD CONSTRAINT "%[1]d_roles_participants_pkey" PRIMARY KEY ("id"); + + ` diff --git a/packages/model/batch.go b/packages/model/batch.go new file mode 100644 index 000000000..f36c4d68d --- /dev/null +++ b/packages/model/batch.go @@ -0,0 +1,85 @@ +package model + +import ( + "fmt" + "strings" +) + +const maxBatchRows = 1000 + +// BatchModel allows bulk insert on BatchModel slice +type BatchModel interface { + TableName() string + FieldValue(fieldName string) (interface{}, error) +} + +// BatchInsert create and execute batch queries from rows splitted by maxBatchRows and fields +func BatchInsert(rows []BatchModel, fields []string) error { + queries, values, err := batchQueue(rows, fields) + if err != nil { + return err + } + + for i := 0; i < len(queries); i++ { + if err := DBConn.Exec(queries[i], values[i]...).Error; err != nil { + return err + } + } + + return nil +} + +func batchQueue(rows []BatchModel, fields []string) (queries []string, values [][]interface{}, err error) { + for len(rows) > 0 { + if len(rows) > maxBatchRows { + q, vals, err := prepareQuery(rows[:maxBatchRows], fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = rows[maxBatchRows:] + continue + } + + q, vals, err := prepareQuery(rows, fields) + if err != nil { + return queries, values, err + } + + queries = append(queries, q) + values = append(values, vals) + rows = nil + } + + return +} + +func prepareQuery(rows []BatchModel, fields []string) (query string, values []interface{}, err error) { + valueTemplates := make([]string, 0, len(rows)) + values = make([]interface{}, 0, len(rows)*len(fields)) + query = fmt.Sprintf(`INSERT INTO "%s" (%s) VALUES `, rows[0].TableName(), strings.Join(fields, ",")) + + rowQSlice := make([]string, 0, len(fields)) + for range fields { + rowQSlice = append(rowQSlice, "?") + } + + valueTemplate := fmt.Sprintf("(%s)", strings.Join(rowQSlice, ",")) + + for _, row := range rows { + valueTemplates = append(valueTemplates, valueTemplate) + for _, field := range fields { + val, err := row.FieldValue(field) + if err != nil { + return query, values, err + } + + values = append(values, val) + } + } + + query += strings.Join(valueTemplates, ",") + return +} diff --git a/packages/model/batch_test.go b/packages/model/batch_test.go new file mode 100644 index 000000000..2b260c2f5 --- /dev/null +++ b/packages/model/batch_test.go @@ -0,0 +1,44 @@ +package model + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +type TestBatchModel struct { + ID int64 + Name string +} + +func (m TestBatchModel) TableName() string { + return "test_batch" +} + +func (m TestBatchModel) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "id": + return m.ID, nil + case "name": + return m.Name, nil + default: + return nil, fmt.Errorf("Unknown field %s of TestBatchModel", fieldName) + } +} + +func TestPrepareQuery(t *testing.T) { + slice := []BatchModel{ + TestBatchModel{ID: 1, Name: "first"}, + TestBatchModel{ID: 2, Name: "second"}, + } + + query, args, err := prepareQuery(slice, []string{"id", "name"}) + require.NoError(t, err) + + checkQuery := `INSERT INTO "test_batch" (id,name) VALUES (?,?),(?,?)` + checkArgs := []interface{}{int64(1), "first", int64(2), "second"} + + require.Equal(t, checkQuery, query) + require.Equal(t, checkArgs, args) +} diff --git a/packages/model/db.go b/packages/model/db.go index f784fc70c..fdf4d54aa 100644 --- a/packages/model/db.go +++ b/packages/model/db.go @@ -10,6 +10,7 @@ import ( "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/crypto" "github.com/GenesisKernel/go-genesis/packages/migration" + "github.com/GenesisKernel/go-genesis/packages/migration/vde" "github.com/jinzhu/gorm" log "github.com/sirupsen/logrus" @@ -139,23 +140,23 @@ func GetRecordsCountTx(db *DbTransaction, tableName string) (int64, error) { // ExecSchemaEcosystem is executing ecosystem schema func ExecSchemaEcosystem(db *DbTransaction, id int, wallet int64, name string, founder int64) error { - err := GetDB(db).Exec(fmt.Sprintf(migration.GetEcosystemScript(), id, wallet, name, founder)).Error - if err != nil { + q := fmt.Sprintf(migration.GetEcosystemScript(), id, wallet, name, founder) + if err := GetDB(db).Exec(q).Error; err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("executing ecosystem schema") return err } if id == 1 { - err = GetDB(db).Exec(fmt.Sprintf(migration.GetFirstEcosystemScript(), wallet)).Error - if err != nil { + q = fmt.Sprintf(migration.GetFirstEcosystemScript(), wallet) + if err := GetDB(db).Exec(q).Error; err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("executing first ecosystem schema") } } - return err + return nil } // ExecSchemaLocalData is executing schema with local data func ExecSchemaLocalData(id int, wallet int64) error { - return DBConn.Exec(fmt.Sprintf(migration.SchemaVDE, id, wallet)).Error + return DBConn.Exec(fmt.Sprintf(vde.GetVDEScript(), id, wallet)).Error } // ExecSchema is executing schema @@ -215,12 +216,12 @@ func SendTx(txType int64, adminWallet int64, data []byte) ([]byte, error) { // AlterTableAddColumn is adding column to table func AlterTableAddColumn(transaction *DbTransaction, tableName, columnName, columnType string) error { - return GetDB(transaction).Exec(`ALTER TABLE "` + tableName + `" ADD COLUMN ` + columnName + ` ` + columnType).Error + return GetDB(transaction).Exec(`ALTER TABLE "` + tableName + `" ADD COLUMN "` + columnName + `" ` + columnType).Error } // AlterTableDropColumn is dropping column from table func AlterTableDropColumn(tableName, columnName string) error { - return DBConn.Exec(`ALTER TABLE "` + tableName + `" DROP COLUMN ` + columnName).Error + return DBConn.Exec(`ALTER TABLE "` + tableName + `" DROP COLUMN "` + columnName + `"`).Error } // CreateIndex is creating index on table column @@ -384,5 +385,34 @@ func InitDB(cfg conf.DBConfig) error { return err } + if conf.Config.IsSupportingVDE() { + if err := ExecSchemaLocalData(consts.DefaultVDE, conf.Config.KeyID); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE schema") + return err + } + } + + return nil +} + +// DropDatabase kill all process and drop database +func DropDatabase(name string) error { + query := `SELECT + pg_terminate_backend (pg_stat_activity.pid) + FROM + pg_stat_activity + WHERE + pg_stat_activity.datname = ?` + + if err := DBConn.Exec(query, name).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on kill db process") + return err + } + + if err := DBConn.Exec(fmt.Sprintf("DROP DATABASE IF EXISTS %s", name)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err, "dbname": name}).Error("on drop db") + return err + } + return nil } diff --git a/packages/model/queue_tx.go b/packages/model/queue_tx.go index b19c24224..18d57a0a5 100644 --- a/packages/model/queue_tx.go +++ b/packages/model/queue_tx.go @@ -1,5 +1,9 @@ package model +import ( + "fmt" +) + // QueueTx is model type QueueTx struct { Hash []byte `gorm:"primary_key;not null"` @@ -76,3 +80,17 @@ func GetAllUnverifiedAndUnusedTransactions() ([]*QueueTx, error) { } return result, nil } + +// FieldValue implementing BatchModel interface +func (qt QueueTx) FieldValue(fieldName string) (interface{}, error) { + switch fieldName { + case "hash": + return qt.Hash, nil + case "data": + return qt.Data, nil + case "from_gate": + return qt.FromGate, nil + default: + return nil, fmt.Errorf("Unknown field '%s' for QueueTx", fieldName) + } +} diff --git a/packages/model/rollback_tx.go b/packages/model/rollback_tx.go index dbf4dce09..68583a120 100644 --- a/packages/model/rollback_tx.go +++ b/packages/model/rollback_tx.go @@ -30,7 +30,8 @@ func (rt *RollbackTx) GetBlockRollbackTransactions(dbTransaction *DbTransaction, // GetRollbackTxsByTableIDAndTableName returns records of rollback by table name and id func (rt *RollbackTx) GetRollbackTxsByTableIDAndTableName(tableID, tableName string, limit int) (*[]RollbackTx, error) { rollbackTx := new([]RollbackTx) - if err := DBConn.Where("table_id = ? AND table_name = ?", tableID, tableName).Limit(limit).Find(rollbackTx).Error; err != nil { + if err := DBConn.Where("table_id = ? AND table_name = ?", tableID, tableName). + Order("id desc").Limit(limit).Find(rollbackTx).Error; err != nil { return nil, err } return rollbackTx, nil diff --git a/packages/model/system_parameters.go b/packages/model/system_parameters.go index f3137aa99..3ed2c7f91 100644 --- a/packages/model/system_parameters.go +++ b/packages/model/system_parameters.go @@ -22,6 +22,11 @@ func (sp *SystemParameter) Get(name string) (bool, error) { return isFound(DBConn.Where("name = ?", name).First(sp)) } +// GetTransaction is retrieving model from database using transaction +func (sp *SystemParameter) GetTransaction(transaction *DbTransaction, name string) (bool, error) { + return isFound(GetDB(transaction).Where("name = ?", name).First(sp)) +} + // GetJSONField returns fields as json func (sp *SystemParameter) GetJSONField(jsonField string, name string) (string, error) { var result string diff --git a/packages/script/vm.go b/packages/script/vm.go index 57cc779ae..65f8d81c2 100644 --- a/packages/script/vm.go +++ b/packages/script/vm.go @@ -63,7 +63,7 @@ var sysVars = map[string]struct{}{ `parent`: {}, `original_contract`: {}, `sc`: {}, - `stack_cont`: {}, + `stack`: {}, `this_contract`: {}, `time`: {}, `type`: {}, @@ -96,8 +96,8 @@ type RunTime struct { err error unwrap bool callDepth uint16 - mem int64 - memVars map[interface{}]int64 + mem int64 + memVars map[interface{}]int64 } func isSysVar(name string) bool { diff --git a/packages/script/vminit.go b/packages/script/vminit.go index 8b6adf7bd..a82309641 100644 --- a/packages/script/vminit.go +++ b/packages/script/vminit.go @@ -69,6 +69,8 @@ const ( VMTypeSmart VMType = 1 // VMTypeVDE is vde vm type VMTypeVDE VMType = 2 + // VMTypeVDEMaster is VDEMaster type + VMTypeVDEMaster VMType = 3 TagFile = "file" TagAddress = "address" @@ -188,6 +190,11 @@ type ExtendData struct { AutoPars map[string]string } +// Stacker represents interface for working with call stack +type Stacker interface { + AppendStack(contract string) +} + // ParseContract gets a state identifier and the name of the contract from the full name like @[id]name func ParseContract(in string) (id uint64, name string) { var err error @@ -280,10 +287,10 @@ func ExecContract(rt *RunTime, name, txs string, params ...interface{}) (interfa } } rt.cost -= CostContract - var stackCont func(interface{}, string) - if stack, ok := (*rt.extend)[`stack_cont`]; ok && (*rt.extend)[`sc`] != nil { - stackCont = stack.(func(interface{}, string)) - stackCont((*rt.extend)[`sc`], name) + + var stack Stacker + if stack, ok = (*rt.extend)["sc"].(Stacker); ok { + stack.AppendStack(name) } if (*rt.extend)[`sc`] != nil && isSignature { obj := rt.vm.Objects[`check_signature`] @@ -305,8 +312,8 @@ func ExecContract(rt *RunTime, name, txs string, params ...interface{}) (interfa } } } - if stackCont != nil { - stackCont((*rt.extend)[`sc`], ``) + if stack != nil { + stack.AppendStack("") } (*rt.extend)[`parent`] = prevparent (*rt.extend)[`this_contract`] = prevthis diff --git a/packages/service/node_ban.go b/packages/service/node_ban.go index e69810f77..d8292de5d 100644 --- a/packages/service/node_ban.go +++ b/packages/service/node_ban.go @@ -143,7 +143,7 @@ func (nbs *NodesBanService) newBadBlock(producer syspar.FullNode, blockId, block } params = append(append(params, converter.EncodeLength(int64(len(reason)))...), []byte(reason)...) - vm := smart.GetVM(false, 0) + vm := smart.GetVM() contract := smart.VMGetContract(vm, "NewBadBlock", 1) info := contract.Block.Info.(*script.ContractInfo) diff --git a/packages/service/node_relevance.go b/packages/service/node_relevance.go index 65447679b..ffbff798d 100644 --- a/packages/service/node_relevance.go +++ b/packages/service/node_relevance.go @@ -82,6 +82,7 @@ func (n *NodeRelevanceService) checkNodeRelevance() (relevant bool, err error) { _, maxBlockID, err := utils.ChooseBestHost(ctx, remoteHosts, &log.Entry{Logger: &log.Logger{}}) if err != nil { if err == utils.ErrNodesUnavailable { + log.WithFields(log.Fields{"hosts": remoteHosts}).Info("can't connect to others, stopping node relevance") return false, nil } return false, errors.Wrapf(err, "choosing best host") @@ -89,11 +90,13 @@ func (n *NodeRelevanceService) checkNodeRelevance() (relevant bool, err error) { // Node can't connect to others if maxBlockID == -1 { + log.WithFields(log.Fields{"hosts": remoteHosts}).Info("can't connect to others, stopping node relevance") return false, nil } // Node blockchain is stale if curBlock.BlockID+n.availableBlockchainGap < maxBlockID { + log.WithFields(log.Fields{"maxBlockID": maxBlockID, "curBlockID": curBlock.BlockID, "Gap": n.availableBlockchainGap}).Info("blockchain is stale, stopping node relevance") return false, nil } diff --git a/packages/smart/errors.go b/packages/smart/errors.go index de42178d8..60bc7c0b1 100644 --- a/packages/smart/errors.go +++ b/packages/smart/errors.go @@ -20,6 +20,8 @@ import "errors" const ( eTableNotFound = `Table %s has not been found` + eContractLoop = `There is loop in %s contract` + eContractExist = `Contract %s already exists` ) var ( @@ -28,4 +30,7 @@ var ( errContractNotFound = errors.New(`Contract has not been found`) errAccessRollbackContract = errors.New(`RollbackContract can be only called from Import or NewContract`) errCommission = errors.New("There is not enough money to pay the commission fee") + errEmptyColumn = errors.New(`Column name is empty`) + errWrongColumn = errors.New(`Column name cannot begin with digit`) + errNotFound = errors.New(`Record has not been found`) ) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index c8b0105dd..b2fc24f96 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -35,6 +35,7 @@ import ( "strconv" "strings" "time" + "unicode/utf8" "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" @@ -46,13 +47,19 @@ import ( "github.com/GenesisKernel/go-genesis/packages/script" "github.com/GenesisKernel/go-genesis/packages/utils" "github.com/GenesisKernel/go-genesis/packages/utils/tx" + "github.com/GenesisKernel/go-genesis/packages/vdemanager" "github.com/satori/go.uuid" "github.com/shopspring/decimal" log "github.com/sirupsen/logrus" ) -const nodeBanNotificationHeader = "Your node was banned" +const ( + nodeBanNotificationHeader = "Your node was banned" + historyLimit = 250 +) + +var BOM = []byte{0xEF, 0xBB, 0xBF} type permTable struct { Insert string `json:"insert"` @@ -71,6 +78,7 @@ type permColumn struct { type SmartContract struct { VDE bool Rollback bool + FullAccess bool SysUpdate bool VM *script.VM TxSmart tx.SmartContract @@ -80,11 +88,23 @@ type SmartContract struct { TxCost int64 // Maximum cost of executing contract TxUsedCost decimal.Decimal // Used cost of CPU resources BlockData *utils.BlockData + Loop map[string]bool TxHash []byte PublicKeys [][]byte DbTransaction *model.DbTransaction } +// AppendStack adds an element to the stack of contract call or removes the top element when name is empty +func (sc *SmartContract) AppendStack(contract string) { + cont := sc.TxContract + if len(contract) > 0 { + cont.StackCont = append(cont.StackCont, contract) + } else { + cont.StackCont = cont.StackCont[:len(cont.StackCont)-1] + } + (*sc.TxContract.Extend)["stack"] = cont.StackCont +} + var ( funcCallsDB = map[string]struct{}{ "DBInsert": {}, @@ -94,41 +114,41 @@ var ( "SetPubKey": {}, } extendCost = map[string]int64{ - "AddressToId": 10, - "ColumnCondition": 50, - "CompileContract": 100, - "Contains": 10, - "ContractAccess": 50, - "ContractConditions": 50, - "ContractsList": 10, - "CreateColumn": 50, - "CreateTable": 100, - "CreateLanguage": 50, - "EditLanguage": 50, - "EcosysParam": 10, - "AppParam": 10, - "Eval": 10, - "EvalCondition": 20, - "FlushContract": 50, - "GetContractByName": 20, - "GetContractById": 20, - "HMac": 50, - "Join": 10, - "JSONToMap": 50, - "Sha256": 50, - "IdToAddress": 10, - "IsObject": 10, - "Len": 5, - "Replace": 10, - "PermColumn": 50, - "Split": 50, - "PermTable": 100, - "Substr": 10, - "Size": 10, - "ToLower": 10, - "TrimSpace": 10, - "TableConditions": 100, - "ValidateCondition": 30, + "AddressToId": 10, + "ColumnCondition": 50, + "Contains": 10, + "ContractAccess": 50, + "ContractConditions": 50, + "ContractName": 10, + "CreateColumn": 50, + "CreateTable": 100, + "CreateLanguage": 50, + "EditLanguage": 50, + "CreateContract": 60, + "UpdateContract": 60, + "EcosysParam": 10, + "AppParam": 10, + "Eval": 10, + "EvalCondition": 20, + "GetContractByName": 20, + "GetContractById": 20, + "HMac": 50, + "Join": 10, + "JSONToMap": 50, + "Sha256": 50, + "IdToAddress": 10, + "Len": 5, + "Replace": 10, + "PermColumn": 50, + "Split": 50, + "PermTable": 100, + "Substr": 10, + "Size": 10, + "ToLower": 10, + "TrimSpace": 10, + "TableConditions": 100, + "ValidateCondition": 30, + "ValidateEditContractNewValue": 10, } // map for table name to parameter with conditions tableParamConditions = map[string]string{ @@ -152,87 +172,102 @@ func getCost(name string) int64 { // EmbedFuncs is extending vm with embedded functions func EmbedFuncs(vm *script.VM, vt script.VMType) { f := map[string]interface{}{ - "AddressToId": AddressToID, - "ColumnCondition": ColumnCondition, - "CompileContract": CompileContract, - "Contains": strings.Contains, - "ContractAccess": ContractAccess, - "ContractConditions": ContractConditions, - "ContractsList": contractsList, - "CreateColumn": CreateColumn, - "CreateTable": CreateTable, - "DBInsert": DBInsert, - "DBSelect": DBSelect, - "DBUpdate": DBUpdate, - "DBUpdateSysParam": UpdateSysParam, - "DBUpdateExt": DBUpdateExt, - "EcosysParam": EcosysParam, - "AppParam": AppParam, - "SysParamString": SysParamString, - "SysParamInt": SysParamInt, - "SysFuel": SysFuel, - "Eval": Eval, - "EvalCondition": EvalCondition, - "Float": Float, - "FlushContract": FlushContract, - "GetContractByName": GetContractByName, - "GetContractById": GetContractById, - "HMac": HMac, - "Join": Join, - "JSONToMap": JSONDecode, // Deprecated - "JSONDecode": JSONDecode, - "JSONEncode": JSONEncode, - "IdToAddress": IDToAddress, - "Int": Int, - "IsObject": IsObject, - "Len": Len, - "Money": Money, - "PermColumn": PermColumn, - "PermTable": PermTable, - "Random": Random, - "Split": Split, - "Str": Str, - "Substr": Substr, - "Replace": Replace, - "Size": Size, - "Sha256": Sha256, - "PubToID": PubToID, - "HexToBytes": HexToBytes, - "LangRes": LangRes, - "HasPrefix": strings.HasPrefix, - "ValidateCondition": ValidateCondition, - "TrimSpace": strings.TrimSpace, - "ToLower": strings.ToLower, - "CreateEcosystem": CreateEcosystem, - "RollbackEcosystem": RollbackEcosystem, - "RollbackTable": RollbackTable, - "TableConditions": TableConditions, - "RollbackColumn": RollbackColumn, - "CreateLanguage": CreateLanguage, - "EditLanguage": EditLanguage, - "Activate": Activate, - "Deactivate": Deactivate, - "SetContractWallet": SetContractWallet, - "RollbackContract": RollbackContract, - "RollbackEditContract": RollbackEditContract, - "check_signature": CheckSignature, - "RowConditions": RowConditions, - "UUID": UUID, - "DecodeBase64": DecodeBase64, - "EncodeBase64": EncodeBase64, - "MD5": MD5, - "EditEcosysName": EditEcosysName, - "GetColumnType": GetColumnType, - "GetType": GetType, - "AllowChangeCondition": AllowChangeCondition, - "StringToBytes": StringToBytes, - "BytesToString": BytesToString, - "SetPubKey": SetPubKey, - "NewMoney": NewMoney, + "AddressToId": AddressToID, + "ColumnCondition": ColumnCondition, + "Contains": strings.Contains, + "ContractAccess": ContractAccess, + "ContractConditions": ContractConditions, + "ContractName": contractName, + "ValidateEditContractNewValue": ValidateEditContractNewValue, + "CreateColumn": CreateColumn, + "CreateTable": CreateTable, + "DBInsert": DBInsert, + "DBSelect": DBSelect, + "DBUpdate": DBUpdate, + "DBUpdateSysParam": UpdateSysParam, + "DBUpdateExt": DBUpdateExt, + "EcosysParam": EcosysParam, + "AppParam": AppParam, + "SysParamString": SysParamString, + "SysParamInt": SysParamInt, + "SysFuel": SysFuel, + "Eval": Eval, + "EvalCondition": EvalCondition, + "Float": Float, + "GetContractByName": GetContractByName, + "GetContractById": GetContractById, + "HMac": HMac, + "Join": Join, + "JSONToMap": JSONDecode, // Deprecated + "JSONDecode": JSONDecode, + "JSONEncode": JSONEncode, + "IdToAddress": IDToAddress, + "Int": Int, + "Len": Len, + "Money": Money, + "PermColumn": PermColumn, + "PermTable": PermTable, + "Random": Random, + "Split": Split, + "Str": Str, + "Substr": Substr, + "Replace": Replace, + "Size": Size, + "Sha256": Sha256, + "PubToID": PubToID, + "HexToBytes": HexToBytes, + "LangRes": LangRes, + "HasPrefix": strings.HasPrefix, + "ValidateCondition": ValidateCondition, + "TrimSpace": strings.TrimSpace, + "ToLower": strings.ToLower, + "CreateEcosystem": CreateEcosystem, + "RollbackEcosystem": RollbackEcosystem, + "CreateContract": CreateContract, + "UpdateContract": UpdateContract, + "RollbackTable": RollbackTable, + "TableConditions": TableConditions, + "RollbackColumn": RollbackColumn, + "CreateLanguage": CreateLanguage, + "EditLanguage": EditLanguage, + "Activate": Activate, + "Deactivate": Deactivate, + "RollbackContract": RollbackContract, + "RollbackEditContract": RollbackEditContract, + "RollbackNewContract": RollbackNewContract, + "check_signature": CheckSignature, + "RowConditions": RowConditions, + "UUID": UUID, + "DecodeBase64": DecodeBase64, + "EncodeBase64": EncodeBase64, + "MD5": MD5, + "EditEcosysName": EditEcosysName, + "GetColumnType": GetColumnType, + "GetType": GetType, + "AllowChangeCondition": AllowChangeCondition, + "StringToBytes": StringToBytes, + "BytesToString": BytesToString, + "SetPubKey": SetPubKey, + "NewMoney": NewMoney, + "GetMapKeys": GetMapKeys, + "SortedKeys": SortedKeys, + "Append": Append, + "GetPageHistory": GetPageHistory, + "GetBlockHistory": GetBlockHistory, + "GetMenuHistory": GetMenuHistory, + "GetContractHistory": GetContractHistory, } switch vt { case script.VMTypeVDE: + f["HTTPRequest"] = HTTPRequest + f["Date"] = Date + f["HTTPPostJSON"] = HTTPPostJSON + f["ValidateCron"] = ValidateCron + f["UpdateCron"] = UpdateCron + vmExtendCost(vm, getCost) + vmFuncCallsDB(vm, funcCallsDB) + case script.VMTypeVDEMaster: f["HTTPRequest"] = HTTPRequest f["GetMapKeys"] = GetMapKeys f["SortedKeys"] = SortedKeys @@ -240,6 +275,11 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { f["HTTPPostJSON"] = HTTPPostJSON f["ValidateCron"] = ValidateCron f["UpdateCron"] = UpdateCron + f["CreateVDE"] = CreateVDE + f["DeleteVDE"] = DeleteVDE + f["StartVDE"] = StartVDE + f["StopVDEProcess"] = StopVDEProcess + f["GetVDEList"] = GetVDEList vmExtendCost(vm, getCost) vmFuncCallsDB(vm, funcCallsDB) case script.VMTypeSmart: @@ -261,14 +301,11 @@ func GetTableName(sc *SmartContract, tblname string, ecosystem int64) string { return strings.ToLower(tblname[1:]) } prefix := converter.Int64ToStr(ecosystem) - if sc.VDE { - prefix += `_vde` - } return strings.ToLower(fmt.Sprintf(`%s_%s`, prefix, tblname)) } func getDefTableName(sc *SmartContract, tblname string) string { - return GetTableName(sc, tblname, sc.TxSmart.EcosystemID) + return converter.EscapeSQL(GetTableName(sc, tblname, sc.TxSmart.EcosystemID)) } func accessContracts(sc *SmartContract, names ...string) bool { @@ -331,11 +368,22 @@ func ContractConditions(sc *SmartContract, names ...interface{}) (bool, error) { log.WithFields(log.Fields{"contract_name": name, "type": consts.EmptyObject}).Error("There is not conditions in contract") return false, fmt.Errorf(`There is not conditions in contract %s`, name) } - _, err := VMRun(sc.VM, block, []interface{}{}, &map[string]interface{}{`ecosystem_id`: int64(sc.TxSmart.EcosystemID), - `key_id`: sc.TxSmart.KeyID, `sc`: sc, `original_contract`: ``, `this_contract`: ``, `role_id`: sc.TxSmart.RoleID}) + vars := map[string]interface{}{`ecosystem_id`: int64(sc.TxSmart.EcosystemID), + `key_id`: sc.TxSmart.KeyID, `sc`: sc, `original_contract`: ``, `this_contract`: ``, `role_id`: sc.TxSmart.RoleID} + + if sc.Loop == nil { + sc.Loop = make(map[string]bool) + } + if _, ok := sc.Loop[`loop_`+name]; ok { + log.WithFields(log.Fields{"type": consts.ContractError, "contract_name": name}).Error("there is loop in contract") + return false, fmt.Errorf(eContractLoop, name) + } + sc.Loop[`loop_`+name] = true + _, err := VMRun(sc.VM, block, []interface{}{}, &vars) if err != nil { return false, err } + delete(sc.Loop, `loop_`+name) } else { log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty contract name in ContractConditions") return false, fmt.Errorf(`empty contract name in ContractConditions`) @@ -344,20 +392,133 @@ func ContractConditions(sc *SmartContract, names ...interface{}) (bool, error) { return true, nil } -func contractsList(value string) ([]interface{}, error) { +func contractName(value string) (string, error) { list, err := script.ContractsList(value) - result := make([]interface{}, len(list)) + if err != nil { + return "", err + } + if len(list) > 0 { + return list[0], nil + } else { + return "", nil + } +} + +func ValidateEditContractNewValue(sc *SmartContract, newValue, oldValue string) error { + list, err := script.ContractsList(newValue) + if err != nil { + return err + } + curlist, err := script.ContractsList(oldValue) + if err != nil { + return err + } + if len(list) != len(curlist) { + return fmt.Errorf("Contract cannot be removed or inserted") + } for i := 0; i < len(list); i++ { - result[i] = reflect.ValueOf(list[i]).Interface() + var ok bool + for j := 0; j < len(curlist); j++ { + if curlist[j] == list[i] { + ok = true + break + } + } + if !ok { + return fmt.Errorf("Contracts or functions names cannot be changed") + } + } + return nil +} + +func UpdateContract(sc *SmartContract, id int64, value, conditions, walletID string, recipient int64, active, tokenID string) error { + if !accessContracts(sc, `EditContract`, `Import`) { + log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("UpdateContract can be only called from EditContract") + return fmt.Errorf(`UpdateContract can be only called from EditContract`) + } + var pars []string + var vals []interface{} + ecosystemID := sc.TxSmart.EcosystemID + var root interface{} + if value != "" { + var err error + root, err = CompileContract(sc, value, ecosystemID, recipient, converter.StrToInt64(tokenID)) + if err != nil { + return err + } + pars = append(pars, "value") + vals = append(vals, value) + } + if conditions != "" { + pars = append(pars, "conditions") + vals = append(vals, conditions) + } + if walletID != "" { + pars = append(pars, "wallet_id") + vals = append(vals, recipient) + } + if len(vals) > 0 { + if _, err := DBUpdate(sc, "contracts", id, strings.Join(pars, ","), vals...); err != nil { + return err + } + } + if value != "" { + if err := FlushContract(sc, root, id, converter.StrToInt64(active) == 1); err != nil { + return err + } + } else { + if walletID != "" { + if err := SetContractWallet(sc, id, ecosystemID, recipient); err != nil { + return err + } + } } - return result, err + return nil +} + +func CreateContract(sc *SmartContract, name, value, conditions string, walletID, tokenEcosystem, appID int64) (int64, error) { + if !accessContracts(sc, `NewContract`, `Import`) { + log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("CreateContract can be only called from NewContract") + return 0, fmt.Errorf(`CreateContract can be only called from NewContract`) + } + var id int64 + var err error + + if GetContractByName(sc, name) != 0 { + return 0, fmt.Errorf(eContractExist, name) + } + root, err := CompileContract(sc, value, sc.TxSmart.EcosystemID, walletID, tokenEcosystem) + if err != nil { + return 0, err + } + _, id, err = DBInsert(sc, "contracts", "name,value,conditions,wallet_id,token_id,app_id", name, value, conditions, walletID, tokenEcosystem, appID) + if err != nil { + return 0, err + } + if err := FlushContract(sc, root, id, false); err != nil { + return 0, err + } + return id, nil +} + +func RollbackNewContract(sc *SmartContract, value string) error { + contractList, err := script.ContractsList(value) + if err != nil { + return err + } + for _, contract := range contractList { + if err := RollbackContract(sc, contract); err != nil { + return err + } + } + return nil } // CreateTable is creating smart contract table func CreateTable(sc *SmartContract, name, columns, permissions string, applicationID int64) error { var err error - if !ContractAccess(sc, `NewTable`, `Import`) { - return fmt.Errorf(`CreateTable can be only called from NewTable`) + if !accessContracts(sc, `NewTable`, `NewTableJoint`, `Import`) { + return fmt.Errorf(`CreateTable can be only called from NewTable, NewTableJoint or Import`) } if len(name) == 0 { @@ -396,7 +557,10 @@ func CreateTable(sc *SmartContract, name, columns, permissions string, applicati default: data = v.(map[string]interface{}) } - colname := strings.ToLower(data[`name`].(string)) + colname := converter.EscapeSQL(strings.ToLower(data[`name`].(string))) + if err := checkColumnName(colname); err != nil { + return err + } if colList[colname] { return fmt.Errorf(`There are the same columns`) } @@ -776,11 +940,6 @@ func FlushContract(sc *SmartContract, iroot interface{}, id int64, active bool) return nil } -// IsObject returns true if there is the specified contract -func IsObject(sc *SmartContract, name string, state int64) bool { - return VMObjectExists(sc.VM, name, uint32(state)) -} - // Len returns the length of the slice func Len(in []interface{}) int64 { if in == nil { @@ -820,9 +979,9 @@ func TableConditions(sc *SmartContract, name, columns, permissions string) (err log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("TableConditions can be only called from @1EditTable") return fmt.Errorf(`TableConditions can be only called from EditTable`) } - } else if !ContractAccess(sc, `NewTable`, `Import`) { - log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("TableConditions can be only called from @1NewTable") - return fmt.Errorf(`TableConditions can be only called from NewTable or Import`) + } else if !accessContracts(sc, `NewTable`, `Import`, `NewTableJoint`) { + log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("TableConditions can be only called from @1NewTable, @1Import, @1NewTableJoint") + return fmt.Errorf(`TableConditions can be only called from NewTable or Import or NewTableJoint`) } prefix := converter.Int64ToStr(sc.TxSmart.EcosystemID) @@ -965,8 +1124,8 @@ func ValidateCondition(sc *SmartContract, condition string, state int64) error { // ColumnCondition is contract func func ColumnCondition(sc *SmartContract, tableName, name, coltype, permissions string) error { - name = strings.ToLower(name) - tableName = strings.ToLower(tableName) + name = converter.EscapeSQL(strings.ToLower(name)) + tableName = converter.EscapeSQL(strings.ToLower(tableName)) if !accessContracts(sc, `NewColumn`, `EditColumn`) { log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("ColumnConditions can be only called from @1NewColumn") return fmt.Errorf(`ColumnCondition can be only called from NewColumn or EditColumn`) @@ -1051,6 +1210,12 @@ func RowConditions(sc *SmartContract, tblname string, id int64, conditionOnly bo return fmt.Errorf("Item %d has not been found", id) } + for _, v := range sc.TxContract.StackCont { + if v == condition { + return fmt.Errorf("Recursion detected") + } + } + if err := Eval(sc, condition); err != nil { if err == errAccessDenied && conditionOnly { return AllowChangeCondition(sc, tblname) @@ -1062,25 +1227,42 @@ func RowConditions(sc *SmartContract, tblname string, id int64, conditionOnly bo return nil } +func checkColumnName(name string) error { + if len(name) == 0 { + return errEmptyColumn + } else if name[0] >= '0' && name[0] <= '9' { + return errWrongColumn + } + return nil +} + // CreateColumn is creating column -func CreateColumn(sc *SmartContract, tableName, name, colType, permissions string) error { +func CreateColumn(sc *SmartContract, tableName, name, colType, permissions string) (err error) { + var ( + sqlColType string + permout []byte + ) if !accessContracts(sc, `NewColumn`) { log.WithFields(log.Fields{"type": consts.InvalidObject}).Error("CreateColumn can be only called from @1NewColumn") return fmt.Errorf(`CreateColumn can be only called from NewColumn`) } - name = strings.ToLower(name) + name = converter.EscapeSQL(strings.ToLower(name)) + if err = checkColumnName(name); err != nil { + return + } + tableName = strings.ToLower(tableName) tblname := getDefTableName(sc, tableName) - sqlColType, err := columnType(colType) + sqlColType, err = columnType(colType) if err != nil { - return err + return } err = model.AlterTableAddColumn(sc.DbTransaction, tblname, name, sqlColType) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("adding column to the table") - return err + return } tables := getDefTableName(sc, `tables`) @@ -1090,16 +1272,16 @@ func CreateColumn(sc *SmartContract, tableName, name, colType, permissions strin temp := &cols{} err = model.DBConn.Table(tables).Where("name = ?", tableName).Select("columns").Find(temp).Error if err != nil { - return err + return } var perm map[string]string err = json.Unmarshal([]byte(temp.Columns), &perm) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("selecting columns from the table") - return err + return } perm[name] = permissions - permout, err := json.Marshal(perm) + permout, err = json.Marshal(perm) if err != nil { log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err}).Error("unmarshalling columns to json") return err @@ -1107,7 +1289,7 @@ func CreateColumn(sc *SmartContract, tableName, name, colType, permissions strin _, _, err = sc.selectiveLoggingAndUpd([]string{`columns`}, []interface{}{string(permout)}, tables, []string{`name`}, []string{tableName}, !sc.VDE && sc.Rollback, false) if err != nil { - return err + return } return nil @@ -1158,7 +1340,7 @@ func PermColumn(sc *SmartContract, tableName, name, permissions string) error { log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("EditColumn can be only called from @1EditColumn") return fmt.Errorf(`EditColumn can be only called from EditColumn`) } - name = strings.ToLower(name) + name = converter.EscapeSQL(strings.ToLower(name)) tableName = strings.ToLower(tableName) tables := getDefTableName(sc, `tables`) type cols struct { @@ -1552,5 +1734,115 @@ func StringToBytes(src string) []byte { // BytesToString converts bytes to string func BytesToString(src []byte) string { + if bytes.HasPrefix(src, BOM) && utf8.Valid(src[len(BOM):]) { + return string(src[len(BOM):]) + } return string(src) } + +// CreateVDE allow create new VDE throw vdemanager +func CreateVDE(sc *SmartContract, name, dbUser, dbPassword string, port int64) error { + return vdemanager.Manager.CreateVDE(name, dbUser, dbPassword, int(port)) +} + +// DeleteVDE delete vde +func DeleteVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.DeleteVDE(name) +} + +// StartVDE run VDE process +func StartVDE(sc *SmartContract, name string) error { + return vdemanager.Manager.StartVDE(name) +} + +// StopVDEProcess stops VDE process +func StopVDEProcess(sc *SmartContract, name string) error { + return vdemanager.Manager.StopVDE(name) +} + +// GetVDEList returns list VDE process with statuses +func GetVDEList(sc *SmartContract) (map[string]string, error) { + return vdemanager.Manager.ListProcess() +} + +func GetHistory(transaction *model.DbTransaction, ecosystem int64, tableName string, id int64) ([]interface{}, error) { + table := fmt.Sprintf(`%d_%s`, ecosystem, tableName) + rows, err := model.GetDB(transaction).Table(table).Where("id=?", id).Rows() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("get current values") + return nil, err + } + if !rows.Next() { + return nil, errNotFound + } + defer rows.Close() + // Get column names + columns, err := rows.Columns() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("get columns") + return nil, err + } + values := make([][]byte, len(columns)) + scanArgs := make([]interface{}, len(values)) + for i := range values { + scanArgs[i] = &values[i] + } + err = rows.Scan(scanArgs...) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("scan values") + return nil, err + } + var value string + curVal := make(map[string]string) + for i, col := range values { + if col == nil { + value = "NULL" + } else { + value = string(col) + } + curVal[columns[i]] = value + } + rollbackList := []interface{}{} + rollbackTx := &model.RollbackTx{} + txs, err := rollbackTx.GetRollbackTxsByTableIDAndTableName(converter.Int64ToStr(id), + table, historyLimit) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("rollback history") + return nil, err + } + for _, tx := range *txs { + if len(rollbackList) > 0 { + rollbackList[len(rollbackList)-1].(map[string]string)[`block_id`] = converter.Int64ToStr(tx.BlockID) + } + if tx.Data == "" { + continue + } + rollback := make(map[string]string) + for k, v := range curVal { + rollback[k] = v + } + if err := json.Unmarshal([]byte(tx.Data), &rollback); err != nil { + log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err}).Error("unmarshalling rollbackTx.Data from JSON") + return nil, err + } + rollbackList = append(rollbackList, rollback) + curVal = rollback + } + return rollbackList, nil +} + +func GetBlockHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `blocks`, id) +} + +func GetPageHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `pages`, id) +} + +func GetMenuHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `menu`, id) +} + +func GetContractHistory(sc *SmartContract, id int64) ([]interface{}, error) { + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `contracts`, id) +} diff --git a/packages/smart/selective.go b/packages/smart/selective.go index aa13e5ebe..80677c43f 100644 --- a/packages/smart/selective.go +++ b/packages/smart/selective.go @@ -86,9 +86,9 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter if whereFields != nil && whereValues != nil { for i := 0; i < len(whereFields); i++ { if val := converter.StrToInt64(whereValues[i]); val != 0 { - addSQLWhere += whereFields[i] + "= " + whereValues[i] + " AND " + addSQLWhere += whereFields[i] + "= " + escapeSingleQuotes(whereValues[i]) + " AND " } else { - addSQLWhere += whereFields[i] + "= '" + whereValues[i] + "' AND " + addSQLWhere += whereFields[i] + "= '" + escapeSingleQuotes(whereValues[i]) + "' AND " } } } @@ -143,15 +143,15 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter if converter.IsByteColumn(table, fields[i]) && len(values[i]) != 0 { addSQLUpdate += fields[i] + `=decode('` + hex.EncodeToString([]byte(values[i])) + `','HEX'),` } else if fields[i][:1] == "+" { - addSQLUpdate += fields[i][1:len(fields[i])] + `=` + fields[i][1:len(fields[i])] + `+` + values[i] + `,` + addSQLUpdate += fields[i][1:len(fields[i])] + `=` + fields[i][1:len(fields[i])] + `+` + escapeSingleQuotes(values[i]) + `,` } else if fields[i][:1] == "-" { - addSQLUpdate += fields[i][1:len(fields[i])] + `=` + fields[i][1:len(fields[i])] + `-` + values[i] + `,` + addSQLUpdate += fields[i][1:len(fields[i])] + `=` + fields[i][1:len(fields[i])] + `-` + escapeSingleQuotes(values[i]) + `,` } else if values[i] == `NULL` { addSQLUpdate += fields[i] + `= NULL,` } else if strings.HasPrefix(fields[i], `timestamp `) { addSQLUpdate += fields[i][len(`timestamp `):] + `= to_timestamp('` + values[i] + `'),` } else if strings.HasPrefix(values[i], `timestamp `) { - addSQLUpdate += fields[i] + `= timestamp '` + values[i][len(`timestamp `):] + `',` + addSQLUpdate += fields[i] + `= timestamp '` + escapeSingleQuotes(values[i][len(`timestamp `):]) + `',` } else if strings.Contains(fields[i], `->`) { colfield := strings.Split(fields[i], `->`) if len(colfield) == 2 { @@ -161,7 +161,7 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter jsonFields[colfield[0]][colfield[1]] = values[i] } } else { - addSQLUpdate += fields[i] + `='` + strings.Replace(values[i], `'`, `''`, -1) + `',` + addSQLUpdate += fields[i] + `='` + escapeSingleQuotes(values[i]) + `',` } } for colname, colvals := range jsonFields { @@ -201,7 +201,7 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter for i := 0; i < len(fields); i++ { if fields[i] == `id` { isID = true - tableID = fmt.Sprint(values[i]) + tableID = escapeSingleQuotes(values[i]) } if strings.Contains(fields[i], `->`) { @@ -210,7 +210,7 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter if jsonFields[colfield[0]] == nil { jsonFields[colfield[0]] = make(map[string]string) } - jsonFields[colfield[0]][colfield[1]] = values[i] + jsonFields[colfield[0]][colfield[1]] = escapeSingleQuotes(values[i]) continue } } @@ -227,11 +227,11 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter } else if values[i] == `NULL` { addSQLIns1 = append(addSQLIns1, `NULL`) } else if strings.HasPrefix(fields[i], `timestamp`) { - addSQLIns1 = append(addSQLIns1, `to_timestamp('`+values[i]+`')`) + addSQLIns1 = append(addSQLIns1, `to_timestamp('`+escapeSingleQuotes(values[i])+`')`) } else if strings.HasPrefix(values[i], `timestamp`) { - addSQLIns1 = append(addSQLIns1, `timestamp '`+values[i][len(`timestamp `):]+`'`) + addSQLIns1 = append(addSQLIns1, `timestamp '`+escapeSingleQuotes(values[i][len(`timestamp `):])+`'`) } else { - addSQLIns1 = append(addSQLIns1, `'`+strings.Replace(values[i], `'`, `''`, -1)+`'`) + addSQLIns1 = append(addSQLIns1, `'`+escapeSingleQuotes(values[i])+`'`) } } for colname, colvals := range jsonFields { @@ -247,10 +247,10 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter for i := 0; i < len(whereFields); i++ { if whereFields[i] == `id` { isID = true - tableID = fmt.Sprint(whereValues[i]) + tableID = whereValues[i] } addSQLIns0 = append(addSQLIns0, whereFields[i]) - addSQLIns1 = append(addSQLIns1, whereValues[i]) + addSQLIns1 = append(addSQLIns1, escapeSingleQuotes(whereValues[i])) } } if !isID { @@ -297,3 +297,7 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter } return cost, tableID, nil } + +func escapeSingleQuotes(val string) string { + return strings.Replace(val, `'`, `''`, -1) +} diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 355b7289f..f529a7779 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -66,7 +66,6 @@ const ( var ( smartVM *script.VM - smartVDE map[int64]*script.VM smartTest = make(map[string]string) ErrCurrentBalance = errors.New(`current balance is not enough`) @@ -118,17 +117,10 @@ func newVM() *script.VM { func init() { smartVM = newVM() - smartVDE = make(map[int64]*script.VM) } // GetVM is returning smart vm -func GetVM(vde bool, ecosystemID int64) *script.VM { - if vde { - if v, ok := smartVDE[ecosystemID]; ok { - return v - } - return nil - } +func GetVM() *script.VM { return smartVM } @@ -182,6 +174,7 @@ func VMRun(vm *script.VM, block *script.Block, params []interface{}, extend *map func VMGetContract(vm *script.VM, name string, state uint32) *Contract { name = script.StateName(state, name) obj, ok := vm.Objects[name] + if ok && obj.Type == script.ObjContract { return &Contract{Name: name, Block: obj.Value.(*script.Block)} } @@ -477,17 +470,24 @@ func LoadContract(transaction *model.DbTransaction, prefix string) (err error) { func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err error) { var contracts []map[string]string - if !model.IsTable(prefix + `_vde_contracts`) { + if !model.IsTable(prefix + `_contracts`) { return } - contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_vde_contracts" order by id`, -1) + contracts, err = model.GetAllTransaction(transaction, `select * from "`+prefix+`_contracts" order by id`, -1) if err != nil { return err } state := converter.StrToInt64(prefix) - vm := newVM() - EmbedFuncs(vm, script.VMTypeVDE) - smartVDE[state] = vm + vm := GetVM() + + var vmt script.VMType + if conf.Config.IsVDE() { + vmt = script.VMTypeVDE + } else if conf.Config.IsVDEMaster() { + vmt = script.VMTypeVDEMaster + } + + EmbedFuncs(vm, vmt) LoadSysFuncs(vm, int(state)) for _, item := range contracts { list, err := script.ContractsList(item[`value`]) @@ -503,6 +503,7 @@ func LoadVDEContracts(transaction *model.DbTransaction, prefix string) (err erro WalletID: 0, TokenID: 0, } + if err = vmCompile(vm, item[`value`], &owner); err != nil { log.WithFields(log.Fields{"names": names, "error": err}).Error("Load VDE Contract") } else { @@ -550,17 +551,6 @@ func (sc *SmartContract) getExtend() *map[string]interface{} { return &extend } -// StackCont adds an element to the stack of contract call or removes the top element when name is empty -func StackCont(sc interface{}, name string) { - cont := sc.(*SmartContract).TxContract - if len(name) > 0 { - cont.StackCont = append(cont.StackCont, name) - } else { - cont.StackCont = cont.StackCont[:len(cont.StackCont)-1] - } - return -} - func PrefixName(table string) (prefix, name string) { name = table if off := strings.IndexByte(table, '_'); off > 0 && table[0] >= '0' && table[0] <= '9' { @@ -636,6 +626,9 @@ func (sc *SmartContract) AccessTablePerm(table, action string) (map[string]strin } func (sc *SmartContract) AccessTable(table, action string) error { + if sc.FullAccess { + return nil + } _, err := sc.AccessTablePerm(table, action) return err } @@ -659,6 +652,9 @@ type colAccess struct { // AccessColumns checks access rights to the columns func (sc *SmartContract) AccessColumns(table string, columns *[]string, update bool) error { logger := sc.GetLogger() + if sc.FullAccess { + return nil + } if table == getDefTableName(sc, `parameters`) || table == getDefTableName(sc, `app_params`) { if update { if sc.TxSmart.KeyID == converter.StrToInt64(EcosysParam(sc, `founder_account`)) { @@ -830,9 +826,8 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { } methods := []string{`init`, `conditions`, `action`, `rollback`} - sc.TxContract.StackCont = []string{sc.TxContract.Name} - (*sc.TxContract.Extend)[`stack_cont`] = StackCont - sc.VM = GetVM(sc.VDE, sc.TxSmart.EcosystemID) + sc.AppendStack(sc.TxContract.Name) + sc.VM = GetVM() if (flags&CallRollback) == 0 && (flags&CallAction) != 0 { if !sc.VDE { toID = sc.BlockData.KeyID @@ -871,6 +866,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { return retError(ErrEmptyPublicKey) } sc.PublicKeys = append(sc.PublicKeys, public) + var CheckSignResult bool CheckSignResult, err = utils.CheckSign(sc.PublicKeys, sc.TxData[`forsign`].(string), sc.TxSmart.BinSignatures, false) if err != nil { @@ -881,7 +877,7 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect sign") return retError(ErrIncorrectSign) } - if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.PrivateBlockchain { + if sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { if sc.TxSmart.TokenEcosystem == 0 { sc.TxSmart.TokenEcosystem = 1 } @@ -1003,8 +999,8 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { result = result[:255] } } - if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && - !sc.VDE && !conf.Config.PrivateBlockchain && sc.TxContract.Name != `@1NewUser` { + + if (flags&CallRollback) == 0 && (flags&CallAction) != 0 && sc.TxSmart.EcosystemID > 0 && !sc.VDE && !conf.Config.IsPrivateBlockchain() { apl := sc.TxUsedCost.Mul(fuelRate) wltAmount, ierr := decimal.NewFromString(payWallet.Amount) diff --git a/packages/smart/smart_p.go b/packages/smart/smart_p.go index 24a66e6fe..0a3cf8117 100644 --- a/packages/smart/smart_p.go +++ b/packages/smart/smart_p.go @@ -65,10 +65,6 @@ var ( "Join": "extend_cost_join", "Size": "extend_cost_size", "Substr": "extend_cost_substr", - "ContractsList": "extend_cost_contracts_list", - "IsObject": "extend_cost_is_object", - "CompileContract": "extend_cost_compile_contract", - "FlushContract": "extend_cost_flush_contract", "Eval": "extend_cost_eval", "Len": "extend_cost_len", "Activate": "extend_cost_activate", @@ -342,9 +338,9 @@ func LangRes(sc *SmartContract, appID int64, idRes, lang string) string { // NewLang creates new language func CreateLanguage(sc *SmartContract, name, trans string, appID int64) (id int64, err error) { - if !accessContracts(sc, "NewLang", "Import") { - log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("NewLang can be only called from @1NewLang") - return 0, fmt.Errorf(`NewLang can be only called from @1NewLang`) + if !accessContracts(sc, "NewLang", "NewLangJoint", "Import") { + log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("CreateLanguage can be only called from @1NewLang, @1NewLangJoint, @1Import") + return 0, fmt.Errorf(`CreateLanguage can be only called from @1NewLang, @1NewLangJoint, @1Import`) } idStr := converter.Int64ToStr(sc.TxSmart.EcosystemID) if _, id, err = DBInsert(sc, `@`+idStr+"_languages", "name,res,app_id", name, trans, appID); err != nil { @@ -357,9 +353,9 @@ func CreateLanguage(sc *SmartContract, name, trans string, appID int64) (id int6 // EditLanguage edits language func EditLanguage(sc *SmartContract, id int64, name, trans string, appID int64) error { - if !accessContracts(sc, "EditLang", "Import") { - log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("EditLang can be only called from @1EditLang") - return fmt.Errorf(`EditLang can be only called from @1EditLang`) + if !accessContracts(sc, "EditLang", "EditLangJoint", "Import") { + log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("EditLanguage can be only called from @1EditLang, @1EditLangJoint and @1Import") + return fmt.Errorf(`EditLanguage can be only called from @1EditLang, @1EditLangJoint and @1Import`) } idStr := converter.Int64ToStr(sc.TxSmart.EcosystemID) if _, err := DBUpdate(sc, `@`+idStr+"_languages", id, "name,res,app_id", name, trans, appID); err != nil { @@ -457,6 +453,7 @@ func CreateEcosystem(sc *SmartContract, wallet int64, name string) (int64, error } sc.Rollback = false + sc.FullAccess = true if _, _, err = DBInsert(sc, `@`+idStr+"_pages", "id,name,value,menu,conditions", "1", "default_page", SysParamString("default_ecosystem_page"), "default_menu", `ContractConditions("MainCondition")`); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("inserting default page") @@ -486,6 +483,7 @@ func CreateEcosystem(sc *SmartContract, wallet int64, name string) (int64, error return 0, err } + sc.FullAccess = false // because of we need to know which ecosystem to rollback. // All tables will be deleted so it's no need to rollback data from tables sc.Rollback = true @@ -657,7 +655,7 @@ func RollbackColumn(sc *SmartContract, tableName, name string) error { log.WithFields(log.Fields{"type": consts.IncorrectCallingContract}).Error("RollbackColumn can be only called from @1NewColumn") return fmt.Errorf(`RollbackColumn can be only called from @1NewColumn`) } - name = strings.ToLower(name) + name = converter.EscapeSQL(strings.ToLower(name)) rollbackTx := &model.RollbackTx{} found, err := rollbackTx.Get(sc.DbTransaction, sc.TxHash, fmt.Sprintf("%d_tables", sc.TxSmart.EcosystemID)) if err != nil { @@ -867,6 +865,15 @@ func JSONDecode(input string) (interface{}, error) { // JSONEncode converts object to json string func JSONEncode(input interface{}) (string, error) { + rv := reflect.ValueOf(input) + if rv.Kind() == reflect.Ptr { + rv = rv.Elem() + } + + if rv.Kind() == reflect.Struct { + return "", fmt.Errorf("Type %T doesn't support json marshalling", input) + } + b, err := json.Marshal(input) if err != nil { log.WithFields(log.Fields{"type": consts.JSONMarshallError, "error": err}).Error("marshalling json") @@ -874,3 +881,8 @@ func JSONEncode(input interface{}) (string, error) { } return string(b), nil } + +// Append syn for golang 'append' function +func Append(slice []interface{}, val interface{}) []interface{} { + return append(slice, val) +} diff --git a/packages/smart/smart_test.go b/packages/smart/smart_test.go index 4abe85bbd..5b0d635bf 100644 --- a/packages/smart/smart_test.go +++ b/packages/smart/smart_test.go @@ -19,6 +19,8 @@ package smart import ( "testing" + "github.com/stretchr/testify/require" + "github.com/GenesisKernel/go-genesis/packages/script" ) @@ -65,3 +67,29 @@ func TestNewContract(t *testing.T) { t.Error(err) } } + +func TestCheckAppend(t *testing.T) { + appendTestContract := `contract AppendTest { + action { + var list array + list = Append(list, "naw_value") + Println(list) + } + }` + + owner := script.OwnerInfo{ + StateID: 1, + Active: false, + TableID: 1, + WalletID: 0, + TokenID: 0, + } + + require.NoError(t, Compile(appendTestContract, &owner)) + + cnt := GetContract("AppendTest", 1) + cfunc := cnt.GetFunc("action") + + _, err := Run(cfunc, nil, &map[string]interface{}{}) + require.NoError(t, err) +} diff --git a/packages/tcpserver/tcpserver.go b/packages/tcpserver/tcpserver.go index c533456dc..1b11f111b 100644 --- a/packages/tcpserver/tcpserver.go +++ b/packages/tcpserver/tcpserver.go @@ -22,6 +22,8 @@ import ( "sync/atomic" "time" + "github.com/GenesisKernel/go-genesis/packages/conf" + "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/service" @@ -107,6 +109,10 @@ func HandleTCPRequest(rw net.Conn) { // TcpListener is listening tcp address func TcpListener(laddr string) error { + if conf.Config.IsSupportingVDE() { + return nil + } + if strings.HasPrefix(laddr, "127.") { log.Warn("Listening at local address: ", laddr) } diff --git a/packages/tcpserver/type1.go b/packages/tcpserver/type1.go index 1265f58f2..12d179b30 100644 --- a/packages/tcpserver/type1.go +++ b/packages/tcpserver/type1.go @@ -194,7 +194,9 @@ func getUnknownTransactions(buf *bytes.Buffer) ([]byte, error) { func saveNewTransactions(r *DisRequest) error { binaryTxs := r.Data + queue := []model.BatchModel{} log.WithFields(log.Fields{"binaryTxs": binaryTxs}).Debug("trying to save binary txs") + for len(binaryTxs) > 0 { txSize, err := converter.DecodeLength(&binaryTxs) if err != nil { @@ -222,12 +224,13 @@ func saveNewTransactions(r *DisRequest) error { log.WithFields(log.Fields{"type": consts.CryptoError, "error": err, "value": txBinData}).Fatal("cannot hash bindata") } - queueTx := &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1} - err = queueTx.Create() - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") - return err - } + queue = append(queue, &model.QueueTx{Hash: hash, Data: txBinData, FromGate: 1}) } + + if err := model.BatchInsert(queue, []string{"hash", "data", "from_gate"}); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("error creating QueueTx") + return err + } + return nil } diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 8bd496e5f..c5bb6f44d 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -17,6 +17,7 @@ package template import ( + "bytes" "encoding/csv" "encoding/json" "fmt" @@ -62,9 +63,14 @@ func init() { funcs[`EcosysParam`] = tplFunc{ecosysparTag, defaultTag, `ecosyspar`, `Name,Index,Source`} funcs[`Em`] = tplFunc{defaultTag, defaultTag, `em`, `Body,Class`} funcs[`GetVar`] = tplFunc{getvarTag, defaultTag, `getvar`, `Name`} + funcs[`GetContractHistory`] = tplFunc{getContractHistoryTag, defaultTag, `getcontracthistory`, `Source,Id`} + funcs[`GetMenuHistory`] = tplFunc{getMenuHistoryTag, defaultTag, `getmenuhistory`, `Source,Id`} + funcs[`GetBlockHistory`] = tplFunc{getBlockHistoryTag, defaultTag, `getblockhistory`, `Source,Id`} + funcs[`GetPageHistory`] = tplFunc{getPageHistoryTag, defaultTag, `getpagehistory`, `Source,Id`} funcs[`ImageInput`] = tplFunc{defaultTag, defaultTag, `imageinput`, `Name,Width,Ratio,Format`} funcs[`InputErr`] = tplFunc{defaultTag, defaultTag, `inputerr`, `*`} funcs[`JsonToSource`] = tplFunc{jsontosourceTag, defaultTag, `jsontosource`, `Source,Data`} + funcs[`ArrayToSource`] = tplFunc{arraytosourceTag, defaultTag, `arraytosource`, `Source,Data`} funcs[`LangRes`] = tplFunc{langresTag, defaultTag, `langres`, `Name,Lang`} funcs[`MenuGroup`] = tplFunc{menugroupTag, defaultTag, `menugroup`, `Title,Body,Icon`} funcs[`MenuItem`] = tplFunc{defaultTag, defaultTag, `menuitem`, `Title,Page,PageParams,Icon,Vde`} @@ -103,6 +109,7 @@ func init() { tails[`button`] = forTails{map[string]tailInfo{ `Alert`: {tplFunc{alertTag, defaultTailFull, `alert`, `Text,ConfirmButton,CancelButton,Icon`}, true}, + `Popup`: {tplFunc{popupTag, defaultTailFull, `popup`, `Width,Header`}, true}, `Style`: {tplFunc{tailTag, defaultTailFull, `style`, `Style`}, false}, `CompositeContract`: {tplFunc{compositeTag, defaultTailFull, `composite`, `Name,Data`}, false}, }} @@ -765,6 +772,18 @@ func compositeTag(par parFunc) string { return `` } +func popupTag(par parFunc) string { + setAllAttr(par) + + width := converter.StrToInt((*par.Pars)[`Width`]) + if width < 1 || width > 100 { + return `` + } + + par.Owner.Attr[`popup`] = par.Node.Attr + return `` +} + func customTag(par parFunc) string { setAllAttr(par) if len((*par.Pars)[`Column`]) == 0 || len((*par.Pars)[`Body`]) == 0 { @@ -801,15 +820,21 @@ func tailTag(par parFunc) string { func includeTag(par parFunc) string { if len((*par.Pars)[`Name`]) >= 0 && len((*par.Workspace.Vars)[`_include`]) < 5 { - pattern, err := model.Single(`select value from "`+(*par.Workspace.Vars)[`ecosystem_id`]+`_blocks" where name=?`, (*par.Pars)[`Name`]).String() + bi := &model.BlockInterface{} + bi.SetTablePrefix((*par.Workspace.Vars)[`ecosystem_id`]) + found, err := bi.Get(macro((*par.Pars)[`Name`], par.Workspace.Vars)) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block by name") return err.Error() } - if len(pattern) > 0 { + if !found { + log.WithFields(log.Fields{"type": consts.NotFound, "name": (*par.Pars)[`Name`]}).Error("include block not found") + return fmt.Sprintf("Inlcude %s has not been found", (*par.Pars)[`Name`]) + } + if len(bi.Value) > 0 { root := node{} (*par.Workspace.Vars)[`_include`] += `1` - process(pattern, &root, par.Workspace) + process(bi.Value, &root, par.Workspace) (*par.Workspace.Vars)[`_include`] = (*par.Workspace.Vars)[`_include`][:len((*par.Workspace.Vars)[`_include`])-1] for _, item := range root.Children { par.Owner.Children = append(par.Owner.Children, item) @@ -1071,6 +1096,34 @@ func jsontosourceTag(par parFunc) string { return `` } +func arraytosourceTag(par parFunc) string { + setAllAttr(par) + + data := make([][]string, 0, 16) + cols := []string{`key`, `value`} + types := []string{`text`, `text`} + var out []json.RawMessage + if err := json.Unmarshal([]byte(macro((*par.Pars)[`Data`], par.Workspace.Vars)), &out); err != nil { + log.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err}).Error("unmarshalling JSON Array to source") + } + for key, item := range out { + if item == nil { + item = []byte("") + } + + item = bytes.Trim(item, `"`) + + data = append(data, []string{fmt.Sprint(key), string(item)}) + } + setAllAttr(par) + par.Node.Attr[`columns`] = &cols + par.Node.Attr[`types`] = &types + par.Node.Attr[`data`] = &data + newSource(par) + par.Owner.Children = append(par.Owner.Children, par.Node) + return `` +} + func chartTag(par parFunc) string { defaultTag(par) defaultTail(par, "chart") @@ -1176,3 +1229,58 @@ func columntypeTag(par parFunc) string { } return `` } + +func getHistoryTag(par parFunc, table string) string { + setAllAttr(par) + + list, err := smart.GetHistory(nil, converter.StrToInt64((*par.Workspace.Vars)[`ecosystem_id`]), + table, converter.StrToInt64(macro((*par.Pars)[`Id`], par.Workspace.Vars))) + if err != nil { + return err.Error() + } + data := make([][]string, 0) + cols := make([]string, 0, 8) + types := make([]string, 0, 8) + if len(list) > 0 { + for i := range list { + item := list[i].(map[string]string) + if i == 0 { + for key := range item { + cols = append(cols, key) + types = append(types, `text`) + } + } + items := make([]string, len(cols)) + for ind, key := range cols { + val := item[key] + if val == `NULL` { + val = `` + } + items[ind] = val + } + data = append(data, items) + } + } + par.Node.Attr[`columns`] = &cols + par.Node.Attr[`types`] = &types + par.Node.Attr[`data`] = &data + newSource(par) + par.Owner.Children = append(par.Owner.Children, par.Node) + return `` +} + +func getContractHistoryTag(par parFunc) string { + return getHistoryTag(par, `contracts`) +} + +func getBlockHistoryTag(par parFunc) string { + return getHistoryTag(par, `blocks`) +} + +func getMenuHistoryTag(par parFunc) string { + return getHistoryTag(par, `menu`) +} + +func getPageHistoryTag(par parFunc) string { + return getHistoryTag(par, `pages`) +} diff --git a/packages/template/template.go b/packages/template/template.go index 5c0dc1842..8beb4882b 100644 --- a/packages/template/template.go +++ b/packages/template/template.go @@ -692,7 +692,7 @@ func Template2JSON(input string, timeout *bool, vars *map[string]string) []byte isvde := (*vars)[`vde`] == `true` || (*vars)[`vde`] == `1` sc := smart.SmartContract{ VDE: isvde, - VM: smart.GetVM(isvde, converter.StrToInt64((*vars)[`ecosystem_id`])), + VM: smart.GetVM(), TxSmart: tx.SmartContract{ Header: tx.Header{ EcosystemID: converter.StrToInt64((*vars)[`ecosystem_id`]), diff --git a/packages/template/template_test.go b/packages/template/template_test.go index 8cac4157c..0e8344485 100644 --- a/packages/template/template_test.go +++ b/packages/template/template_test.go @@ -52,11 +52,11 @@ var forTest = tplList{ {`SetVar("Name1", "Value1")GetVar("Name1")#Name1#Span(#Name1#)SetVar("Name1", "Value2")GetVar("Name1")#Name1# SetVar("Name1", "Value3")#Name1#`, `[{"tag":"text","text":"Value1"},{"tag":"text","text":"Value1"},{"tag":"span","children":[{"tag":"text","text":"Value1"}]},{"tag":"text","text":"Value2"},{"tag":"text","text":"Value2\n\t\t"},{"tag":"text","text":"Value3"}]`}, {`Data(src1, "name,value,cost"){ - 1, 1, 0 - 2, 2 - 3, 3, 4 - 5, 6 - }`, `[{"tag":"data","attr":{"columns":["name","value","cost"],"data":[["1","1","0"],["3","3","4"]],"error":"line 2, column 0: wrong number of fields in line","source":"src1","types":["text","text","text"]}}]`}, + 1, 1, 0 + 2, 2 + 3, 3, 4 + 5, 6 + }`, `[{"tag":"data","attr":{"columns":["name","value","cost"],"data":[],"error":"record on line 2: wrong number of fields","source":"src1","types":["text","text","text"]}}]`}, {`Data(Columns: "a"){a b}.Custom(){}`, `[{"tag":"data","attr":{"columns":["a"],"data":[["a"],["b"]],"types":["text"]}}]`}, @@ -86,6 +86,10 @@ var forTest = tplList{ {`SetVar(json,{"p1":"v1", "p2":"v2"})JsonToSource(none, ["q","p"])JsonToSource(pv, #json#) JsonToSource(dat, {"param":"va lue", "obj": {"sub":"one"}, "arr":["one"], "empty": null})`, `[{"tag":"jsontosource","attr":{"columns":["key","value"],"data":[],"source":"none","types":["text","text"]}},{"tag":"jsontosource","attr":{"columns":["key","value"],"data":[["p1","v1"],["p2","v2"]],"source":"pv","types":["text","text"]}},{"tag":"jsontosource","attr":{"columns":["key","value"],"data":[["arr","[one]"],["empty",""],["obj","map[sub:one]"],["param","va lue"]],"source":"dat","types":["text","text"]}}]`}, + {`SetVar(arr,[1, 2, 3])ArrayToSource(src2, #arr#)ArrayToSource(src1, ["q","p"])ArrayToSource(src1, {"k":"v"})`, + `[{"tag":"arraytosource","attr":{"columns":["key","value"],"data":[["0","1"],["1","2"],["2","3"]],"source":"src2","types":["text","text"]}},{"tag":"arraytosource","attr":{"columns":["key","value"],"data":[["0","q"],["1","p"]],"source":"src1","types":["text","text"]}},{"tag":"arraytosource","attr":{"columns":["key","value"],"data":[],"source":"src1","types":["text","text"]}}]`}, + {`ArrayToSource(arr, [{"k1":"v1"},{"k2":"v2"}])ForList(arr){JsonToSource(json, #value#)}`, + `[{"tag":"arraytosource","attr":{"columns":["key","value"],"data":[["0","{\"k1\":\"v1\"}"],["1","{\"k2\":\"v2\"}"]],"source":"arr","types":["text","text"]}},{"tag":"forlist","attr":{"source":"arr"},"children":[{"tag":"jsontosource","attr":{"columns":["key","value"],"data":[["k1","v1"]],"source":"json","types":["text","text"]}},{"tag":"jsontosource","attr":{"columns":["key","value"],"data":[["k2","v2"]],"source":"json","types":["text","text"]}}]}]`}, {`Button(Body: addpage).CompositeContract().CompositeContract(NewPage, [{"param1": "Value 1"}, {"param2": "Value 2", "param3" : "#my#"}]).CompositeContract(EditPage)`, `[{"tag":"button","attr":{"composite":[{"name":"NewPage","data":[{"param1":"Value 1"},{"param2":"Value 2","param3":"Span(test)"}]},{"name":"EditPage"}]},"children":[{"tag":"text","text":"addpage"}]}]`}, @@ -112,9 +116,9 @@ var forTest = tplList{ {"Button(Body: add table1, Contract: NewTable, Params: `Name=name,Columns=[{\"name\":\"MyName\",\"type\":\"varchar\", \"index\": \"1\", \"conditions\":\"true\"}, {\"name\":\"Amount\", \"type\":\"number\",\"index\": \"0\", \"conditions\":\"true\"}],Permissions={\"insert\": \"true\", \"update\" : \"true\", \"new_column\": \"true\"}`)", `[{"tag":"button","attr":{"contract":"NewTable","params":{"Columns":{"text":"[{\"name\":\"MyName\",\"type\":\"varchar\", \"index\": \"1\", \"conditions\":\"true\"}, {\"name\":\"Amount\", \"type\":\"number\",\"index\": \"0\", \"conditions\":\"true\"}]","type":"text"},"Name":{"text":"name","type":"text"},"Permissions":{"text":"{\"insert\": \"true\", \"update\" : \"true\", \"new_column\": \"true\"}","type":"text"}}},"children":[{"tag":"text","text":"add table1"}]}]`}, {`Calculate( Exp: 342278783438/0, Type: money )Calculate( Exp: 5.2/0, Type: float ) Calculate( Exp: 7/0)SetVar(moneyDigit, 2)Calculate(10/2, Type: money, Prec: #moneyDigit#)`, - `[{"tag":"text","text":"dividing by zerodividing by zerodividing by zero5.00"}]`}, + `[{"tag":"text","text":"dividing by zerodividing by zerodividing by zero5"}]`}, {`SetVar(val, 2200000034343443343430000)SetVar(zero, 0)Calculate( Exp: (342278783438+5000)*(#val#-932780000), Type: money, Prec:18 )Calculate( Exp: (2+50)*(#zero#-9), Type: money )`, - `[{"tag":"text","text":"753013346318631859107508068064700000.000000000000000000-468"}]`}, + `[{"tag":"text","text":"753013346318631859107508068064700000-468"}]`}, {`SetVar(val, 100)Calculate(10000-(34+5)*#val#)=Calculate("((10+#val#-45)*3.0-10)/4.5 + #val#", Prec: 4)`, `[{"tag":"text","text":"6100"},{"tag":"text","text":"=141.1111"}]`}, {`Span((span text), ok )Span(((span text), ok) )Div(){{My #my# body}}`, @@ -171,7 +175,7 @@ var forTest = tplList{ "1",John Silver,2 2,"Mark, Smith" )`, - `[{"tag":"data","attr":{"columns":["id","name"],"data":[["1","John Silver"]],"error":"line 2, column 0: wrong number of fields in line","source":"mysrc","types":["text","text"]}}]`}, + `[{"tag":"data","attr":{"columns":["id","name"],"data":[],"error":"record on line 2: wrong number of fields","source":"mysrc","types":["text","text"]}}]`}, {`Select(myselect,mysrc,name,id,0,myclass)`, `[{"tag":"select","attr":{"class":"myclass","name":"myselect","namecolumn":"name","source":"mysrc","value":"0","valuecolumn":"id"}}]`}, {`Data(mysrc,"id,name"){ diff --git a/packages/transaction/db.go b/packages/transaction/db.go index 24f2a8c60..197a22be4 100644 --- a/packages/transaction/db.go +++ b/packages/transaction/db.go @@ -2,7 +2,6 @@ package transaction import ( "errors" - "fmt" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/crypto" @@ -12,6 +11,8 @@ import ( log "github.com/sirupsen/logrus" ) +var ErrDuplicatedTx = errors.New("Duplicated transaction") + // InsertInLogTx is inserting tx in log func InsertInLogTx(transaction *model.DbTransaction, binaryTx []byte, time int64) error { txHash, err := crypto.Hash(binaryTx) @@ -42,7 +43,7 @@ func CheckLogTx(txBinary []byte, transactions, txQueue bool) error { } if found { log.WithFields(log.Fields{"tx_hash": searchedHash, "type": consts.DuplicateObject}).Error("double tx in log transactions") - return utils.ErrInfo(fmt.Errorf("double tx in log_transactions %x", searchedHash)) + return ErrDuplicatedTx } if transactions { @@ -55,7 +56,7 @@ func CheckLogTx(txBinary []byte, transactions, txQueue bool) error { } if len(tx.Hash) > 0 { log.WithFields(log.Fields{"tx_hash": tx.Hash, "type": consts.DuplicateObject}).Error("double tx in transactions") - return utils.ErrInfo(fmt.Errorf("double tx in transactions %x", searchedHash)) + return ErrDuplicatedTx } } @@ -65,7 +66,7 @@ func CheckLogTx(txBinary []byte, transactions, txQueue bool) error { found, err := qtx.GetByHash(nil, searchedHash) if found { log.WithFields(log.Fields{"tx_hash": searchedHash, "type": consts.DuplicateObject}).Error("double tx in queue") - return utils.ErrInfo(fmt.Errorf("double tx in queue_tx %x", searchedHash)) + return ErrDuplicatedTx } if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting transaction from queue") diff --git a/packages/transaction/transaction.go b/packages/transaction/transaction.go index 8620e5bed..18b68aa7b 100644 --- a/packages/transaction/transaction.go +++ b/packages/transaction/transaction.go @@ -382,7 +382,7 @@ func (t *Transaction) CallContract(flags int) (resultContract string, err error) VDE: false, Rollback: true, SysUpdate: false, - VM: smart.GetVM(false, 0), + VM: smart.GetVM(), TxSmart: *t.TxSmart, TxData: t.TxData, TxContract: t.TxContract, diff --git a/packages/utils/utils.go b/packages/utils/utils.go index d7a23611c..b8634f377 100644 --- a/packages/utils/utils.go +++ b/packages/utils/utils.go @@ -584,7 +584,7 @@ func GetHostPort(h string) string { return fmt.Sprintf("%s:%d", h, consts.DEFAULT_TCP_PORT) } -func BuildBlockTimeCalculator() (BlockTimeCalculator, error) { +func BuildBlockTimeCalculator(transaction *model.DbTransaction) (BlockTimeCalculator, error) { var btc BlockTimeCalculator firstBlock := model.Block{} found, err := firstBlock.Get(1) @@ -604,7 +604,7 @@ func BuildBlockTimeCalculator() (BlockTimeCalculator, error) { btc = NewBlockTimeCalculator(time.Unix(firstBlock.Time, 0), blockGenerationDuration, blocksGapDuration, - syspar.GetNumberOfNodes(), + syspar.GetNumberOfNodesFromDB(transaction), ) return btc, nil } diff --git a/packages/vdemanager/config.go b/packages/vdemanager/config.go new file mode 100644 index 000000000..bcafa10ff --- /dev/null +++ b/packages/vdemanager/config.go @@ -0,0 +1,66 @@ +package vdemanager + +import ( + "fmt" + "os/exec" + "path/filepath" +) + +const ( + inidDBCommand = "initDatabase" + genKeysCommand = "generateKeys" + startCommand = "start" +) + +// ChildVDEConfig struct to manage child entry +type ChildVDEConfig struct { + Executable string + Name string + Directory string + DBUser string + DBPassword string + ConfigFileName string + HTTPPort int +} + +func (c ChildVDEConfig) configCommand() *exec.Cmd { + + args := []string{ + "config", + fmt.Sprintf("--path=%s", c.configPath()), + fmt.Sprintf("--dbUser=%s", c.DBUser), + fmt.Sprintf("--dbPassword=%s", c.DBPassword), + fmt.Sprintf("--dbName=%s", c.Name), + fmt.Sprintf("--httpPort=%d", c.HTTPPort), + fmt.Sprintf("--dataDir=%s", c.Directory), + fmt.Sprintf("--keysDir=%s", c.Directory), + "--runMode=VDE", + } + + return exec.Command(c.Executable, args...) +} + +func (c ChildVDEConfig) initDBCommand() *exec.Cmd { + return c.getCommand(inidDBCommand) +} + +func (c ChildVDEConfig) generateKeysCommand() *exec.Cmd { + return c.getCommand(genKeysCommand) +} + +func (c ChildVDEConfig) startCommand() *exec.Cmd { + return c.getCommand(startCommand) +} + +func (c ChildVDEConfig) configPath() string { + return filepath.Join(c.Directory, c.ConfigFileName) +} + +func (c ChildVDEConfig) getCommand(commandName string) *exec.Cmd { + args := []string{ + commandName, + fmt.Sprintf("--config=%s", c.configPath()), + } + + return exec.Command(c.Executable, args...) +} diff --git a/packages/vdemanager/manager.go b/packages/vdemanager/manager.go new file mode 100644 index 000000000..0e628edca --- /dev/null +++ b/packages/vdemanager/manager.go @@ -0,0 +1,290 @@ +package vdemanager + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + "time" + + "github.com/GenesisKernel/go-genesis/packages/conf" + + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/model" + pConf "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/process" + log "github.com/sirupsen/logrus" +) + +const ( + childFolder = "configs" + createRoleTemplate = `CREATE ROLE %s WITH ENCRYPTED PASSWORD '%s' NOSUPERUSER NOCREATEDB NOCREATEROLE INHERIT LOGIN` + createDBTemplate = `CREATE DATABASE %s OWNER %s` + + dropDBTemplate = `DROP DATABASE IF EXISTS %s` + dropOwnedTemplate = `DROP OWNED BY %s CASCADE` + dropDBRoleTemplate = `DROP ROLE IF EXISTS %s` + commandTemplate = `%s start --config=%s` +) + +var ( + errWrongMode = errors.New("node must be running as VDEMaster") +) + +// VDEManager struct +type VDEManager struct { + processes *process.ProcessManager + execPath string + childConfigsPath string +} + +var ( + Manager *VDEManager +) + +func prepareWorkDir() (string, error) { + childConfigsPath := path.Join(conf.Config.DataDir, childFolder) + + if _, err := os.Stat(childConfigsPath); os.IsNotExist(err) { + if err := os.Mkdir(childConfigsPath, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating configs directory") + return "", err + } + } + + return childConfigsPath, nil +} + +// CreateVDE creates one instance of VDE +func (mgr *VDEManager) CreateVDE(name, dbUser, dbPassword string, port int) error { + + config := ChildVDEConfig{ + Executable: mgr.execPath, + Name: name, + Directory: path.Join(mgr.childConfigsPath, name), + DBUser: dbUser, + DBPassword: dbPassword, + ConfigFileName: consts.DefaultConfigFile, + HTTPPort: port, + } + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("creating new VDE") + return errWrongMode + } + + if err := mgr.createVDEDB(name, dbUser, dbPassword); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on creating VDE DB") + return err + } + + if err := mgr.initVDEDir(name); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "DirName": name, "error": err}).Error("on init VDE dir") + return err + } + + cmd := config.configCommand() + if err := cmd.Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run config command") + return err + } + + if err := config.generateKeysCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run generateKeys command") + return err + } + + if err := config.initDBCommand().Run(); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "args": cmd.Args}).Error("on run initDB command") + return err + } + + procConfEntry := pConf.NewConfigEntry(config.Directory) + procConfEntry.Name = "program:" + name + command := fmt.Sprintf("%s start --config=%s", config.Executable, filepath.Join(config.Directory, consts.DefaultConfigFile)) + log.Infoln(command) + procConfEntry.AddKeyValue("command", command) + proc := process.NewProcess("vdeMaster", procConfEntry) + + mgr.processes.Add(name, proc) + mgr.processes.Find(name).Start(true) + return nil +} + +// ListProcess returns list of process names with state of process +func (mgr *VDEManager) ListProcess() (map[string]string, error) { + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("get VDE list") + return nil, errWrongMode + } + + list := make(map[string]string) + + mgr.processes.ForEachProcess(func(p *process.Process) { + list[p.GetName()] = p.GetState().String() + }) + + return list, nil +} + +// DeleteVDE stop VDE process and remove VDE folder +func (mgr *VDEManager) DeleteVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("deleting VDE") + return errWrongMode + } + + mgr.StopVDE(name) + + vdeDir := path.Join(mgr.childConfigsPath, name) + vdeConfigPath := filepath.Join(vdeDir, consts.DefaultConfigFile) + vdeConfig, err := conf.GetConfigFromPath(vdeConfigPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Errorf("Getting config from path %s", vdeConfigPath) + return err + } + + time.Sleep(1 * time.Second) + if err := model.DropDatabase(vdeConfig.DB.Name); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db") + return err + } + + dropVDERoleQuery := fmt.Sprintf(dropDBRoleTemplate, vdeConfig.DB.User) + if err := model.DBConn.Exec(dropVDERoleQuery).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("Deleting vde db user") + return err + } + + return os.RemoveAll(vdeDir) +} + +// StartVDE find process and then start him +func (mgr *VDEManager) StartVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("starting VDE") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.STOPPED || + state == process.EXITED || + state == process.FATAL { + proc.Start(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE started") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on starting VDE") + return err +} + +// StopVDE find process with definded name and then stop him +func (mgr *VDEManager) StopVDE(name string) error { + + if mgr.processes == nil { + log.WithFields(log.Fields{"type": consts.WrongModeError, "error": errWrongMode}).Error("on stopping VDE process") + return errWrongMode + } + + proc := mgr.processes.Find(name) + if proc == nil { + err := fmt.Errorf(`VDE '%s' is not exists`, name) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on find VDE process") + return err + } + + state := proc.GetState() + if state == process.RUNNING || + state == process.STARTING { + proc.Stop(true) + log.WithFields(log.Fields{"vde_name": name}).Info("VDE is stoped") + return nil + } + + err := fmt.Errorf("VDE '%s' is %s", name, state) + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Error("on stoping VDE") + return err +} + +func (mgr *VDEManager) createVDEDB(vdeName, login, pass string) error { + + if err := model.DBConn.Exec(fmt.Sprintf(createRoleTemplate, login, pass)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB User") + return err + } + + if err := model.DBConn.Exec(fmt.Sprintf(createDBTemplate, vdeName, login)).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("creating VDE DB") + return err + } + + return nil +} + +func (mgr *VDEManager) initVDEDir(vdeName string) error { + + vdeDirName := path.Join(mgr.childConfigsPath, vdeName) + if _, err := os.Stat(vdeDirName); os.IsNotExist(err) { + if err := os.Mkdir(vdeDirName, 0700); err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("creating VDE directory") + return err + } + } + + return nil +} + +func InitVDEManager() { + + execPath, err := os.Executable() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on determine executable path") + } + + childConfigsPath, err := prepareWorkDir() + if err != nil { + log.WithFields(log.Fields{"type": consts.VDEManagerError, "error": err}).Fatal("on prepare child configs folder") + } + + Manager = &VDEManager{ + processes: process.NewProcessManager(), + execPath: execPath, + childConfigsPath: childConfigsPath, + } + + list, err := ioutil.ReadDir(childConfigsPath) + if err != nil { + log.WithFields(log.Fields{"type": consts.IOError, "error": err, "path": childConfigsPath}).Fatal("on read child VDE directory") + } + + for _, item := range list { + if item.IsDir() { + procDir := path.Join(Manager.childConfigsPath, item.Name()) + commandStr := fmt.Sprintf(commandTemplate, Manager.execPath, filepath.Join(procDir, consts.DefaultConfigFile)) + log.Info(commandStr) + confEntry := pConf.NewConfigEntry(procDir) + confEntry.Name = "program:" + item.Name() + confEntry.AddKeyValue("command", commandStr) + confEntry.AddKeyValue("redirect_stderr", "true") + confEntry.AddKeyValue("autostart", "true") + confEntry.AddKeyValue("autorestart", "true") + + proc := process.NewProcess("vdeMaster", confEntry) + Manager.processes.Add(item.Name(), proc) + proc.Start(true) + } + } +} diff --git a/tools/desync_monitor/query/query.go b/tools/desync_monitor/query/query.go index a47a7b3e1..9cafc3ff8 100644 --- a/tools/desync_monitor/query/query.go +++ b/tools/desync_monitor/query/query.go @@ -3,8 +3,6 @@ package query import ( "fmt" "sync" - - "github.com/GenesisKernel/go-genesis/packages/api" ) const maxBlockIDEndpoint = "/api/v2/maxblockid" @@ -14,6 +12,15 @@ type MaxBlockID struct { MaxBlockID int64 `json:"max_block_id"` } +type blockInfoResult struct { + Hash []byte `json:"hash"` + EcosystemID int64 `json:"ecosystem_id"` + KeyID int64 `json:"key_id"` + Time int64 `json:"time"` + Tx int32 `json:"tx_count"` + RollbacksHash []byte `json:"rollbacks_hash"` +} + func MaxBlockIDs(nodesList []string) ([]int64, error) { wg := sync.WaitGroup{} workResults := ConcurrentMap{m: map[string]interface{}{}} @@ -42,14 +49,14 @@ func MaxBlockIDs(nodesList []string) ([]int64, error) { return maxBlockIds, nil } -func BlockInfo(nodesList []string, blockID int64) (map[string]*api.GetBlockInfoResult, error) { +func BlockInfo(nodesList []string, blockID int64) (map[string]*blockInfoResult, error) { wg := sync.WaitGroup{} workResults := ConcurrentMap{m: map[string]interface{}{}} for _, nodeUrl := range nodesList { wg.Add(1) go func(url string) { defer wg.Done() - blockInfo := &api.GetBlockInfoResult{} + blockInfo := &blockInfoResult{} if err := sendGetRequest(url+fmt.Sprintf(blockInfoEndpoint, blockID), blockInfo); err != nil { workResults.Set(url, err) return @@ -58,12 +65,12 @@ func BlockInfo(nodesList []string, blockID int64) (map[string]*api.GetBlockInfoR }(nodeUrl) } wg.Wait() - result := map[string]*api.GetBlockInfoResult{} + result := map[string]*blockInfoResult{} for nodeUrl, blockInfoOrError := range workResults.m { switch res := blockInfoOrError.(type) { case error: return nil, res - case *api.GetBlockInfoResult: + case *blockInfoResult: result[nodeUrl] = res } } diff --git a/vendor/github.com/gorilla/rpc/LICENSE b/vendor/github.com/gorilla/rpc/LICENSE new file mode 100644 index 000000000..0e5fb8728 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 Rodrigo Moraes. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gorilla/rpc/README.md b/vendor/github.com/gorilla/rpc/README.md new file mode 100644 index 000000000..75c26eaa8 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/README.md @@ -0,0 +1,7 @@ +rpc +=== +[![Build Status](https://travis-ci.org/gorilla/rpc.png?branch=master)](https://travis-ci.org/gorilla/rpc) + +gorilla/rpc is a foundation for RPC over HTTP services, providing access to the exported methods of an object through HTTP requests. + +Read the full documentation here: http://www.gorillatoolkit.org/pkg/rpc diff --git a/vendor/github.com/gorilla/rpc/doc.go b/vendor/github.com/gorilla/rpc/doc.go new file mode 100644 index 000000000..bc65b532a --- /dev/null +++ b/vendor/github.com/gorilla/rpc/doc.go @@ -0,0 +1,81 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package gorilla/rpc is a foundation for RPC over HTTP services, providing +access to the exported methods of an object through HTTP requests. + +This package derives from the standard net/rpc package but uses a single HTTP +request per call instead of persistent connections. Other differences +compared to net/rpc: + + - Multiple codecs can be registered in the same server. + - A codec is chosen based on the "Content-Type" header from the request. + - Service methods also receive http.Request as parameter. + - This package can be used on Google App Engine. + +Let's setup a server and register a codec and service: + + import ( + "http" + "github.com/gorilla/rpc" + "github.com/gorilla/rpc/json" + ) + + func init() { + s := rpc.NewServer() + s.RegisterCodec(json.NewCodec(), "application/json") + s.RegisterService(new(HelloService), "") + http.Handle("/rpc", s) + } + +This server handles requests to the "/rpc" path using a JSON codec. +A codec is tied to a content type. In the example above, the JSON codec is +registered to serve requests with "application/json" as the value for the +"Content-Type" header. If the header includes a charset definition, it is +ignored; only the media-type part is taken into account. + +A service can be registered using a name. If the name is empty, like in the +example above, it will be inferred from the service type. + +That's all about the server setup. Now let's define a simple service: + + type HelloArgs struct { + Who string + } + + type HelloReply struct { + Message string + } + + type HelloService struct {} + + func (h *HelloService) Say(r *http.Request, args *HelloArgs, reply *HelloReply) error { + reply.Message = "Hello, " + args.Who + "!" + return nil + } + +The example above defines a service with a method "HelloService.Say" and +the arguments and reply related to that method. + +The service must be exported (begin with an upper case letter) or local +(defined in the package registering the service). + +When a service is registered, the server inspects the service methods +and make available the ones that follow these rules: + + - The method name is exported. + - The method has three arguments: *http.Request, *args, *reply. + - All three arguments are pointers. + - The second and third arguments are exported or local. + - The method has return type error. + +All other methods are ignored. + +Gorilla has packages with common RPC codecs. Check out their documentation: + + JSON: http://gorilla-web.appspot.com/pkg/rpc/json +*/ +package rpc diff --git a/vendor/github.com/gorilla/rpc/map.go b/vendor/github.com/gorilla/rpc/map.go new file mode 100644 index 000000000..433f275b8 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/map.go @@ -0,0 +1,180 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package rpc + +import ( + "fmt" + "net/http" + "reflect" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +var ( + // Precompute the reflect.Type of error and http.Request + typeOfError = reflect.TypeOf((*error)(nil)).Elem() + typeOfRequest = reflect.TypeOf((*http.Request)(nil)).Elem() +) + +// ---------------------------------------------------------------------------- +// service +// ---------------------------------------------------------------------------- + +type service struct { + name string // name of service + rcvr reflect.Value // receiver of methods for the service + rcvrType reflect.Type // type of the receiver + methods map[string]*serviceMethod // registered methods + passReq bool +} + +type serviceMethod struct { + method reflect.Method // receiver method + argsType reflect.Type // type of the request argument + replyType reflect.Type // type of the response argument +} + +// ---------------------------------------------------------------------------- +// serviceMap +// ---------------------------------------------------------------------------- + +// serviceMap is a registry for services. +type serviceMap struct { + mutex sync.Mutex + services map[string]*service +} + +// register adds a new service using reflection to extract its methods. +func (m *serviceMap) register(rcvr interface{}, name string, passReq bool) error { + // Setup service. + s := &service{ + name: name, + rcvr: reflect.ValueOf(rcvr), + rcvrType: reflect.TypeOf(rcvr), + methods: make(map[string]*serviceMethod), + passReq: passReq, + } + if name == "" { + s.name = reflect.Indirect(s.rcvr).Type().Name() + if !isExported(s.name) { + return fmt.Errorf("rpc: type %q is not exported", s.name) + } + } + if s.name == "" { + return fmt.Errorf("rpc: no service name for type %q", + s.rcvrType.String()) + } + // Setup methods. + for i := 0; i < s.rcvrType.NumMethod(); i++ { + method := s.rcvrType.Method(i) + mtype := method.Type + + // offset the parameter indexes by one if the + // service methods accept an HTTP request pointer + var paramOffset int + if passReq { + paramOffset = 1 + } else { + paramOffset = 0 + } + + // Method must be exported. + if method.PkgPath != "" { + continue + } + // Method needs four ins: receiver, *http.Request, *args, *reply. + if mtype.NumIn() != 3+paramOffset { + continue + } + + // If the service methods accept an HTTP request pointer + if passReq { + // First argument must be a pointer and must be http.Request. + reqType := mtype.In(1) + if reqType.Kind() != reflect.Ptr || reqType.Elem() != typeOfRequest { + continue + } + } + // Next argument must be a pointer and must be exported. + args := mtype.In(1 + paramOffset) + if args.Kind() != reflect.Ptr || !isExportedOrBuiltin(args) { + continue + } + // Next argument must be a pointer and must be exported. + reply := mtype.In(2 + paramOffset) + if reply.Kind() != reflect.Ptr || !isExportedOrBuiltin(reply) { + continue + } + // Method needs one out: error. + if mtype.NumOut() != 1 { + continue + } + if returnType := mtype.Out(0); returnType != typeOfError { + continue + } + s.methods[method.Name] = &serviceMethod{ + method: method, + argsType: args.Elem(), + replyType: reply.Elem(), + } + } + if len(s.methods) == 0 { + return fmt.Errorf("rpc: %q has no exported methods of suitable type", + s.name) + } + // Add to the map. + m.mutex.Lock() + defer m.mutex.Unlock() + if m.services == nil { + m.services = make(map[string]*service) + } else if _, ok := m.services[s.name]; ok { + return fmt.Errorf("rpc: service already defined: %q", s.name) + } + m.services[s.name] = s + return nil +} + +// get returns a registered service given a method name. +// +// The method name uses a dotted notation as in "Service.Method". +func (m *serviceMap) get(method string) (*service, *serviceMethod, error) { + parts := strings.Split(method, ".") + if len(parts) != 2 { + err := fmt.Errorf("rpc: service/method request ill-formed: %q", method) + return nil, nil, err + } + m.mutex.Lock() + service := m.services[parts[0]] + m.mutex.Unlock() + if service == nil { + err := fmt.Errorf("rpc: can't find service %q", method) + return nil, nil, err + } + serviceMethod := service.methods[parts[1]] + if serviceMethod == nil { + err := fmt.Errorf("rpc: can't find method %q", method) + return nil, nil, err + } + return service, serviceMethod, nil +} + +// isExported returns true of a string is an exported (upper case) name. +func isExported(name string) bool { + rune, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(rune) +} + +// isExportedOrBuiltin returns true if a type is exported or a builtin. +func isExportedOrBuiltin(t reflect.Type) bool { + for t.Kind() == reflect.Ptr { + t = t.Elem() + } + // PkgPath will be non-empty even for an exported type, + // so we need to check the type name as well. + return isExported(t.Name()) || t.PkgPath() == "" +} diff --git a/vendor/github.com/gorilla/rpc/server.go b/vendor/github.com/gorilla/rpc/server.go new file mode 100644 index 000000000..d61b5eaa9 --- /dev/null +++ b/vendor/github.com/gorilla/rpc/server.go @@ -0,0 +1,269 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Copyright 2012 The Gorilla Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package rpc + +import ( + "fmt" + "net/http" + "reflect" + "strings" +) + +// ---------------------------------------------------------------------------- +// Codec +// ---------------------------------------------------------------------------- + +// Codec creates a CodecRequest to process each request. +type Codec interface { + NewRequest(*http.Request) CodecRequest +} + +// CodecRequest decodes a request and encodes a response using a specific +// serialization scheme. +type CodecRequest interface { + // Reads request and returns the RPC method name. + Method() (string, error) + // Reads request filling the RPC method args. + ReadRequest(interface{}) error + // Writes response using the RPC method reply. The error parameter is + // the error returned by the method call, if any. + WriteResponse(http.ResponseWriter, interface{}, error) error +} + +// ---------------------------------------------------------------------------- +// Server +// ---------------------------------------------------------------------------- + +// NewServer returns a new RPC server. +func NewServer() *Server { + return &Server{ + codecs: make(map[string]Codec), + services: new(serviceMap), + } +} + +// RequestInfo contains all the information we pass to before/after functions +type RequestInfo struct { + Method string + Error error + Request *http.Request + StatusCode int +} + +// Server serves registered RPC services using registered codecs. +type Server struct { + codecs map[string]Codec + services *serviceMap + interceptFunc func(i *RequestInfo) *http.Request + beforeFunc func(i *RequestInfo) + afterFunc func(i *RequestInfo) +} + +// RegisterCodec adds a new codec to the server. +// +// Codecs are defined to process a given serialization scheme, e.g., JSON or +// XML. A codec is chosen based on the "Content-Type" header from the request, +// excluding the charset definition. +func (s *Server) RegisterCodec(codec Codec, contentType string) { + s.codecs[strings.ToLower(contentType)] = codec +} + +// RegisterService adds a new service to the server. +// +// The name parameter is optional: if empty it will be inferred from +// the receiver type name. +// +// Methods from the receiver will be extracted if these rules are satisfied: +// +// - The receiver is exported (begins with an upper case letter) or local +// (defined in the package registering the service). +// - The method name is exported. +// - The method has three arguments: *http.Request, *args, *reply. +// - All three arguments are pointers. +// - The second and third arguments are exported or local. +// - The method has return type error. +// +// All other methods are ignored. +func (s *Server) RegisterService(receiver interface{}, name string) error { + return s.services.register(receiver, name, true) +} + +// RegisterTCPService adds a new TCP service to the server. +// No HTTP request struct will be passed to the service methods. +// +// The name parameter is optional: if empty it will be inferred from +// the receiver type name. +// +// Methods from the receiver will be extracted if these rules are satisfied: +// +// - The receiver is exported (begins with an upper case letter) or local +// (defined in the package registering the service). +// - The method name is exported. +// - The method has two arguments: *args, *reply. +// - Both arguments are pointers. +// - Both arguments are exported or local. +// - The method has return type error. +// +// All other methods are ignored. +func (s *Server) RegisterTCPService(receiver interface{}, name string) error { + return s.services.register(receiver, name, false) +} + +// HasMethod returns true if the given method is registered. +// +// The method uses a dotted notation as in "Service.Method". +func (s *Server) HasMethod(method string) bool { + if _, _, err := s.services.get(method); err == nil { + return true + } + return false +} + +// RegisterInterceptFunc registers the specified function as the function +// that will be called before every request. The function is allowed to intercept +// the request e.g. add values to the context. +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterInterceptFunc(f func(i *RequestInfo) *http.Request) { + s.interceptFunc = f +} + +// RegisterBeforeFunc registers the specified function as the function +// that will be called before every request. +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterBeforeFunc(f func(i *RequestInfo)) { + s.beforeFunc = f +} + +// RegisterAfterFunc registers the specified function as the function +// that will be called after every request +// +// Note: Only one function can be registered, subsequent calls to this +// method will overwrite all the previous functions. +func (s *Server) RegisterAfterFunc(f func(i *RequestInfo)) { + s.afterFunc = f +} + +// ServeHTTP +func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if r.Method != "POST" { + s.writeError(w, 405, "rpc: POST method required, received "+r.Method) + return + } + contentType := r.Header.Get("Content-Type") + idx := strings.Index(contentType, ";") + if idx != -1 { + contentType = contentType[:idx] + } + var codec Codec + if contentType == "" && len(s.codecs) == 1 { + // If Content-Type is not set and only one codec has been registered, + // then default to that codec. + for _, c := range s.codecs { + codec = c + } + } else if codec = s.codecs[strings.ToLower(contentType)]; codec == nil { + s.writeError(w, 415, "rpc: unrecognized Content-Type: "+contentType) + return + } + // Create a new codec request. + codecReq := codec.NewRequest(r) + // Get service method to be called. + method, errMethod := codecReq.Method() + if errMethod != nil { + s.writeError(w, 400, errMethod.Error()) + return + } + serviceSpec, methodSpec, errGet := s.services.get(method) + if errGet != nil { + s.writeError(w, 400, errGet.Error()) + return + } + // Decode the args. + args := reflect.New(methodSpec.argsType) + if errRead := codecReq.ReadRequest(args.Interface()); errRead != nil { + s.writeError(w, 400, errRead.Error()) + return + } + + // Call the registered Intercept Function + if s.interceptFunc != nil { + req := s.interceptFunc(&RequestInfo{ + Request: r, + Method: method, + }) + if req != nil { + r = req + } + } + // Call the registered Before Function + if s.beforeFunc != nil { + s.beforeFunc(&RequestInfo{ + Request: r, + Method: method, + }) + } + + // Call the service method. + reply := reflect.New(methodSpec.replyType) + + // omit the HTTP request if the service method doesn't accept it + var errValue []reflect.Value + if serviceSpec.passReq { + errValue = methodSpec.method.Func.Call([]reflect.Value{ + serviceSpec.rcvr, + reflect.ValueOf(r), + args, + reply, + }) + } else { + errValue = methodSpec.method.Func.Call([]reflect.Value{ + serviceSpec.rcvr, + args, + reply, + }) + } + + // Cast the result to error if needed. + var errResult error + errInter := errValue[0].Interface() + if errInter != nil { + errResult = errInter.(error) + } + + // Prevents Internet Explorer from MIME-sniffing a response away + // from the declared content-type + w.Header().Set("x-content-type-options", "nosniff") + // Encode the response. + if errWrite := codecReq.WriteResponse(w, reply.Interface(), errResult); errWrite != nil { + s.writeError(w, 400, errWrite.Error()) + } else { + // Call the registered After Function + if s.afterFunc != nil { + s.afterFunc(&RequestInfo{ + Request: r, + Method: method, + Error: errResult, + StatusCode: 200, + }) + } + } +} + +func (s *Server) writeError(w http.ResponseWriter, status int, msg string) { + w.WriteHeader(status) + w.Header().Set("Content-Type", "text/plain; charset=utf-8") + fmt.Fprint(w, msg) + if s.afterFunc != nil { + s.afterFunc(&RequestInfo{ + Error: fmt.Errorf(msg), + StatusCode: status, + }) + } +} diff --git a/vendor/github.com/ochinchina/go-ini/LICENSE b/vendor/github.com/ochinchina/go-ini/LICENSE new file mode 100644 index 000000000..6713cd967 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Steven Ou + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/ochinchina/go-ini/README.md b/vendor/github.com/ochinchina/go-ini/README.md new file mode 100644 index 000000000..6c67d5c1e --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/README.md @@ -0,0 +1,368 @@ +# Overview + +This is a golang library for reading/writing the .ini format file. The description on .ini file can be found at https://en.wikipedia.org/wiki/INI_file + +# Supported .ini format + +A .ini file contains one or more sections and each section contains one or more key/value pair. Following is an example of .ini file + +```ini +# this is a comment line +; this is also a comment line + +[section1] + +key1 = value1 + +[section2] + +key2 = value2 +``` + +## Comments +### Comments line + +A comments line is started with char '#' or ';' and it will be ignored when processing the .ini file. + +```ini + +# this is a comment line +; this is also a comment line + +``` + +### inline comments + +A comment can be appended in a tail of line. The inline comments must be started with ';' or '#' and its previous char must be a space. + +```ini +[section1] +key1 = value1 ;this is a inline comment +key2 = value2;this is not a inline comment +``` + +## Multiline value + +if a value is multiple line value, the value can be put between """ and """, an example: + +```ini + +[section1] + +multi-line-key = """this is a multi-line example, +multiple line can be put in a value, +this is multiple line is just for test""" + +single-line-key = this is a normal value +``` + +## Continuation line + +If a line is too long, user can devide one line to multiple line and on the end of line the char '\\' should be put: + +```ini +[section1] +key1 = this line is too long, \ +we need to write it to multiple line, \ +but actually it is one line from the point of user + +``` + +## Escape char + +This library supports the escape char, the escape char is started with char \\ + +|Common escape sequences Sequence | Meaning | +|---------------------------------|-----------------------------------------------------| +|\\\\ |\ (a single backslash, escaping the escape character)| +|\0 |Null character | +|\a |Bell/Alert/Audible | +|\b |Backspace, Bell character for some applications | +|\t |Tab character | +|\r |Carriage return | +|\n |Line feed | +|\\; |Semicolon | +|\\# |Number sign | +|\\= |Equals sign | +|\\: |Colon | +|\\x???? |Unicode character with hexadecimal code point | + + +## Environemnt variable support + +Environment variable can be embeded in the value of the key and the environment variable will be replaced. For example: + +```ini +[section1] +key1 = this value has env ${HOME} +key2 = this value has env with default ${SOME_ENV:-test},hihi +``` + +In the above example, the environment variable HOME is in the value of key1. So if the value of environment variable HOME is "/home/test", the value of key1 is "this value has env /home/test". + +For the key2, the environemnt SOME_ENV is included and if the environment variable SOME_ENV does not exist, its value will be "test" otherwise it will be the value of SOME_ENV environment variable. + +# API + +## import the library + +The go-ini library should be imported before using this library: + +```go +import ( + ini "github.com/ochinchina/go-ini" +) +``` +## Load .ini file + +.ini format file or string can be loaded by the method: + +### Load from a file + +```go +//Load the .ini from a file +ini := ini.Load( "fileName" ) + +``` + +### Load from a string or byte array in .ini format + +```go +ini_str := `[section1] +key1 = value1 +key2 = value 2 +` + +ini := ini.Load( ini_str ) +//load from a byte array + +ini = ini.Load( []byte(ini_str) ) + +``` + +### Load from a io.Reader + +```go + +var reader io.Reader = ... + +ini := ini.Load( reader ) + +``` + +### Load .ini from multiple source + +The Load() method can load .ini from multiple mixed sources. + +``` go +//load multiple sources: fileName, string, reader and byte array in one statement + +ini := ini.Load( "fileName", ini_str, reader ) +``` + +### Load the .ini in Ini object + +The Ini class also provide a method named Load(), this method can be called multiple times and the later loaded .ini will be appended to the Ini object. + +```go +//first load the .ini from a file +ini := ini.Load( "fileName" ) + +//append the .ini from string to the ini object +ini_str := `[section1] +key1 = value1 +key2 = value 2 +` +ini.Load( ini_str ) + +//append the .ini from a reader to the ini object +var reader io.Reader = ... +ini.Load( reader ) + +``` + +## Access the value of key in the .ini file + +After loading the .ini from a file/string/reader, we can access a keya under a section. This library provides three level API to access the value of a key in a section. + +### Access the value of key in Ini class level + +The value of key can be accessed in Ini class level. + +```go +ini := ini.Load(...) + +value, err := ini.GetValue( "section1", "key1") + +// if err is nil, the value is ok +if err == nil { + //the value exists and DO something according to the value +} +``` + +Sometimes we need to provide a default value if the key in the section does not exist, at this time the user can provide a default value by GetValueWithDefault() method. + +```go +ini := ini.Load(...) + +//if the section1 or key1 does not exist, return a default value(empty string) +value := ini.GetValueWithDefault( "section1", "key1", "" ) +``` +### Access the value of key in Section class level + +Call the GetSection() method by the section name on the Ini object at frist, and then call GetValue() on the section to get the value of key. + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + value, err := section.GetValue( "key1" ) + if err == nil { + //the value of key1 exists + } +} +``` + +The method GetValueWithDefault() ask user provide a default value if the key under section does not exist, the user provided default value will be returned. + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + //get the value of key1 and if the key1 does not exists, return the default empty string + value := section.GetValueWithDefault("key1", "" ) +} +``` + +### Access the value of key in Key class level + +The value of a key can be acccessed in the Key class level also. The method Key() on the section with keyname can be called even if the key does not exist. After getting a Key object, user can call Value() method to get the value of key. +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) +if err == nil { + //the Key() method always returns a Key object even if the key does not exist + value, err := section.Key( "key1" ).Value() + if err == nul { + //the value in key1 exists + } +} +``` +User can provide a default value to method ValueWithDefault() on the Key object to get the value of key and if the key does not exist the default value will be returned. + + +```go +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) +if err == nil { + //the Key() method always returns a Key object even if the key does not exist + value:= section.Key( "key1" ).ValueWithDefault("") +} +``` + +## Convert the string value to desired types + +Except for getting a string value of a key, you can also ask the library convert the string to one of following types: + +- bool +- int +- int64 +- uint64 +- float32 +- float64 + +For each data type, this library provides two methods GetXXX() and GetXXXWithDefault() on the Ini&Section class level where the XXX stands for the Bool, Int, Int64, Uint64, Float32, Float64. + +An example to ask the library convert the key to a int data type in Ini level: + +```go + +ini := ini.Load(...) + +value, err := ini.GetInt( "section1", "key1" ) + +if err == nil { + //at this time, the value of key1 exists and can be converted to integer +} + +value = ini.GetIntWithDefault( "section1", "key1", 0 ) + +``` + +An example to ask the library convert the key to a int data type in Section level: +```go + +ini := ini.Load(...) + +section, err := ini.GetSection( "section1" ) + +if err == nil { + value, err = section.GetInt( "key1" ) + if err == nil { + //at this time the key1 exists and its value can be converted to int + } + + value = section.GetIntWithDefault("key1", 0 ) +} +``` + +An example to ask the library convert the key to a int data type in Key level: +```go + +ini := ini.Load(...) +section, err := ini.GetSection( "section1" ) +if err == nil { + value, err := section.Key( "key1" ).Int() + if err == nil { + //at this time the key1 exists and its value can be converted to int + } + + //get with default value + value = section.Key( "key1" ).IntWithDefault( 0 ) +} +``` + +## Add the key&value to .ini file + +This library also provides API to add key&value to the .ini file. + +```go + +ini := ini.NewIni() + +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) +``` + +## Save the .ini to the file + +User can call the Write() method on Ini object to write the .ini contents to a io.Writer + +```go + +ini := ini.NewIni() +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) + +buf := bytes.NewBufferString("") +ini.Write( buf ) +``` + +If want to write to the file, there is a convinent API WriteToFile() with filename on the Ini object to write the .ini content to the file. + + +```go + +ini := ini.NewIni() +section := ini.NewSection( "section1" ) +section.Add( "key1", "value1" ) + +ini.WriteToFile( "test.ini" ) + +``` diff --git a/vendor/github.com/ochinchina/go-ini/doc.go b/vendor/github.com/ochinchina/go-ini/doc.go new file mode 100644 index 000000000..105ca3e98 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/doc.go @@ -0,0 +1,49 @@ +/* +A golang implemented library to read/write .ini format files. + +With this library, you can load the .ini file a string, a byte array, a file and a io.Reader. + + import ( + ini "github.com/ochinchina/go-ini" + ) + + + func main() { + //load from .ini file + ini := ini.Load( "myfile.ini") + //load from .ini format string + str_data := "[section1]\nkey1=value1\n[section2]\nkey2=value2" + ini = ini.Load( str_data ) + + //load .ini format byte array + ini = ini.Load( []byte(str_data) ) + + //load from io.Reader + var reader io.Reader = ... + + ini = ini.Load( reader ) + + //load from multiple source in one Load method + ini = ini.Load( "myfile.ini", reader, str_data, bytes_data ) + } + +The loaded Ini includes sections, you can access section: + + //get all the sections in the .ini + var sections []*Section = ini.Sections() + + //get a section by Name + var section *Section = ini.GetSection( sectionName ) + + +Then the key in a section can be accessed by method GetXXX() and GetXXXWithDefault(defValue): + //get the value of key + value, err := section.GetValue( "key1") + value = section.GetValueWithDefault("key1", "") + + //get value of key as int + i, err := section.GetInt( "key2" ) + i = section.GetIntWithDefault( "key2" ) + +*/ +package ini diff --git a/vendor/github.com/ochinchina/go-ini/env_replacer.go b/vendor/github.com/ochinchina/go-ini/env_replacer.go new file mode 100644 index 000000000..efbd5d92d --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/env_replacer.go @@ -0,0 +1,65 @@ +package ini + +import ( + "bytes" + "os" + "strings" +) + +func get_env_value(env string) (string, bool) { + pos := strings.Index(env, ":") + if pos == -1 { + return os.LookupEnv(env) + } + + real_env := env[0:pos] + def_value := env[pos+1:] + if len(def_value) > 0 && def_value[0] == '-' { + def_value = def_value[1:] + } + if value, ok := os.LookupEnv(real_env); ok { + return value, ok + } else { + return def_value, true + } +} + +func replace_env(s string) string { + n := len(s) + env_start_pos := -1 + result := bytes.NewBuffer(make([]byte, 0)) + + for i := 0; i < n; i++ { + //if env start flag "${" is found but env end flag "}" is not found + if env_start_pos >= 0 && s[i] != '}' { + continue + } + switch s[i] { + case '\\': + result.WriteByte(s[i]) + if i+1 < n { + i++ + result.WriteByte(s[i]) + } + case '$': + if i+1 < n && s[i+1] == '{' { + env_start_pos = i + i++ + } else { + result.WriteByte(s[i]) + } + case '}': + if env_start_pos >= 0 { + if env_value, ok := get_env_value(s[env_start_pos+2 : i]); ok { + result.WriteString(env_value) + } + env_start_pos = -1 + } else { + result.WriteByte(s[i]) + } + default: + result.WriteByte(s[i]) + } + } + return result.String() +} diff --git a/vendor/github.com/ochinchina/go-ini/ini.go b/vendor/github.com/ochinchina/go-ini/ini.go new file mode 100644 index 000000000..f628ae5a6 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/ini.go @@ -0,0 +1,265 @@ +package ini + +import ( + "bytes" + "fmt" + "io" + "os" +) + +// manage all the sections and their key values defined in the .ini file +// +type Ini struct { + defaultSectionName string + sections map[string]*Section +} + +func NewIni() *Ini { + return &Ini{defaultSectionName: "default", + sections: make(map[string]*Section)} +} + +func (ini *Ini) GetDefaultSectionName() string { + return ini.defaultSectionName +} + +func (ini *Ini) SetDefaultSectionName(defSectionName string) { + ini.defaultSectionName = defSectionName +} + +// create a new section if the section with name does not exist +// or return the exist one if the section with name already exists +// +func (ini *Ini) NewSection(name string) *Section { + if section, ok := ini.sections[name]; ok { + return section + } + section := NewSection(name) + ini.sections[name] = section + return section +} + +// add a section to the .ini file and overwrite the exist section +// with same name +func (ini *Ini) AddSection(section *Section) { + ini.sections[section.Name] = section +} + +// Get all the section name in the ini +// +// return all the section names +func (ini *Ini) Sections() []*Section { + r := make([]*Section, 0) + for _, section := range ini.sections { + r = append(r, section) + } + return r +} + +// check if a key exists or not in the Ini +// +// return true if the key in section exists +func (ini *Ini) HasKey(sectionName, key string) bool { + if section, ok := ini.sections[sectionName]; ok { + return section.HasKey(key) + } + return false +} + +// get section by section name +// +// return: section or nil +func (ini *Ini) GetSection(name string) (*Section, error) { + if section, ok := ini.sections[name]; ok { + return section, nil + } + return nil, noSuchSection(name) +} + +// return true if the section with name exists +// return false if the section with name does not exist +func (ini *Ini) HasSection(name string) bool { + _, err := ini.GetSection(name) + return err == nil +} + +// get the value of key in section +func (ini *Ini) GetValue(sectionName, key string) (string, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetValue(key) + } + return "", noSuchSection(sectionName) +} + +// get the value of the key in section +// if the key does not exist, return the defValue +func (ini *Ini) GetValueWithDefault(sectionName, key string, defValue string) string { + if section, ok := ini.sections[sectionName]; ok { + return section.GetValueWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in section as bool. +// return true if the value of the key is one of following(case insensitive): +// - true +// - yes +// - t +// - y +// - 1 +// return false for all other values +func (ini *Ini) GetBool(sectionName, key string) (bool, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetBool(key) + } + return false, noSuchSection(sectionName) +} + +// get the value of key as bool and return the default value if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetBoolWithDefault(sectionName, key string, defValue bool) bool { + if section, ok := ini.sections[sectionName]; ok { + return section.GetBoolWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as int +func (ini *Ini) GetInt(sectionName, key string) (int, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetIntWithDefault(sectionName, key string, defValue int) int { + if section, ok := ini.sections[sectionName]; ok { + return section.GetIntWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as uint +func (ini *Ini) GetUint(sectionName, key string) (uint, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetUintWithDefault(sectionName, key string, defValue uint) uint { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUintWithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as int64 +func (ini *Ini) GetInt64(sectionName, key string) (int64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as int64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetInt64WithDefault(sectionName, key string, defValue int64) int64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetInt64WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as uint64 +func (ini *Ini) GetUint64(sectionName, key string) (uint64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as uint64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetUint64WithDefault(sectionName, key string, defValue uint64) uint64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetUint64WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as float32 +func (ini *Ini) GetFloat32(sectionName, key string) (float32, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat32(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as float32 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetFloat32WithDefault(sectionName, key string, defValue float32) float32 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat32WithDefault(key, defValue) + } + return defValue +} + +// get the value of key in the section as float64 +func (ini *Ini) GetFloat64(sectionName, key string) (float64, error) { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat64(key) + } + return 0, noSuchSection(sectionName) +} + +// get the value of key in the section as float64 and return defValue if the section in the .ini file +// or key in the section does not exist +func (ini *Ini) GetFloat64WithDefault(sectionName, key string, defValue float64) float64 { + if section, ok := ini.sections[sectionName]; ok { + return section.GetFloat64WithDefault(key, defValue) + } + return defValue +} + +func noSuchSection(sectionName string) error { + return fmt.Errorf("no such section:%s", sectionName) +} + +func (ini *Ini) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + ini.Write(buf) + return buf.String() +} + +// write the content of the .ini in the .ini file format, e.g. in following format: +// +// [section1] +// key1 = value1 +// key2 = value2 +// [section2] +// key3 = value3 +// key4 = value4 +func (ini *Ini) Write(writer io.Writer) error { + for _, section := range ini.sections { + err := section.Write(writer) + if err != nil { + return err + } + } + return nil +} + +// Write the conents of ini to a file +func (ini *Ini) WriteToFile(fileName string) error { + file, err := os.Create(fileName) + if err == nil { + defer file.Close() + return ini.Write(file) + } + return err +} diff --git a/vendor/github.com/ochinchina/go-ini/key.go b/vendor/github.com/ochinchina/go-ini/key.go new file mode 100644 index 000000000..a3dd881a0 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/key.go @@ -0,0 +1,282 @@ +package ini + +import ( + "fmt" + "strconv" + "strings" +) + +// represents the pair stored in the +// section of the .ini file +// +type Key interface { + // get name of the key + Name() string + + // get value of the key + Value() (string, error) + + //get the value of key and return defValue if + //the value does not exist + ValueWithDefault(defValue string) string + + // get the value as bool + // return true if the value is one of following(case insensitive): + // - true + // - yes + // - T + // - Y + // - 1 + // Any other value will return false + Bool() (bool, error) + + // get the value as bool and return the defValue if the + // value of the key does not exist + BoolWithDefault(defValue bool) bool + // get the value as int + Int() (int, error) + + // get value as int and return defValue if the + // value of the key does not exist + IntWithDefault(defValue int) int + + //get value as uint + Uint() (uint, error) + + //get value as uint and return defValue if the + //key does not exist or it is not uint format + UintWithDefault(defValue uint) uint + + // get the value as int64 + Int64() (int64, error) + + // get the value as int64 and return defValue + // if the value of the key does not exist + Int64WithDefault(defValue int64) int64 + + // get the value as uint64 + Uint64() (uint64, error) + + // get the value as uint64 and return defValue + // if the value of the key does not exist + Uint64WithDefault(defValue uint64) uint64 + + // get the value as float32 + Float32() (float32, error) + + // get the value as float32 and return defValue + // if the value of the key does not exist + Float32WithDefault(defValue float32) float32 + + // get the value as float64 + Float64() (float64, error) + + // get the value as the float64 and return defValue + // if the value of the key does not exist + Float64WithDefault(defValue float64) float64 + + // return a string as "key=value" format + // and if no value return empty string + String() string +} + +type nonExistKey struct { + name string +} + +func newNonExistKey(name string) *nonExistKey { + return &nonExistKey{name: name} +} + +func (nek *nonExistKey) Name() string { + return nek.name +} + +func (nek *nonExistKey) Value() (string, error) { + return "", nek.noSuchKey() +} + +func (nek *nonExistKey) ValueWithDefault(defValue string) string { + return defValue +} + +func (nek *nonExistKey) Bool() (bool, error) { + return false, nek.noSuchKey() +} + +func (nek *nonExistKey) BoolWithDefault(defValue bool) bool { + return defValue +} + +func (nek *nonExistKey) Int() (int, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) IntWithDefault(defValue int) int { + return defValue +} + +func (nek *nonExistKey) Uint() (uint, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) UintWithDefault(defValue uint) uint { + return defValue +} + +func (nek *nonExistKey) Int64() (int64, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) Int64WithDefault(defValue int64) int64 { + return defValue +} + +func (nek *nonExistKey) Uint64() (uint64, error) { + return 0, nek.noSuchKey() +} + +func (nek *nonExistKey) Uint64WithDefault(defValue uint64) uint64 { + return defValue +} + +func (nek *nonExistKey) Float32() (float32, error) { + return .0, nek.noSuchKey() +} + +func (nek *nonExistKey) Float32WithDefault(defValue float32) float32 { + return defValue +} + +func (nek *nonExistKey) Float64() (float64, error) { + return .0, nek.noSuchKey() +} + +func (nek *nonExistKey) Float64WithDefault(defValue float64) float64 { + return defValue +} + +func (nek *nonExistKey) String() string { + return "" +} + +func (nek *nonExistKey) noSuchKey() error { + return fmt.Errorf("no such key:%s", nek.name) +} + +type normalKey struct { + name string + value string +} + +var trueBoolValue = map[string]bool{"true": true, "t": true, "yes": true, "y": true, "1": true} + +func newNormalKey(name, value string) *normalKey { + return &normalKey{name: name, value: replace_env(value)} +} + +func (k *normalKey) Name() string { + return k.name +} + +func (k *normalKey) Value() (string, error) { + return k.value, nil +} + +func (k *normalKey) ValueWithDefault(defValue string) string { + return k.value +} + +func (k *normalKey) Bool() (bool, error) { + if _, ok := trueBoolValue[strings.ToLower(k.value)]; ok { + return true, nil + } + return false, nil +} + +func (k *normalKey) BoolWithDefault(defValue bool) bool { + v, err := k.Bool() + if err == nil { + return v + } + return defValue +} + +func (k *normalKey) Int() (int, error) { + return strconv.Atoi(k.value) +} + +func (k *normalKey) IntWithDefault(defValue int) int { + i, err := strconv.Atoi(k.value) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Uint() (uint, error) { + v, err := strconv.ParseUint(k.value, 0, 32) + return uint(v), err +} + +func (k *normalKey) UintWithDefault(defValue uint) uint { + i, err := k.Uint() + if err == nil { + return i + } + return defValue + +} + +func (k *normalKey) Int64() (int64, error) { + return strconv.ParseInt(k.value, 0, 64) +} + +func (k *normalKey) Int64WithDefault(defValue int64) int64 { + i, err := strconv.ParseInt(k.value, 0, 64) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Uint64() (uint64, error) { + return strconv.ParseUint(k.value, 0, 64) +} + +func (k *normalKey) Uint64WithDefault(defValue uint64) uint64 { + i, err := strconv.ParseUint(k.value, 0, 64) + if err == nil { + return i + } + return defValue +} + +func (k *normalKey) Float32() (float32, error) { + f, err := strconv.ParseFloat(k.value, 32) + return float32(f), err +} + +func (k *normalKey) Float32WithDefault(defValue float32) float32 { + f, err := strconv.ParseFloat(k.value, 32) + if err == nil { + return float32(f) + } + return defValue +} + +func (k *normalKey) Float64() (float64, error) { + return strconv.ParseFloat(k.value, 64) +} + +func (k *normalKey) Float64WithDefault(defValue float64) float64 { + f, err := strconv.ParseFloat(k.value, 64) + if err == nil { + return f + } + return defValue +} + +func (k *normalKey) String() string { + return fmt.Sprintf("%s=%s", k.name, toEscape(k.value)) +} diff --git a/vendor/github.com/ochinchina/go-ini/loader.go b/vendor/github.com/ochinchina/go-ini/loader.go new file mode 100644 index 000000000..059b8c26a --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/loader.go @@ -0,0 +1,349 @@ +package ini + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "os" + "strconv" + "strings" + "unicode" +) + +// remove inline comments +// +// inline comments must start with ';' or '#' +// and the char before the ';' or '#' must be a space +// +func removeComments(value string) string { + n := len( value ) + i := 0 + for ;i < n; i++ { + if value[i] == '\\' { + i++ + } else if value[i] == ';' || value[i] == '#' { + if i > 0 && unicode.IsSpace( rune( value[i-1] ) ) { + return strings.TrimSpace( value[0:i] ) + } + } + } + return strings.TrimSpace( value ) +} + +// check if it is a oct char,e.g. must be char '0' to '7' +// +func isOctChar(ch byte) bool { + return ch >= '0' && ch <= '7' +} + +// check if the char is a hex char, e.g. the char +// must be '0'..'9' or 'a'..'f' or 'A'..'F' +// +func isHexChar(ch byte) bool { + return ch >= '0' && ch <= '9' || + ch >= 'a' && ch <= 'f' || + ch >= 'A' && ch <= 'F' +} + +func fromEscape(value string) string { + if strings.Index(value, "\\") == -1 { + return value + } + + r := "" + n := len(value) + for i := 0; i < n; i++ { + if value[i] == '\\' { + if i+1 < n { + i++ + //if is it oct + if i+2 < n && isOctChar(value[i]) && isOctChar(value[i+1]) && isOctChar(value[i+2]) { + t, err := strconv.ParseInt(value[i:i+3], 8, 32) + if err == nil { + r = r + string(rune(t)) + } + i += 2 + continue + } + switch value[i] { + case '0': + r = r + string(byte(0)) + case 'a': + r = r + "\a" + case 'b': + r = r + "\b" + case 'f': + r = r + "\f" + case 't': + r = r + "\t" + case 'r': + r = r + "\r" + case 'n': + r = r + "\n" + case 'v': + r = r + "\v" + case 'x': + i++ + if i+3 < n && isHexChar(value[i]) && + isHexChar(value[i+1]) && + isHexChar(value[i+2]) && + isHexChar(value[i+3]) { + + t, err := strconv.ParseInt(value[i:i+4], 16, 32) + if err == nil { + r = r + string(rune(t)) + } + i += 3 + } + default: + r = fmt.Sprintf("%s%c", r, value[i]) + } + } + } else { + r = fmt.Sprintf("%s%c", r, value[i]) + } + } + return r +} + +func toEscape(s string) string { + result := bytes.NewBuffer(make([]byte, 0)) + + n := len(s) + + for i := 0; i < n; i++ { + switch s[i] { + case 0: + result.WriteString("\\0") + case '\\': + result.WriteString("\\\\") + case '\a': + result.WriteString("\\a") + case '\b': + result.WriteString("\\b") + case '\t': + result.WriteString("\\t") + case '\r': + result.WriteString("\\r") + case '\n': + result.WriteString("\\n") + case ';': + result.WriteString("\\;") + case '#': + result.WriteString("\\#") + case '=': + result.WriteString("\\=") + case ':': + result.WriteString("\\:") + default: + result.WriteByte(s[i]) + } + } + return result.String() +} +func removeContinuationSuffix(value string) (string, bool) { + pos := strings.LastIndex(value, "\\") + n := len(value) + if pos == -1 || pos != n-1 { + return "", false + } + for pos >= 0 { + if value[pos] != '\\' { + return "", false + } + pos-- + if pos < 0 || value[pos] != '\\' { + return value[0 : n-1], true + } + pos-- + } + return "", false +} + +type lineReader struct { + reader *bufio.Scanner +} + +func newLineReader(reader io.Reader) *lineReader { + return &lineReader{reader: bufio.NewScanner(reader)} +} + +func (lr *lineReader) readLine() (string, error) { + if lr.reader.Scan() { + return lr.reader.Text(), nil + } + return "", errors.New("No data") + +} + +func readLinesUntilSuffix(lineReader *lineReader, suffix string) string { + r := "" + for { + line, err := lineReader.readLine() + if err != nil { + break + } + t := strings.TrimRightFunc(line, unicode.IsSpace) + if strings.HasSuffix(t, suffix) { + r = r + t[0:len(t)-len(suffix)] + break + } else { + r = r + line + "\n" + } + } + return r +} + +func readContinuationLines(lineReader *lineReader) string { + r := "" + for { + line, err := lineReader.readLine() + if err != nil { + break + } + line = strings.TrimRightFunc(line, unicode.IsSpace) + if t, continuation := removeContinuationSuffix(line); continuation { + r = r + t + } else { + r = r + line + break + } + } + return r +} + +/* +Load from the sources, the source can be one of: + - fileName + - a string includes .ini + - io.Reader the reader to load the .ini contents + - byte array incldues .ini content +*/ +func (ini *Ini) Load(sources ...interface{}) { + for _, source := range sources { + switch source.(type) { + case string: + s, _ := source.(string) + if _, err := os.Stat(s); err == nil { + ini.LoadFile(s) + } else { + ini.LoadString(s) + } + case io.Reader: + reader, _ := source.(io.Reader) + ini.LoadReader(reader) + case []byte: + b, _ := source.([]byte) + ini.LoadBytes(b) + } + } + +} + +// Explicitly loads .ini from a reader +// +func (ini *Ini) LoadReader(reader io.Reader) { + lineReader := newLineReader(reader) + var curSection *Section = nil + for { + line, err := lineReader.readLine() + if err != nil { + break + } + line = strings.TrimSpace(line) + + //empty line or comments line + if len(line) <= 0 || line[0] == ';' || line[0] == '#' { + continue + } + //if it is a section + if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") { + sectionName := strings.TrimSpace(line[1 : len(line)-1]) + if len(sectionName) > 0 { + curSection = ini.NewSection(sectionName) + } + continue + } + pos := strings.IndexAny(line, "=;") + if pos != -1 { + key := strings.TrimSpace(line[0:pos]) + value := strings.TrimLeftFunc(line[pos+1:], unicode.IsSpace) + //if it is a multiline indicator + if strings.HasPrefix(value, "\"\"\"") { + t := strings.TrimRightFunc(value, unicode.IsSpace) + //if the end multiline indicator is found + if strings.HasSuffix(t, "\"\"\"") { + value = t[3 : len(t)-3] + } else { //read lines until end multiline indicator is found + value = value[3:] + "\n" + readLinesUntilSuffix(lineReader, "\"\"\"") + } + } else { + value = strings.TrimRightFunc(value, unicode.IsSpace) + //if is it a continuation line + if t, continuation := removeContinuationSuffix(value); continuation { + value = t + readContinuationLines(lineReader) + } + } + + if len(key) > 0 { + if curSection == nil && len(ini.defaultSectionName) > 0 { + curSection = ini.NewSection(ini.defaultSectionName) + } + if curSection != nil { + //remove the comments and convert escape char to real + curSection.Add(key, strings.TrimSpace(fromEscape(removeComments(value)))) + } + } + } + } +} + +// Load ini file from file named fileName +// +func (ini *Ini) LoadFile(fileName string) { + f, err := os.Open(fileName) + if err == nil { + defer f.Close() + ini.Load(f) + } +} + +var defaultSectionName string = "default" + +func SetDefaultSectionName(defSectionName string) { + defaultSectionName = defSectionName +} + +// load ini from the content which contains the .ini formated string +// +func (ini *Ini) LoadString(content string) { + ini.Load(bytes.NewBufferString(content)) +} + +// load .ini from a byte array which contains the .ini formated content +func (ini *Ini) LoadBytes(content []byte) { + ini.Load(bytes.NewBuffer(content)) +} + +/* +Load the .ini from one of following resource: + - file + - string in .ini format + - byte array in .ini format + - io.Reader a reader to load .ini content + +One or more source can be provided in this Load method, such as: + var reader1 io.Reader = ... + var reader2 io.Reader = ... + ini.Load( "./my.ini", "[section]\nkey=1", "./my2.ini", reader1, reader2 ) +*/ +func Load(sources ...interface{}) *Ini { + ini := NewIni() + ini.SetDefaultSectionName(defaultSectionName) + for _, source := range sources { + ini.Load(source) + } + return ini +} diff --git a/vendor/github.com/ochinchina/go-ini/properties.go b/vendor/github.com/ochinchina/go-ini/properties.go new file mode 100644 index 000000000..bf02b6e5c --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/properties.go @@ -0,0 +1,116 @@ +package ini + +type Properties struct { + ini *Ini +} + +func NewProperties() *Properties { + return &Properties{ini: NewIni()} +} + +func (p *Properties) Load(sources ...interface{}) { + p.ini.Load(sources) +} + +func (p *Properties) GetProperty(key string) (string, error) { + return p.ini.GetValue(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetPropertyWithDefault(key string, defValue string) string { + v, err := p.GetProperty(key) + if err == nil { + return v + } + return defValue +} + +func (p *Properties) GetBool(key string) (bool, error) { + return p.ini.GetBool(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetBoolWithDefault(key string, defValue bool) bool{ + v, err := p.GetBool(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetInt(key string) (int, error) { + return p.ini.GetInt(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetIntWithDefault(key string, defValue int) int { + v, err := p.GetInt(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetInt64(key string) (int64, error) { + return p.ini.GetInt64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetInt64WithDefault(key string, defValue int64) int64 { + v, err := p.GetInt64(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetUint64(key string) (uint64, error) { + return p.ini.GetUint64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetUint64WithDefault(key string, defValue uint64) uint64 { + v, err := p.GetUint64(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetUint(key string) (uint, error) { + return p.ini.GetUint(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetUintWithDefault(key string, defValue uint) uint { + v, err := p.GetUint(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetFloat32(key string) (float32, error) { + return p.ini.GetFloat32(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetFloat32WithDefault(key string, defValue float32) float32 { + v, err := p.GetFloat32(key) + if err == nil { + return v + } else { + return defValue + } +} + +func (p *Properties) GetFloat64(key string) (float64, error) { + return p.ini.GetFloat64(p.ini.GetDefaultSectionName(), key) +} + +func (p *Properties) GetFloat64WithDefault(key string, defValue float64) float64 { + v, err := p.GetFloat64(key) + if err == nil { + return v + } else { + return defValue + } +} diff --git a/vendor/github.com/ochinchina/go-ini/section.go b/vendor/github.com/ochinchina/go-ini/section.go new file mode 100644 index 000000000..e04ba4277 --- /dev/null +++ b/vendor/github.com/ochinchina/go-ini/section.go @@ -0,0 +1,177 @@ +package ini + +import ( + "bytes" + "fmt" + "io" +) + +// manages all the key/value defined in the .ini file format +type Section struct { + //Name of the section + Name string + //key values + keyValues map[string]Key +} + +// construct a new section with section name +func NewSection(name string) *Section { + return &Section{Name: name, + keyValues: make(map[string]Key)} +} + +// add key/value to the section and overwrite the old one +func (section *Section) Add(key, value string) { + section.keyValues[key] = newNormalKey(key, value) +} + +// check if the key is in the section +// +// return true if the section contains the key +func (section *Section) HasKey(key string) bool { + _, ok := section.keyValues[key] + return ok +} + +// Get all the keys in the section +// +// return: all keys in the section +func (section *Section) Keys() []Key { + r := make([]Key, 0) + for _, v := range section.keyValues { + r = append(r, v) + } + return r +} + +// Get the key. +// +// This method can be called even if the key is not in the +// section. +func (section *Section) Key(key string) Key { + if v, ok := section.keyValues[key]; ok { + return v + } + return newNonExistKey(key) +} + +// Get value of key as string +func (section *Section) GetValue(key string) (string, error) { + return section.Key(key).Value() +} + +// Get value of key and if the key does not exist, return the defValue +func (section *Section) GetValueWithDefault(key string, defValue string) string { + return section.Key(key).ValueWithDefault(defValue) +} + +// Get the value of key as bool, it will return true if the value of the key is one +// of following( case insensitive): +// - true +// - yes +// - t +// - y +// - 1 +func (section *Section) GetBool(key string) (bool, error) { + return section.Key(key).Bool() +} + +// Get the value of key as bool and if the key does not exist, return the +// default value +func (section *Section) GetBoolWithDefault(key string, defValue bool) bool { + return section.Key(key).BoolWithDefault(defValue) +} + +// Get the value of the key as int +func (section *Section) GetInt(key string) (int, error) { + return section.Key(key).Int() +} + +// Get the value of the key as int and if the key does not exist return +// the default value +func (section *Section) GetIntWithDefault(key string, defValue int) int { + return section.Key(key).IntWithDefault(defValue) +} + +// Get the value of the key as uint +func (section *Section) GetUint(key string) (uint, error) { + return section.Key(key).Uint() +} + +// Get the value of the key as int and if the key does not exist return +// the default value +func (section *Section) GetUintWithDefault(key string, defValue uint) uint { + return section.Key(key).UintWithDefault(defValue) +} + +// Get the value of the key as int64 +func (section *Section) GetInt64(key string) (int64, error) { + return section.Key(key).Int64() +} + +// Get the value of the key as int64 and if the key does not exist return +// the default value +func (section *Section) GetInt64WithDefault(key string, defValue int64) int64 { + return section.Key(key).Int64WithDefault(defValue) +} + +// Get the value of the key as uint64 +func (section *Section) GetUint64(key string) (uint64, error) { + return section.Key(key).Uint64() +} + +// Get the value of the key as uint64 and if the key does not exist return +// the default value +func (section *Section) GetUint64WithDefault(key string, defValue uint64) uint64 { + return section.Key(key).Uint64WithDefault(defValue) +} + +// Get the value of the key as float32 +func (section *Section) GetFloat32(key string) (float32, error) { + return section.Key(key).Float32() +} + +// Get the value of the key as float32 and if the key does not exist return +// the default value +func (section *Section) GetFloat32WithDefault(key string, defValue float32) float32 { + return section.Key(key).Float32WithDefault(defValue) +} + +// Get the value of the key as float64 +func (section *Section) GetFloat64(key string) (float64, error) { + return section.Key(key).Float64() +} + +// Get the value of the key as float64 and if the key does not exist return +// the default value +func (section *Section) GetFloat64WithDefault(key string, defValue float64) float64 { + return section.Key(key).Float64WithDefault(defValue) +} + +// convert the section content to the .ini section format, so the section content will +// be converted to following format: +// +// [sectionx] +// key1 = value1 +// key2 = value2 +// +func (section *Section) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + section.Write(buf) + return buf.String() +} + +// write the section content to the writer with .ini section format. +func (section *Section) Write(writer io.Writer) error { + _, err := fmt.Fprintf(writer, "[%s]\n", section.Name) + if err != nil { + return err + } + for _, v := range section.keyValues { + _, err = fmt.Fprintf(writer, "%s\n", v.String()) + if err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE b/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE new file mode 100644 index 000000000..2e907e487 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013, Ivan Daniluk +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. + +* Neither the name of the {organization} nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go new file mode 100644 index 000000000..d8cb0a4cf --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/client.go @@ -0,0 +1,26 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "io" + "io/ioutil" +) + +// EncodeClientRequest encodes parameters for a XML-RPC client request. +func EncodeClientRequest(method string, args interface{}) ([]byte, error) { + xml, err := rpcRequest2XML(method, args) + return []byte(xml), err +} + +// DecodeClientResponse decodes the response body of a client request into +// the interface reply. +func DecodeClientResponse(r io.Reader, reply interface{}) error { + rawxml, err := ioutil.ReadAll(r) + if err != nil { + return FaultSystemError + } + return xml2RPC(string(rawxml), reply) +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go new file mode 100644 index 000000000..eebf2e8ae --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/doc.go @@ -0,0 +1,50 @@ +/* +XML-RPC implementation for the Gorilla/RPC toolkit. + +It's built on top of gorilla/rpc package in Go(Golang) language and implements XML-RPC, according to it's specification. Unlike net/rpc from Go strlib, gorilla/rpc allows usage of HTTP POST requests for RPC. + +XML-RPC spec: http://xmlrpc.scripting.com/spec.html + +Installation + +Assuming you already imported gorilla/rpc, use the following command: + + go get github.com/divan/gorilla-xmlrpc/xml + +Implementation details + +The main objective was to use standard encoding/xml package for XML marshalling/unmarshalling. Unfortunately, in current implementation there is no graceful way to implement common structre for marshal and unmarshal functions - marshalling doesn't handle interface{} types so far (though, it could be changed in the future). So, marshalling is implemented manually. + +Unmarshalling code first creates temporary structure for unmarshalling XML into, then converts it into the passed variable using reflect package. If XML struct member's name is lowercased, it's first letter will be uppercased, as in Go/Gorilla field name must be exported(first-letter uppercased). + +Marshalling code converts rpc directly to the string XML representation. + +For the better understanding, I use terms 'rpc2xml' and 'xml2rpc' instead of 'marshal' and 'unmarshall'. + +Types + +The following types are supported: + + XML-RPC Golang + ------- ------ + int, i4 int + double float64 + boolean bool + stringi string + dateTime.iso8601 time.Time + base64 []byte + struct struct + array []interface{} + nil nil + +TODO + +TODO list: + * Add more corner cases tests + +Examples + +Checkout examples in examples/ directory. + +*/ +package xml diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go new file mode 100644 index 000000000..4a24efb66 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/fault.go @@ -0,0 +1,51 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "fmt" + "io" +) + +// Default Faults +// NOTE: XMLRPC spec doesn't specify any Fault codes. +// These codes seems to be widely accepted, and taken from the http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php +var ( + FaultInvalidParams = Fault{Code: -32602, String: "Invalid Method Parameters"} + FaultWrongArgumentsNumber = Fault{Code: -32602, String: "Wrong Arguments Number"} + FaultInternalError = Fault{Code: -32603, String: "Internal Server Error"} + FaultApplicationError = Fault{Code: -32500, String: "Application Error"} + FaultSystemError = Fault{Code: -32400, String: "System Error"} + FaultDecode = Fault{Code: -32700, String: "Parsing error: not well formed"} +) + +// Fault represents XML-RPC Fault. +type Fault struct { + Code int `xml:"faultCode"` + String string `xml:"faultString"` +} + +// Error satisifies error interface for Fault. +func (f Fault) Error() string { + return fmt.Sprintf("%d: %s", f.Code, f.String) +} + +// Fault2XML is a quick 'marshalling' replacemnt for the Fault case. +func fault2XML(fault Fault, buffer io.Writer) { + fmt.Fprintf(buffer, "") + rpc2XML(fault, buffer) + fmt.Fprintf(buffer, "") +} + +type faultValue struct { + Value value `xml:"value"` +} + +// IsEmpty returns true if faultValue contain fault. +// +// faultValue should be a struct with 2 members. +func (f faultValue) IsEmpty() bool { + return len(f.Value.Struct) == 0 +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go new file mode 100644 index 000000000..6c17e5a2f --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/rpc2xml.go @@ -0,0 +1,149 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/base64" + "fmt" + "io" + "reflect" + "strings" + "time" +) + +func rpcRequest2XML(method string, rpc interface{}) (string, error) { + buffer := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buffer, "%s", method) + err := rpcParams2XML(rpc, buffer) + fmt.Fprintf(buffer, "") + return buffer.String(), err +} + +func rpcResponse2XMLStr(rpc interface{}) (string, error) { + buffer := bytes.NewBuffer(make([]byte, 0)) + err := rpcResponse2XML(rpc, buffer) + return buffer.String(), err +} + +func rpcResponse2XML(rpc interface{}, writer io.Writer) error { + fmt.Fprintf(writer, "") + err := rpcParams2XML(rpc, writer) + fmt.Fprintf(writer, "") + return err +} + +func rpcParams2XML(rpc interface{}, writer io.Writer) error { + var err error + fmt.Fprintf(writer, "") + for i := 0; i < reflect.ValueOf(rpc).Elem().NumField(); i++ { + fmt.Fprintf(writer, "") + err = rpc2XML(reflect.ValueOf(rpc).Elem().Field(i).Interface(), writer) + fmt.Fprintf(writer, "") + } + fmt.Fprintf(writer, "") + return err +} + +func rpc2XML(value interface{}, writer io.Writer) error { + fmt.Fprintf(writer, "") + switch reflect.ValueOf(value).Kind() { + case reflect.Int: + fmt.Fprintf(writer, "%d", value.(int)) + case reflect.Float64: + fmt.Fprintf(writer, "%f", value.(float64)) + case reflect.String: + string2XML(value.(string), writer) + case reflect.Bool: + bool2XML(value.(bool), writer) + case reflect.Struct: + if reflect.TypeOf(value).String() != "time.Time" { + struct2XML(value, writer) + } else { + time2XML(value.(time.Time), writer) + } + case reflect.Slice, reflect.Array: + // FIXME: is it the best way to recognize '[]byte'? + if reflect.TypeOf(value).String() != "[]uint8" { + array2XML(value, writer) + } else { + base642XML(value.([]byte), writer) + } + case reflect.Ptr: + if reflect.ValueOf(value).IsNil() { + fmt.Fprintf(writer, "") + } + } + fmt.Fprintf(writer, "") + return nil +} + +func bool2XML(value bool, writer io.Writer) { + var b string + if value { + b = "1" + } else { + b = "0" + } + fmt.Fprintf(writer, "%s", b) +} + +func string2XML(value string, writer io.Writer) { + value = strings.Replace(value, "&", "&", -1) + value = strings.Replace(value, "\"", """, -1) + value = strings.Replace(value, "<", "<", -1) + value = strings.Replace(value, ">", ">", -1) + fmt.Fprintf(writer, "%s", value) +} + +func struct2XML(value interface{}, writer io.Writer) { + fmt.Fprintf(writer, "") + for i := 0; i < reflect.TypeOf(value).NumField(); i++ { + field := reflect.ValueOf(value).Field(i) + field_type := reflect.TypeOf(value).Field(i) + var name string + if field_type.Tag.Get("xml") != "" { + name = field_type.Tag.Get("xml") + } else { + name = field_type.Name + } + fmt.Fprintf(writer, "") + fmt.Fprintf(writer, "%s", name) + rpc2XML(field.Interface(), writer) + fmt.Fprintf(writer, "") + } + fmt.Fprintf(writer, "") + return +} + +func array2XML(value interface{}, writer io.Writer) { + fmt.Fprintf(writer, "") + for i := 0; i < reflect.ValueOf(value).Len(); i++ { + rpc2XML(reflect.ValueOf(value).Index(i).Interface(), writer) + } + fmt.Fprintf(writer, "") +} + +func time2XML(t time.Time, writer io.Writer) { + /* + // TODO: find out whether we need to deal + // here with TZ + var tz string; + zone, offset := t.Zone() + if zone == "UTC" { + tz = "Z" + } else { + tz = fmt.Sprintf("%03d00", offset / 3600 ) + } + */ + fmt.Fprintf(writer, "%04d%02d%02dT%02d:%02d:%02d", + t.Year(), t.Month(), t.Day(), + t.Hour(), t.Minute(), t.Second()) +} + +func base642XML(data []byte, writer io.Writer) { + str := base64.StdEncoding.EncodeToString(data) + fmt.Fprintf(writer, "%s", str) +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go new file mode 100644 index 000000000..a1eb0a7e0 --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/server.go @@ -0,0 +1,118 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/xml" + "fmt" + "io/ioutil" + "net/http" + + "github.com/gorilla/rpc" +) + +// ---------------------------------------------------------------------------- +// Codec +// ---------------------------------------------------------------------------- + +// NewCodec returns a new XML-RPC Codec. +func NewCodec() *Codec { + return &Codec{ + aliases: make(map[string]string), + } +} + +// Codec creates a CodecRequest to process each request. +type Codec struct { + aliases map[string]string +} + +// RegisterAlias creates a method alias +func (c *Codec) RegisterAlias(alias, method string) { + c.aliases[alias] = method +} + +// NewRequest returns a CodecRequest. +func (c *Codec) NewRequest(r *http.Request) rpc.CodecRequest { + rawxml, err := ioutil.ReadAll(r.Body) + if err != nil { + return &CodecRequest{err: err} + } + defer r.Body.Close() + + var request ServerRequest + if err := xml.Unmarshal(rawxml, &request); err != nil { + return &CodecRequest{err: err} + } + request.rawxml = string(rawxml) + if method, ok := c.aliases[request.Method]; ok { + request.Method = method + } + return &CodecRequest{request: &request} +} + +// ---------------------------------------------------------------------------- +// CodecRequest +// ---------------------------------------------------------------------------- + +type ServerRequest struct { + Name xml.Name `xml:"methodCall"` + Method string `xml:"methodName"` + rawxml string +} + +// CodecRequest decodes and encodes a single request. +type CodecRequest struct { + request *ServerRequest + err error +} + +// Method returns the RPC method for the current request. +// +// The method uses a dotted notation as in "Service.Method". +func (c *CodecRequest) Method() (string, error) { + if c.err == nil { + return c.request.Method, nil + } + return "", c.err +} + +// ReadRequest fills the request object for the RPC method. +// +// args is the pointer to the Service.Args structure +// it gets populated from temporary XML structure +func (c *CodecRequest) ReadRequest(args interface{}) error { + c.err = xml2RPC(c.request.rawxml, args) + return nil +} + +// WriteResponse encodes the response and writes it to the ResponseWriter. +// +// response is the pointer to the Service.Response structure +// it gets encoded into the XML-RPC xml string +func (c *CodecRequest) WriteResponse(w http.ResponseWriter, response interface{}, methodErr error) error { + if c.err == nil { + c.err = methodErr + } + buffer := bytes.NewBuffer(make([]byte, 0)) + if c.err != nil { + var fault Fault + switch c.err.(type) { + case Fault: + fault = c.err.(Fault) + default: + fault = FaultApplicationError + fault.String += fmt.Sprintf(": %v", c.err) + } + fault2XML(fault, buffer) + } else { + rpcResponse2XML(response, buffer) + } + + w.Header().Set("Content-Type", "text/xml; charset=utf-8") + buffer.WriteTo(w) + return nil +} diff --git a/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go new file mode 100644 index 000000000..48b08536f --- /dev/null +++ b/vendor/github.com/ochinchina/gorilla-xmlrpc/xml/xml2rpc.go @@ -0,0 +1,219 @@ +// Copyright 2013 Ivan Danyliuk +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xml + +import ( + "bytes" + "encoding/base64" + "encoding/xml" + "fmt" + "reflect" + "strconv" + "time" + "unicode" + "unicode/utf8" + + "github.com/rogpeppe/go-charset/charset" + _ "github.com/rogpeppe/go-charset/data" +) + +// Types used for unmarshalling +type response struct { + Name xml.Name `xml:"methodResponse"` + Params []param `xml:"params>param"` + Fault faultValue `xml:"fault,omitempty"` +} + +type param struct { + Value value `xml:"value"` +} + +type value struct { + Array []value `xml:"array>data>value"` + Struct []member `xml:"struct>member"` + String string `xml:"string"` + Int string `xml:"int"` + Int4 string `xml:"i4"` + Double string `xml:"double"` + Boolean string `xml:"boolean"` + DateTime string `xml:"dateTime.iso8601"` + Base64 string `xml:"base64"` + Raw string `xml:",innerxml"` // the value can be defualt string +} + +type member struct { + Name string `xml:"name"` + Value value `xml:"value"` +} + +func xml2RPC(xmlraw string, rpc interface{}) error { + // Unmarshal raw XML into the temporal structure + var ret response + decoder := xml.NewDecoder(bytes.NewReader([]byte(xmlraw))) + decoder.CharsetReader = charset.NewReader + err := decoder.Decode(&ret) + if err != nil { + return FaultDecode + } + + if !ret.Fault.IsEmpty() { + return getFaultResponse(ret.Fault) + } + + // Now, convert temporal structure into the + // passed rpc variable, according to it's structure + fieldNum := reflect.TypeOf(rpc).Elem().NumField() + //for i, param := range ret.Params { + for i := 0; i < fieldNum; i += 1 { + field := reflect.ValueOf(rpc).Elem().Field(i) + if len(ret.Params) > i { + err = value2Field(ret.Params[i].Value, &field) + } else if reflect.TypeOf(rpc).Elem().Field(i).Tag.Get("default") != "" { + err = value2Field(createValue(reflect.TypeOf(rpc).Elem().Field(i).Type.Kind(), reflect.TypeOf(rpc).Elem().Field(i).Tag.Get("default")), &field) + } + if err != nil { + return err + } + } + + return nil +} + +func createValue(kind reflect.Kind, val string) value { + v := value{} + if kind == reflect.Bool { + v.Boolean = val + } else if kind == reflect.Int { + v.Int = val + } + return v +} + +// getFaultResponse converts faultValue to Fault. +func getFaultResponse(fault faultValue) Fault { + var ( + code int + str string + ) + + for _, field := range fault.Value.Struct { + if field.Name == "faultCode" { + code, _ = strconv.Atoi(field.Value.Int) + } else if field.Name == "faultString" { + str = field.Value.String + if str == "" { + str = field.Value.Raw + } + } + } + + return Fault{Code: code, String: str} +} + +func value2Field(value value, field *reflect.Value) error { + if !field.CanSet() { + return FaultApplicationError + } + + var ( + err error + val interface{} + ) + + switch { + case value.Int != "": + val, _ = strconv.Atoi(value.Int) + case value.Int4 != "": + val, _ = strconv.Atoi(value.Int4) + case value.Double != "": + val, _ = strconv.ParseFloat(value.Double, 64) + case value.String != "": + val = value.String + case value.Boolean != "": + val = xml2Bool(value.Boolean) + case value.DateTime != "": + val, err = xml2DateTime(value.DateTime) + case value.Base64 != "": + val, err = xml2Base64(value.Base64) + case len(value.Struct) != 0: + if field.Kind() != reflect.Struct { + fault := FaultInvalidParams + fault.String += fmt.Sprintf("structure fields mismatch: %s != %s", field.Kind(), reflect.Struct.String()) + return fault + } + s := value.Struct + for i := 0; i < len(s); i++ { + // Uppercase first letter for field name to deal with + // methods in lowercase, which cannot be used + field_name := uppercaseFirst(s[i].Name) + f := field.FieldByName(field_name) + err = value2Field(s[i].Value, &f) + } + case len(value.Array) != 0: + a := value.Array + f := *field + slice := reflect.MakeSlice(reflect.TypeOf(f.Interface()), + len(a), len(a)) + for i := 0; i < len(a); i++ { + item := slice.Index(i) + err = value2Field(a[i], &item) + } + f = reflect.AppendSlice(f, slice) + val = f.Interface() + + default: + // value field is default to string, see http://en.wikipedia.org/wiki/XML-RPC#Data_types + // also can be + if value.Raw != "" { + val = value.Raw + } + } + + if val != nil { + if reflect.TypeOf(val) != reflect.TypeOf(field.Interface()) { + fault := FaultInvalidParams + fault.String += fmt.Sprintf(": fields type mismatch: %s != %s", + reflect.TypeOf(val), + reflect.TypeOf(field.Interface())) + return fault + } + + field.Set(reflect.ValueOf(val)) + } + + return err +} + +func xml2Bool(value string) bool { + var b bool + switch value { + case "1", "true", "TRUE", "True": + b = true + case "0", "false", "FALSE", "False": + b = false + } + return b +} + +func xml2DateTime(value string) (time.Time, error) { + var ( + year, month, day int + hour, minute, second int + ) + _, err := fmt.Sscanf(value, "%04d%02d%02dT%02d:%02d:%02d", + &year, &month, &day, + &hour, &minute, &second) + t := time.Date(year, time.Month(month), day, hour, minute, second, 0, time.Local) + return t, err +} + +func xml2Base64(value string) ([]byte, error) { + return base64.StdEncoding.DecodeString(value) +} + +func uppercaseFirst(in string) (out string) { + r, n := utf8.DecodeRuneInString(in) + return string(unicode.ToUpper(r)) + in[n:] +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/big5.go b/vendor/github.com/rogpeppe/go-charset/charset/big5.go new file mode 100644 index 000000000..e01fa1afd --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/big5.go @@ -0,0 +1,88 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("big5", fromBig5, nil) +} + +// Big5 consists of 89 fonts of 157 chars each +const ( + big5Max = 13973 + big5Font = 157 + big5Data = "big5.dat" +) + +type translateFromBig5 struct { + font int + scratch []byte + big5map []rune +} + +func (p *translateFromBig5) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = p.scratch[:0] + n := 0 + for len(data) > 0 { + c := int(data[0]) + data = data[1:] + n++ + if p.font == -1 { + // idle state + if c >= 0xa1 { + p.font = c + continue + } + if c == 26 { + c = '\n' + } + continue + } + f := p.font + p.font = -1 + r := utf8.RuneError + switch { + case c >= 64 && c <= 126: + c -= 64 + case c >= 161 && c <= 254: + c = c - 161 + 63 + default: + // bad big5 char + f = 255 + } + if f <= 254 { + f -= 161 + ix := f*big5Font + c + if ix < len(p.big5map) { + r = p.big5map[ix] + } + if r == -1 { + r = utf8.RuneError + } + } + p.scratch = appendRune(p.scratch, r) + } + return n, p.scratch, nil +} + +type big5Key bool + +func fromBig5(arg string) (Translator, error) { + big5map, err := cache(big5Key(false), func() (interface{}, error) { + data, err := readFile(big5Data) + if err != nil { + return nil, fmt.Errorf("charset: cannot open big5 data file: %v", err) + } + big5map := []rune(string(data)) + if len(big5map) != big5Max { + return nil, fmt.Errorf("charset: corrupt big5 data") + } + return big5map, nil + }) + if err != nil { + return nil, err + } + return &translateFromBig5{big5map: big5map.([]rune), font: -1}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/charset.go b/vendor/github.com/rogpeppe/go-charset/charset/charset.go new file mode 100644 index 000000000..a7af30ee6 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/charset.go @@ -0,0 +1,301 @@ +// The charset package implements translation between character sets. +// It uses Unicode as the intermediate representation. +// Because it can be large, the character set data is separated +// from the charset package. It can be embedded in the Go +// executable by importing the data package: +// +// import _ "code.google.com/p/go-charset/data" +// +// It can also made available in a data directory (by settting CharsetDir). +package charset + +import ( + "io" + "strings" + "unicode/utf8" +) + +// Charset holds information about a given character set. +type Charset struct { + Name string // Canonical name of character set. + Aliases []string // Known aliases. + Desc string // Description. + NoFrom bool // Not possible to translate from this charset. + NoTo bool // Not possible to translate to this charset. +} + +// Translator represents a character set converter. +// The Translate method translates the given data, +// and returns the number of bytes of data consumed, +// a slice containing the converted data (which may be +// overwritten on the next call to Translate), and any +// conversion error. If eof is true, the data represents +// the final bytes of the input. +type Translator interface { + Translate(data []byte, eof bool) (n int, cdata []byte, err error) +} + +// A Factory can be used to make character set translators. +type Factory interface { + // TranslatorFrom creates a translator that will translate from the named character + // set to UTF-8. + TranslatorFrom(name string) (Translator, error) // Create a Translator from this character set to. + + // TranslatorTo creates a translator that will translate from UTF-8 to the named character set. + TranslatorTo(name string) (Translator, error) // Create a Translator To this character set. + + // Names returns all the character set names accessibile through the factory. + Names() []string + + // Info returns information on the named character set. It returns nil if the + // factory doesn't recognise the given name. + Info(name string) *Charset +} + +var factories = []Factory{localFactory{}} + +// Register registers a new Factory which will be consulted when NewReader +// or NewWriter needs a character set translator for a given name. +func Register(factory Factory) { + factories = append(factories, factory) +} + +// NewReader returns a new Reader that translates from the named +// character set to UTF-8 as it reads r. +func NewReader(charset string, r io.Reader) (io.Reader, error) { + tr, err := TranslatorFrom(charset) + if err != nil { + return nil, err + } + return NewTranslatingReader(r, tr), nil +} + +// NewWriter returns a new WriteCloser writing to w. It converts writes +// of UTF-8 text into writes on w of text in the named character set. +// The Close is necessary to flush any remaining partially translated +// characters to the output. +func NewWriter(charset string, w io.Writer) (io.WriteCloser, error) { + tr, err := TranslatorTo(charset) + if err != nil { + return nil, err + } + return NewTranslatingWriter(w, tr), nil +} + +// Info returns information about a character set, or nil +// if the character set is not found. +func Info(name string) *Charset { + for _, f := range factories { + if info := f.Info(name); info != nil { + return info + } + } + return nil +} + +// Names returns the canonical names of all supported character sets, in alphabetical order. +func Names() []string { + // TODO eliminate duplicates + var names []string + for _, f := range factories { + names = append(names, f.Names()...) + } + return names +} + +// TranslatorFrom returns a translator that will translate from +// the named character set to UTF-8. +func TranslatorFrom(charset string) (Translator, error) { + var err error + var tr Translator + for _, f := range factories { + tr, err = f.TranslatorFrom(charset) + if err == nil { + break + } + } + if tr == nil { + return nil, err + } + return tr, nil +} + +// TranslatorTo returns a translator that will translate from UTF-8 +// to the named character set. +func TranslatorTo(charset string) (Translator, error) { + var err error + var tr Translator + for _, f := range factories { + tr, err = f.TranslatorTo(charset) + if err == nil { + break + } + } + if tr == nil { + return nil, err + } + return tr, nil +} + +func normalizedChar(c rune) rune { + switch { + case c >= 'A' && c <= 'Z': + c = c - 'A' + 'a' + case c == '_': + c = '-' + } + return c +} + +// NormalisedName returns s with all Roman capitals +// mapped to lower case, and '_' mapped to '-' +func NormalizedName(s string) string { + return strings.Map(normalizedChar, s) +} + +type translatingWriter struct { + w io.Writer + tr Translator + buf []byte // unconsumed data from writer. +} + +// NewTranslatingWriter returns a new WriteCloser writing to w. +// It passes the written bytes through the given Translator. +func NewTranslatingWriter(w io.Writer, tr Translator) io.WriteCloser { + return &translatingWriter{w: w, tr: tr} +} + +func (w *translatingWriter) Write(data []byte) (rn int, rerr error) { + wdata := data + if len(w.buf) > 0 { + w.buf = append(w.buf, data...) + wdata = w.buf + } + n, cdata, err := w.tr.Translate(wdata, false) + if err != nil { + // TODO + } + if n > 0 { + _, err = w.w.Write(cdata) + if err != nil { + return 0, err + } + } + w.buf = w.buf[:0] + if n < len(wdata) { + w.buf = append(w.buf, wdata[n:]...) + } + return len(data), nil +} + +func (p *translatingWriter) Close() error { + for { + n, data, err := p.tr.Translate(p.buf, true) + p.buf = p.buf[n:] + if err != nil { + // TODO + } + // If the Translator produces no data + // at EOF, then assume that it never will. + if len(data) == 0 { + break + } + n, err = p.w.Write(data) + if err != nil { + return err + } + if n < len(data) { + return io.ErrShortWrite + } + if len(p.buf) == 0 { + break + } + } + return nil +} + +type translatingReader struct { + r io.Reader + tr Translator + cdata []byte // unconsumed data from converter. + rdata []byte // unconverted data from reader. + err error // final error from reader. +} + +// NewTranslatingReader returns a new Reader that +// translates data using the given Translator as it reads r. +func NewTranslatingReader(r io.Reader, tr Translator) io.Reader { + return &translatingReader{r: r, tr: tr} +} + +func (r *translatingReader) Read(buf []byte) (int, error) { + for { + if len(r.cdata) > 0 { + n := copy(buf, r.cdata) + r.cdata = r.cdata[n:] + return n, nil + } + if r.err == nil { + r.rdata = ensureCap(r.rdata, len(r.rdata)+len(buf)) + n, err := r.r.Read(r.rdata[len(r.rdata):cap(r.rdata)]) + // Guard against non-compliant Readers. + if n == 0 && err == nil { + err = io.EOF + } + r.rdata = r.rdata[0 : len(r.rdata)+n] + r.err = err + } else if len(r.rdata) == 0 { + break + } + nc, cdata, cvterr := r.tr.Translate(r.rdata, r.err != nil) + if cvterr != nil { + // TODO + } + r.cdata = cdata + + // Ensure that we consume all bytes at eof + // if the converter refuses them. + if nc == 0 && r.err != nil { + nc = len(r.rdata) + } + + // Copy unconsumed data to the start of the rdata buffer. + r.rdata = r.rdata[0:copy(r.rdata, r.rdata[nc:])] + } + return 0, r.err +} + +// ensureCap returns s with a capacity of at least n bytes. +// If cap(s) < n, then it returns a new copy of s with the +// required capacity. +func ensureCap(s []byte, n int) []byte { + if n <= cap(s) { + return s + } + // logic adapted from appendslice1 in runtime + m := cap(s) + if m == 0 { + m = n + } else { + for { + if m < 1024 { + m += m + } else { + m += m / 4 + } + if m >= n { + break + } + } + } + t := make([]byte, len(s), m) + copy(t, s) + return t +} + +func appendRune(buf []byte, r rune) []byte { + n := len(buf) + buf = ensureCap(buf, n+utf8.UTFMax) + nu := utf8.EncodeRune(buf[n:n+utf8.UTFMax], r) + return buf[0 : n+nu] +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/codepage.go b/vendor/github.com/rogpeppe/go-charset/charset/codepage.go new file mode 100644 index 000000000..6864c8753 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/codepage.go @@ -0,0 +1,133 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("cp", fromCodePage, toCodePage) +} + +type translateFromCodePage struct { + byte2rune *[256]rune + scratch []byte +} + +type cpKeyFrom string +type cpKeyTo string + +func (p *translateFromCodePage) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, len(data)*utf8.UTFMax)[:0] + buf := p.scratch + for _, x := range data { + r := p.byte2rune[x] + if r < utf8.RuneSelf { + buf = append(buf, byte(r)) + continue + } + size := utf8.EncodeRune(buf[len(buf):cap(buf)], r) + buf = buf[0 : len(buf)+size] + } + return len(data), buf, nil +} + +type toCodePageInfo struct { + rune2byte map[rune]byte + // same gives the number of runes at start of code page that map exactly to + // unicode. + same rune +} + +type translateToCodePage struct { + toCodePageInfo + scratch []byte +} + +func (p *translateToCodePage) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, len(data)) + buf := p.scratch[:0] + + for i := 0; i < len(data); { + r := rune(data[i]) + size := 1 + if r >= utf8.RuneSelf { + r, size = utf8.DecodeRune(data[i:]) + if size == 1 && !eof && !utf8.FullRune(data[i:]) { + return i, buf, nil + } + } + + var b byte + if r < p.same { + b = byte(r) + } else { + var ok bool + b, ok = p.rune2byte[r] + if !ok { + b = '?' + } + } + buf = append(buf, b) + i += size + } + return len(data), buf, nil +} + +func fromCodePage(arg string) (Translator, error) { + runes, err := cache(cpKeyFrom(arg), func() (interface{}, error) { + data, err := readFile(arg) + if err != nil { + return nil, err + } + runes := []rune(string(data)) + if len(runes) != 256 { + return nil, fmt.Errorf("charset: %q has wrong rune count (%d)", arg, len(runes)) + } + r := new([256]rune) + copy(r[:], runes) + return r, nil + }) + if err != nil { + return nil, err + } + return &translateFromCodePage{byte2rune: runes.(*[256]rune)}, nil +} + +func toCodePage(arg string) (Translator, error) { + m, err := cache(cpKeyTo(arg), func() (interface{}, error) { + data, err := readFile(arg) + if err != nil { + return nil, err + } + + info := toCodePageInfo{ + rune2byte: make(map[rune]byte), + same: 256, + } + atStart := true + i := rune(0) + for _, r := range string(data) { + if atStart { + if r == i { + i++ + continue + } + info.same = i + atStart = false + } + info.rune2byte[r] = byte(i) + i++ + } + // TODO fix tables + // fmt.Printf("%s, same = %d\n", arg, info.same) + if i != 256 { + return nil, fmt.Errorf("charset: %q has wrong rune count (%d)", arg, i) + } + return info, nil + }) + if err != nil { + return nil, err + } + return &translateToCodePage{toCodePageInfo: m.(toCodePageInfo)}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/cp932.go b/vendor/github.com/rogpeppe/go-charset/charset/cp932.go new file mode 100644 index 000000000..9f46262ba --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/cp932.go @@ -0,0 +1,195 @@ +package charset + +import ( + "fmt" + "unicode/utf8" +) + +func init() { + registerClass("cp932", fromCP932, nil) +} + +// encoding details +// (Traditional) Shift-JIS +// +// 00..1f control characters +// 20 space +// 21..7f JIS X 0201:1976/1997 roman (see notes) +// 80 undefined +// 81..9f lead byte of JIS X 0208-1983 or JIS X 0202:1990/1997 +// a0 undefined +// a1..df JIS X 0201:1976/1997 katakana +// e0..ea lead byte of JIS X 0208-1983 or JIS X 0202:1990/1997 +// eb..ff undefined +// +// CP932 (windows-31J) +// +// this encoding scheme extends Shift-JIS in the following way +// +// eb..ec undefined (marked as lead bytes - see notes below) +// ed..ee lead byte of NEC-selected IBM extended characters +// ef undefined (marked as lead byte - see notes below) +// f0..f9 lead byte of User defined GAIJI (see note below) +// fa..fc lead byte of IBM extended characters +// fd..ff undefined +// +// +// Notes +// +// JISX 0201:1976/1997 roman +// this is the same as ASCII but with 0x5c (ASCII code for '\') +// representing the Yen currency symbol '¥' (U+00a5) +// This mapping is contentious, some conversion packages implent it +// others do not. +// The mapping files from The Unicode Consortium show cp932 mapping +// plain ascii in the range 00..7f whereas shift-jis maps 0x5c ('\') to the yen +// symbol (¥) and 0x7e ('~') to overline (¯) +// +// CP932 double-byte character codes: +// +// eb-ec, ef, f0-f9: +// Marked as DBCS LEAD BYTEs in the unicode mapping data +// obtained from: +// https://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP932.TXT +// +// but there are no defined mappings for codes in this range. +// It is not clear whether or not an implementation should +// consume one or two bytes before emitting an error char. + +const ( + kanaPages = 1 + kanaPageSize = 63 + kanaChar0 = 0xa1 + + cp932Pages = 45 // 81..84, 87..9f, e0..ea, ed..ee, fa..fc + cp932PageSize = 189 // 40..fc (including 7f) + cp932Char0 = 0x40 +) + +type jisTables struct { + page0 [256]rune + dbcsoff [256]int + cp932 []rune +} + +type translateFromCP932 struct { + tables *jisTables + scratch []byte +} + +func (p *translateFromCP932) Translate(data []byte, eof bool) (int, []byte, error) { + tables := p.tables + p.scratch = p.scratch[:0] + n := 0 + for i := 0; i < len(data); i++ { + b := data[i] + r := tables.page0[b] + if r != -1 { + p.scratch = appendRune(p.scratch, r) + n++ + continue + } + // DBCS + i++ + if i >= len(data) { + break + } + pnum := tables.dbcsoff[b] + ix := int(data[i]) - cp932Char0 + if pnum == -1 || ix < 0 || ix >= cp932PageSize { + r = utf8.RuneError + } else { + r = tables.cp932[pnum*cp932PageSize+ix] + } + p.scratch = appendRune(p.scratch, r) + n += 2 + } + return n, p.scratch, nil +} + +type cp932Key bool + +func fromCP932(arg string) (Translator, error) { + shiftJIS := arg == "shiftjis" + tables, err := cache(cp932Key(shiftJIS), func() (interface{}, error) { + tables := new(jisTables) + kana, err := jisGetMap("jisx0201kana.dat", kanaPageSize, kanaPages) + if err != nil { + return nil, err + } + tables.cp932, err = jisGetMap("cp932.dat", cp932PageSize, cp932Pages) + if err != nil { + return nil, err + } + + // jisx0201kana is mapped into 0xA1..0xDF + for i := 0; i < kanaPageSize; i++ { + tables.page0[i+kanaChar0] = kana[i] + } + + // 00..7f same as ascii in cp932 + for i := rune(0); i < 0x7f; i++ { + tables.page0[i] = i + } + + if shiftJIS { + // shift-jis uses JIS X 0201 for the ASCII range + // this is the same as ASCII apart from + // 0x5c ('\') maps to yen symbol (¥) and 0x7e ('~') maps to overline (¯) + tables.page0['\\'] = '¥' + tables.page0['~'] = '¯' + } + + // pre-calculate DBCS page numbers to mapping file page numbers + // and mark codes in page0 that are DBCS lead bytes + pnum := 0 + for i := 0x81; i <= 0x84; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0x87; i <= 0x9f; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0xe0; i <= 0xea; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + if shiftJIS { + return tables, nil + } + // add in cp932 extensions + for i := 0xed; i <= 0xee; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + for i := 0xfa; i <= 0xfc; i++ { + tables.page0[i] = -1 + tables.dbcsoff[i] = pnum + pnum++ + } + return tables, nil + }) + + if err != nil { + return nil, err + } + + return &translateFromCP932{tables: tables.(*jisTables)}, nil +} + +func jisGetMap(name string, pgsize, npages int) ([]rune, error) { + data, err := readFile(name) + if err != nil { + return nil, err + } + m := []rune(string(data)) + if len(m) != pgsize*npages { + return nil, fmt.Errorf("%q: incorrect length data", name) + } + return m, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/file.go b/vendor/github.com/rogpeppe/go-charset/charset/file.go new file mode 100644 index 000000000..a0c26225e --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/file.go @@ -0,0 +1,40 @@ +package charset + +import ( + "io" + "io/ioutil" + "os" + "path/filepath" +) + +var files = make(map[string]func() (io.ReadCloser, error)) + +// RegisterDataFile registers the existence of a given data +// file with the given name that may be used by a character-set converter. +// It is intended to be used by packages that wish to embed +// data in the executable binary, and should not be +// used normally. +func RegisterDataFile(name string, open func() (io.ReadCloser, error)) { + files[name] = open +} + +// CharsetDir gives the location of the default data file directory. +// This directory will be used for files with names that have not +// been registered with RegisterDataFile. +var CharsetDir = "/usr/local/lib/go-charset/datafiles" + +func readFile(name string) (data []byte, err error) { + var r io.ReadCloser + if open := files[name]; open != nil { + r, err = open() + if err != nil { + return + } + } else { + r, err = os.Open(filepath.Join(CharsetDir, name)) + if err != nil { + return + } + } + return ioutil.ReadAll(r) +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/local.go b/vendor/github.com/rogpeppe/go-charset/charset/local.go new file mode 100644 index 000000000..9776b962f --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/local.go @@ -0,0 +1,162 @@ +package charset + +import ( + "encoding/json" + "fmt" + "os" + "sync" +) + +var ( + readLocalCharsetsOnce sync.Once + localCharsets = make(map[string]*localCharset) +) + +type localCharset struct { + Charset + arg string + *class +} + +// A class of character sets. +// Each class can be instantiated with an argument specified in the config file. +// Many character sets can use a single class. +type class struct { + from, to func(arg string) (Translator, error) +} + +// The set of classes, indexed by class name. +var classes = make(map[string]*class) + +func registerClass(charset string, from, to func(arg string) (Translator, error)) { + classes[charset] = &class{from, to} +} + +type localFactory struct{} + +func (f localFactory) TranslatorFrom(name string) (Translator, error) { + f.init() + name = NormalizedName(name) + cs := localCharsets[name] + if cs == nil { + return nil, fmt.Errorf("character set %q not found", name) + } + if cs.from == nil { + return nil, fmt.Errorf("cannot translate from %q", name) + } + return cs.from(cs.arg) +} + +func (f localFactory) TranslatorTo(name string) (Translator, error) { + f.init() + name = NormalizedName(name) + cs := localCharsets[name] + if cs == nil { + return nil, fmt.Errorf("character set %q not found", name) + } + if cs.to == nil { + return nil, fmt.Errorf("cannot translate to %q", name) + } + return cs.to(cs.arg) +} + +func (f localFactory) Names() []string { + f.init() + var names []string + for name, cs := range localCharsets { + // add names only for non-aliases. + if localCharsets[cs.Name] == cs { + names = append(names, name) + } + } + return names +} + +func (f localFactory) Info(name string) *Charset { + f.init() + lcs := localCharsets[NormalizedName(name)] + if lcs == nil { + return nil + } + // copy the charset info so that callers can't mess with it. + cs := lcs.Charset + return &cs +} + +func (f localFactory) init() { + readLocalCharsetsOnce.Do(readLocalCharsets) +} + +// charsetEntry is the data structure for one entry in the JSON config file. +// If Alias is non-empty, it should be the canonical name of another +// character set; otherwise Class should be the name +// of an entry in classes, and Arg is the argument for +// instantiating it. +type charsetEntry struct { + Aliases []string + Desc string + Class string + Arg string +} + +// readCharsets reads the JSON config file. +// It's done once only, when first needed. +func readLocalCharsets() { + csdata, err := readFile("charsets.json") + if err != nil { + fmt.Fprintf(os.Stderr, "charset: cannot open \"charsets.json\": %v\n", err) + return + } + + var entries map[string]charsetEntry + err = json.Unmarshal(csdata, &entries) + if err != nil { + fmt.Fprintf(os.Stderr, "charset: cannot decode config file: %v\n", err) + } + for name, e := range entries { + class := classes[e.Class] + if class == nil { + continue + } + name = NormalizedName(name) + for i, a := range e.Aliases { + e.Aliases[i] = NormalizedName(a) + } + cs := &localCharset{ + Charset: Charset{ + Name: name, + Aliases: e.Aliases, + Desc: e.Desc, + NoFrom: class.from == nil, + NoTo: class.to == nil, + }, + arg: e.Arg, + class: class, + } + localCharsets[cs.Name] = cs + for _, a := range cs.Aliases { + localCharsets[a] = cs + } + } +} + +// A general cache store that local character set translators +// can use for persistent storage of data. +var ( + cacheMutex sync.Mutex + cacheStore = make(map[interface{}]interface{}) +) + +func cache(key interface{}, f func() (interface{}, error)) (interface{}, error) { + cacheMutex.Lock() + defer cacheMutex.Unlock() + if x := cacheStore[key]; x != nil { + return x, nil + } + x, err := f() + if err != nil { + return nil, err + } + cacheStore[key] = x + return x, err +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/utf16.go b/vendor/github.com/rogpeppe/go-charset/charset/utf16.go new file mode 100644 index 000000000..ebde794c9 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/utf16.go @@ -0,0 +1,110 @@ +package charset + +import ( + "encoding/binary" + "errors" + "unicode/utf8" +) + +func init() { + registerClass("utf16", fromUTF16, toUTF16) +} + +type translateFromUTF16 struct { + first bool + endian binary.ByteOrder + scratch []byte +} + +func (p *translateFromUTF16) Translate(data []byte, eof bool) (int, []byte, error) { + data = data[0 : len(data)&^1] // round to even number of bytes. + if len(data) < 2 { + return 0, nil, nil + } + n := 0 + if p.first && p.endian == nil { + switch binary.BigEndian.Uint16(data) { + case 0xfeff: + p.endian = binary.BigEndian + data = data[2:] + n += 2 + case 0xfffe: + p.endian = binary.LittleEndian + data = data[2:] + n += 2 + default: + p.endian = guessEndian(data) + } + p.first = false + } + + p.scratch = p.scratch[:0] + for ; len(data) > 0; data = data[2:] { + p.scratch = appendRune(p.scratch, rune(p.endian.Uint16(data))) + n += 2 + } + return n, p.scratch, nil +} + +func guessEndian(data []byte) binary.ByteOrder { + // XXX TODO + return binary.LittleEndian +} + +type translateToUTF16 struct { + first bool + endian binary.ByteOrder + scratch []byte +} + +func (p *translateToUTF16) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch[:0], (len(data)+1)*2) + if p.first { + p.scratch = p.scratch[0:2] + p.endian.PutUint16(p.scratch, 0xfeff) + p.first = false + } + n := 0 + for len(data) > 0 { + if !utf8.FullRune(data) && !eof { + break + } + r, size := utf8.DecodeRune(data) + // TODO if r > 65535? + + slen := len(p.scratch) + p.scratch = p.scratch[0 : slen+2] + p.endian.PutUint16(p.scratch[slen:], uint16(r)) + data = data[size:] + n += size + } + return n, p.scratch, nil +} + +func getEndian(arg string) (binary.ByteOrder, error) { + switch arg { + case "le": + return binary.LittleEndian, nil + case "be": + return binary.BigEndian, nil + case "": + return nil, nil + } + return nil, errors.New("charset: unknown utf16 endianness") +} + +func fromUTF16(arg string) (Translator, error) { + endian, err := getEndian(arg) + if err != nil { + return nil, err + } + return &translateFromUTF16{first: true, endian: endian}, nil +} + +func toUTF16(arg string) (Translator, error) { + endian, err := getEndian(arg) + if err != nil { + return nil, err + } + return &translateToUTF16{first: false, endian: endian}, nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/charset/utf8.go b/vendor/github.com/rogpeppe/go-charset/charset/utf8.go new file mode 100644 index 000000000..23980b334 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/charset/utf8.go @@ -0,0 +1,51 @@ +package charset + +import ( + "unicode/utf8" +) + +func init() { + registerClass("utf8", toUTF8, toUTF8) +} + +type translateToUTF8 struct { + scratch []byte +} + +var errorBytes = []byte(string(utf8.RuneError)) + +const errorRuneLen = len(string(utf8.RuneError)) + +func (p *translateToUTF8) Translate(data []byte, eof bool) (int, []byte, error) { + p.scratch = ensureCap(p.scratch, (len(data))*errorRuneLen) + buf := p.scratch[:0] + for i := 0; i < len(data); { + // fast path for ASCII + if b := data[i]; b < utf8.RuneSelf { + buf = append(buf, b) + i++ + continue + } + _, size := utf8.DecodeRune(data[i:]) + if size == 1 { + if !eof && !utf8.FullRune(data) { + // When DecodeRune has converted only a single + // byte, we know there must be some kind of error + // because we know the byte's not ASCII. + // If we aren't at EOF, and it's an incomplete + // rune encoding, then we return to process + // the final bytes in a subsequent call. + return i, buf, nil + } + buf = append(buf, errorBytes...) + } else { + buf = append(buf, data[i:i+size]...) + } + i += size + } + return len(data), buf, nil +} + +func toUTF8(arg string) (Translator, error) { + return new(translateToUTF8), nil +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go new file mode 100644 index 000000000..398ebe339 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_big5.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("big5.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("\u3000,、。.・;:?!︰…‥﹐﹑﹒·﹔﹕﹖﹗︲–︱—︳�︴﹏()︵︶{}︷︸〔〕︹︺【】︻︼《》︽︾〈〉︿﹀「」﹁﹂『』﹃﹄﹙﹚﹛﹜﹝﹞‘’“”〝〞‵′#&*※§〃○●△▲◎☆★◇◆□■▽▼㊣℅‾�_�﹉﹊﹍﹎﹋﹌#&*+-×÷±√<>=≤≥≠∞≒≡﹢﹣﹤﹥﹦∼∩∪⊥∠∟⊿㏒㏑∫∮∵∴♀♂♁☉↑↓←→↖↗↙↘∥∣��/\$¥〒¢£%@℃℉$%@㏕㎜㎝㎞㏎㎡㎎㎏㏄°兙兛兞兝兡兣嗧瓩糎▁▂▃▄▅▆▇█▏▎▍▌▋▊▉┼┴┬┤├▔─│▕┌┐└┘╭╮╰╯═╞╪╡◢◣◥◤╱╲╳0123456789ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ〡〢〣〤〥〦〧〨〩�卄�ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩαβγδεζηθικλμνξοπρστυφχψωㄅㄆㄇㄈㄉㄊㄋㄌㄍㄎㄏㄐㄑㄒㄓㄔㄕㄖㄗㄘㄙㄚㄛㄜㄝㄞㄟㄠㄡㄢㄣㄤㄥㄦㄧㄨㄩ˙ˉˊˇˋ���������������������������������������������������������������一乙丁七乃九了二人儿入八几刀刁力匕十卜又三下丈上丫丸凡久么也乞于亡兀刃勺千叉口土士夕大女子孑孓寸小尢尸山川工己已巳巾干廾弋弓才丑丐不中丰丹之尹予云井互五亢仁什仃仆仇仍今介仄元允內六兮公冗凶分切刈勻勾勿化匹午升卅卞厄友及反壬天夫太夭孔少尤尺屯巴幻廿弔引心戈戶手扎支文斗斤方日曰月木欠止歹毋比毛氏水火爪父爻片牙牛犬王丙世丕且丘主乍乏乎以付仔仕他仗代令仙仞充兄冉冊冬凹出凸刊加功包匆北匝仟半卉卡占卯卮去可古右召叮叩叨叼司叵叫另只史叱台句叭叻四囚外央失奴奶孕它尼巨巧左市布平幼弁弘弗必戊打扔扒扑斥旦朮本未末札正母民氐永汁汀氾犯玄玉瓜瓦甘生用甩田由甲申疋白皮皿目矛矢石示禾穴立丞丟乒乓乩亙交亦亥仿伉伙伊伕伍伐休伏仲件任仰仳份企伋光兇兆先全共再冰列刑划刎刖劣匈匡匠印危吉吏同吊吐吁吋各向名合吃后吆吒因回囝圳地在圭圬圯圩夙多夷夸妄奸妃好她如妁字存宇守宅安寺尖屹州帆并年式弛忙忖戎戌戍成扣扛托收早旨旬旭曲曳有朽朴朱朵次此死氖汝汗汙江池汐汕污汛汍汎灰牟牝百竹米糸缶羊羽老考而耒耳聿肉肋肌臣自至臼舌舛舟艮色艾虫血行衣西阡串亨位住佇佗佞伴佛何估佐佑伽伺伸佃佔似但佣作你伯低伶余佝佈佚兌克免兵冶冷別判利刪刨劫助努劬匣即卵吝吭吞吾否呎吧呆呃吳呈呂君吩告吹吻吸吮吵吶吠吼呀吱含吟听囪困囤囫坊坑址坍均坎圾坐坏圻壯夾妝妒妨妞妣妙妖妍妤妓妊妥孝孜孚孛完宋宏尬局屁尿尾岐岑岔岌巫希序庇床廷弄弟彤形彷役忘忌志忍忱快忸忪戒我抄抗抖技扶抉扭把扼找批扳抒扯折扮投抓抑抆改攻攸旱更束李杏材村杜杖杞杉杆杠杓杗步每求汞沙沁沈沉沅沛汪決沐汰沌汨沖沒汽沃汲汾汴沆汶沍沔沘沂灶灼災灸牢牡牠狄狂玖甬甫男甸皂盯矣私秀禿究系罕肖肓肝肘肛肚育良芒芋芍見角言谷豆豕貝赤走足身車辛辰迂迆迅迄巡邑邢邪邦那酉釆里防阮阱阪阬並乖乳事些亞享京佯依侍佳使佬供例來侃佰併侈佩佻侖佾侏侑佺兔兒兕兩具其典冽函刻券刷刺到刮制剁劾劻卒協卓卑卦卷卸卹取叔受味呵咖呸咕咀呻呷咄咒咆呼咐呱呶和咚呢周咋命咎固垃坷坪坩坡坦坤坼夜奉奇奈奄奔妾妻委妹妮姑姆姐姍始姓姊妯妳姒姅孟孤季宗定官宜宙宛尚屈居屆岷岡岸岩岫岱岳帘帚帖帕帛帑幸庚店府底庖延弦弧弩往征彿彼忝忠忽念忿怏怔怯怵怖怪怕怡性怩怫怛或戕房戾所承拉拌拄抿拂抹拒招披拓拔拋拈抨抽押拐拙拇拍抵拚抱拘拖拗拆抬拎放斧於旺昔易昌昆昂明昀昏昕昊昇服朋杭枋枕東果杳杷枇枝林杯杰板枉松析杵枚枓杼杪杲欣武歧歿氓氛泣注泳沱泌泥河沽沾沼波沫法泓沸泄油況沮泗泅泱沿治泡泛泊沬泯泜泖泠炕炎炒炊炙爬爭爸版牧物狀狎狙狗狐玩玨玟玫玥甽疝疙疚的盂盲直知矽社祀祁秉秈空穹竺糾罔羌羋者肺肥肢肱股肫肩肴肪肯臥臾舍芳芝芙芭芽芟芹花芬芥芯芸芣芰芾芷虎虱初表軋迎返近邵邸邱邶采金長門阜陀阿阻附陂隹雨青非亟亭亮信侵侯便俠俑俏保促侶俘俟俊俗侮俐俄係俚俎俞侷兗冒冑冠剎剃削前剌剋則勇勉勃勁匍南卻厚叛咬哀咨哎哉咸咦咳哇哂咽咪品哄哈咯咫咱咻咩咧咿囿垂型垠垣垢城垮垓奕契奏奎奐姜姘姿姣姨娃姥姪姚姦威姻孩宣宦室客宥封屎屏屍屋峙峒巷帝帥帟幽庠度建弈弭彥很待徊律徇後徉怒思怠急怎怨恍恰恨恢恆恃恬恫恪恤扁拜挖按拼拭持拮拽指拱拷拯括拾拴挑挂政故斫施既春昭映昧是星昨昱昤曷柿染柱柔某柬架枯柵柩柯柄柑枴柚查枸柏柞柳枰柙柢柝柒歪殃殆段毒毗氟泉洋洲洪流津洌洱洞洗活洽派洶洛泵洹洧洸洩洮洵洎洫炫為炳炬炯炭炸炮炤爰牲牯牴狩狠狡玷珊玻玲珍珀玳甚甭畏界畎畋疫疤疥疢疣癸皆皇皈盈盆盃盅省盹相眉看盾盼眇矜砂研砌砍祆祉祈祇禹禺科秒秋穿突竿竽籽紂紅紀紉紇約紆缸美羿耄耐耍耑耶胖胥胚胃胄背胡胛胎胞胤胝致舢苧范茅苣苛苦茄若茂茉苒苗英茁苜苔苑苞苓苟苯茆虐虹虻虺衍衫要觔計訂訃貞負赴赳趴軍軌述迦迢迪迥迭迫迤迨郊郎郁郃酋酊重閂限陋陌降面革韋韭音頁風飛食首香乘亳倌倍倣俯倦倥俸倩倖倆值借倚倒們俺倀倔倨俱倡個候倘俳修倭倪俾倫倉兼冤冥冢凍凌准凋剖剜剔剛剝匪卿原厝叟哨唐唁唷哼哥哲唆哺唔哩哭員唉哮哪哦唧唇哽唏圃圄埂埔埋埃堉夏套奘奚娑娘娜娟娛娓姬娠娣娩娥娌娉孫屘宰害家宴宮宵容宸射屑展屐峭峽峻峪峨峰島崁峴差席師庫庭座弱徒徑徐恙恣恥恐恕恭恩息悄悟悚悍悔悌悅悖扇拳挈拿捎挾振捕捂捆捏捉挺捐挽挪挫挨捍捌效敉料旁旅時晉晏晃晒晌晅晁書朔朕朗校核案框桓根桂桔栩梳栗桌桑栽柴桐桀格桃株桅栓栘桁殊殉殷氣氧氨氦氤泰浪涕消涇浦浸海浙涓浬涉浮浚浴浩涌涊浹涅浥涔烊烘烤烙烈烏爹特狼狹狽狸狷玆班琉珮珠珪珞畔畝畜畚留疾病症疲疳疽疼疹痂疸皋皰益盍盎眩真眠眨矩砰砧砸砝破砷砥砭砠砟砲祕祐祠祟祖神祝祗祚秤秣秧租秦秩秘窄窈站笆笑粉紡紗紋紊素索純紐紕級紜納紙紛缺罟羔翅翁耆耘耕耙耗耽耿胱脂胰脅胭胴脆胸胳脈能脊胼胯臭臬舀舐航舫舨般芻茫荒荔荊茸荐草茵茴荏茲茹茶茗荀茱茨荃虔蚊蚪蚓蚤蚩蚌蚣蚜衰衷袁袂衽衹記訐討訌訕訊託訓訖訏訑豈豺豹財貢起躬軒軔軏辱送逆迷退迺迴逃追逅迸邕郡郝郢酒配酌釘針釗釜釙閃院陣陡陛陝除陘陞隻飢馬骨高鬥鬲鬼乾偺偽停假偃偌做偉健偶偎偕偵側偷偏倏偯偭兜冕凰剪副勒務勘動匐匏匙匿區匾參曼商啪啦啄啞啡啃啊唱啖問啕唯啤唸售啜唬啣唳啁啗圈國圉域堅堊堆埠埤基堂堵執培夠奢娶婁婉婦婪婀娼婢婚婆婊孰寇寅寄寂宿密尉專將屠屜屝崇崆崎崛崖崢崑崩崔崙崤崧崗巢常帶帳帷康庸庶庵庾張強彗彬彩彫得徙從徘御徠徜恿患悉悠您惋悴惦悽情悻悵惜悼惘惕惆惟悸惚惇戚戛扈掠控捲掖探接捷捧掘措捱掩掉掃掛捫推掄授掙採掬排掏掀捻捩捨捺敝敖救教敗啟敏敘敕敔斜斛斬族旋旌旎晝晚晤晨晦晞曹勗望梁梯梢梓梵桿桶梱梧梗械梃棄梭梆梅梔條梨梟梡梂欲殺毫毬氫涎涼淳淙液淡淌淤添淺清淇淋涯淑涮淞淹涸混淵淅淒渚涵淚淫淘淪深淮淨淆淄涪淬涿淦烹焉焊烽烯爽牽犁猜猛猖猓猙率琅琊球理現琍瓠瓶瓷甜產略畦畢異疏痔痕疵痊痍皎盔盒盛眷眾眼眶眸眺硫硃硎祥票祭移窒窕笠笨笛第符笙笞笮粒粗粕絆絃統紮紹紼絀細紳組累終紲紱缽羞羚翌翎習耜聊聆脯脖脣脫脩脰脤舂舵舷舶船莎莞莘荸莢莖莽莫莒莊莓莉莠荷荻荼莆莧處彪蛇蛀蚶蛄蚵蛆蛋蚱蚯蛉術袞袈被袒袖袍袋覓規訪訝訣訥許設訟訛訢豉豚販責貫貨貪貧赧赦趾趺軛軟這逍通逗連速逝逐逕逞造透逢逖逛途部郭都酗野釵釦釣釧釭釩閉陪陵陳陸陰陴陶陷陬雀雪雩章竟頂頃魚鳥鹵鹿麥麻傢傍傅備傑傀傖傘傚最凱割剴創剩勞勝勛博厥啻喀喧啼喊喝喘喂喜喪喔喇喋喃喳單喟唾喲喚喻喬喱啾喉喫喙圍堯堪場堤堰報堡堝堠壹壺奠婷媚婿媒媛媧孳孱寒富寓寐尊尋就嵌嵐崴嵇巽幅帽幀幃幾廊廁廂廄弼彭復循徨惑惡悲悶惠愜愣惺愕惰惻惴慨惱愎惶愉愀愒戟扉掣掌描揀揩揉揆揍插揣提握揖揭揮捶援揪換摒揚揹敞敦敢散斑斐斯普晰晴晶景暑智晾晷曾替期朝棺棕棠棘棗椅棟棵森棧棹棒棲棣棋棍植椒椎棉棚楮棻款欺欽殘殖殼毯氮氯氬港游湔渡渲湧湊渠渥渣減湛湘渤湖湮渭渦湯渴湍渺測湃渝渾滋溉渙湎湣湄湲湩湟焙焚焦焰無然煮焜牌犄犀猶猥猴猩琺琪琳琢琥琵琶琴琯琛琦琨甥甦畫番痢痛痣痙痘痞痠登發皖皓皴盜睏短硝硬硯稍稈程稅稀窘窗窖童竣等策筆筐筒答筍筋筏筑粟粥絞結絨絕紫絮絲絡給絢絰絳善翔翕耋聒肅腕腔腋腑腎脹腆脾腌腓腴舒舜菩萃菸萍菠菅萋菁華菱菴著萊菰萌菌菽菲菊萸萎萄菜萇菔菟虛蛟蛙蛭蛔蛛蛤蛐蛞街裁裂袱覃視註詠評詞証詁詔詛詐詆訴診訶詖象貂貯貼貳貽賁費賀貴買貶貿貸越超趁跎距跋跚跑跌跛跆軻軸軼辜逮逵週逸進逶鄂郵鄉郾酣酥量鈔鈕鈣鈉鈞鈍鈐鈇鈑閔閏開閑間閒閎隊階隋陽隅隆隍陲隄雁雅雄集雇雯雲韌項順須飧飪飯飩飲飭馮馭黃黍黑亂傭債傲傳僅傾催傷傻傯僇剿剷剽募勦勤勢勣匯嗟嗨嗓嗦嗎嗜嗇嗑嗣嗤嗯嗚嗡嗅嗆嗥嗉園圓塞塑塘塗塚塔填塌塭塊塢塒塋奧嫁嫉嫌媾媽媼媳嫂媲嵩嵯幌幹廉廈弒彙徬微愚意慈感想愛惹愁愈慎慌慄慍愾愴愧愍愆愷戡戢搓搾搞搪搭搽搬搏搜搔損搶搖搗搆敬斟新暗暉暇暈暖暄暘暍會榔業楚楷楠楔極椰概楊楨楫楞楓楹榆楝楣楛歇歲毀殿毓毽溢溯滓溶滂源溝滇滅溥溘溼溺溫滑準溜滄滔溪溧溴煎煙煩煤煉照煜煬煦煌煥煞煆煨煖爺牒猷獅猿猾瑯瑚瑕瑟瑞瑁琿瑙瑛瑜當畸瘀痰瘁痲痱痺痿痴痳盞盟睛睫睦睞督睹睪睬睜睥睨睢矮碎碰碗碘碌碉硼碑碓硿祺祿禁萬禽稜稚稠稔稟稞窟窠筷節筠筮筧粱粳粵經絹綑綁綏絛置罩罪署義羨群聖聘肆肄腱腰腸腥腮腳腫腹腺腦舅艇蒂葷落萱葵葦葫葉葬葛萼萵葡董葩葭葆虞虜號蛹蜓蜈蜇蜀蛾蛻蜂蜃蜆蜊衙裟裔裙補裘裝裡裊裕裒覜解詫該詳試詩詰誇詼詣誠話誅詭詢詮詬詹詻訾詨豢貊貉賊資賈賄貲賃賂賅跡跟跨路跳跺跪跤跦躲較載軾輊辟農運遊道遂達逼違遐遇遏過遍遑逾遁鄒鄗酬酪酩釉鈷鉗鈸鈽鉀鈾鉛鉋鉤鉑鈴鉉鉍鉅鈹鈿鉚閘隘隔隕雍雋雉雊雷電雹零靖靴靶預頑頓頊頒頌飼飴飽飾馳馱馴髡鳩麂鼎鼓鼠僧僮僥僖僭僚僕像僑僱僎僩兢凳劃劂匱厭嗾嘀嘛嘗嗽嘔嘆嘉嘍嘎嗷嘖嘟嘈嘐嗶團圖塵塾境墓墊塹墅塽壽夥夢夤奪奩嫡嫦嫩嫗嫖嫘嫣孵寞寧寡寥實寨寢寤察對屢嶄嶇幛幣幕幗幔廓廖弊彆彰徹慇愿態慷慢慣慟慚慘慵截撇摘摔撤摸摟摺摑摧搴摭摻敲斡旗旖暢暨暝榜榨榕槁榮槓構榛榷榻榫榴槐槍榭槌榦槃榣歉歌氳漳演滾漓滴漩漾漠漬漏漂漢滿滯漆漱漸漲漣漕漫漯澈漪滬漁滲滌滷熔熙煽熊熄熒爾犒犖獄獐瑤瑣瑪瑰瑭甄疑瘧瘍瘋瘉瘓盡監瞄睽睿睡磁碟碧碳碩碣禎福禍種稱窪窩竭端管箕箋筵算箝箔箏箸箇箄粹粽精綻綰綜綽綾綠緊綴網綱綺綢綿綵綸維緒緇綬罰翠翡翟聞聚肇腐膀膏膈膊腿膂臧臺與舔舞艋蓉蒿蓆蓄蒙蒞蒲蒜蓋蒸蓀蓓蒐蒼蓑蓊蜿蜜蜻蜢蜥蜴蜘蝕蜷蜩裳褂裴裹裸製裨褚裯誦誌語誣認誡誓誤說誥誨誘誑誚誧豪貍貌賓賑賒赫趙趕跼輔輒輕輓辣遠遘遜遣遙遞遢遝遛鄙鄘鄞酵酸酷酴鉸銀銅銘銖鉻銓銜銨鉼銑閡閨閩閣閥閤隙障際雌雒需靼鞅韶頗領颯颱餃餅餌餉駁骯骰髦魁魂鳴鳶鳳麼鼻齊億儀僻僵價儂儈儉儅凜劇劈劉劍劊勰厲嘮嘻嘹嘲嘿嘴嘩噓噎噗噴嘶嘯嘰墀墟增墳墜墮墩墦奭嬉嫻嬋嫵嬌嬈寮寬審寫層履嶝嶔幢幟幡廢廚廟廝廣廠彈影德徵慶慧慮慝慕憂慼慰慫慾憧憐憫憎憬憚憤憔憮戮摩摯摹撞撲撈撐撰撥撓撕撩撒撮播撫撚撬撙撢撳敵敷數暮暫暴暱樣樟槨樁樞標槽模樓樊槳樂樅槭樑歐歎殤毅毆漿潼澄潑潦潔澆潭潛潸潮澎潺潰潤澗潘滕潯潠潟熟熬熱熨牖犛獎獗瑩璋璃瑾璀畿瘠瘩瘟瘤瘦瘡瘢皚皺盤瞎瞇瞌瞑瞋磋磅確磊碾磕碼磐稿稼穀稽稷稻窯窮箭箱範箴篆篇篁箠篌糊締練緯緻緘緬緝編緣線緞緩綞緙緲緹罵罷羯翩耦膛膜膝膠膚膘蔗蔽蔚蓮蔬蔭蔓蔑蔣蔡蔔蓬蔥蓿蔆螂蝴蝶蝠蝦蝸蝨蝙蝗蝌蝓衛衝褐複褒褓褕褊誼諒談諄誕請諸課諉諂調誰論諍誶誹諛豌豎豬賠賞賦賤賬賭賢賣賜質賡赭趟趣踫踐踝踢踏踩踟踡踞躺輝輛輟輩輦輪輜輞輥適遮遨遭遷鄰鄭鄧鄱醇醉醋醃鋅銻銷鋪銬鋤鋁銳銼鋒鋇鋰銲閭閱霄霆震霉靠鞍鞋鞏頡頫頜颳養餓餒餘駝駐駟駛駑駕駒駙骷髮髯鬧魅魄魷魯鴆鴉鴃麩麾黎墨齒儒儘儔儐儕冀冪凝劑劓勳噙噫噹噩噤噸噪器噥噱噯噬噢噶壁墾壇壅奮嬝嬴學寰導彊憲憑憩憊懍憶憾懊懈戰擅擁擋撻撼據擄擇擂操撿擒擔撾整曆曉暹曄曇暸樽樸樺橙橫橘樹橄橢橡橋橇樵機橈歙歷氅濂澱澡濃澤濁澧澳激澹澶澦澠澴熾燉燐燒燈燕熹燎燙燜燃燄獨璜璣璘璟璞瓢甌甍瘴瘸瘺盧盥瞠瞞瞟瞥磨磚磬磧禦積穎穆穌穋窺篙簑築篤篛篡篩篦糕糖縊縑縈縛縣縞縝縉縐罹羲翰翱翮耨膳膩膨臻興艘艙蕊蕙蕈蕨蕩蕃蕉蕭蕪蕞螃螟螞螢融衡褪褲褥褫褡親覦諦諺諫諱謀諜諧諮諾謁謂諷諭諳諶諼豫豭貓賴蹄踱踴蹂踹踵輻輯輸輳辨辦遵遴選遲遼遺鄴醒錠錶鋸錳錯錢鋼錫錄錚錐錦錡錕錮錙閻隧隨險雕霎霑霖霍霓霏靛靜靦鞘頰頸頻頷頭頹頤餐館餞餛餡餚駭駢駱骸骼髻髭鬨鮑鴕鴣鴦鴨鴒鴛默黔龍龜優償儡儲勵嚎嚀嚐嚅嚇嚏壕壓壑壎嬰嬪嬤孺尷屨嶼嶺嶽嶸幫彌徽應懂懇懦懋戲戴擎擊擘擠擰擦擬擱擢擭斂斃曙曖檀檔檄檢檜櫛檣橾檗檐檠歜殮毚氈濘濱濟濠濛濤濫濯澀濬濡濩濕濮濰燧營燮燦燥燭燬燴燠爵牆獰獲璩環璦璨癆療癌盪瞳瞪瞰瞬瞧瞭矯磷磺磴磯礁禧禪穗窿簇簍篾篷簌篠糠糜糞糢糟糙糝縮績繆縷縲繃縫總縱繅繁縴縹繈縵縿縯罄翳翼聱聲聰聯聳臆臃膺臂臀膿膽臉膾臨舉艱薪薄蕾薜薑薔薯薛薇薨薊虧蟀蟑螳蟒蟆螫螻螺蟈蟋褻褶襄褸褽覬謎謗謙講謊謠謝謄謐豁谿豳賺賽購賸賻趨蹉蹋蹈蹊轄輾轂轅輿避遽還邁邂邀鄹醣醞醜鍍鎂錨鍵鍊鍥鍋錘鍾鍬鍛鍰鍚鍔闊闋闌闈闆隱隸雖霜霞鞠韓顆颶餵騁駿鮮鮫鮪鮭鴻鴿麋黏點黜黝黛鼾齋叢嚕嚮壙壘嬸彝懣戳擴擲擾攆擺擻擷斷曜朦檳檬櫃檻檸櫂檮檯歟歸殯瀉瀋濾瀆濺瀑瀏燻燼燾燸獷獵璧璿甕癖癘癒瞽瞿瞻瞼礎禮穡穢穠竄竅簫簧簪簞簣簡糧織繕繞繚繡繒繙罈翹翻職聶臍臏舊藏薩藍藐藉薰薺薹薦蟯蟬蟲蟠覆覲觴謨謹謬謫豐贅蹙蹣蹦蹤蹟蹕軀轉轍邇邃邈醫醬釐鎔鎊鎖鎢鎳鎮鎬鎰鎘鎚鎗闔闖闐闕離雜雙雛雞霤鞣鞦鞭韹額顏題顎顓颺餾餿餽餮馥騎髁鬃鬆魏魎魍鯊鯉鯽鯈鯀鵑鵝鵠黠鼕鼬儳嚥壞壟壢寵龐廬懲懷懶懵攀攏曠曝櫥櫝櫚櫓瀛瀟瀨瀚瀝瀕瀘爆爍牘犢獸獺璽瓊瓣疇疆癟癡矇礙禱穫穩簾簿簸簽簷籀繫繭繹繩繪羅繳羶羹羸臘藩藝藪藕藤藥藷蟻蠅蠍蟹蟾襠襟襖襞譁譜識證譚譎譏譆譙贈贊蹼蹲躇蹶蹬蹺蹴轔轎辭邊邋醱醮鏡鏑鏟鏃鏈鏜鏝鏖鏢鏍鏘鏤鏗鏨關隴難霪霧靡韜韻類願顛颼饅饉騖騙鬍鯨鯧鯖鯛鶉鵡鵲鵪鵬麒麗麓麴勸嚨嚷嚶嚴嚼壤孀孃孽寶巉懸懺攘攔攙曦朧櫬瀾瀰瀲爐獻瓏癢癥礦礪礬礫竇競籌籃籍糯糰辮繽繼纂罌耀臚艦藻藹蘑藺蘆蘋蘇蘊蠔蠕襤覺觸議譬警譯譟譫贏贍躉躁躅躂醴釋鐘鐃鏽闡霰飄饒饑馨騫騰騷騵鰓鰍鹹麵黨鼯齟齣齡儷儸囁囀囂夔屬巍懼懾攝攜斕曩櫻欄櫺殲灌爛犧瓖瓔癩矓籐纏續羼蘗蘭蘚蠣蠢蠡蠟襪襬覽譴護譽贓躊躍躋轟辯醺鐮鐳鐵鐺鐸鐲鐫闢霸霹露響顧顥饗驅驃驀騾髏魔魑鰭鰥鶯鶴鷂鶸麝黯鼙齜齦齧儼儻囈囊囉孿巔巒彎懿攤權歡灑灘玀瓤疊癮癬禳籠籟聾聽臟襲襯觼讀贖贗躑躓轡酈鑄鑑鑒霽霾韃韁顫饕驕驍髒鬚鱉鰱鰾鰻鷓鷗鼴齬齪龔囌巖戀攣攫攪曬欐瓚竊籤籣籥纓纖纔臢蘸蘿蠱變邐邏鑣鑠鑤靨顯饜驚驛驗髓體髑鱔鱗鱖鷥麟黴囑壩攬灞癱癲矗罐羈蠶蠹衢讓讒讖艷贛釀鑪靂靈靄韆顰驟鬢魘鱟鷹鷺鹼鹽鼇齷齲廳欖灣籬籮蠻觀躡釁鑲鑰顱饞髖鬣黌灤矚讚鑷韉驢驥纜讜躪釅鑽鑾鑼鱷鱸黷豔鑿鸚爨驪鬱鸛鸞籲ヾゝゞ々ぁあぃいぅうぇえぉおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろゎわゐゑをんァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミムメモャヤュユョヨラリルレロヮワヰヱヲンヴヵヶДЕЁЖЗИЙКЛМУФХЦЧШЩЪЫЬЭЮЯабвгдеёжзийклмнопрстуфхцчшщъыьэюя①②③④⑤⑥⑦⑧⑨⑩⑴⑵⑶⑷⑸⑹⑺⑻⑼⑽���������������������������������������������������������������������������������������������������������������������������������������������������������������乂乜凵匚厂万丌乇亍囗兀屮彳丏冇与丮亓仂仉仈冘勼卬厹圠夃夬尐巿旡殳毌气爿丱丼仨仜仩仡仝仚刌匜卌圢圣夗夯宁宄尒尻屴屳帄庀庂忉戉扐氕氶汃氿氻犮犰玊禸肊阞伎优伬仵伔仱伀价伈伝伂伅伢伓伄仴伒冱刓刉刐劦匢匟卍厊吇囡囟圮圪圴夼妀奼妅奻奾奷奿孖尕尥屼屺屻屾巟幵庄异弚彴忕忔忏扜扞扤扡扦扢扙扠扚扥旯旮朾朹朸朻机朿朼朳氘汆汒汜汏汊汔汋汌灱牞犴犵玎甪癿穵网艸艼芀艽艿虍襾邙邗邘邛邔阢阤阠阣佖伻佢佉体佤伾佧佒佟佁佘伭伳伿佡冏冹刜刞刡劭劮匉卣卲厎厏吰吷吪呔呅吙吜吥吘吽呏呁吨吤呇囮囧囥坁坅坌坉坋坒夆奀妦妘妠妗妎妢妐妏妧妡宎宒尨尪岍岏岈岋岉岒岊岆岓岕巠帊帎庋庉庌庈庍弅弝彸彶忒忑忐忭忨忮忳忡忤忣忺忯忷忻怀忴戺抃抌抎抏抔抇扱扻扺扰抁抈扷扽扲扴攷旰旴旳旲旵杅杇杙杕杌杈杝杍杚杋毐氙氚汸汧汫沄沋沏汱汯汩沚汭沇沕沜汦汳汥汻沎灴灺牣犿犽狃狆狁犺狅玕玗玓玔玒町甹疔疕皁礽耴肕肙肐肒肜芐芏芅芎芑芓芊芃芄豸迉辿邟邡邥邞邧邠阰阨阯阭丳侘佼侅佽侀侇佶佴侉侄佷佌侗佪侚佹侁佸侐侜侔侞侒侂侕佫佮冞冼冾刵刲刳剆刱劼匊匋匼厒厔咇呿咁咑咂咈呫呺呾呥呬呴呦咍呯呡呠咘呣呧呤囷囹坯坲坭坫坱坰坶垀坵坻坳坴坢坨坽夌奅妵妺姏姎妲姌姁妶妼姃姖妱妽姀姈妴姇孢孥宓宕屄屇岮岤岠岵岯岨岬岟岣岭岢岪岧岝岥岶岰岦帗帔帙弨弢弣弤彔徂彾彽忞忥怭怦怙怲怋怴怊怗怳怚怞怬怢怍怐怮怓怑怌怉怜戔戽抭抴拑抾抪抶拊抮抳抯抻抩抰抸攽斨斻昉旼昄昒昈旻昃昋昍昅旽昑昐曶朊枅杬枎枒杶杻枘枆构杴枍枌杺枟枑枙枃杽极杸杹枔欥殀歾毞氝沓泬泫泮泙沶泔沭泧沷泐泂沺泃泆泭泲泒泝沴沊沝沀泞泀洰泍泇沰泹泏泩泑炔炘炅炓炆炄炑炖炂炚炃牪狖狋狘狉狜狒狔狚狌狑玤玡玭玦玢玠玬玝瓝瓨甿畀甾疌疘皯盳盱盰盵矸矼矹矻矺矷祂礿秅穸穻竻籵糽耵肏肮肣肸肵肭舠芠苀芫芚芘芛芵芧芮芼芞芺芴芨芡芩苂芤苃芶芢虰虯虭虮豖迒迋迓迍迖迕迗邲邴邯邳邰阹阽阼阺陃俍俅俓侲俉俋俁俔俜俙侻侳俛俇俖侺俀侹俬剄剉勀勂匽卼厗厖厙厘咺咡咭咥哏哃茍咷咮哖咶哅哆咠呰咼咢咾呲哞咰垵垞垟垤垌垗垝垛垔垘垏垙垥垚垕壴复奓姡姞姮娀姱姝姺姽姼姶姤姲姷姛姩姳姵姠姾姴姭宨屌峐峘峌峗峋峛峞峚峉峇峊峖峓峔峏峈峆峎峟峸巹帡帢帣帠帤庰庤庢庛庣庥弇弮彖徆怷怹恔恲恞恅恓恇恉恛恌恀恂恟怤恄恘恦恮扂扃拏挍挋拵挎挃拫拹挏挌拸拶挀挓挔拺挕拻拰敁敃斪斿昶昡昲昵昜昦昢昳昫昺昝昴昹昮朏朐柁柲柈枺柜枻柸柘柀枷柅柫柤柟枵柍枳柷柶柮柣柂枹柎柧柰枲柼柆柭柌枮柦柛柺柉柊柃柪柋欨殂殄殶毖毘毠氠氡洨洴洭洟洼洿洒洊泚洳洄洙洺洚洑洀洝浂洁洘洷洃洏浀洇洠洬洈洢洉洐炷炟炾炱炰炡炴炵炩牁牉牊牬牰牳牮狊狤狨狫狟狪狦狣玅珌珂珈珅玹玶玵玴珫玿珇玾珃珆玸珋瓬瓮甮畇畈疧疪癹盄眈眃眄眅眊盷盻盺矧矨砆砑砒砅砐砏砎砉砃砓祊祌祋祅祄秕种秏秖秎窀穾竑笀笁籺籸籹籿粀粁紃紈紁罘羑羍羾耇耎耏耔耷胘胇胠胑胈胂胐胅胣胙胜胊胕胉胏胗胦胍臿舡芔苙苾苹茇苨茀苕茺苫苖苴苬苡苲苵茌苻苶苰苪苤苠苺苳苭虷虴虼虳衁衎衧衪衩觓訄訇赲迣迡迮迠郱邽邿郕郅邾郇郋郈釔釓陔陏陑陓陊陎倞倅倇倓倢倰倛俵俴倳倷倬俶俷倗倜倠倧倵倯倱倎党冔冓凊凄凅凈凎剡剚剒剞剟剕剢勍匎厞唦哢唗唒哧哳哤唚哿唄唈哫唑唅哱唊哻哷哸哠唎唃唋圁圂埌堲埕埒垺埆垽垼垸垶垿埇埐垹埁夎奊娙娖娭娮娕娏娗娊娞娳孬宧宭宬尃屖屔峬峿峮峱峷崀峹帩帨庨庮庪庬弳弰彧恝恚恧恁悢悈悀悒悁悝悃悕悛悗悇悜悎戙扆拲挐捖挬捄捅挶捃揤挹捋捊挼挩捁挴捘捔捙挭捇挳捚捑挸捗捀捈敊敆旆旃旄旂晊晟晇晑朒朓栟栚桉栲栳栻桋桏栖栱栜栵栫栭栯桎桄栴栝栒栔栦栨栮桍栺栥栠欬欯欭欱欴歭肂殈毦毤毨毣毢毧氥浺浣浤浶洍浡涒浘浢浭浯涑涍淯浿涆浞浧浠涗浰浼浟涂涘洯浨涋浾涀涄洖涃浻浽浵涐烜烓烑烝烋缹烢烗烒烞烠烔烍烅烆烇烚烎烡牂牸牷牶猀狺狴狾狶狳狻猁珓珙珥珖玼珧珣珩珜珒珛珔珝珚珗珘珨瓞瓟瓴瓵甡畛畟疰痁疻痄痀疿疶疺皊盉眝眛眐眓眒眣眑眕眙眚眢眧砣砬砢砵砯砨砮砫砡砩砳砪砱祔祛祏祜祓祒祑秫秬秠秮秭秪秜秞秝窆窉窅窋窌窊窇竘笐笄笓笅笏笈笊笎笉笒粄粑粊粌粈粍粅紞紝紑紎紘紖紓紟紒紏紌罜罡罞罠罝罛羖羒翃翂翀耖耾耹胺胲胹胵脁胻脀舁舯舥茳茭荄茙荑茥荖茿荁茦茜茢荂荎茛茪茈茼荍茖茤茠茷茯茩荇荅荌荓茞茬荋茧荈虓虒蚢蚨蚖蚍蚑蚞蚇蚗蚆蚋蚚蚅蚥蚙蚡蚧蚕蚘蚎蚝蚐蚔衃衄衭衵衶衲袀衱衿衯袃衾衴衼訒豇豗豻貤貣赶赸趵趷趶軑軓迾迵适迿迻逄迼迶郖郠郙郚郣郟郥郘郛郗郜郤酐酎酏釕釢釚陜陟隼飣髟鬯乿偰偪偡偞偠偓偋偝偲偈偍偁偛偊偢倕偅偟偩偫偣偤偆偀偮偳偗偑凐剫剭剬剮勖勓匭厜啵啶唼啍啐唴唪啑啢唶唵唰啒啅唌唲啥啎唹啈唭唻啀啋圊圇埻堔埢埶埜埴堀埭埽堈埸堋埳埏堇埮埣埲埥埬埡堎埼堐埧堁堌埱埩埰堍堄奜婠婘婕婧婞娸娵婭婐婟婥婬婓婤婗婃婝婒婄婛婈媎娾婍娹婌婰婩婇婑婖婂婜孲孮寁寀屙崞崋崝崚崠崌崨崍崦崥崏崰崒崣崟崮帾帴庱庴庹庲庳弶弸徛徖徟悊悐悆悾悰悺惓惔惏惤惙惝惈悱惛悷惊悿惃惍惀挲捥掊掂捽掽掞掭掝掗掫掎捯掇掐据掯捵掜捭掮捼掤挻掟捸掅掁掑掍捰敓旍晥晡晛晙晜晢朘桹梇梐梜桭桮梮梫楖桯梣梬梩桵桴梲梏桷梒桼桫桲梪梀桱桾梛梖梋梠梉梤桸桻梑梌梊桽欶欳欷欸殑殏殍殎殌氪淀涫涴涳湴涬淩淢涷淶淔渀淈淠淟淖涾淥淜淝淛淴淊涽淭淰涺淕淂淏淉淐淲淓淽淗淍淣涻烺焍烷焗烴焌烰焄烳焐烼烿焆焓焀烸烶焋焂焎牾牻牼牿猝猗猇猑猘猊猈狿猏猞玈珶珸珵琄琁珽琇琀珺珼珿琌琋珴琈畤畣痎痒痏痋痌痑痐皏皉盓眹眯眭眱眲眴眳眽眥眻眵硈硒硉硍硊硌砦硅硐祤祧祩祪祣祫祡离秺秸秶秷窏窔窐笵筇笴笥笰笢笤笳笘笪笝笱笫笭笯笲笸笚笣粔粘粖粣紵紽紸紶紺絅紬紩絁絇紾紿絊紻紨罣羕羜羝羛翊翋翍翐翑翇翏翉耟耞耛聇聃聈脘脥脙脛脭脟脬脞脡脕脧脝脢舑舸舳舺舴舲艴莐莣莨莍荺荳莤荴莏莁莕莙荵莔莩荽莃莌莝莛莪莋荾莥莯莈莗莰荿莦莇莮荶莚虙虖蚿蚷蛂蛁蛅蚺蚰蛈蚹蚳蚸蛌蚴蚻蚼蛃蚽蚾衒袉袕袨袢袪袚袑袡袟袘袧袙袛袗袤袬袌袓袎覂觖觙觕訰訧訬訞谹谻豜豝豽貥赽赻赹趼跂趹趿跁軘軞軝軜軗軠軡逤逋逑逜逌逡郯郪郰郴郲郳郔郫郬郩酖酘酚酓酕釬釴釱釳釸釤釹釪釫釷釨釮镺閆閈陼陭陫陱陯隿靪頄飥馗傛傕傔傞傋傣傃傌傎傝偨傜傒傂傇兟凔匒匑厤厧喑喨喥喭啷噅喢喓喈喏喵喁喣喒喤啽喌喦啿喕喡喎圌堩堷堙堞堧堣堨埵塈堥堜堛堳堿堶堮堹堸堭堬堻奡媯媔媟婺媢媞婸媦婼媥媬媕媮娷媄媊媗媃媋媩婻婽媌媜媏媓媝寪寍寋寔寑寊寎尌尰崷嵃嵫嵁嵋崿崵嵑嵎嵕崳崺嵒崽崱嵙嵂崹嵉崸崼崲崶嵀嵅幄幁彘徦徥徫惉悹惌惢惎惄愔惲愊愖愅惵愓惸惼惾惁愃愘愝愐惿愄愋扊掔掱掰揎揥揨揯揃撝揳揊揠揶揕揲揵摡揟掾揝揜揄揘揓揂揇揌揋揈揰揗揙攲敧敪敤敜敨敥斌斝斞斮旐旒晼晬晻暀晱晹晪晲朁椌棓椄棜椪棬棪棱椏棖棷棫棤棶椓椐棳棡椇棌椈楰梴椑棯棆椔棸棐棽棼棨椋椊椗棎棈棝棞棦棴棑椆棔棩椕椥棇欹欻欿欼殔殗殙殕殽毰毲毳氰淼湆湇渟湉溈渼渽湅湢渫渿湁湝湳渜渳湋湀湑渻渃渮湞湨湜湡渱渨湠湱湫渹渢渰湓湥渧湸湤湷湕湹湒湦渵渶湚焠焞焯烻焮焱焣焥焢焲焟焨焺焛牋牚犈犉犆犅犋猒猋猰猢猱猳猧猲猭猦猣猵猌琮琬琰琫琖琚琡琭琱琤琣琝琩琠琲瓻甯畯畬痧痚痡痦痝痟痤痗皕皒盚睆睇睄睍睅睊睎睋睌矞矬硠硤硥硜硭硱硪确硰硩硨硞硢祴祳祲祰稂稊稃稌稄窙竦竤筊笻筄筈筌筎筀筘筅粢粞粨粡絘絯絣絓絖絧絪絏絭絜絫絒絔絩絑絟絎缾缿罥罦羢羠羡翗聑聏聐胾胔腃腊腒腏腇脽腍脺臦臮臷臸臹舄舼舽舿艵茻菏菹萣菀菨萒菧菤菼菶萐菆菈菫菣莿萁菝菥菘菿菡菋菎菖菵菉萉萏菞萑萆菂菳菕菺菇菑菪萓菃菬菮菄菻菗菢萛菛菾蛘蛢蛦蛓蛣蛚蛪蛝蛫蛜蛬蛩蛗蛨蛑衈衖衕袺裗袹袸裀袾袶袼袷袽袲褁裉覕覘覗觝觚觛詎詍訹詙詀詗詘詄詅詒詈詑詊詌詏豟貁貀貺貾貰貹貵趄趀趉跘跓跍跇跖跜跏跕跙跈跗跅軯軷軺軹軦軮軥軵軧軨軶軫軱軬軴軩逭逴逯鄆鄬鄄郿郼鄈郹郻鄁鄀鄇鄅鄃酡酤酟酢酠鈁鈊鈥鈃鈚鈦鈏鈌鈀鈒釿釽鈆鈄鈧鈂鈜鈤鈙鈗鈅鈖镻閍閌閐隇陾隈隉隃隀雂雈雃雱雰靬靰靮頇颩飫鳦黹亃亄亶傽傿僆傮僄僊傴僈僂傰僁傺傱僋僉傶傸凗剺剸剻剼嗃嗛嗌嗐嗋嗊嗝嗀嗔嗄嗩喿嗒喍嗏嗕嗢嗖嗈嗲嗍嗙嗂圔塓塨塤塏塍塉塯塕塎塝塙塥塛堽塣塱壼嫇嫄嫋媺媸媱媵媰媿嫈媻嫆媷嫀嫊媴媶嫍媹媐寖寘寙尟尳嵱嵣嵊嵥嵲嵬嵞嵨嵧嵢巰幏幎幊幍幋廅廌廆廋廇彀徯徭惷慉慊愫慅愶愲愮慆愯慏愩慀戠酨戣戥戤揅揱揫搐搒搉搠搤搳摃搟搕搘搹搷搢搣搌搦搰搨摁搵搯搊搚摀搥搧搋揧搛搮搡搎敯斒旓暆暌暕暐暋暊暙暔晸朠楦楟椸楎楢楱椿楅楪椹楂楗楙楺楈楉椵楬椳椽楥棰楸椴楩楀楯楄楶楘楁楴楌椻楋椷楜楏楑椲楒椯楻椼歆歅歃歂歈歁殛嗀毻毼毹毷毸溛滖滈溏滀溟溓溔溠溱溹滆滒溽滁溞滉溷溰滍溦滏溲溾滃滜滘溙溒溎溍溤溡溿溳滐滊溗溮溣煇煔煒煣煠煁煝煢煲煸煪煡煂煘煃煋煰煟煐煓煄煍煚牏犍犌犑犐犎猼獂猻猺獀獊獉瑄瑊瑋瑒瑑瑗瑀瑏瑐瑎瑂瑆瑍瑔瓡瓿瓾瓽甝畹畷榃痯瘏瘃痷痾痼痹痸瘐痻痶痭痵痽皙皵盝睕睟睠睒睖睚睩睧睔睙睭矠碇碚碔碏碄碕碅碆碡碃硹碙碀碖硻祼禂祽祹稑稘稙稒稗稕稢稓稛稐窣窢窞竫筦筤筭筴筩筲筥筳筱筰筡筸筶筣粲粴粯綈綆綀綍絿綅絺綎絻綃絼綌綔綄絽綒罭罫罧罨罬羦羥羧翛翜耡腤腠腷腜腩腛腢腲朡腞腶腧腯腄腡舝艉艄艀艂艅蓱萿葖葶葹蒏蒍葥葑葀蒆葧萰葍葽葚葙葴葳葝蔇葞萷萺萴葺葃葸萲葅萩菙葋萯葂萭葟葰萹葎葌葒葯蓅蒎萻葇萶萳葨葾葄萫葠葔葮葐蜋蜄蛷蜌蛺蛖蛵蝍蛸蜎蜉蜁蛶蜍蜅裖裋裍裎裞裛裚裌裐覅覛觟觥觤觡觠觢觜触詶誆詿詡訿詷誂誄詵誃誁詴詺谼豋豊豥豤豦貆貄貅賌赨赩趑趌趎趏趍趓趔趐趒跰跠跬跱跮跐跩跣跢跧跲跫跴輆軿輁輀輅輇輈輂輋遒逿遄遉逽鄐鄍鄏鄑鄖鄔鄋鄎酮酯鉈鉒鈰鈺鉦鈳鉥鉞銃鈮鉊鉆鉭鉬鉏鉠鉧鉯鈶鉡鉰鈱鉔鉣鉐鉲鉎鉓鉌鉖鈲閟閜閞閛隒隓隑隗雎雺雽雸雵靳靷靸靲頏頍頎颬飶飹馯馲馰馵骭骫魛鳪鳭鳧麀黽僦僔僗僨僳僛僪僝僤僓僬僰僯僣僠凘劀劁勩勫匰厬嘧嘕嘌嘒嗼嘏嘜嘁嘓嘂嗺嘝嘄嗿嗹墉塼墐墘墆墁塿塴墋塺墇墑墎塶墂墈塻墔墏壾奫嫜嫮嫥嫕嫪嫚嫭嫫嫳嫢嫠嫛嫬嫞嫝嫙嫨嫟孷寠寣屣嶂嶀嵽嶆嵺嶁嵷嶊嶉嶈嵾嵼嶍嵹嵿幘幙幓廘廑廗廎廜廕廙廒廔彄彃彯徶愬愨慁慞慱慳慒慓慲慬憀慴慔慺慛慥愻慪慡慖戩戧戫搫摍摛摝摴摶摲摳摽摵摦撦摎撂摞摜摋摓摠摐摿搿摬摫摙摥摷敳斠暡暠暟朅朄朢榱榶槉榠槎榖榰榬榼榑榙榎榧榍榩榾榯榿槄榽榤槔榹槊榚槏榳榓榪榡榞槙榗榐槂榵榥槆歊歍歋殞殟殠毃毄毾滎滵滱漃漥滸漷滻漮漉潎漙漚漧漘漻漒滭漊漶潳滹滮漭潀漰漼漵滫漇漎潃漅滽滶漹漜滼漺漟漍漞漈漡熇熐熉熀熅熂熏煻熆熁熗牄牓犗犕犓獃獍獑獌瑢瑳瑱瑵瑲瑧瑮甀甂甃畽疐瘖瘈瘌瘕瘑瘊瘔皸瞁睼瞅瞂睮瞀睯睾瞃碲碪碴碭碨硾碫碞碥碠碬碢碤禘禊禋禖禕禔禓禗禈禒禐稫穊稰稯稨稦窨窫窬竮箈箜箊箑箐箖箍箌箛箎箅箘劄箙箤箂粻粿粼粺綧綷緂綣綪緁緀緅綝緎緄緆緋緌綯綹綖綼綟綦綮綩綡緉罳翢翣翥翞耤聝聜膉膆膃膇膍膌膋舕蒗蒤蒡蒟蒺蓎蓂蒬蒮蒫蒹蒴蓁蓍蒪蒚蒱蓐蒝蒧蒻蒢蒔蓇蓌蒛蒩蒯蒨蓖蒘蒶蓏蒠蓗蓔蓒蓛蒰蒑虡蜳蜣蜨蝫蝀蜮蜞蜡蜙蜛蝃蜬蝁蜾蝆蜠蜲蜪蜭蜼蜒蜺蜱蜵蝂蜦蜧蜸蜤蜚蜰蜑裷裧裱裲裺裾裮裼裶裻裰裬裫覝覡覟覞觩觫觨誫誙誋誒誏誖谽豨豩賕賏賗趖踉踂跿踍跽踊踃踇踆踅跾踀踄輐輑輎輍鄣鄜鄠鄢鄟鄝鄚鄤鄡鄛酺酲酹酳銥銤鉶銛鉺銠銔銪銍銦銚銫鉹銗鉿銣鋮銎銂銕銢鉽銈銡銊銆銌銙銧鉾銇銩銝銋鈭隞隡雿靘靽靺靾鞃鞀鞂靻鞄鞁靿韎韍頖颭颮餂餀餇馝馜駃馹馻馺駂馽駇骱髣髧鬾鬿魠魡魟鳱鳲鳵麧僿儃儰僸儆儇僶僾儋儌僽儊劋劌勱勯噈噂噌嘵噁噊噉噆噘噚噀嘳嘽嘬嘾嘸嘪嘺圚墫墝墱墠墣墯墬墥墡壿嫿嫴嫽嫷嫶嬃嫸嬂嫹嬁嬇嬅嬏屧嶙嶗嶟嶒嶢嶓嶕嶠嶜嶡嶚嶞幩幝幠幜緳廛廞廡彉徲憋憃慹憱憰憢憉憛憓憯憭憟憒憪憡憍慦憳戭摮摰撖撠撅撗撜撏撋撊撌撣撟摨撱撘敶敺敹敻斲斳暵暰暩暲暷暪暯樀樆樗槥槸樕槱槤樠槿槬槢樛樝槾樧槲槮樔槷槧橀樈槦槻樍槼槫樉樄樘樥樏槶樦樇槴樖歑殥殣殢殦氁氀毿氂潁漦潾澇濆澒澍澉澌潢潏澅潚澖潶潬澂潕潲潒潐潗澔澓潝漀潡潫潽潧澐潓澋潩潿澕潣潷潪潻熲熯熛熰熠熚熩熵熝熥熞熤熡熪熜熧熳犘犚獘獒獞獟獠獝獛獡獚獙獢璇璉璊璆璁瑽璅璈瑼瑹甈甇畾瘥瘞瘙瘝瘜瘣瘚瘨瘛皜皝皞皛瞍瞏瞉瞈磍碻磏磌磑磎磔磈磃磄磉禚禡禠禜禢禛歶稹窲窴窳箷篋箾箬篎箯箹篊箵糅糈糌糋緷緛緪緧緗緡縃緺緦緶緱緰緮緟罶羬羰羭翭翫翪翬翦翨聤聧膣膟膞膕膢膙膗舖艏艓艒艐艎艑蔤蔻蔏蔀蔩蔎蔉蔍蔟蔊蔧蔜蓻蔫蓺蔈蔌蓴蔪蓲蔕蓷蓫蓳蓼蔒蓪蓩蔖蓾蔨蔝蔮蔂蓽蔞蓶蔱蔦蓧蓨蓰蓯蓹蔘蔠蔰蔋蔙蔯虢蝖蝣蝤蝷蟡蝳蝘蝔蝛蝒蝡蝚蝑蝞蝭蝪蝐蝎蝟蝝蝯蝬蝺蝮蝜蝥蝏蝻蝵蝢蝧蝩衚褅褌褔褋褗褘褙褆褖褑褎褉覢覤覣觭觰觬諏諆誸諓諑諔諕誻諗誾諀諅諘諃誺誽諙谾豍貏賥賟賙賨賚賝賧趠趜趡趛踠踣踥踤踮踕踛踖踑踙踦踧踔踒踘踓踜踗踚輬輤輘輚輠輣輖輗遳遰遯遧遫鄯鄫鄩鄪鄲鄦鄮醅醆醊醁醂醄醀鋐鋃鋄鋀鋙銶鋏鋱鋟鋘鋩鋗鋝鋌鋯鋂鋨鋊鋈鋎鋦鋍鋕鋉鋠鋞鋧鋑鋓銵鋡鋆銴镼閬閫閮閰隤隢雓霅霈霂靚鞊鞎鞈韐韏頞頝頦頩頨頠頛頧颲餈飺餑餔餖餗餕駜駍駏駓駔駎駉駖駘駋駗駌骳髬髫髳髲髱魆魃魧魴魱魦魶魵魰魨魤魬鳼鳺鳽鳿鳷鴇鴀鳹鳻鴈鴅鴄麃黓鼏鼐儜儓儗儚儑凞匴叡噰噠噮噳噦噣噭噲噞噷圜圛壈墽壉墿墺壂墼壆嬗嬙嬛嬡嬔嬓嬐嬖嬨嬚嬠嬞寯嶬嶱嶩嶧嶵嶰嶮嶪嶨嶲嶭嶯嶴幧幨幦幯廩廧廦廨廥彋徼憝憨憖懅憴懆懁懌憺憿憸憌擗擖擐擏擉撽撉擃擛擳擙攳敿敼斢曈暾曀曊曋曏暽暻暺曌朣樴橦橉橧樲橨樾橝橭橶橛橑樨橚樻樿橁橪橤橐橏橔橯橩橠樼橞橖橕橍橎橆歕歔歖殧殪殫毈毇氄氃氆澭濋澣濇澼濎濈潞濄澽澞濊澨瀄澥澮澺澬澪濏澿澸澢濉澫濍澯澲澰燅燂熿熸燖燀燁燋燔燊燇燏熽燘熼燆燚燛犝犞獩獦獧獬獥獫獪瑿璚璠璔璒璕璡甋疀瘯瘭瘱瘽瘳瘼瘵瘲瘰皻盦瞚瞝瞡瞜瞛瞢瞣瞕瞙瞗磝磩磥磪磞磣磛磡磢磭磟磠禤穄穈穇窶窸窵窱窷篞篣篧篝篕篥篚篨篹篔篪篢篜篫篘篟糒糔糗糐糑縒縡縗縌縟縠縓縎縜縕縚縢縋縏縖縍縔縥縤罃罻罼罺羱翯耪耩聬膱膦膮膹膵膫膰膬膴膲膷膧臲艕艖艗蕖蕅蕫蕍蕓蕡蕘蕀蕆蕤蕁蕢蕄蕑蕇蕣蔾蕛蕱蕎蕮蕵蕕蕧蕠薌蕦蕝蕔蕥蕬虣虥虤螛螏螗螓螒螈螁螖螘蝹螇螣螅螐螑螝螄螔螜螚螉褞褦褰褭褮褧褱褢褩褣褯褬褟觱諠諢諲諴諵諝謔諤諟諰諈諞諡諨諿諯諻貑貒貐賵賮賱賰賳赬赮趥趧踳踾踸蹀蹅踶踼踽蹁踰踿躽輶輮輵輲輹輷輴遶遹遻邆郺鄳鄵鄶醓醐醑醍醏錧錞錈錟錆錏鍺錸錼錛錣錒錁鍆錭錎錍鋋錝鋺錥錓鋹鋷錴錂錤鋿錩錹錵錪錔錌錋鋾錉錀鋻錖閼闍閾閹閺閶閿閵閽隩雔霋霒霐鞙鞗鞔韰韸頵頯頲餤餟餧餩馞駮駬駥駤駰駣駪駩駧骹骿骴骻髶髺髹髷鬳鮀鮅鮇魼魾魻鮂鮓鮒鮐魺鮕魽鮈鴥鴗鴠鴞鴔鴩鴝鴘鴢鴐鴙鴟麈麆麇麮麭黕黖黺鼒鼽儦儥儢儤儠儩勴嚓嚌嚍嚆嚄嚃噾嚂噿嚁壖壔壏壒嬭嬥嬲嬣嬬嬧嬦嬯嬮孻寱寲嶷幬幪徾徻懃憵憼懧懠懥懤懨懞擯擩擣擫擤擨斁斀斶旚曒檍檖檁檥檉檟檛檡檞檇檓檎檕檃檨檤檑橿檦檚檅檌檒歛殭氉濌澩濴濔濣濜濭濧濦濞濲濝濢濨燡燱燨燲燤燰燢獳獮獯璗璲璫璐璪璭璱璥璯甐甑甒甏疄癃癈癉癇皤盩瞵瞫瞲瞷瞶瞴瞱瞨矰磳磽礂磻磼磲礅磹磾礄禫禨穜穛穖穘穔穚窾竀竁簅簏篲簀篿篻簎篴簋篳簂簉簃簁篸篽簆篰篱簐簊糨縭縼繂縳顈縸縪繉繀繇縩繌縰縻縶繄縺罅罿罾罽翴翲耬膻臄臌臊臅臇膼臩艛艚艜薃薀薏薧薕薠薋薣蕻薤薚薞蕷蕼薉薡蕺蕸蕗薎薖薆薍薙薝薁薢薂薈薅蕹蕶薘薐薟虨螾螪螭蟅螰螬螹螵螼螮蟉蟃蟂蟌螷螯蟄蟊螴螶螿螸螽蟞螲褵褳褼褾襁襒褷襂覭覯覮觲觳謞謘謖謑謅謋謢謏謒謕謇謍謈謆謜謓謚豏豰豲豱豯貕貔賹赯蹎蹍蹓蹐蹌蹇轃轀邅遾鄸醚醢醛醙醟醡醝醠鎡鎃鎯鍤鍖鍇鍼鍘鍜鍶鍉鍐鍑鍠鍭鎏鍌鍪鍹鍗鍕鍒鍏鍱鍷鍻鍡鍞鍣鍧鎀鍎鍙闇闀闉闃闅閷隮隰隬霠霟霘霝霙鞚鞡鞜鞞鞝韕韔韱顁顄顊顉顅顃餥餫餬餪餳餲餯餭餱餰馘馣馡騂駺駴駷駹駸駶駻駽駾駼騃骾髾髽鬁髼魈鮚鮨鮞鮛鮦鮡鮥鮤鮆鮢鮠鮯鴳鵁鵧鴶鴮鴯鴱鴸鴰鵅鵂鵃鴾鴷鵀鴽翵鴭麊麉麍麰黈黚黻黿鼤鼣鼢齔龠儱儭儮嚘嚜嚗嚚嚝嚙奰嬼屩屪巀幭幮懘懟懭懮懱懪懰懫懖懩擿攄擽擸攁攃擼斔旛曚曛曘櫅檹檽櫡櫆檺檶檷櫇檴檭歞毉氋瀇瀌瀍瀁瀅瀔瀎濿瀀濻瀦濼濷瀊爁燿燹爃燽獶璸瓀璵瓁璾璶璻瓂甔甓癜癤癙癐癓癗癚皦皽盬矂瞺磿礌礓礔礉礐礒礑禭禬穟簜簩簙簠簟簭簝簦簨簢簥簰繜繐繖繣繘繢繟繑繠繗繓羵羳翷翸聵臑臒臐艟艞薴藆藀藃藂薳薵薽藇藄薿藋藎藈藅薱薶藒蘤薸薷薾虩蟧蟦蟢蟛蟫蟪蟥蟟蟳蟤蟔蟜蟓蟭蟘蟣螤蟗蟙蠁蟴蟨蟝襓襋襏襌襆襐襑襉謪謧謣謳謰謵譇謯謼謾謱謥謷謦謶謮謤謻謽謺豂豵貙貘貗賾贄贂贀蹜蹢蹠蹗蹖蹞蹥蹧蹛蹚蹡蹝蹩蹔轆轇轈轋鄨鄺鄻鄾醨醥醧醯醪鎵鎌鎒鎷鎛鎝鎉鎧鎎鎪鎞鎦鎕鎈鎙鎟鎍鎱鎑鎲鎤鎨鎴鎣鎥闒闓闑隳雗雚巂雟雘雝霣霢霥鞬鞮鞨鞫鞤鞪鞢鞥韗韙韖韘韺顐顑顒颸饁餼餺騏騋騉騍騄騑騊騅騇騆髀髜鬈鬄鬅鬩鬵魊魌魋鯇鯆鯃鮿鯁鮵鮸鯓鮶鯄鮹鮽鵜鵓鵏鵊鵛鵋鵙鵖鵌鵗鵒鵔鵟鵘鵚麎麌黟鼁鼀鼖鼥鼫鼪鼩鼨齌齕儴儵劖勷厴嚫嚭嚦嚧嚪嚬壚壝壛夒嬽嬾嬿巃幰徿懻攇攐攍攉攌攎斄旞旝曞櫧櫠櫌櫑櫙櫋櫟櫜櫐櫫櫏櫍櫞歠殰氌瀙瀧瀠瀖瀫瀡瀢瀣瀩瀗瀤瀜瀪爌爊爇爂爅犥犦犤犣犡瓋瓅璷瓃甖癠矉矊矄矱礝礛礡礜礗礞禰穧穨簳簼簹簬簻糬糪繶繵繸繰繷繯繺繲繴繨罋罊羃羆羷翽翾聸臗臕艤艡艣藫藱藭藙藡藨藚藗藬藲藸藘藟藣藜藑藰藦藯藞藢蠀蟺蠃蟶蟷蠉蠌蠋蠆蟼蠈蟿蠊蠂襢襚襛襗襡襜襘襝襙覈覷覶觶譐譈譊譀譓譖譔譋譕譑譂譒譗豃豷豶貚贆贇贉趬趪趭趫蹭蹸蹳蹪蹯蹻軂轒轑轏轐轓辴酀鄿醰醭鏞鏇鏏鏂鏚鏐鏹鏬鏌鏙鎩鏦鏊鏔鏮鏣鏕鏄鏎鏀鏒鏧镽闚闛雡霩霫霬霨霦鞳鞷鞶韝韞韟顜顙顝顗颿颽颻颾饈饇饃馦馧騚騕騥騝騤騛騢騠騧騣騞騜騔髂鬋鬊鬎鬌鬷鯪鯫鯠鯞鯤鯦鯢鯰鯔鯗鯬鯜鯙鯥鯕鯡鯚鵷鶁鶊鶄鶈鵱鶀鵸鶆鶋鶌鵽鵫鵴鵵鵰鵩鶅鵳鵻鶂鵯鵹鵿鶇鵨麔麑黀黼鼭齀齁齍齖齗齘匷嚲嚵嚳壣孅巆巇廮廯忀忁懹攗攖攕攓旟曨曣曤櫳櫰櫪櫨櫹櫱櫮櫯瀼瀵瀯瀷瀴瀱灂瀸瀿瀺瀹灀瀻瀳灁爓爔犨獽獼璺皫皪皾盭矌矎矏矍矲礥礣礧礨礤礩禲穮穬穭竷籉籈籊籇籅糮繻繾纁纀羺翿聹臛臙舋艨艩蘢藿蘁藾蘛蘀藶蘄蘉蘅蘌藽蠙蠐蠑蠗蠓蠖襣襦覹觷譠譪譝譨譣譥譧譭趮躆躈躄轙轖轗轕轘轚邍酃酁醷醵醲醳鐋鐓鏻鐠鐏鐔鏾鐕鐐鐨鐙鐍鏵鐀鏷鐇鐎鐖鐒鏺鐉鏸鐊鏿鏼鐌鏶鐑鐆闞闠闟霮霯鞹鞻韽韾顠顢顣顟飁飂饐饎饙饌饋饓騲騴騱騬騪騶騩騮騸騭髇髊髆鬐鬒鬑鰋鰈鯷鰅鰒鯸鱀鰇鰎鰆鰗鰔鰉鶟鶙鶤鶝鶒鶘鶐鶛鶠鶔鶜鶪鶗鶡鶚鶢鶨鶞鶣鶿鶩鶖鶦鶧麙麛麚黥黤黧黦鼰鼮齛齠齞齝齙龑儺儹劘劗囃嚽嚾孈孇巋巏廱懽攛欂櫼欃櫸欀灃灄灊灈灉灅灆爝爚爙獾甗癪矐礭礱礯籔籓糲纊纇纈纋纆纍罍羻耰臝蘘蘪蘦蘟蘣蘜蘙蘧蘮蘡蘠蘩蘞蘥蠩蠝蠛蠠蠤蠜蠫衊襭襩襮襫觺譹譸譅譺譻贐贔趯躎躌轞轛轝酆酄酅醹鐿鐻鐶鐩鐽鐼鐰鐹鐪鐷鐬鑀鐱闥闤闣霵霺鞿韡顤飉飆飀饘饖騹騽驆驄驂驁騺騿髍鬕鬗鬘鬖鬺魒鰫鰝鰜鰬鰣鰨鰩鰤鰡鶷鶶鶼鷁鷇鷊鷏鶾鷅鷃鶻鶵鷎鶹鶺鶬鷈鶱鶭鷌鶳鷍鶲鹺麜黫黮黭鼛鼘鼚鼱齎齥齤龒亹囆囅囋奱孋孌巕巑廲攡攠攦攢欋欈欉氍灕灖灗灒爞爟犩獿瓘瓕瓙瓗癭皭礵禴穰穱籗籜籙籛籚糴糱纑罏羇臞艫蘴蘵蘳蘬蘲蘶蠬蠨蠦蠪蠥襱覿覾觻譾讄讂讆讅譿贕躕躔躚躒躐躖躗轠轢酇鑌鑐鑊鑋鑏鑇鑅鑈鑉鑆霿韣顪顩飋饔饛驎驓驔驌驏驈驊驉驒驐髐鬙鬫鬻魖魕鱆鱈鰿鱄鰹鰳鱁鰼鰷鰴鰲鰽鰶鷛鷒鷞鷚鷋鷐鷜鷑鷟鷩鷙鷘鷖鷵鷕鷝麶黰鼵鼳鼲齂齫龕龢儽劙壨壧奲孍巘蠯彏戁戃戄攩攥斖曫欑欒欏毊灛灚爢玂玁玃癰矔籧籦纕艬蘺虀蘹蘼蘱蘻蘾蠰蠲蠮蠳襶襴襳觾讌讎讋讈豅贙躘轤轣醼鑢鑕鑝鑗鑞韄韅頀驖驙鬞鬟鬠鱒鱘鱐鱊鱍鱋鱕鱙鱌鱎鷻鷷鷯鷣鷫鷸鷤鷶鷡鷮鷦鷲鷰鷢鷬鷴鷳鷨鷭黂黐黲黳鼆鼜鼸鼷鼶齃齏齱齰齮齯囓囍孎屭攭曭曮欓灟灡灝灠爣瓛瓥矕礸禷禶籪纗羉艭虃蠸蠷蠵衋讔讕躞躟躠躝醾醽釂鑫鑨鑩雥靆靃靇韇韥驞髕魙鱣鱧鱦鱢鱞鱠鸂鷾鸇鸃鸆鸅鸀鸁鸉鷿鷽鸄麠鼞齆齴齵齶囔攮斸欘欙欗欚灢爦犪矘矙礹籩籫糶纚纘纛纙臠臡虆虇虈襹襺襼襻觿讘讙躥躤躣鑮鑭鑯鑱鑳靉顲饟鱨鱮鱭鸋鸍鸐鸏鸒鸑麡黵鼉齇齸齻齺齹圞灦籯蠼趲躦釃鑴鑸鑶鑵驠鱴鱳鱱鱵鸔鸓黶鼊龤灨灥糷虪蠾蠽蠿讞貜躩軉靋顳顴飌饡馫驤驦驧鬤鸕鸗齈戇欞爧虌躨钂钀钁驩驨鬮鸙爩虋讟钃鱹麷癵驫鱺鸝灩灪麤齾齉龘�����������������������������������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go b/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go new file mode 100644 index 000000000..a2e578d4a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_charsets.json.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("charsets.json", func() (io.ReadCloser, error) { + r := strings.NewReader("{\n\"8bit\": {\n\t\"Desc\": \"raw 8-bit data\",\n\t\"Class\": \"8bit\",\n\t\"Comment\": \"special class for raw 8bit data that has been converted to utf-8\"\n},\n\"big5\": {\n\t\"Desc\": \"Big 5 (HKU)\",\n\t\"Class\": \"big5\",\n\t\"Comment\": \"Traditional Chinese\"\n},\n\"euc-jp\": {\n\t\"Aliases\":[\"x-euc-jp\"],\n\t\"Desc\": \"Japanese Extended UNIX Code\",\n\t\"Class\": \"euc-jp\"\n},\n\"gb2312\": {\n\t\"Aliases\":[\"iso-ir-58\", \"chinese\", \"gb_2312-80\"],\n\t\"Desc\": \"Chinese mixed one byte\",\n\t\"Class\": \"gb2312\"\n},\n\"ibm437\": {\n\t\"Aliases\":[\"437\", \"cp437\"],\n\t\"Desc\": \"IBM PC: CP 437\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm437.cp\",\n\t\"Comment\": \"originally from jhelling@cs.ruu.nl (Jeroen Hellingman)\"\n},\n\"ibm850\": {\n\t\"Aliases\":[\"850\", \"cp850\"],\n\t\"Desc\": \"IBM PS/2: CP 850\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm850.cp\",\n\t\"Comment\": \"originally from jhelling@cs.ruu.nl (Jeroen Hellingman)\"\n},\n\"ibm866\": {\n\t\"Aliases\":[\"cp866\", \"866\"],\n\t\"Desc\": \"Russian MS-DOS CP 866\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"ibm866.cp\"\n},\n\"iso-8859-1\": {\n\t\"Aliases\":[\"iso-ir-100\", \"ibm819\", \"l1\", \"iso8859-1\", \"iso-latin-1\", \"iso_8859-1:1987\", \"cp819\", \"iso_8859-1\", \"iso8859_1\", \"latin1\"],\n\t\"Desc\": \"Latin-1\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-1.cp\"\n},\n\"iso-8859-10\": {\n\t\"Aliases\":[\"iso_8859-10:1992\", \"l6\", \"iso-ir-157\", \"latin6\"],\n\t\"Desc\": \"Latin-6\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-10.cp\",\n\t\"Comment\": \"originally from dkuug.dk:i18n/charmaps/ISO_8859-10:1993\"\n},\n\"iso-8859-15\": {\n\t\"Aliases\":[\"l9-iso-8859-15\", \"latin9\"],\n\t\"Desc\": \"Latin-9\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-15.cp\"\n},\n\"iso-8859-2\": {\n\t\"Aliases\":[\"iso-ir-101\", \"iso_8859-2:1987\", \"l2\", \"iso_8859-2\", \"latin2\"],\n\t\"Desc\": \"Latin-2\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-2.cp\"\n},\n\"iso-8859-3\": {\n\t\"Aliases\":[\"iso-ir-109\", \"l3\", \"iso_8859-3:1988\", \"iso_8859-3\", \"latin3\"],\n\t\"Desc\": \"Latin-3\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-3.cp\"\n},\n\"iso-8859-4\": {\n\t\"Aliases\":[\"iso-ir-110\", \"iso_8859-4:1988\", \"l4\", \"iso_8859-4\", \"latin4\"],\n\t\"Desc\": \"Latin-4\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-4.cp\"\n},\n\"iso-8859-5\": {\n\t\"Aliases\":[\"cyrillic\", \"iso_8859-5\", \"iso-ir-144\", \"iso_8859-5:1988\"],\n\t\"Desc\": \"Part 5 (Cyrillic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-5.cp\"\n},\n\"iso-8859-6\": {\n\t\"Aliases\":[\"ecma-114\", \"iso_8859-6:1987\", \"arabic\", \"iso_8859-6\", \"asmo-708\", \"iso-ir-127\"],\n\t\"Desc\": \"Part 6 (Arabic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-6.cp\"\n},\n\"iso-8859-7\": {\n\t\"Aliases\":[\"greek8\", \"elot_928\", \"ecma-118\", \"greek\", \"iso_8859-7\", \"iso_8859-7:1987\", \"iso-ir-126\"],\n\t\"Desc\": \"Part 7 (Greek)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-7.cp\"\n},\n\"iso-8859-8\": {\n\t\"Aliases\":[\"iso_8859-8:1988\", \"hebrew\", \"iso_8859-8\", \"iso-ir-138\"],\n\t\"Desc\": \"Part 8 (Hebrew)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-8.cp\"\n},\n\"iso-8859-9\": {\n\t\"Aliases\":[\"l5\", \"iso_8859-9:1989\", \"iso_8859-9\", \"iso-ir-148\", \"latin5\"],\n\t\"Desc\": \"Latin-5\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"iso-8859-9.cp\"\n},\n\"koi8-r\": {\n\t\"Desc\": \"KOI8-R (RFC1489)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"koi8-r.cp\"\n},\n\"shift_jis\": {\n\t\"Aliases\":[\"sjis\", \"ms_kanji\", \"x-sjis\"],\n\t\"Desc\": \"Shift-JIS Japanese\",\n\t\"Class\": \"cp932\",\n\t\"Arg\": \"shiftjis\"\n},\n\"utf-16\": {\n\t\"Aliases\":[\"utf16\"],\n\t\"Desc\": \"Unicode UTF-16\",\n\t\"Class\": \"utf16\"\n},\n\"utf-16be\": {\n\t\"Aliases\":[\"utf16be\"],\n\t\"Desc\": \"Unicode UTF-16 big endian\",\n\t\"Class\": \"utf16\",\n\t\"Arg\": \"be\"\n},\n\"utf-16le\": {\n\t\"Aliases\":[\"utf16le\"],\n\t\"Desc\": \"Unicode UTF-16 little endian\",\n\t\"Class\": \"utf16\",\n\t\"Arg\": \"le\"\n},\n\"utf-8\": {\n\t\"Aliases\":[\"utf8\", \"ascii\", \"us-ascii\"],\n\t\"Desc\": \"Unicode UTF-8\",\n\t\"Class\": \"utf8\"\n},\n\"windows-1250\": {\n\t\"Desc\": \"MS Windows CP 1250 (Central Europe)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1250.cp\"\n},\n\"windows-1251\": {\n\t\"Desc\": \"MS Windows CP 1251 (Cyrillic)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1251.cp\"\n},\n\"windows-1252\": {\n\t\"Desc\": \"MS Windows CP 1252 (Latin 1)\",\n\t\"Class\": \"cp\",\n\t\"Arg\": \"windows-1252.cp\"\n},\n\"windows-31j\": {\n\t\"Aliases\":[\"cp932\"],\n\t\"Desc\": \"MS-Windows Japanese (cp932)\",\n\t\"Class\": \"cp932\",\n\t\"Arg\": \"cp932\"\n}\n}\n") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go new file mode 100644 index 000000000..0e53a5c1e --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_cp932.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("cp932.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("\u3000、。,.・:;?!゛゜´`¨^ ̄_ヽヾゝゞ〃仝々〆〇ー―‐/\~∥|…‥‘’“”()〔〕[]{}〈〉《》「」『』【】+-±×�÷=≠<>≦≧∞∴♂♀°′″℃¥$¢£%#&*@§☆★○●◎◇◆□■△▲▽▼※〒→←↑↓〓�����������∈∋⊆⊇⊂⊃∪∩��������∧∨¬⇒⇔∀∃�����������∠⊥⌒∂∇≡≒≪≫√∽∝∵∫∬�������ʼn♯♭♪†‡¶����◯���������������0123456789�������ABCDEFGHIJKLMNOPQRSTUVWXYZ�������abcdefghijklmnopqrstuvwxyz����ぁあぃいぅうぇえぉおかがきぎくぐけげこごさざしじすずせぜそぞただちぢっつづてでとどなにぬねのはばぱひびぴふぶぷへべぺほぼぽまみむめもゃやゅゆょよらりるれろゎわゐゑをん�����������ァアィイゥウェエォオカガキギクグケゲコゴサザシジスズセゼソゾタダチヂッツヅテデトドナニヌネノハバパヒビピフブプヘベペホボポマミ�ムメモャヤュユョヨラリルレロヮワヰヱヲンヴヵヶ��������ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩ��������αβγδεζηθικλμνξοπρστυφχψω��������������������������������������АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯ���������������абвгдеёжзийклмн�опрстуфхцчшщъыьэюя�������������─│┌┐┘└├┬┤┴┼━┃┏┓┛┗┣┳┫┻╋┠┯┨┷┿┝┰┥┸╂��������������������������������������������������������������①②③④⑤⑥⑦⑧⑨⑩⑪⑫⑬⑭⑮⑯⑰⑱⑲⑳ⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ�㍉㌔㌢㍍㌘㌧㌃㌶㍑㍗㌍㌦㌣㌫㍊㌻㎜㎝㎞㎎㎏㏄㎡��������㍻�〝〟№㏍℡㊤㊥㊦㊧㊨㈱㈲㈹㍾㍽㍼≒≡∫∮∑√⊥∠∟⊿∵∩∪�����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������亜唖娃阿哀愛挨姶逢葵茜穐悪握渥旭葦芦鯵梓圧斡扱宛姐虻飴絢綾鮎或粟袷安庵按暗案闇鞍杏以伊位依偉囲夷委威尉惟意慰易椅為畏異移維緯胃萎衣謂違遺医井亥域育郁磯一壱溢逸稲茨芋鰯允印咽員因姻引飲淫胤蔭院陰隠韻吋右宇烏羽迂雨卯鵜窺丑碓臼渦嘘唄欝蔚鰻姥厩浦瓜閏噂云運雲荏餌叡営嬰影映曳栄永泳洩瑛盈穎頴英衛詠鋭液疫益駅悦謁越閲榎厭円�園堰奄宴延怨掩援沿演炎焔煙燕猿縁艶苑薗遠鉛鴛塩於汚甥凹央奥往応押旺横欧殴王翁襖鴬鴎黄岡沖荻億屋憶臆桶牡乙俺卸恩温穏音下化仮何伽価佳加可嘉夏嫁家寡科暇果架歌河火珂禍禾稼箇花苛茄荷華菓蝦課嘩貨迦過霞蚊俄峨我牙画臥芽蛾賀雅餓駕介会解回塊壊廻快怪悔恢懐戒拐改魁晦械海灰界皆絵芥蟹開階貝凱劾外咳害崖慨概涯碍蓋街該鎧骸浬馨蛙垣柿蛎鈎劃嚇各廓拡撹格核殻獲確穫覚角赫較郭閣隔革学岳楽額顎掛笠樫�橿梶鰍潟割喝恰括活渇滑葛褐轄且鰹叶椛樺鞄株兜竃蒲釜鎌噛鴨栢茅萱粥刈苅瓦乾侃冠寒刊勘勧巻喚堪姦完官寛干幹患感慣憾換敢柑桓棺款歓汗漢澗潅環甘監看竿管簡緩缶翰肝艦莞観諌貫還鑑間閑関陥韓館舘丸含岸巌玩癌眼岩翫贋雁頑顔願企伎危喜器基奇嬉寄岐希幾忌揮机旗既期棋棄機帰毅気汽畿祈季稀紀徽規記貴起軌輝飢騎鬼亀偽儀妓宜戯技擬欺犠疑祇義蟻誼議掬菊鞠吉吃喫桔橘詰砧杵黍却客脚虐逆丘久仇休及吸宮弓急救�朽求汲泣灸球究窮笈級糾給旧牛去居巨拒拠挙渠虚許距鋸漁禦魚亨享京供侠僑兇競共凶協匡卿叫喬境峡強彊怯恐恭挟教橋況狂狭矯胸脅興蕎郷鏡響饗驚仰凝尭暁業局曲極玉桐粁僅勤均巾錦斤欣欽琴禁禽筋緊芹菌衿襟謹近金吟銀九倶句区狗玖矩苦躯駆駈駒具愚虞喰空偶寓遇隅串櫛釧屑屈掘窟沓靴轡窪熊隈粂栗繰桑鍬勲君薫訓群軍郡卦袈祁係傾刑兄啓圭珪型契形径恵慶慧憩掲携敬景桂渓畦稽系経継繋罫茎荊蛍計詣警軽頚鶏芸迎鯨�劇戟撃激隙桁傑欠決潔穴結血訣月件倹倦健兼券剣喧圏堅嫌建憲懸拳捲検権牽犬献研硯絹県肩見謙賢軒遣鍵険顕験鹸元原厳幻弦減源玄現絃舷言諺限乎個古呼固姑孤己庫弧戸故枯湖狐糊袴股胡菰虎誇跨鈷雇顧鼓五互伍午呉吾娯後御悟梧檎瑚碁語誤護醐乞鯉交佼侯候倖光公功効勾厚口向后喉坑垢好孔孝宏工巧巷幸広庚康弘恒慌抗拘控攻昂晃更杭校梗構江洪浩港溝甲皇硬稿糠紅紘絞綱耕考肯肱腔膏航荒行衡講貢購郊酵鉱砿鋼閤降�項香高鴻剛劫号合壕拷濠豪轟麹克刻告国穀酷鵠黒獄漉腰甑忽惚骨狛込此頃今困坤墾婚恨懇昏昆根梱混痕紺艮魂些佐叉唆嵯左差査沙瑳砂詐鎖裟坐座挫債催再最哉塞妻宰彩才採栽歳済災采犀砕砦祭斎細菜裁載際剤在材罪財冴坂阪堺榊肴咲崎埼碕鷺作削咋搾昨朔柵窄策索錯桜鮭笹匙冊刷察拶撮擦札殺薩雑皐鯖捌錆鮫皿晒三傘参山惨撒散桟燦珊産算纂蚕讃賛酸餐斬暫残仕仔伺使刺司史嗣四士始姉姿子屍市師志思指支孜斯施旨枝止�死氏獅祉私糸紙紫肢脂至視詞詩試誌諮資賜雌飼歯事似侍児字寺慈持時次滋治爾璽痔磁示而耳自蒔辞汐鹿式識鴫竺軸宍雫七叱執失嫉室悉湿漆疾質実蔀篠偲柴芝屡蕊縞舎写射捨赦斜煮社紗者謝車遮蛇邪借勺尺杓灼爵酌釈錫若寂弱惹主取守手朱殊狩珠種腫趣酒首儒受呪寿授樹綬需囚収周宗就州修愁拾洲秀秋終繍習臭舟蒐衆襲讐蹴輯週酋酬集醜什住充十従戎柔汁渋獣縦重銃叔夙宿淑祝縮粛塾熟出術述俊峻春瞬竣舜駿准循旬楯殉淳�準潤盾純巡遵醇順処初所暑曙渚庶緒署書薯藷諸助叙女序徐恕鋤除傷償勝匠升召哨商唱嘗奨妾娼宵将小少尚庄床廠彰承抄招掌捷昇昌昭晶松梢樟樵沼消渉湘焼焦照症省硝礁祥称章笑粧紹肖菖蒋蕉衝裳訟証詔詳象賞醤鉦鍾鐘障鞘上丈丞乗冗剰城場壌嬢常情擾条杖浄状畳穣蒸譲醸錠嘱埴飾拭植殖燭織職色触食蝕辱尻伸信侵唇娠寝審心慎振新晋森榛浸深申疹真神秦紳臣芯薪親診身辛進針震人仁刃塵壬尋甚尽腎訊迅陣靭笥諏須酢図厨�逗吹垂帥推水炊睡粋翠衰遂酔錐錘随瑞髄崇嵩数枢趨雛据杉椙菅頗雀裾澄摺寸世瀬畝是凄制勢姓征性成政整星晴棲栖正清牲生盛精聖声製西誠誓請逝醒青静斉税脆隻席惜戚斥昔析石積籍績脊責赤跡蹟碩切拙接摂折設窃節説雪絶舌蝉仙先千占宣専尖川戦扇撰栓栴泉浅洗染潜煎煽旋穿箭線繊羨腺舛船薦詮賎践選遷銭銑閃鮮前善漸然全禅繕膳糎噌塑岨措曾曽楚狙疏疎礎祖租粗素組蘇訴阻遡鼠僧創双叢倉喪壮奏爽宋層匝惣想捜掃挿掻�操早曹巣槍槽漕燥争痩相窓糟総綜聡草荘葬蒼藻装走送遭鎗霜騒像増憎臓蔵贈造促側則即息捉束測足速俗属賊族続卒袖其揃存孫尊損村遜他多太汰詑唾堕妥惰打柁舵楕陀駄騨体堆対耐岱帯待怠態戴替泰滞胎腿苔袋貸退逮隊黛鯛代台大第醍題鷹滝瀧卓啄宅托択拓沢濯琢託鐸濁諾茸凧蛸只叩但達辰奪脱巽竪辿棚谷狸鱈樽誰丹単嘆坦担探旦歎淡湛炭短端箪綻耽胆蛋誕鍛団壇弾断暖檀段男談値知地弛恥智池痴稚置致蜘遅馳築畜竹筑蓄�逐秩窒茶嫡着中仲宙忠抽昼柱注虫衷註酎鋳駐樗瀦猪苧著貯丁兆凋喋寵帖帳庁弔張彫徴懲挑暢朝潮牒町眺聴脹腸蝶調諜超跳銚長頂鳥勅捗直朕沈珍賃鎮陳津墜椎槌追鎚痛通塚栂掴槻佃漬柘辻蔦綴鍔椿潰坪壷嬬紬爪吊釣鶴亭低停偵剃貞呈堤定帝底庭廷弟悌抵挺提梯汀碇禎程締艇訂諦蹄逓邸鄭釘鼎泥摘擢敵滴的笛適鏑溺哲徹撤轍迭鉄典填天展店添纏甜貼転顛点伝殿澱田電兎吐堵塗妬屠徒斗杜渡登菟賭途都鍍砥砺努度土奴怒倒党冬�凍刀唐塔塘套宕島嶋悼投搭東桃梼棟盗淘湯涛灯燈当痘祷等答筒糖統到董蕩藤討謄豆踏逃透鐙陶頭騰闘働動同堂導憧撞洞瞳童胴萄道銅峠鴇匿得徳涜特督禿篤毒独読栃橡凸突椴届鳶苫寅酉瀞噸屯惇敦沌豚遁頓呑曇鈍奈那内乍凪薙謎灘捺鍋楢馴縄畷南楠軟難汝二尼弐迩匂賑肉虹廿日乳入如尿韮任妊忍認濡禰祢寧葱猫熱年念捻撚燃粘乃廼之埜嚢悩濃納能脳膿農覗蚤巴把播覇杷波派琶破婆罵芭馬俳廃拝排敗杯盃牌背肺輩配倍培媒梅�楳煤狽買売賠陪這蝿秤矧萩伯剥博拍柏泊白箔粕舶薄迫曝漠爆縛莫駁麦函箱硲箸肇筈櫨幡肌畑畠八鉢溌発醗髪伐罰抜筏閥鳩噺塙蛤隼伴判半反叛帆搬斑板氾汎版犯班畔繁般藩販範釆煩頒飯挽晩番盤磐蕃蛮匪卑否妃庇彼悲扉批披斐比泌疲皮碑秘緋罷肥被誹費避非飛樋簸備尾微枇毘琵眉美鼻柊稗匹疋髭彦膝菱肘弼必畢筆逼桧姫媛紐百謬俵彪標氷漂瓢票表評豹廟描病秒苗錨鋲蒜蛭鰭品彬斌浜瀕貧賓頻敏瓶不付埠夫婦富冨布府怖扶敷�斧普浮父符腐膚芙譜負賦赴阜附侮撫武舞葡蕪部封楓風葺蕗伏副復幅服福腹複覆淵弗払沸仏物鮒分吻噴墳憤扮焚奮粉糞紛雰文聞丙併兵塀幣平弊柄並蔽閉陛米頁僻壁癖碧別瞥蔑箆偏変片篇編辺返遍便勉娩弁鞭保舗鋪圃捕歩甫補輔穂募墓慕戊暮母簿菩倣俸包呆報奉宝峰峯崩庖抱捧放方朋法泡烹砲縫胞芳萌蓬蜂褒訪豊邦鋒飽鳳鵬乏亡傍剖坊妨帽忘忙房暴望某棒冒紡肪膨謀貌貿鉾防吠頬北僕卜墨撲朴牧睦穆釦勃没殆堀幌奔本翻凡盆�摩磨魔麻埋妹昧枚毎哩槙幕膜枕鮪柾鱒桝亦俣又抹末沫迄侭繭麿万慢満漫蔓味未魅巳箕岬密蜜湊蓑稔脈妙粍民眠務夢無牟矛霧鵡椋婿娘冥名命明盟迷銘鳴姪牝滅免棉綿緬面麺摸模茂妄孟毛猛盲網耗蒙儲木黙目杢勿餅尤戻籾貰問悶紋門匁也冶夜爺耶野弥矢厄役約薬訳躍靖柳薮鑓愉愈油癒諭輸唯佑優勇友宥幽悠憂揖有柚湧涌猶猷由祐裕誘遊邑郵雄融夕予余与誉輿預傭幼妖容庸揚揺擁曜楊様洋溶熔用窯羊耀葉蓉要謡踊遥陽養慾抑欲�沃浴翌翼淀羅螺裸来莱頼雷洛絡落酪乱卵嵐欄濫藍蘭覧利吏履李梨理璃痢裏裡里離陸律率立葎掠略劉流溜琉留硫粒隆竜龍侶慮旅虜了亮僚両凌寮料梁涼猟療瞭稜糧良諒遼量陵領力緑倫厘林淋燐琳臨輪隣鱗麟瑠塁涙累類令伶例冷励嶺怜玲礼苓鈴隷零霊麗齢暦歴列劣烈裂廉恋憐漣煉簾練聯蓮連錬呂魯櫓炉賂路露労婁廊弄朗楼榔浪漏牢狼篭老聾蝋郎六麓禄肋録論倭和話歪賄脇惑枠鷲亙亘鰐詫藁蕨椀湾碗腕��������������������������������������������弌丐丕个丱丶丼丿乂乖乘亂亅豫亊舒弍于亞亟亠亢亰亳亶从仍仄仆仂仗仞仭仟价伉佚估佛佝佗佇佶侈侏侘佻佩佰侑佯來侖儘俔俟俎俘俛俑俚俐俤俥倚倨倔倪倥倅伜俶倡倩倬俾俯們倆偃假會偕偐偈做偖偬偸傀傚傅傴傲僉僊傳僂僖僞僥僭僣僮價僵儉儁儂儖儕儔儚儡儺儷儼儻儿兀兒兌兔兢竸兩兪兮冀冂囘册冉冏冑冓冕冖冤冦冢冩冪冫决冱冲冰况冽凅凉凛几處凩凭�凰凵凾刄刋刔刎刧刪刮刳刹剏剄剋剌剞剔剪剴剩剳剿剽劍劔劒剱劈劑辨辧劬劭劼劵勁勍勗勞勣勦飭勠勳勵勸勹匆匈甸匍匐匏匕匚匣匯匱匳匸區卆卅丗卉卍凖卞卩卮夘卻卷厂厖厠厦厥厮厰厶參簒雙叟曼燮叮叨叭叺吁吽呀听吭吼吮吶吩吝呎咏呵咎呟呱呷呰咒呻咀呶咄咐咆哇咢咸咥咬哄哈咨咫哂咤咾咼哘哥哦唏唔哽哮哭哺哢唹啀啣啌售啜啅啖啗唸唳啝喙喀咯喊喟啻啾喘喞單啼喃喩喇喨嗚嗅嗟嗄嗜嗤嗔嘔嗷嘖嗾嗽嘛嗹噎噐營嘴嘶嘲嘸�噫噤嘯噬噪嚆嚀嚊嚠嚔嚏嚥嚮嚶嚴囂嚼囁囃囀囈囎囑囓囗囮囹圀囿圄圉圈國圍圓團圖嗇圜圦圷圸坎圻址坏坩埀垈坡坿垉垓垠垳垤垪垰埃埆埔埒埓堊埖埣堋堙堝塲堡塢塋塰毀塒堽塹墅墹墟墫墺壞墻墸墮壅壓壑壗壙壘壥壜壤壟壯壺壹壻壼壽夂夊夐夛梦夥夬夭夲夸夾竒奕奐奎奚奘奢奠奧奬奩奸妁妝佞侫妣妲姆姨姜妍姙姚娥娟娑娜娉娚婀婬婉娵娶婢婪媚媼媾嫋嫂媽嫣嫗嫦嫩嫖嫺嫻嬌嬋嬖嬲嫐嬪嬶嬾孃孅孀孑孕孚孛孥孩孰孳孵學斈孺宀�它宦宸寃寇寉寔寐寤實寢寞寥寫寰寶寳尅將專對尓尠尢尨尸尹屁屆屎屓屐屏孱屬屮乢屶屹岌岑岔妛岫岻岶岼岷峅岾峇峙峩峽峺峭嶌峪崋崕崗嵜崟崛崑崔崢崚崙崘嵌嵒嵎嵋嵬嵳嵶嶇嶄嶂嶢嶝嶬嶮嶽嶐嶷嶼巉巍巓巒巖巛巫已巵帋帚帙帑帛帶帷幄幃幀幎幗幔幟幢幤幇幵并幺麼广庠廁廂廈廐廏廖廣廝廚廛廢廡廨廩廬廱廳廰廴廸廾弃弉彝彜弋弑弖弩弭弸彁彈彌彎弯彑彖彗彙彡彭彳彷徃徂彿徊很徑徇從徙徘徠徨徭徼忖忻忤忸忱忝悳忿怡恠�怙怐怩怎怱怛怕怫怦怏怺恚恁恪恷恟恊恆恍恣恃恤恂恬恫恙悁悍惧悃悚悄悛悖悗悒悧悋惡悸惠惓悴忰悽惆悵惘慍愕愆惶惷愀惴惺愃愡惻惱愍愎慇愾愨愧慊愿愼愬愴愽慂慄慳慷慘慙慚慫慴慯慥慱慟慝慓慵憙憖憇憬憔憚憊憑憫憮懌懊應懷懈懃懆憺懋罹懍懦懣懶懺懴懿懽懼懾戀戈戉戍戌戔戛戞戡截戮戰戲戳扁扎扞扣扛扠扨扼抂抉找抒抓抖拔抃抔拗拑抻拏拿拆擔拈拜拌拊拂拇抛拉挌拮拱挧挂挈拯拵捐挾捍搜捏掖掎掀掫捶掣掏掉掟掵捫�捩掾揩揀揆揣揉插揶揄搖搴搆搓搦搶攝搗搨搏摧摯摶摎攪撕撓撥撩撈撼據擒擅擇撻擘擂擱擧舉擠擡抬擣擯攬擶擴擲擺攀擽攘攜攅攤攣攫攴攵攷收攸畋效敖敕敍敘敞敝敲數斂斃變斛斟斫斷旃旆旁旄旌旒旛旙无旡旱杲昊昃旻杳昵昶昴昜晏晄晉晁晞晝晤晧晨晟晢晰暃暈暎暉暄暘暝曁暹曉暾暼曄暸曖曚曠昿曦曩曰曵曷朏朖朞朦朧霸朮朿朶杁朸朷杆杞杠杙杣杤枉杰枩杼杪枌枋枦枡枅枷柯枴柬枳柩枸柤柞柝柢柮枹柎柆柧檜栞框栩桀桍栲桎�梳栫桙档桷桿梟梏梭梔條梛梃檮梹桴梵梠梺椏梍桾椁棊椈棘椢椦棡椌棍棔棧棕椶椒椄棗棣椥棹棠棯椨椪椚椣椡棆楹楷楜楸楫楔楾楮椹楴椽楙椰楡楞楝榁楪榲榮槐榿槁槓榾槎寨槊槝榻槃榧樮榑榠榜榕榴槞槨樂樛槿權槹槲槧樅榱樞槭樔槫樊樒櫁樣樓橄樌橲樶橸橇橢橙橦橈樸樢檐檍檠檄檢檣檗蘗檻櫃櫂檸檳檬櫞櫑櫟檪櫚櫪櫻欅蘖櫺欒欖鬱欟欸欷盜欹飮歇歃歉歐歙歔歛歟歡歸歹歿殀殄殃殍殘殕殞殤殪殫殯殲殱殳殷殼毆毋毓毟毬毫毳毯�麾氈氓气氛氤氣汞汕汢汪沂沍沚沁沛汾汨汳沒沐泄泱泓沽泗泅泝沮沱沾沺泛泯泙泪洟衍洶洫洽洸洙洵洳洒洌浣涓浤浚浹浙涎涕濤涅淹渕渊涵淇淦涸淆淬淞淌淨淒淅淺淙淤淕淪淮渭湮渮渙湲湟渾渣湫渫湶湍渟湃渺湎渤滿渝游溂溪溘滉溷滓溽溯滄溲滔滕溏溥滂溟潁漑灌滬滸滾漿滲漱滯漲滌漾漓滷澆潺潸澁澀潯潛濳潭澂潼潘澎澑濂潦澳澣澡澤澹濆澪濟濕濬濔濘濱濮濛瀉瀋濺瀑瀁瀏濾瀛瀚潴瀝瀘瀟瀰瀾瀲灑灣炙炒炯烱炬炸炳炮烟烋烝�烙焉烽焜焙煥煕熈煦煢煌煖煬熏燻熄熕熨熬燗熹熾燒燉燔燎燠燬燧燵燼燹燿爍爐爛爨爭爬爰爲爻爼爿牀牆牋牘牴牾犂犁犇犒犖犢犧犹犲狃狆狄狎狒狢狠狡狹狷倏猗猊猜猖猝猴猯猩猥猾獎獏默獗獪獨獰獸獵獻獺珈玳珎玻珀珥珮珞璢琅瑯琥珸琲琺瑕琿瑟瑙瑁瑜瑩瑰瑣瑪瑶瑾璋璞璧瓊瓏瓔珱瓠瓣瓧瓩瓮瓲瓰瓱瓸瓷甄甃甅甌甎甍甕甓甞甦甬甼畄畍畊畉畛畆畚畩畤畧畫畭畸當疆疇畴疊疉疂疔疚疝疥疣痂疳痃疵疽疸疼疱痍痊痒痙痣痞痾痿�痼瘁痰痺痲痳瘋瘍瘉瘟瘧瘠瘡瘢瘤瘴瘰瘻癇癈癆癜癘癡癢癨癩癪癧癬癰癲癶癸發皀皃皈皋皎皖皓皙皚皰皴皸皹皺盂盍盖盒盞盡盥盧盪蘯盻眈眇眄眩眤眞眥眦眛眷眸睇睚睨睫睛睥睿睾睹瞎瞋瞑瞠瞞瞰瞶瞹瞿瞼瞽瞻矇矍矗矚矜矣矮矼砌砒礦砠礪硅碎硴碆硼碚碌碣碵碪碯磑磆磋磔碾碼磅磊磬磧磚磽磴礇礒礑礙礬礫祀祠祗祟祚祕祓祺祿禊禝禧齋禪禮禳禹禺秉秕秧秬秡秣稈稍稘稙稠稟禀稱稻稾稷穃穗穉穡穢穩龝穰穹穽窈窗窕窘窖窩竈窰�窶竅竄窿邃竇竊竍竏竕竓站竚竝竡竢竦竭竰笂笏笊笆笳笘笙笞笵笨笶筐筺笄筍笋筌筅筵筥筴筧筰筱筬筮箝箘箟箍箜箚箋箒箏筝箙篋篁篌篏箴篆篝篩簑簔篦篥籠簀簇簓篳篷簗簍篶簣簧簪簟簷簫簽籌籃籔籏籀籐籘籟籤籖籥籬籵粃粐粤粭粢粫粡粨粳粲粱粮粹粽糀糅糂糘糒糜糢鬻糯糲糴糶糺紆紂紜紕紊絅絋紮紲紿紵絆絳絖絎絲絨絮絏絣經綉絛綏絽綛綺綮綣綵緇綽綫總綢綯緜綸綟綰緘緝緤緞緻緲緡縅縊縣縡縒縱縟縉縋縢繆繦縻縵縹繃縷�縲縺繧繝繖繞繙繚繹繪繩繼繻纃緕繽辮繿纈纉續纒纐纓纔纖纎纛纜缸缺罅罌罍罎罐网罕罔罘罟罠罨罩罧罸羂羆羃羈羇羌羔羞羝羚羣羯羲羹羮羶羸譱翅翆翊翕翔翡翦翩翳翹飜耆耄耋耒耘耙耜耡耨耿耻聊聆聒聘聚聟聢聨聳聲聰聶聹聽聿肄肆肅肛肓肚肭冐肬胛胥胙胝胄胚胖脉胯胱脛脩脣脯腋隋腆脾腓腑胼腱腮腥腦腴膃膈膊膀膂膠膕膤膣腟膓膩膰膵膾膸膽臀臂膺臉臍臑臙臘臈臚臟臠臧臺臻臾舁舂舅與舊舍舐舖舩舫舸舳艀艙艘艝艚艟艤�艢艨艪艫舮艱艷艸艾芍芒芫芟芻芬苡苣苟苒苴苳苺莓范苻苹苞茆苜茉苙茵茴茖茲茱荀茹荐荅茯茫茗茘莅莚莪莟莢莖茣莎莇莊荼莵荳荵莠莉莨菴萓菫菎菽萃菘萋菁菷萇菠菲萍萢萠莽萸蔆菻葭萪萼蕚蒄葷葫蒭葮蒂葩葆萬葯葹萵蓊葢蒹蒿蒟蓙蓍蒻蓚蓐蓁蓆蓖蒡蔡蓿蓴蔗蔘蔬蔟蔕蔔蓼蕀蕣蕘蕈蕁蘂蕋蕕薀薤薈薑薊薨蕭薔薛藪薇薜蕷蕾薐藉薺藏薹藐藕藝藥藜藹蘊蘓蘋藾藺蘆蘢蘚蘰蘿虍乕虔號虧虱蚓蚣蚩蚪蚋蚌蚶蚯蛄蛆蚰蛉蠣蚫蛔蛞蛩蛬�蛟蛛蛯蜒蜆蜈蜀蜃蛻蜑蜉蜍蛹蜊蜴蜿蜷蜻蜥蜩蜚蝠蝟蝸蝌蝎蝴蝗蝨蝮蝙蝓蝣蝪蠅螢螟螂螯蟋螽蟀蟐雖螫蟄螳蟇蟆螻蟯蟲蟠蠏蠍蟾蟶蟷蠎蟒蠑蠖蠕蠢蠡蠱蠶蠹蠧蠻衄衂衒衙衞衢衫袁衾袞衵衽袵衲袂袗袒袮袙袢袍袤袰袿袱裃裄裔裘裙裝裹褂裼裴裨裲褄褌褊褓襃褞褥褪褫襁襄褻褶褸襌褝襠襞襦襤襭襪襯襴襷襾覃覈覊覓覘覡覩覦覬覯覲覺覽覿觀觚觜觝觧觴觸訃訖訐訌訛訝訥訶詁詛詒詆詈詼詭詬詢誅誂誄誨誡誑誥誦誚誣諄諍諂諚諫諳諧�諤諱謔諠諢諷諞諛謌謇謚諡謖謐謗謠謳鞫謦謫謾謨譁譌譏譎證譖譛譚譫譟譬譯譴譽讀讌讎讒讓讖讙讚谺豁谿豈豌豎豐豕豢豬豸豺貂貉貅貊貍貎貔豼貘戝貭貪貽貲貳貮貶賈賁賤賣賚賽賺賻贄贅贊贇贏贍贐齎贓賍贔贖赧赭赱赳趁趙跂趾趺跏跚跖跌跛跋跪跫跟跣跼踈踉跿踝踞踐踟蹂踵踰踴蹊蹇蹉蹌蹐蹈蹙蹤蹠踪蹣蹕蹶蹲蹼躁躇躅躄躋躊躓躑躔躙躪躡躬躰軆躱躾軅軈軋軛軣軼軻軫軾輊輅輕輒輙輓輜輟輛輌輦輳輻輹轅轂輾轌轉轆轎轗轜�轢轣轤辜辟辣辭辯辷迚迥迢迪迯邇迴逅迹迺逑逕逡逍逞逖逋逧逶逵逹迸遏遐遑遒逎遉逾遖遘遞遨遯遶隨遲邂遽邁邀邊邉邏邨邯邱邵郢郤扈郛鄂鄒鄙鄲鄰酊酖酘酣酥酩酳酲醋醉醂醢醫醯醪醵醴醺釀釁釉釋釐釖釟釡釛釼釵釶鈞釿鈔鈬鈕鈑鉞鉗鉅鉉鉤鉈銕鈿鉋鉐銜銖銓銛鉚鋏銹銷鋩錏鋺鍄錮錙錢錚錣錺錵錻鍜鍠鍼鍮鍖鎰鎬鎭鎔鎹鏖鏗鏨鏥鏘鏃鏝鏐鏈鏤鐚鐔鐓鐃鐇鐐鐶鐫鐵鐡鐺鑁鑒鑄鑛鑠鑢鑞鑪鈩鑰鑵鑷鑽鑚鑼鑾钁鑿閂閇閊閔閖閘閙�閠閨閧閭閼閻閹閾闊濶闃闍闌闕闔闖關闡闥闢阡阨阮阯陂陌陏陋陷陜陞陝陟陦陲陬隍隘隕隗險隧隱隲隰隴隶隸隹雎雋雉雍襍雜霍雕雹霄霆霈霓霎霑霏霖霙霤霪霰霹霽霾靄靆靈靂靉靜靠靤靦靨勒靫靱靹鞅靼鞁靺鞆鞋鞏鞐鞜鞨鞦鞣鞳鞴韃韆韈韋韜韭齏韲竟韶韵頏頌頸頤頡頷頽顆顏顋顫顯顰顱顴顳颪颯颱颶飄飃飆飩飫餃餉餒餔餘餡餝餞餤餠餬餮餽餾饂饉饅饐饋饑饒饌饕馗馘馥馭馮馼駟駛駝駘駑駭駮駱駲駻駸騁騏騅駢騙騫騷驅驂驀驃�騾驕驍驛驗驟驢驥驤驩驫驪骭骰骼髀髏髑髓體髞髟髢髣髦髯髫髮髴髱髷髻鬆鬘鬚鬟鬢鬣鬥鬧鬨鬩鬪鬮鬯鬲魄魃魏魍魎魑魘魴鮓鮃鮑鮖鮗鮟鮠鮨鮴鯀鯊鮹鯆鯏鯑鯒鯣鯢鯤鯔鯡鰺鯲鯱鯰鰕鰔鰉鰓鰌鰆鰈鰒鰊鰄鰮鰛鰥鰤鰡鰰鱇鰲鱆鰾鱚鱠鱧鱶鱸鳧鳬鳰鴉鴈鳫鴃鴆鴪鴦鶯鴣鴟鵄鴕鴒鵁鴿鴾鵆鵈鵝鵞鵤鵑鵐鵙鵲鶉鶇鶫鵯鵺鶚鶤鶩鶲鷄鷁鶻鶸鶺鷆鷏鷂鷙鷓鷸鷦鷭鷯鷽鸚鸛鸞鹵鹹鹽麁麈麋麌麒麕麑麝麥麩麸麪麭靡黌黎黏黐黔黜點黝黠黥黨黯�黴黶黷黹黻黼黽鼇鼈皷鼕鼡鼬鼾齊齒齔齣齟齠齡齦齧齬齪齷齲齶龕龜龠堯槇遙瑤凜熙����������������������������������������������������������������������������������������纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏�塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙�蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑��ⅰⅱⅲⅳⅴⅵⅶⅷⅸⅹ¬¦'"ⅰⅱⅲⅳⅴⅵⅶⅷⅸⅹⅠⅡⅢⅣⅤⅥⅦⅧⅨⅩ¬¦'"㈱№℡∵纊褜鍈銈蓜俉炻昱棈鋹曻彅丨仡仼伀伃伹佖侒侊侚侔俍偀倢俿倞偆偰偂傔僴僘兊�兤冝冾凬刕劜劦勀勛匀匇匤卲厓厲叝﨎咜咊咩哿喆坙坥垬埈埇﨏塚增墲夋奓奛奝奣妤妺孖寀甯寘寬尞岦岺峵崧嵓﨑嵂嵭嶸嶹巐弡弴彧德忞恝悅悊惞惕愠惲愑愷愰憘戓抦揵摠撝擎敎昀昕昻昉昮昞昤晥晗晙晴晳暙暠暲暿曺朎朗杦枻桒柀栁桄棏﨓楨﨔榘槢樰橫橆橳橾櫢櫤毖氿汜沆汯泚洄涇浯涖涬淏淸淲淼渹湜渧渼溿澈澵濵瀅瀇瀨炅炫焏焄煜煆煇凞燁燾犱犾猤猪獷玽珉珖珣珒琇珵琦琪琩琮瑢璉璟甁畯皂皜皞皛皦益睆劯砡硎硤硺礰礼神�祥禔福禛竑竧靖竫箞精絈絜綷綠緖繒罇羡羽茁荢荿菇菶葈蒴蕓蕙蕫﨟薰蘒﨡蠇裵訒訷詹誧誾諟諸諶譓譿賰賴贒赶﨣軏﨤逸遧郞都鄕鄧釚釗釞釭釮釤釥鈆鈐鈊鈺鉀鈼鉎鉙鉑鈹鉧銧鉷鉸鋧鋗鋙鋐﨧鋕鋠鋓錥錡鋻﨨錞鋿錝錂鍰鍗鎤鏆鏞鏸鐱鑅鑈閒隆﨩隝隯霳霻靃靍靏靑靕顗顥飯飼餧館馞驎髙髜魵魲鮏鮱鮻鰀鵰鵫鶴鸙黑���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go new file mode 100644 index 000000000..ecd0631ef --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm437.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm437.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜ¢£¥₧ƒáíóúñѪº¿⌐¬½¼¡«»░▒▓│┤╡╢╖╕╣║╗╝╜╛┐└┴┬├─┼╞╟╚╔╩╦╠═╬╧╨╤╥╙╘╒╓╫╪┘┌█▄▌▐▀αßΓπΣσµτΦΘΩδ∞∅∈∩≡±≥≤⌠⌡÷≈°•·√ⁿ²∎\u00a0") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go new file mode 100644 index 000000000..ea833fa4c --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm850.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm850.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fÇüéâäàåçêëèïîìÄÅÉæÆôöòûùÿÖÜø£Ø׃áíóúñѪº¿®¬½¼¡«»░▒▓│┤ÁÂÀ©╣║╗╝¢¥┐└┴┬├─┼ãÃ╚╔╩╦╠═╬¤ðÐÊËÈıÍÎÏ┘┌█▄¦Ì▀ÓßÔÒõÕµþÞÚÛÙýݯ´\u00ad±‗¾¶§÷¸°¨·¹³²∎\u00a0") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go new file mode 100644 index 000000000..338c99c93 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_ibm866.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("ibm866.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмноп������������������������������������������������рстуфхцчшщъыьэюяЁё��������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go new file mode 100644 index 000000000..dab316145 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-1.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-1.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go new file mode 100644 index 000000000..252aef1e1 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-10.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-10.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ĄĒĢĪĨĶ§ĻĐŠŦŽ\u00adŪŊ°ąēģīĩķ·ļĐšŧž—ūŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎÏÐŅŌÓÔÕÖŨØŲÚÛÜÝÞßāáâãäåæįčéęëėíîïðņōóôõöũøųúûüýþĸ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go new file mode 100644 index 000000000..26e0764ac --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-15.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-15.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£€¥Š§š©ª«¬\u00ad®¯°±²³Žµ¶·ž¹º»ŒœŸ¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go new file mode 100644 index 000000000..d8a5f95ed --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-2.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-2.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0Ą˘Ł¤ĽŚ§¨ŠŞŤŹ\u00adŽŻ°ą˛ł´ľśˇ¸šşťź˝žżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go new file mode 100644 index 000000000..d632e8707 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-3.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-3.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0Ħ˘£¤�Ĥ§¨İŞĞĴ\u00ad�Ż°ħ²³´µĥ·¸ışğĵ½�żÀÁÂ�ÄĊĈÇÈÉÊËÌÍÎÏ�ÑÒÓÔĠÖ×ĜÙÚÛÜŬŜßàáâ�äċĉçèéêëìíîï�ñòóôġö÷ĝùúûüŭŝ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go new file mode 100644 index 000000000..f9874c17f --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-4.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-4.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ĄĸŖ¤ĨĻ§¨ŠĒĢŦ\u00adŽ¯°ą˛ŗ´ĩļˇ¸šēģŧŊžŋĀÁÂÃÄÅÆĮČÉĘËĖÍÎĪĐŅŌĶÔÕÖ×ØŲÚÛÜŨŪßāáâãäåæįčéęëėíîīđņōķôõö÷øųúûüũū˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go new file mode 100644 index 000000000..0ab027a0a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-5.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-5.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0ЁЂЃЄЅІЇЈЉЊЋЌ\u00adЎЏАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя№ёђѓєѕіїјљњћќ§ўџ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go new file mode 100644 index 000000000..b1eaf9677 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-6.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-6.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0���¤�������،\u00ad�������������؛���؟�ءآأؤإئابةتثجحخدذرزسشصضطظعغ�����ـفقكلمنهوىيًٌٍَُِّْ�������������") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go new file mode 100644 index 000000000..1057692b8 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-7.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-7.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0‘’£��¦§¨©�«¬\u00ad�―°±²³΄΅Ά·ΈΉΊ»Ό½ΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡ�ΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύώ�") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go new file mode 100644 index 000000000..e0e035aaa --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-8.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-8.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0�¢£¤¥¦§¨©×«¬\u00ad®‾°±²³´µ¶·¸¹÷»¼½¾��������������������������������‗אבגדהוזחטיךכלםמןנסעףפץצקרשת�����") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go new file mode 100644 index 000000000..1c1a5bc68 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_iso-8859-9.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("iso-8859-9.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f\u0080\u0081\u0082\u0083\u0084\u0085\u0086\u0087\u0088\u0089\u008a\u008b\u008c\u008d\u008e\u008f\u0090\u0091\u0092\u0093\u0094\u0095\u0096\u0097\u0098\u0099\u009a\u009b\u009c\u009d\u009e\u009f\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏĞÑÒÓÔÕÖ×ØÙÚÛÜİŞßàáâãäåæçèéêëìíîïğñòóôõö÷øùúûüışÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go b/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go new file mode 100644 index 000000000..a26c174db --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_jisx0201kana.dat.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("jisx0201kana.dat", func() (io.ReadCloser, error) { + r := strings.NewReader("。「」、・ヲァィゥェォャュョッーアイウエオカキクケコサシスセソタチツテトナニヌネノハヒフヘホマミムメモヤユヨラリルレロワン゙゚") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go new file mode 100644 index 000000000..831fae5c4 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_koi8-r.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("koi8-r.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f─│┌┐└┘├┤┬┴┼▀▄█▌▐░▒▓⌠■∙√≈≤≥\u00a0⌡°²·÷═║╒ё╓╔╕╖╗╘╙╚╛╜╝╞╟╠╡Ё╢╣╤╥╦╧╨╩╪╫╬©юабцдефгхийклмнопярстужвьызшэщчъЮАБЦДЕФГХИЙКЛМНОПЯРСТУЖВЬЫЗШЭЩЧЪ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go new file mode 100644 index 000000000..5147af073 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1250.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1250.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€�‚�„…†‡�‰Š‹ŚŤŽŹ�‘’“”•–—�™š›śťžź\u00a0ˇ˘Ł¤Ą¦§¨©Ş«¬\u00ad®Ż°±˛ł´µ¶·¸ąş»Ľ˝ľżŔÁÂĂÄĹĆÇČÉĘËĚÍÎĎĐŃŇÓÔŐÖ×ŘŮÚŰÜÝŢßŕáâăäĺćçčéęëěíîďđńňóôőö÷řůúűüýţ˙") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go new file mode 100644 index 000000000..2722b19b8 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1251.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1251.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007fЂЃ‚ѓ„…†‡�‰Љ‹ЊЌЋЏђ‘’“”•–—�™љ›њќћџ\u00a0ЎўЈ¤Ґ¦§Ё©Є«¬\u00ad®Ї°±Ііґµ¶·ё№є»јЅѕїАБВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюя") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go new file mode 100644 index 000000000..bf3b67e02 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/data_windows-1252.cp.go @@ -0,0 +1,18 @@ +// This file is automatically generated by generate-charset-data. +// Do not hand-edit. + +package data + +import ( + "github.com/rogpeppe/go-charset/charset" + "io" + "io/ioutil" + "strings" +) + +func init() { + charset.RegisterDataFile("windows-1252.cp", func() (io.ReadCloser, error) { + r := strings.NewReader("\x00\x01\x02\x03\x04\x05\x06\a\b\t\n\v\f\r\x0e\x0f\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f !\"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\u007f€�‚ƒ„…†‡ˆ‰Š‹Œ�Ž��‘’“”•–—˜™š›œ�žŸ\u00a0¡¢£¤¥¦§¨©ª«¬\u00ad®¯°±²³´µ¶·¸¹º»¼½¾¿ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖ×ØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõö÷øùúûüýþÿ") + return ioutil.NopCloser(r), nil + }) +} diff --git a/vendor/github.com/rogpeppe/go-charset/data/doc.go b/vendor/github.com/rogpeppe/go-charset/data/doc.go new file mode 100644 index 000000000..630a83d53 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/doc.go @@ -0,0 +1,6 @@ +// The data package embeds all the charset +// data files as Go data. It registers the data with the charset +// package as a side effect of its import. To use: +// +// import _ "code.google.com/p/go-charset" +package data diff --git a/vendor/github.com/rogpeppe/go-charset/data/generate.go b/vendor/github.com/rogpeppe/go-charset/data/generate.go new file mode 100644 index 000000000..156ee2c63 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-charset/data/generate.go @@ -0,0 +1,97 @@ +// +build ignore + +// go run generate.go && go fmt + +// The generate-charset-data command generates the Go source code +// for code.google.com/p/go-charset/data from the data files +// found in code.google.com/p/go-charset/datafiles. +// It should be run in the go-charset root directory. +// The resulting Go files will need gofmt'ing. +package main + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + "text/template" +) + +type info struct { + Path string +} + +var tfuncs = template.FuncMap{ + "basename": func(s string) string { + return filepath.Base(s) + }, + "read": func(path string) ([]byte, error) { + return ioutil.ReadFile(path) + }, +} + +var tmpl = template.Must(template.New("").Funcs(tfuncs).Parse(` + // This file is automatically generated by generate-charset-data. + // Do not hand-edit. + + package data + import ( + "code.google.com/p/go-charset/charset" + "io" + "io/ioutil" + "strings" + ) + + func init() { + charset.RegisterDataFile({{basename .Path | printf "%q"}}, func() (io.ReadCloser, error) { + r := strings.NewReader({{read .Path | printf "%q"}}) + return ioutil.NopCloser(r), nil + }) + } +`)) + +var docTmpl = template.Must(template.New("").Funcs(tfuncs).Parse(` + // This file is automatically generated by generate-charset-data. + // Do not hand-edit. + + // The {{basename .Package}} package embeds all the charset + // data files as Go data. It registers the data with the charset + // package as a side effect of its import. To use: + // + // import _ "code.google.com/p/go-charset" + package {{basename .Package}} +`)) + +func main() { + dataDir := filepath.Join("..", "datafiles") + d, err := os.Open(dataDir) + if err != nil { + fatalf("%v", err) + } + names, err := d.Readdirnames(0) + if err != nil { + fatalf("cannot read datafiles dir: %v", err) + } + for _, name := range names { + writeFile("data_"+name+".go", tmpl, info{ + Path: filepath.Join(dataDir, name), + }) + } +} + +func writeFile(name string, t *template.Template, data interface{}) { + w, err := os.Create(name) + if err != nil { + fatalf("cannot create output file: %v", err) + } + defer w.Close() + err = t.Execute(w, data) + if err != nil { + fatalf("template execute %q: %v", name, err) + } +} + +func fatalf(f string, a ...interface{}) { + fmt.Fprintf(os.Stderr, "%s\n", fmt.Sprintf(f, a...)) + os.Exit(2) +} diff --git a/vendor/github.com/rpoletaev/supervisord/Gopkg.lock b/vendor/github.com/rpoletaev/supervisord/Gopkg.lock new file mode 100644 index 000000000..0bbb9ad99 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/Gopkg.lock @@ -0,0 +1,63 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/sirupsen/logrus" + packages = ["."] + revision = "a3f95b5c423586578a4e099b11a46c2479628cac" + version = "1.0.2" + +[[projects]] + name = "github.com/go-ini/ini" + packages = ["."] + revision = "20b96f641a5ea98f2f8619ff4f3e061cff4833bd" + version = "v1.28.2" + +[[projects]] + name = "github.com/gorilla/rpc" + packages = ["."] + revision = "22c016f3df3febe0c1f6727598b6389507e03a18" + version = "v1.1.0" + +[[projects]] + name = "github.com/jessevdk/go-flags" + packages = ["."] + revision = "96dc06278ce32a0e9d957d590bb987c81ee66407" + version = "v1.3.0" + +[[projects]] + branch = "master" + name = "github.com/kardianos/osext" + packages = ["."] + revision = "ae77be60afb1dcacde03767a8c37337fad28ac14" + +[[projects]] + branch = "master" + name = "github.com/rpoletaev/gorilla-xmlrpc" + packages = ["xml"] + revision = "d37a0d21ebabd0d9b59cd868499d6af4673ee9fe" + +[[projects]] + branch = "master" + name = "github.com/rogpeppe/go-charset" + packages = ["charset","data"] + revision = "e9ff06f347d3f5d0013d59ed83754f0e88de10d4" + +[[projects]] + name = "github.com/sevlyar/go-daemon" + packages = ["."] + revision = "1ae26ef5036ad04968706917222a23c535673d8c" + version = "v0.1.1" + +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = ["unix"] + revision = "c84c1ab9fd18cdd4c23dd021c10f5f46dea95e46" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "d765dd3cd60e45504031ba3cbd8f4b516e314572e0047939f23118993a985b67" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/rpoletaev/supervisord/Gopkg.toml b/vendor/github.com/rpoletaev/supervisord/Gopkg.toml new file mode 100644 index 000000000..b9b3db7f3 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/Gopkg.toml @@ -0,0 +1,46 @@ + +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + + +[[constraint]] + name = "github.com/sirupsen/logrus" + version = "1.0.2" + +[[constraint]] + name = "github.com/go-ini/ini" + version = "1.28.2" + +[[constraint]] + name = "github.com/gorilla/rpc" + version = "1.1.0" + +[[constraint]] + name = "github.com/jessevdk/go-flags" + version = "1.3.0" + +[[constraint]] + branch = "master" + name = "github.com/rpoletaev/gorilla-xmlrpc" + +[[constraint]] + name = "github.com/sevlyar/go-daemon" + version = "0.1.1" diff --git a/vendor/github.com/rpoletaev/supervisord/LICENSE b/vendor/github.com/rpoletaev/supervisord/LICENSE new file mode 100644 index 000000000..6713cd967 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Steven Ou + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/rpoletaev/supervisord/README.md b/vendor/github.com/rpoletaev/supervisord/README.md new file mode 100644 index 000000000..2d4384a5e --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/README.md @@ -0,0 +1,161 @@ +# Why this project? + +The python script supervisord is a powerful tool used by a lot of guys to manage the processes. I like the tool supervisord also. + +But this tool requires us to install the big python environment. In some situation, for example in the docker environment, the python is too big for us. + +In this project, the supervisord is re-implemented in go-lang. The compiled supervisord is very suitable for these environment that the python is not installed. + +# Compile the supervisord + +Before compiling the supervisord, make sure the go-lang is installed in your environement. + +To compile the go-lang version supervisord, run following commands: + +```shell +$ mkdir ~/go-supervisor +$ export GOPATH=~/go-supervisor +$ go get -u github.com/ochinchina/supervisord +``` + +# Run the supervisord + +After the supervisord binary is generated, create a supervisord configuration file and start the supervisord like below: + +```shell +$ cat supervisor.conf +[program:test] +command = /your/program args +$ supervisord -c supervisor.conf +``` +# Run as daemon +Add the inet interface in your configuration: +```ini +[inet_http_server] +port=127.0.0.1:9001 +``` +then run +```shell +$ supervisord -c supervisor.conf -d +``` +In order to controll the daemon, you can use `$ supervisord ctl` subcommand, available commands are: `status`, `start`, `stop`, `shutdown`, `reload`. + +```shell +$ supervisord ctl status +$ supervisord ctl stop +$ supervisord ctl stop all +$ supervisord ctl start +$ supervisord ctl start all +$ supervisord ctl shutdown +$ supervisord ctl reload +$ supervisord ctl signal ... +$ supervisord ctl signal all +``` + +the URL of supervisord in the "supervisor ctl" subcommand is dected in following order: + +- check if option -s or --serverurl is present, use this url +- check if -c option is present and the "serverurl" in "supervisorctl" section is present, use the "serverurl" in section "supervisorctl" +- return http://localhost:9001 + +# Check the version + +command "version" will show the current supervisor version. + +```shell +$ supervisord version +``` + +# Supported features + +## http server + +the unix socket & TCP http server is supported. Basic auth is supported. + +The unix socket setting is in the "unix_http_server" section. +The TCP http server setting is in "inet_http_server" section. + +If both "inet_http_server" and "unix_http_server" is not configured in the configuration file, no http server will be started. + +## supervisord information + +The log & pid of supervisord process is supported by section "supervisord" setting. + +## program + +the following features is supported in the "program:x" section: + +- program command +- process name +- numprocs +- numprocs_start +- autostart +- startsecs +- startretries +- autorestart +- exitcodes +- stopsignal +- stopwaitsecs +- stdout_logfile +- stdout_logfile_maxbytes +- stdout_logfile_backups +- redirect_stderr +- stderr_logfile +- stderr_logfile_maxbytes +- stderr_logfile_backups +- environment +- priority +- user +- directory + +### program extends + +Following new keys are supported by the [program:xxx] section: + +- depends_on: define program depends information. If program A depends on program B, C, the program B, C will be started before program A. Example: + +```ini +[program:A] +depends_on = B, C + +[program:B] +... +[program:C] +... +``` + +- user: user in the section "program:xxx" now is extended to support group with format "user[:group]". So "user" can be configured as: + +```ini +[program:xxx] +user = user_name +... +``` +or +```ini +[program:xxx] +user = user_name:group_name +... +``` +## Group +the "group" section is supported and you can set "programs" item + +## Events + +the supervisor 3.x defined events are supported partially. Now it supports following events: + +- all process state related events +- process communication event +- remote communication event +- tick related events +- process log related events + +# The MIT License (MIT) + +Copyright (c) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/rpoletaev/supervisord/circle.yml b/vendor/github.com/rpoletaev/supervisord/circle.yml new file mode 100644 index 000000000..41481db33 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/circle.yml @@ -0,0 +1,9 @@ +deployment: + master: + branch: [master] + commands: + - go version + - go get github.com/mitchellh/gox + - go get github.com/tcnksm/ghr + - gox -output "dist/supervisord_{{.OS}}_{{.Arch}}" -osarch="linux/amd64 linux/386 darwin/amd64" + - ghr -t $GITHUB_TOKEN -u $CIRCLE_PROJECT_USERNAME -r $CIRCLE_PROJECT_REPONAME --replace v1.0.0 dist/ \ No newline at end of file diff --git a/vendor/github.com/rpoletaev/supervisord/config/config.go b/vendor/github.com/rpoletaev/supervisord/config/config.go new file mode 100644 index 000000000..b1e9a6076 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/config.go @@ -0,0 +1,558 @@ +package config + +import ( + "bytes" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" + + ini "github.com/ochinchina/go-ini" + log "github.com/sirupsen/logrus" +) + +type ConfigEntry struct { + ConfigDir string + Group string + Name string + keyValues map[string]string +} + +func (c *ConfigEntry) IsProgram() bool { + return strings.HasPrefix(c.Name, "program:") +} + +func (c *ConfigEntry) GetProgramName() string { + if strings.HasPrefix(c.Name, "program:") { + return c.Name[len("program:"):] + } + return "" +} + +func (c *ConfigEntry) IsEventListener() bool { + return strings.HasPrefix(c.Name, "eventlistener:") +} + +func (c *ConfigEntry) GetEventListenerName() string { + if strings.HasPrefix(c.Name, "eventlistener:") { + return c.Name[len("eventlistener:"):] + } + return "" +} + +func (c *ConfigEntry) IsGroup() bool { + return strings.HasPrefix(c.Name, "group:") +} + +// get the group name if this entry is group +func (c *ConfigEntry) GetGroupName() string { + if strings.HasPrefix(c.Name, "group:") { + return c.Name[len("group:"):] + } + return "" +} + +// get the programs from the group +func (c *ConfigEntry) GetPrograms() []string { + if c.IsGroup() { + r := c.GetStringArray("programs", ",") + for i, p := range r { + r[i] = strings.TrimSpace(p) + } + return r + } + return make([]string, 0) +} + +// add key value entry +func (c *ConfigEntry) AddKeyValue(key, value string) { + c.keyValues[key] = value +} + +func (c *ConfigEntry) setGroup(group string) { + c.Group = group +} + +// dump the configuration as string +func (c *ConfigEntry) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buf, "configDir=%s\n", c.ConfigDir) + fmt.Fprintf(buf, "group=%s\n", c.Group) + for k, v := range c.keyValues { + fmt.Fprintf(buf, "%s=%s\n", k, v) + } + return buf.String() + +} + +type Config struct { + configFile string + //mapping between the section name and the configure + entries map[string]*ConfigEntry + + ProgramGroup *ProcessGroup +} + +func NewConfigEntry(configDir string) *ConfigEntry { + return &ConfigEntry{configDir, "", "", make(map[string]string)} +} + +func NewConfig(configFile string) *Config { + return &Config{configFile, make(map[string]*ConfigEntry), NewProcessGroup()} +} + +//create a new entry or return the already-exist entry +func (c *Config) createEntry(name string, configDir string) *ConfigEntry { + entry, ok := c.entries[name] + + if !ok { + entry = NewConfigEntry(configDir) + c.entries[name] = entry + } + return entry +} + +// +// return the loaded programs +func (c *Config) Load() ([]string, error) { + ini := ini.NewIni() + c.ProgramGroup = NewProcessGroup() + ini.LoadFile(c.configFile) + + includeFiles := c.getIncludeFiles(ini) + for _, f := range includeFiles { + ini.LoadFile(f) + } + return c.parse(ini), nil +} + +func (c *Config) getIncludeFiles(cfg *ini.Ini) []string { + result := make([]string, 0) + if includeSection, err := cfg.GetSection("include"); err == nil { + key, err := includeSection.GetValue("files") + if err == nil { + env := NewStringExpression("here", c.GetConfigFileDir()) + files := strings.Fields(key) + for _, f_raw := range files { + dir := c.GetConfigFileDir() + f, err := env.Eval(f_raw) + if err != nil { + continue + } + if filepath.IsAbs(f) { + dir = filepath.Dir(f) + } + fileInfos, err := ioutil.ReadDir(dir) + if err == nil { + goPattern := toRegexp(filepath.Base(f)) + for _, fileInfo := range fileInfos { + if matched, err := regexp.MatchString(goPattern, fileInfo.Name()); matched && err == nil { + result = append(result, filepath.Join(dir, fileInfo.Name())) + } + } + } + + } + } + } + return result + +} + +func (c *Config) parse(cfg *ini.Ini) []string { + c.parseGroup(cfg) + loaded_programs := c.parseProgram(cfg) + + //parse non-group,non-program and non-eventlistener sections + for _, section := range cfg.Sections() { + if !strings.HasPrefix(section.Name, "group:") && !strings.HasPrefix(section.Name, "program:") && !strings.HasPrefix(section.Name, "eventlistener:") { + entry := c.createEntry(section.Name, c.GetConfigFileDir()) + c.entries[section.Name] = entry + entry.parse(section) + } + } + return loaded_programs +} + +func (c *Config) GetConfigFileDir() string { + return filepath.Dir(c.configFile) +} + +//convert supervisor file pattern to the go regrexp +func toRegexp(pattern string) string { + tmp := strings.Split(pattern, ".") + for i, t := range tmp { + s := strings.Replace(t, "*", ".*", -1) + tmp[i] = strings.Replace(s, "?", ".", -1) + } + return strings.Join(tmp, "\\.") +} + +//get the unix_http_server section +func (c *Config) GetUnixHttpServer() (*ConfigEntry, bool) { + entry, ok := c.entries["unix_http_server"] + + return entry, ok +} + +//get the supervisord section +func (c *Config) GetSupervisord() (*ConfigEntry, bool) { + entry, ok := c.entries["supervisord"] + return entry, ok +} + +// Get the inet_http_server configuration section +func (c *Config) GetInetHttpServer() (*ConfigEntry, bool) { + entry, ok := c.entries["inet_http_server"] + return entry, ok +} + +func (c *Config) GetSupervisorctl() (*ConfigEntry, bool) { + entry, ok := c.entries["supervisorctl"] + return entry, ok +} +func (c *Config) GetEntries(filterFunc func(entry *ConfigEntry) bool) []*ConfigEntry { + result := make([]*ConfigEntry, 0) + for _, entry := range c.entries { + if filterFunc(entry) { + result = append(result, entry) + } + } + return result +} +func (c *Config) GetGroups() []*ConfigEntry { + return c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsGroup() + }) +} + +func (c *Config) GetPrograms() []*ConfigEntry { + programs := c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsProgram() + }) + + return sortProgram(programs) +} + +func (c *Config) GetEventListeners() []*ConfigEntry { + eventListeners := c.GetEntries(func(entry *ConfigEntry) bool { + return entry.IsEventListener() + }) + + return eventListeners +} + +func (c *Config) GetProgramNames() []string { + result := make([]string, 0) + programs := c.GetPrograms() + + programs = sortProgram(programs) + for _, entry := range programs { + result = append(result, entry.GetProgramName()) + } + return result +} + +//return the proram configure entry or nil +func (c *Config) GetProgram(name string) *ConfigEntry { + for _, entry := range c.entries { + if entry.IsProgram() && entry.GetProgramName() == name { + return entry + } + } + return nil +} + +// get value of key as bool +func (c *ConfigEntry) GetBool(key string, defValue bool) bool { + value, ok := c.keyValues[key] + + if ok { + b, err := strconv.ParseBool(value) + if err == nil { + return b + } + } + return defValue +} + +// check if has parameter +func (c *ConfigEntry) HasParameter(key string) bool { + _, ok := c.keyValues[key] + return ok +} + +func toInt(s string, factor int, defValue int) int { + i, err := strconv.Atoi(s) + if err == nil { + return i * factor + } + return defValue +} + +// get the value of the key as int +func (c *ConfigEntry) GetInt(key string, defValue int) int { + value, ok := c.keyValues[key] + + if ok { + return toInt(value, 1, defValue) + } + return defValue +} + +// get the value of key as environment setting. An enviroment string example: +// environment = A="env 1",B="this is a test" +func (c *ConfigEntry) GetEnv(key string) []string { + value, ok := c.keyValues[key] + env := make([]string, 0) + + if ok { + start := 0 + n := len(value) + var i int + for { + for i = start; i < n && value[i] != '='; { + i++ + } + key := value[start:i] + start = i + 1 + if value[start] == '"' { + for i = start + 1; i < n && value[i] != '"'; { + i++ + } + if i < n { + env = append(env, fmt.Sprintf("%s=%s", key, value[start+1:i])) + } + if i+1 < n && value[i+1] == ',' { + start = i + 2 + } else { + break + } + } else { + for i = start; i < n && value[i] != ','; { + i++ + } + if i < n { + env = append(env, fmt.Sprintf("%s=%s", key, value[start:i])) + start = i + 1 + } else { + env = append(env, fmt.Sprintf("%s=%s", key, value[start:])) + break + } + } + } + } + + result := make([]string, 0) + for i := 0; i < len(env); i++ { + tmp, err := NewStringExpression("program_name", c.GetProgramName(), + "process_num", c.GetString("process_num", "0"), + "group_name", c.GetGroupName(), + "here", c.ConfigDir).Eval(env[i]) + if err == nil { + result = append(result, tmp) + } + } + return result +} + +//get the value of key as string +func (c *ConfigEntry) GetString(key string, defValue string) string { + s, ok := c.keyValues[key] + + if ok { + env := NewStringExpression("here", c.ConfigDir) + rep_s, err := env.Eval(s) + if err == nil { + return rep_s + } else { + log.WithFields(log.Fields{ + log.ErrorKey: err, + "program": c.GetProgramName(), + "key": key, + }).Warn("Unable to parse expression") + } + } + return defValue +} + +//get the value of key as string and attempt to parse it with StringExpression +func (c *ConfigEntry) GetStringExpression(key string, defValue string) string { + s, ok := c.keyValues[key] + if !ok || s == "" { + return "" + } + + host_name, err := os.Hostname() + if err != nil { + host_name = "Unknown" + } + result, err := NewStringExpression("program_name", c.GetProgramName(), + "process_num", c.GetString("process_num", "0"), + "group_name", c.GetGroupName(), + "here", c.ConfigDir, + "host_node_name", host_name).Eval(s) + + if err != nil { + log.WithFields(log.Fields{ + log.ErrorKey: err, + "program": c.GetProgramName(), + "key": key, + }).Warn("unable to parse expression") + return s + } + + return result +} + +func (c *ConfigEntry) GetStringArray(key string, sep string) []string { + s, ok := c.keyValues[key] + + if ok { + return strings.Split(s, sep) + } + return make([]string, 0) +} + +// get the value of key as the bytes setting. +// +// logSize=1MB +// logSize=1GB +// logSize=1KB +// logSize=1024 +// +func (c *ConfigEntry) GetBytes(key string, defValue int) int { + v, ok := c.keyValues[key] + + if ok { + if len(v) > 2 { + lastTwoBytes := v[len(v)-2:] + if lastTwoBytes == "MB" { + return toInt(v[:len(v)-2], 1024*1024, defValue) + } else if lastTwoBytes == "GB" { + return toInt(v[:len(v)-2], 1024*1024*1024, defValue) + } else if lastTwoBytes == "KB" { + return toInt(v[:len(v)-2], 1024, defValue) + } + } + return toInt(v, 1, defValue) + } + return defValue +} + +func (c *ConfigEntry) parse(section *ini.Section) { + c.Name = section.Name + for _, key := range section.Keys() { + c.keyValues[key.Name()] = key.ValueWithDefault("") + } +} + +func (c *Config) parseGroup(cfg *ini.Ini) { + + //parse the group at first + for _, section := range cfg.Sections() { + if strings.HasPrefix(section.Name, "group:") { + entry := c.createEntry(section.Name, c.GetConfigFileDir()) + entry.parse(section) + groupName := entry.GetGroupName() + programs := entry.GetPrograms() + for _, program := range programs { + c.ProgramGroup.Add(groupName, program) + } + } + } +} + +func (c *Config) isProgramOrEventListener(section *ini.Section) (bool, string) { + //check if it is a program or event listener section + is_program := strings.HasPrefix(section.Name, "program:") + is_event_listener := strings.HasPrefix(section.Name, "eventlistener:") + prefix := "" + if is_program { + prefix = "program:" + } else if is_event_listener { + prefix = "eventlistener:" + } + return is_program || is_event_listener, prefix +} + +// parse the sections starts with "program:" prefix. +// +// Return all the parsed program names in the ini +func (c *Config) parseProgram(cfg *ini.Ini) []string { + loaded_programs := make([]string, 0) + for _, section := range cfg.Sections() { + + program_or_event_listener, prefix := c.isProgramOrEventListener(section) + + //if it is program or event listener + if program_or_event_listener { + //get the number of processes + numProcs, err := section.GetInt("numprocs") + programName := section.Name[len(prefix):] + if err != nil { + numProcs = 1 + } + procName, err := section.GetValue("process_name") + if numProcs > 1 { + if err != nil || strings.Index(procName, "%(process_num)") == -1 { + log.WithFields(log.Fields{ + "numprocs": numProcs, + "process_name": procName, + }).Error("no process_num in process name") + } + } + originalProcName := programName + if err == nil { + originalProcName = procName + } + + for i := 1; i <= numProcs; i++ { + envs := NewStringExpression("program_name", programName, + "process_num", fmt.Sprintf("%d", i), + "group_name", c.ProgramGroup.GetGroup(programName, programName), + "here", c.GetConfigFileDir()) + cmd, err := envs.Eval(section.GetValueWithDefault("command", "")) + if err != nil { + continue + } + section.Add("command", cmd) + + procName, err := envs.Eval(originalProcName) + if err != nil { + continue + } + + section.Add("process_name", procName) + section.Add("numprocs_start", fmt.Sprintf("%d", (i-1))) + section.Add("process_num", fmt.Sprintf("%d", i)) + entry := c.createEntry(procName, c.GetConfigFileDir()) + entry.parse(section) + entry.Name = prefix + procName + group := c.ProgramGroup.GetGroup(programName, programName) + entry.Group = group + loaded_programs = append(loaded_programs, procName) + } + } + } + return loaded_programs + +} + +func (c *Config) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + fmt.Fprintf(buf, "configFile:%s\n", c.configFile) + for k, v := range c.entries { + fmt.Fprintf(buf, "[program:%s]\n", k) + fmt.Fprintf(buf, "%s\n", v.String()) + } + return buf.String() +} + +func (c *Config) RemoveProgram(programName string) { + delete(c.entries, fmt.Sprintf("program:%s", programName)) + c.ProgramGroup.Remove(programName) +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/process_group.go b/vendor/github.com/rpoletaev/supervisord/config/process_group.go new file mode 100644 index 000000000..264995bdf --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/process_group.go @@ -0,0 +1,114 @@ +package config + +import ( + "bytes" + "strings" + + "github.com/rpoletaev/supervisord/util" +) + +type ProcessGroup struct { + //mapping between the program and its group + processGroup map[string]string +} + +func NewProcessGroup() *ProcessGroup { + return &ProcessGroup{processGroup: make(map[string]string)} +} + +// clone the process group +func (pg *ProcessGroup) Clone() *ProcessGroup { + new_pg := NewProcessGroup() + for k, v := range pg.processGroup { + new_pg.processGroup[k] = v + } + return new_pg +} + +func (pg *ProcessGroup) Sub(other *ProcessGroup) (added []string, changed []string, removed []string) { + thisGroup := pg.GetAllGroup() + otherGroup := other.GetAllGroup() + added = util.Sub(thisGroup, otherGroup) + changed = make([]string, 0) + removed = util.Sub(otherGroup, thisGroup) + + for _, group := range thisGroup { + proc_1 := pg.GetAllProcess(group) + proc_2 := other.GetAllProcess(group) + if len(proc_2) > 0 && !util.IsSameStringArray(proc_1, proc_2) { + changed = append(changed, group) + } + } + return +} + +//add a process to a group +func (pg *ProcessGroup) Add(group string, procName string) { + pg.processGroup[procName] = group +} + +//remove a process +func (pg *ProcessGroup) Remove(procName string) { + delete(pg.processGroup, procName) +} + +//get all the groups +func (pg *ProcessGroup) GetAllGroup() []string { + groups := make(map[string]bool) + for _, group := range pg.processGroup { + groups[group] = true + } + + result := make([]string, 0) + for group, _ := range groups { + result = append(result, group) + } + return result +} + +// get all the processes in a group +func (pg *ProcessGroup) GetAllProcess(group string) []string { + result := make([]string, 0) + for procName, groupName := range pg.processGroup { + if group == groupName { + result = append(result, procName) + } + } + return result +} + +// check if a process belongs to a group or not +func (pg *ProcessGroup) InGroup(procName string, group string) bool { + groupName, ok := pg.processGroup[procName] + if ok && group == groupName { + return true + } + return false +} + +func (pg *ProcessGroup) ForEachProcess(procFunc func(group string, procName string)) { + for procName, groupName := range pg.processGroup { + procFunc(groupName, procName) + } +} + +func (pg *ProcessGroup) GetGroup(procName string, defGroup string) string { + group, ok := pg.processGroup[procName] + + if ok { + return group + } + pg.processGroup[procName] = defGroup + return defGroup +} + +func (pg *ProcessGroup) String() string { + buf := bytes.NewBuffer(make([]byte, 0)) + for _, group := range pg.GetAllGroup() { + buf.WriteString(group) + buf.WriteString(":") + buf.WriteString(strings.Join(pg.GetAllProcess(group), ",")) + buf.WriteString(";") + } + return buf.String() +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/process_sort.go b/vendor/github.com/rpoletaev/supervisord/config/process_sort.go new file mode 100644 index 000000000..bbd902f49 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/process_sort.go @@ -0,0 +1,159 @@ +package config + +import ( + "sort" + "strings" +) + +type ProgramByPriority []*ConfigEntry + +func (p ProgramByPriority) Len() int { + return len(p) +} + +func (p ProgramByPriority) Swap(i, j int) { + p[i], p[j] = p[j], p[i] +} + +func (p ProgramByPriority) Less(i, j int) bool { + return p[i].GetInt("priority", 999) < p[j].GetInt("priority", 999) +} + +type ProcessSorter struct { + depends_on_gragh map[string][]string + procs_without_depends []*ConfigEntry +} + +func NewProcessSorter() *ProcessSorter { + return &ProcessSorter{depends_on_gragh: make(map[string][]string), + procs_without_depends: make([]*ConfigEntry, 0)} +} + +func (p *ProcessSorter) initDepends(program_configs []*ConfigEntry) { + //sort by depends_on + for _, config := range program_configs { + if config.IsProgram() && config.HasParameter("depends_on") { + depends_on := config.GetString("depends_on", "") + prog_name := config.GetProgramName() + for _, depends_on_prog := range strings.Split(depends_on, ",") { + depends_on_prog = strings.TrimSpace(depends_on_prog) + if depends_on_prog != "" { + if _, ok := p.depends_on_gragh[prog_name]; !ok { + p.depends_on_gragh[prog_name] = make([]string, 0) + } + p.depends_on_gragh[prog_name] = append(p.depends_on_gragh[prog_name], depends_on_prog) + + } + } + } + } + +} + +func (p *ProcessSorter) initProgramWithoutDepends(program_configs []*ConfigEntry) { + depends_on_programs := p.getDependsOnInfo() + for _, config := range program_configs { + if config.IsProgram() { + if _, ok := depends_on_programs[config.GetProgramName()]; !ok { + p.procs_without_depends = append(p.procs_without_depends, config) + } + } + } +} + +func (p *ProcessSorter) getDependsOnInfo() map[string]string { + depends_on_programs := make(map[string]string) + + for k, v := range p.depends_on_gragh { + depends_on_programs[k] = k + for _, t := range v { + depends_on_programs[t] = t + } + } + + return depends_on_programs +} + +func (p *ProcessSorter) sortDepends() []string { + finished_programs := make(map[string]string) + progs_with_depends_info := p.getDependsOnInfo() + progs_start_order := make([]string, 0) + + //get all process without depends + for prog_name, _ := range progs_with_depends_info { + if _, ok := p.depends_on_gragh[prog_name]; !ok { + finished_programs[prog_name] = prog_name + progs_start_order = append(progs_start_order, prog_name) + } + } + + for len(finished_programs) < len(progs_with_depends_info) { + for prog_name, _ := range p.depends_on_gragh { + if _, ok := finished_programs[prog_name]; !ok && p.inFinishedPrograms(prog_name, finished_programs) { + finished_programs[prog_name] = prog_name + progs_start_order = append(progs_start_order, prog_name) + } + } + } + + return progs_start_order +} + +func (p *ProcessSorter) inFinishedPrograms(program_name string, finished_programs map[string]string) bool { + if depends_on, ok := p.depends_on_gragh[program_name]; ok { + for _, depend_program := range depends_on { + if _, finished := finished_programs[depend_program]; !finished { + return false + } + } + } + return true +} + +/*func (p *ProcessSorter) SortProcess(procs []*Process) []*Process { + prog_configs := make([]*ConfigEntry, 0) + for _, proc := range procs { + if proc.config.IsProgram() { + prog_configs = append(prog_configs, proc.config) + } + } + + result := make([]*Process, 0) + for _, config := range p.SortProgram(prog_configs) { + for _, proc := range procs { + if proc.config == config { + result = append(result, proc) + } + } + } + + return result +}*/ + +func (p *ProcessSorter) SortProgram(program_configs []*ConfigEntry) []*ConfigEntry { + p.initDepends(program_configs) + p.initProgramWithoutDepends(program_configs) + result := make([]*ConfigEntry, 0) + + for _, prog := range p.sortDepends() { + for _, config := range program_configs { + if config.IsProgram() && config.GetProgramName() == prog { + result = append(result, config) + } + } + } + + sort.Sort(ProgramByPriority(p.procs_without_depends)) + for _, p := range p.procs_without_depends { + result = append(result, p) + } + return result +} + +/*func sortProcess(procs []*Process) []*Process { + return NewProcessSorter().SortProcess(procs) +}*/ + +func sortProgram(configs []*ConfigEntry) []*ConfigEntry { + return NewProcessSorter().SortProgram(configs) +} diff --git a/vendor/github.com/rpoletaev/supervisord/config/string_expression.go b/vendor/github.com/rpoletaev/supervisord/config/string_expression.go new file mode 100644 index 000000000..30933e44a --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config/string_expression.go @@ -0,0 +1,88 @@ +package config + +import ( + "fmt" + "os" + "strconv" + "strings" +) + +type StringExpression struct { + env map[string]string +} + +func NewStringExpression(envs ...string) *StringExpression { + se := &StringExpression{env: make(map[string]string)} + + for _, env := range os.Environ() { + t := strings.Split(env, "=") + se.env["ENV_"+t[0]] = t[1] + } + n := len(envs) + for i := 0; i+1 < n; i += 2 { + se.env[envs[i]] = envs[i+1] + } + + hostname, err := os.Hostname() + if err == nil { + se.env["host_node_name"] = hostname + } + + return se + +} + +func (se *StringExpression) Add(key string, value string) *StringExpression { + se.env[key] = value + return se +} + +func (se *StringExpression) Eval(s string) (string, error) { + for { + //find variable start indicator + start := strings.Index(s, "%(") + + if start == -1 { + return s, nil + } + + end := start + 1 + n := len(s) + + //find variable end indicator + for end < n && s[end] != ')' { + end++ + } + + //find the type of the variable + typ := end + 1 + for typ < n && !((s[typ] >= 'a' && s[typ] <= 'z') || (s[typ] >= 'A' && s[typ] <= 'Z')) { + typ++ + } + + //evaluate the variable + if typ < n { + varName := s[start+2 : end] + + varValue, ok := se.env[varName] + + if !ok { + return "", fmt.Errorf("fail to find the environment variable %s", varName) + } + if s[typ] == 'd' { + i, err := strconv.Atoi(varValue) + if err != nil { + return "", fmt.Errorf("can't convert %s to integer", varValue) + } + s = s[0:start] + fmt.Sprintf("%"+s[end+1:typ+1], i) + s[typ+1:] + } else if s[typ] == 's' { + s = s[0:start] + varValue + s[typ+1:] + } else { + return "", fmt.Errorf("not implement type:%v", s[typ]) + } + } else { + return "", fmt.Errorf("invalid string expression format") + } + } + +} diff --git a/vendor/github.com/rpoletaev/supervisord/config_template.go b/vendor/github.com/rpoletaev/supervisord/config_template.go new file mode 100644 index 000000000..760cc4174 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/config_template.go @@ -0,0 +1,137 @@ +package main + +import ( + "io" + "os" +) + +var config_template = `[unix_http_server] +file=/tmp/supervisord.sock +#chmod=not support +#chown=not support +username=test1 +password={SHA}82ab876d1387bfafe46cc1c8a2ef074eae50cb1d + +[inet_http_server] +port=127.0.0.1:9001 +username=test1 +password=thepassword + +[supervisord] +logfile=%(here)s/supervisord.log +logfile_maxbytes=50MB +logfile_backups=10 +loglevel=info +pidfile=%(here)s/supervisord.pid +#umask=not support +#nodaemon=not support +#minfds=not support +#minprocs=not support +#nocleanup=not support +#childlogdir=not support +#user=not support +#directory=not support +#strip_ansi=not support +#environment=not support +identifier=supervisor + +[program:x] +command=/bin/cat +process_name=%(program_name)s +numprocs=1 +#numprocs_start=not support +autostart=true +startsecs=3 +startretries=3 +autorestart=true +exitcodes=0,2 +stopsignal=TERM +stopwaitsecs=10 +#stopasgroup=not support +#killasgroup=not support +user=user1 +redirect_stderr=false +stdout_logfile=AUTO +stdout_logfile_maxbytes=50MB +stdout_logfile_backups=10 +stdout_capture_maxbytes=0 +stdout_events_enabled=true +stderr_logfile=AUTO +stderr_logfile_maxbytes=50MB +stderr_logfile_backups=10 +stderr_capture_maxbytes=0 +stderr_events_enabled=false +environment=KEY="val",KEY2="val2" +directory=/tmp +#umask=not support +serverurl=AUTO + +[include] +files=/an/absolute/filename.conf /an/absolute/*.conf foo.conf config??.conf + +[group:x] +programs=bar,baz +priority=999 + +[eventlistener:x] +command=/bin/eventlistener +process_name=%(program_name)s +numprocs=1 +#numprocs_start=not support +autostart=true +startsecs=3 +startretries=3 +autorestart=true +exitcodes=0,2 +stopsignal=TERM +stopwaitsecs=10 +#stopasgroup=not support +#killasgroup=not support +user=user1 +redirect_stderr=false +stdout_logfile=AUTO +stdout_logfile_maxbytes=50MB +stdout_logfile_backups=10 +stdout_capture_maxbytes=0 +stdout_events_enabled=true +stderr_logfile=AUTO +stderr_logfile_maxbytes=50MB +stderr_logfile_backups=10 +stderr_capture_maxbytes=0 +stderr_events_enabled=false +environment=KEY="val",KEY2="val2" +directory=/tmp +#umask=not support +serverurl=AUTO +buffer_size=10240 +events=PROCESS_STATE +#result_handler=not support +` + +type InitTemplateCommand struct { + OutFile string `short:"o" long:"output" description:"the output file name" required:"true"` +} + +var initTemplateCommand InitTemplateCommand + +func (x *InitTemplateCommand) Execute(args []string) error { + f, err := os.Create(x.OutFile) + if err != nil { + return err + } + defer f.Close() + return GenTemplate(f) +} + +func GenTemplate(writer io.Writer) error { + _, err := writer.Write([]byte(config_template)) + return err +} + +func init() { + parser.AddCommand("init", + "initialize a template", + "The init subcommand writes the supported configurations to specified file", + &initTemplateCommand) + +} diff --git a/vendor/github.com/rpoletaev/supervisord/content_checker.go b/vendor/github.com/rpoletaev/supervisord/content_checker.go new file mode 100644 index 000000000..6c0b7d3e3 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/content_checker.go @@ -0,0 +1,149 @@ +package main + +import ( + "fmt" + "net" + "net/http" + "os/exec" + "strings" + "time" +) + +type ContentChecker interface { + Check() bool +} + +type BaseChecker struct { + data string + includes []string + //timeout in second + timeoutTime time.Time + notifyChannel chan string +} + +func NewBaseChecker(includes []string, timeout int) *BaseChecker { + return &BaseChecker{data: "", + includes: includes, + timeoutTime: time.Now().Add(time.Duration(timeout) * time.Second), + notifyChannel: make(chan string, 1)} +} + +func (bc *BaseChecker) Write(b []byte) (int, error) { + bc.notifyChannel <- string(b) + return len(b), nil +} + +func (bc *BaseChecker) isReady() bool { + find_all := true + for _, include := range bc.includes { + if strings.Index(bc.data, include) == -1 { + find_all = false + break + } + } + return find_all +} +func (bc *BaseChecker) Check() bool { + d := bc.timeoutTime.Sub(time.Now()) + if d < 0 { + return false + } + timeoutSignal := time.After(d) + + for { + select { + case data := <-bc.notifyChannel: + bc.data = bc.data + data + if bc.isReady() { + return true + } + case <-timeoutSignal: + return false + } + } +} + +type ScriptChecker struct { + args []string +} + +func NewScriptChecker(args []string) *ScriptChecker { + return &ScriptChecker{args: args} +} + +func (sc *ScriptChecker) Check() bool { + cmd := exec.Command(sc.args[0]) + if len(sc.args) > 1 { + cmd.Args = sc.args + } + err := cmd.Run() + return err == nil && cmd.ProcessState != nil && cmd.ProcessState.Success() +} + +type TcpChecker struct { + host string + port int + conn net.Conn + baseChecker *BaseChecker +} + +func NewTcpChecker(host string, port int, includes []string, timeout int) *TcpChecker { + checker := &TcpChecker{host: host, + port: port, + baseChecker: NewBaseChecker(includes, timeout)} + checker.start() + return checker +} + +func (tc *TcpChecker) start() { + go func() { + b := make([]byte, 1024) + var err error = nil + for { + tc.conn, err = net.Dial("tcp", fmt.Sprintf("%s:%d", tc.host, tc.port)) + if err == nil || tc.baseChecker.timeoutTime.Before(time.Now()) { + break + } + } + + if err == nil { + for { + n, err := tc.conn.Read(b) + if err != nil { + break + } + tc.baseChecker.Write(b[0:n]) + } + } + }() +} + +func (tc *TcpChecker) Check() bool { + ret := tc.baseChecker.Check() + if tc.conn != nil { + tc.conn.Close() + } + return ret +} + +type HttpChecker struct { + url string + timeoutTime time.Time +} + +func NewHttpChecker(url string, timeout int) *HttpChecker { + return &HttpChecker{url: url, + timeoutTime: time.Now().Add(time.Duration(timeout) * time.Second)} +} + +func (hc *HttpChecker) Check() bool { + for { + if hc.timeoutTime.After(time.Now()) { + resp, err := http.Get(hc.url) + if err == nil { + return resp.StatusCode >= 200 && resp.StatusCode < 300 + } + } + } + return false +} diff --git a/vendor/github.com/rpoletaev/supervisord/ctl.go b/vendor/github.com/rpoletaev/supervisord/ctl.go new file mode 100644 index 000000000..977dc3ba9 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/ctl.go @@ -0,0 +1,159 @@ +package main + +import ( + "fmt" + "os" + "strings" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/xmlrpcclient" +) + +type CtlCommand struct { + ServerUrl string `short:"s" long:"serverurl" description:"URL on which supervisord server is listening"` +} + +var ctlCommand CtlCommand + +func (x *CtlCommand) getServerUrl() string { + fmt.Printf("%v\n", options) + if x.ServerUrl != "" { + return x.ServerUrl + } else if _, err := os.Stat(options.Configuration); err == nil { + config := config.NewConfig(options.Configuration) + config.Load() + if entry, ok := config.GetSupervisorctl(); ok { + serverurl := entry.GetString("serverurl", "") + if serverurl != "" { + return serverurl + } + } + } + return "http://localhost:9001" +} +func (x *CtlCommand) Execute(args []string) error { + if len(args) == 0 { + return nil + } + + rpcc := xmlrpcclient.NewXmlRPCClient(x.getServerUrl()) + verb := args[0] + + switch verb { + + //////////////////////////////////////////////////////////////////////////////// + // STATUS + //////////////////////////////////////////////////////////////////////////////// + case "status": + processes := args[1:] + processesMap := make(map[string]bool) + for _, process := range processes { + processesMap[strings.ToLower(process)] = true + } + if reply, err := rpcc.GetAllProcessInfo(); err == nil { + x.showProcessInfo(&reply, processesMap) + } + + //////////////////////////////////////////////////////////////////////////////// + // START or STOP + //////////////////////////////////////////////////////////////////////////////// + case "start", "stop": + state := map[string]string{ + "start": "started", + "stop": "stopped", + } + processes := args[1:] + if len(processes) <= 0 { + fmt.Printf("Please specify process for %s\n", verb) + } + for _, pname := range processes { + if pname == "all" { + reply, err := rpcc.ChangeAllProcessState(verb) + if err == nil { + x.showProcessInfo(&reply, make(map[string]bool)) + } else { + fmt.Printf("Fail to change all process state to %s", state) + } + } else { + if reply, err := rpcc.ChangeProcessState(verb, pname); err == nil { + fmt.Printf("%s: ", pname) + if !reply.Value { + fmt.Printf("not ") + } + fmt.Printf("%s\n", state[verb]) + } else { + fmt.Printf("%s: failed [%v]\n", pname, err) + } + } + } + + //////////////////////////////////////////////////////////////////////////////// + // SHUTDOWN + //////////////////////////////////////////////////////////////////////////////// + case "shutdown": + if reply, err := rpcc.Shutdown(); err == nil { + if reply.Value { + fmt.Printf("Shut Down\n") + } else { + fmt.Printf("Hmmm! Something gone wrong?!\n") + } + } + case "reload": + if reply, err := rpcc.ReloadConfig(); err == nil { + + if len(reply.AddedGroup) > 0 { + fmt.Printf("Added Groups: %s\n", strings.Join(reply.AddedGroup, ",")) + } + if len(reply.ChangedGroup) > 0 { + fmt.Printf("Changed Groups: %s\n", strings.Join(reply.ChangedGroup, ",")) + } + if len(reply.RemovedGroup) > 0 { + fmt.Printf("Removed Groups: %s\n", strings.Join(reply.RemovedGroup, ",")) + } + } + case "signal": + sig_name, processes := args[1], args[2:] + for _, process := range processes { + if process == "all" { + reply, err := rpcc.SignalAll(process) + if err == nil { + x.showProcessInfo(&reply, make(map[string]bool)) + } else { + fmt.Printf("Fail to send signal %s to all process", sig_name) + } + } else { + reply, err := rpcc.SignalProcess(sig_name, process) + if err == nil && reply.Success { + fmt.Printf("Succeed to send signal %s to process %s\n", sig_name, process) + } else { + fmt.Printf("Fail to send signal %s to process %s\n", sig_name, process) + } + } + } + + default: + fmt.Println("unknown command") + } + + return nil +} + +func (x *CtlCommand) showProcessInfo(reply *xmlrpcclient.AllProcessInfoReply, processesMap map[string]bool) { + for _, pinfo := range reply.Value { + name := strings.ToLower(pinfo.Name) + description := pinfo.Description + if strings.ToLower(description) == "" { + description = "" + } + if len(processesMap) <= 0 || processesMap[name] { + fmt.Printf("%-33s%-10s%s\n", name, pinfo.Statename, description) + } + } +} + +func init() { + parser.AddCommand("ctl", + "Control a running daemon", + "The ctl subcommand resembles supervisorctl command of original daemon.", + &ctlCommand) +} diff --git a/vendor/github.com/rpoletaev/supervisord/daemonize.go b/vendor/github.com/rpoletaev/supervisord/daemonize.go new file mode 100644 index 000000000..682b1e88c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/daemonize.go @@ -0,0 +1,25 @@ +// +build !windows + +package main + +import ( + log "github.com/sirupsen/logrus" + "github.com/sevlyar/go-daemon" +) + +func Deamonize(proc func()) { + context := new(daemon.Context) + + child, err := context.Reborn() + if err != nil { + log.WithFields(log.Fields{"err": err}).Fatal("Unable to run") + } + if child != nil { + return + } + defer context.Release() + + log.Info("daemon started") + + proc() +} diff --git a/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go b/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go new file mode 100644 index 000000000..9fe9d204d --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/daemonize_windows.go @@ -0,0 +1,7 @@ +package main + +// +build windows + +func Deamonize(proc func()) { + proc() +} diff --git a/vendor/github.com/rpoletaev/supervisord/events/events.go b/vendor/github.com/rpoletaev/supervisord/events/events.go new file mode 100644 index 000000000..ad3c55137 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/events/events.go @@ -0,0 +1,745 @@ +package events + +import ( + "bufio" + "bytes" + "container/list" + "fmt" + log "github.com/sirupsen/logrus" + "io" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" +) + +const ( + EVENT_SYS_VERSION = "3.0" + PROC_COMMON_BEGIN_STR = "" + PROC_COMMON_END_STR = "" +) + +type Event interface { + GetSerial() uint64 + GetType() string + GetBody() string +} + +type BaseEvent struct { + serial uint64 + eventType string +} + +func (be *BaseEvent) GetSerial() uint64 { + return be.serial +} + +func (be *BaseEvent) GetType() string { + return be.eventType +} + +type EventListenerManager struct { + //mapping between the event listener name and the listener + namedListeners map[string]*EventListener + //mapping between the event name and the event listeners + eventListeners map[string]map[*EventListener]bool +} + +type EventPoolSerial struct { + sync.Mutex + poolserial map[string]uint64 +} + +func NewEventPoolSerial() *EventPoolSerial { + return &EventPoolSerial{poolserial: make(map[string]uint64)} +} + +func (eps *EventPoolSerial) nextSerial(pool string) uint64 { + eps.Lock() + defer eps.Unlock() + + r, ok := eps.poolserial[pool] + if !ok { + r = 1 + } + eps.poolserial[pool] = r + 1 + return r +} + +type EventListener struct { + pool string + server string + mutex sync.Mutex + events *list.List + stdin *bufio.Reader + stdout io.Writer + buffer_size int +} + +func NewEventListener(pool string, + server string, + stdin io.Reader, + stdout io.Writer, + buffer_size int) *EventListener { + evtListener := &EventListener{pool: pool, + server: server, + events: list.New(), + stdin: bufio.NewReader(stdin), + stdout: stdout, + buffer_size: buffer_size} + evtListener.start() + return evtListener +} + +func (el *EventListener) getFirstEvent() ([]byte, bool) { + el.mutex.Lock() + defer el.mutex.Unlock() + + if el.events.Len() > 0 { + elem := el.events.Front() + value := elem.Value + b, ok := value.([]byte) + return b, ok + } + return nil, false +} + +func (el *EventListener) removeFirstEvent() { + el.mutex.Lock() + defer el.mutex.Unlock() + if el.events.Len() > 0 { + el.events.Remove(el.events.Front()) + } +} + +func (el *EventListener) start() { + go func() { + for { + //read if it is ready + err := el.waitForReady() + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to read from event listener, the event listener may exit") + break + } + for { + if b, ok := el.getFirstEvent(); ok { + _, err := el.stdout.Write(b) + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to send event") + break + } + result, err := el.readResult() + if err != nil { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to read result") + break + } + if result == "OK" { //remove the event if succeed + log.WithFields(log.Fields{"eventListener": el.pool}).Info("succeed to send the event") + el.removeFirstEvent() + break + } else if result == "FAIL" { + log.WithFields(log.Fields{"eventListener": el.pool}).Warn("fail to send the event") + break + } else { + log.WithFields(log.Fields{"eventListener": el.pool, "result": result}).Warn("unknown result from listener") + } + } + } + } + }() +} + +func (el *EventListener) waitForReady() error { + log.Debug("start to check if event listener program is ready") + for { + line, err := el.stdin.ReadString('\n') + if err != nil { + return err + } + if line == "READY\n" { + log.WithFields(log.Fields{"eventListener": el.pool}).Debug("the event listener is ready") + return nil + } + } +} + +func (el *EventListener) readResult() (string, error) { + s, err := el.stdin.ReadString('\n') + if err != nil { + return s, err + } + fields := strings.Fields(s) + if len(fields) == 2 && fields[0] == "RESULT" { + //try to get the length of result + n, err := strconv.Atoi(fields[1]) + if err != nil { + //return if fail to get the length + return "", err + } + if n < 0 { + return "", fmt.Errorf("Fail to read the result because the result bytes is less than 0") + } + //read n bytes + b := make([]byte, n) + for i := 0; i < n; i++ { + b[i], err = el.stdin.ReadByte() + if err != nil { + return "", err + } + } + //ok, get the n bytes + return string(b), nil + } else { + return "", fmt.Errorf("Fail to read the result") + } + +} + +func (el *EventListener) HandleEvent(event Event) { + encodedEvent := el.encodeEvent(event) + el.mutex.Lock() + defer el.mutex.Unlock() + if el.events.Len() <= el.buffer_size { + el.events.PushBack(encodedEvent) + } else { + log.WithFields(log.Fields{"eventListener": el.pool}).Error("events reaches the buffer_size, discard the events") + } +} + +func (el *EventListener) encodeEvent(event Event) []byte { + body := []byte(event.GetBody()) + + //header + s := fmt.Sprintf("ver:%s server:%s serial:%d pool:%s poolserial:%d eventname:%s len:%d\n", + EVENT_SYS_VERSION, + el.server, + event.GetSerial(), + el.pool, + eventPoolSerial.nextSerial(el.pool), + event.GetType(), + len(body)) + //write the header & body to buffer + r := bytes.NewBuffer([]byte(s)) + r.Write(body) + + return r.Bytes() +} + +var eventTypeDerives = map[string][]string{ + "PROCESS_STATE_STARTING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_RUNNING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_BACKOFF": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_STOPPING": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_EXITED": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_STOPPED": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_FATAL": []string{"EVENT", "PROCESS_STATE"}, + "PROCESS_STATE_UNKNOWN": []string{"EVENT", "PROCESS_STATE"}, + "REMOTE_COMMUNICATION": []string{"EVENT"}, + "PROCESS_LOG_STDOUT": []string{"EVENT", "PROCESS_LOG"}, + "PROCESS_LOG_STDERR": []string{"EVENT", "PROCESS_LOG"}, + "PROCESS_COMMUNICATION_STDOUT": []string{"EVENT", "PROCESS_COMMUNICATION"}, + "PROCESS_COMMUNICATION_STDERR": []string{"EVENT", "PROCESS_COMMUNICATION"}, + "SUPERVISOR_STATE_CHANGE_RUNNING": []string{"EVENT", "SUPERVISOR_STATE_CHANGE"}, + "SUPERVISOR_STATE_CHANGE_STOPPING": []string{"EVENT", "SUPERVISOR_STATE_CHANGE"}, + "TICK_5": []string{"EVENT", "TICK"}, + "TICK_60": []string{"EVENT", "TICK"}, + "TICK_3600": []string{"EVENT", "TICK"}, + "PROCESS_GROUP_ADDED": []string{"EVENT", "PROCESS_GROUP"}, + "PROCESS_GROUP_REMOVED": []string{"EVENT", "PROCESS_GROUP"}} +var eventSerial uint64 = 0 +var eventListenerManager = NewEventListenerManager() +var eventPoolSerial = NewEventPoolSerial() + +func init() { + startTickTimer() +} + +func startTickTimer() { + tickConfigs := map[string]int64{"TICK_5": 5, + "TICK_60": 60, + "TICK_3600": 3600} + + //start a Tick timer + go func() { + lastTickSlice := make(map[string]int64) + + c := time.Tick(1 * time.Second) + for now := range c { + for tickType, period := range tickConfigs { + time_slice := now.Unix() / period + last_time_slice, ok := lastTickSlice[tickType] + if !ok { + lastTickSlice[tickType] = time_slice + } else if last_time_slice != time_slice { + lastTickSlice[tickType] = time_slice + EmitEvent(NewTickEvent(tickType, now.Unix())) + } + } + } + }() +} + +func nextEventSerial() uint64 { + return atomic.AddUint64(&eventSerial, 1) +} + +func NewEventListenerManager() *EventListenerManager { + return &EventListenerManager{namedListeners: make(map[string]*EventListener), + eventListeners: make(map[string]map[*EventListener]bool)} +} + +func (em *EventListenerManager) registerEventListener(eventListenerName string, + events []string, + listener *EventListener) { + + em.namedListeners[eventListenerName] = listener + all_events := make(map[string]bool) + for _, event := range events { + for k, values := range eventTypeDerives { + if event == k { //if it is a final event + all_events[k] = true + } else { //if it is an abstract event, add all its derived events + for _, val := range values { + if val == event { + all_events[k] = true + } + } + } + } + } + for event, _ := range all_events { + log.WithFields(log.Fields{"eventListener": eventListenerName, "event": event}).Info("register event listener") + if _, ok := em.eventListeners[event]; !ok { + em.eventListeners[event] = make(map[*EventListener]bool) + } + em.eventListeners[event][listener] = true + } +} + +func RegisterEventListener(eventListenerName string, + events []string, + listener *EventListener) { + eventListenerManager.registerEventListener( eventListenerName, events, listener ) +} + +func (em *EventListenerManager) unregisterEventListener(eventListenerName string) *EventListener { + listener, ok := em.namedListeners[eventListenerName] + if ok { + delete(em.namedListeners, eventListenerName) + for event, listeners := range em.eventListeners { + if _, ok = listeners[listener]; ok { + log.WithFields(log.Fields{"eventListener": eventListenerName, "event": event}).Info("unregister event listener") + } + + delete(listeners, listener) + } + return listener + } + return nil +} + +func UnregisterEventListener(eventListenerName string) *EventListener { + return eventListenerManager.unregisterEventListener( eventListenerName ) +} + +func (em *EventListenerManager) EmitEvent(event Event) { + listeners, ok := em.eventListeners[event.GetType()] + if ok { + log.WithFields(log.Fields{"event": event.GetType()}).Info("process event") + for listener, _ := range listeners { + log.WithFields(log.Fields{"eventListener": listener.pool, "event": event.GetType()}).Info("receive event on listener") + listener.HandleEvent(event) + } + } +} + +type RemoteCommunicationEvent struct { + BaseEvent + typ string + data string +} + +func NewRemoteCommunicationEvent(typ string, data string) *RemoteCommunicationEvent { + r := &RemoteCommunicationEvent{typ: typ, data: data} + r.eventType = "REMOTE_COMMUNICATION" + r.serial = nextEventSerial() + return r +} + +func (r *RemoteCommunicationEvent) GetBody() string { + return fmt.Sprintf("type:%s\n%s", r.typ, r.data) +} + +type ProcCommEvent struct { + BaseEvent + processName string + groupName string + pid int + data string +} + +func NewProcCommEvent(eventType string, + procName string, + groupName string, + pid int, + data string) *ProcCommEvent { + return &ProcCommEvent{BaseEvent: BaseEvent{eventType: eventType, serial: nextEventSerial()}, + processName: procName, + groupName: groupName, + pid: pid, + data: data} +} + +func (p *ProcCommEvent) GetBody() string { + return fmt.Sprintf("processname:%s groupname:%s pid:%d\n%s", p.processName, p.groupName, p.pid, p.data) +} + +func EmitEvent(event Event) { + eventListenerManager.EmitEvent(event) +} + +type TickEvent struct { + BaseEvent + when int64 +} + +func NewTickEvent(tickType string, when int64) *TickEvent { + r := &TickEvent{when: when} + r.eventType = tickType + r.serial = nextEventSerial() + return r +} + +func (te *TickEvent) GetBody() string { + return fmt.Sprintf("when:%d", te.when) +} + +type ProcCommEventCapture struct { + reader io.Reader + captureMaxBytes int + stdType string + procName string + groupName string + pid int + eventBuffer string + eventBeginPos int +} + +func NewProcCommEventCapture(reader io.Reader, + captureMaxBytes int, + stdType string, + procName string, + groupName string) *ProcCommEventCapture { + pec := &ProcCommEventCapture{reader: reader, + captureMaxBytes: captureMaxBytes, + stdType: stdType, + procName: procName, + groupName: groupName, + pid: -1, + eventBuffer: "", + eventBeginPos: -1} + pec.startCapture() + return pec +} + +func (pec *ProcCommEventCapture) SetPid(pid int) { + pec.pid = pid +} +func (pec *ProcCommEventCapture) startCapture() { + go func() { + buf := make([]byte, 10240) + for { + n, err := pec.reader.Read(buf) + if err != nil { + break + } + pec.eventBuffer += string(buf[0:n]) + for { + event := pec.captureEvent() + if event == nil { + break + } + EmitEvent(event) + } + } + }() +} + +func (pec *ProcCommEventCapture) captureEvent() Event { + pec.findBeginStr() + end_pos := pec.findEndStr() + if end_pos == -1 { + return nil + } + data := pec.eventBuffer[pec.eventBeginPos+len(PROC_COMMON_BEGIN_STR) : end_pos] + pec.eventBuffer = pec.eventBuffer[end_pos+len(PROC_COMMON_END_STR):] + pec.eventBeginPos = -1 + return NewProcCommEvent(pec.stdType, + pec.procName, + pec.groupName, + pec.pid, + data) +} + +func (pec *ProcCommEventCapture) findBeginStr() { + if pec.eventBeginPos == -1 { + pec.eventBeginPos = strings.Index(pec.eventBuffer, PROC_COMMON_BEGIN_STR) + if pec.eventBeginPos == -1 { + //remove some string + n := len(pec.eventBuffer) + if n > len(PROC_COMMON_BEGIN_STR) { + pec.eventBuffer = pec.eventBuffer[n-len(PROC_COMMON_BEGIN_STR):] + } + } + } +} + +func (pec *ProcCommEventCapture) findEndStr() int { + if pec.eventBeginPos == -1 { + return -1 + } + end_pos := strings.Index(pec.eventBuffer, PROC_COMMON_END_STR) + if end_pos == -1 { + if len(pec.eventBuffer) > pec.captureMaxBytes { + log.WithFields(log.Fields{"program": pec.procName}).Warn("The capture buffer is overflow, discard the content") + pec.eventBeginPos = -1 + pec.eventBuffer = "" + } + } + return end_pos +} + +type ProcessStateEvent struct { + BaseEvent + process_name string + group_name string + from_state string + tries int + expected int + pid int +} + +func CreateProcessStartingEvent(process string, + group string, + from_state string, + tries int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: tries, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_STARTING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessRunningEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_RUNNING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessBackoffEvent(process string, + group string, + from_state string, + tries int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: tries, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_BACKOFF" + r.serial = nextEventSerial() + return r +} + +func CreateProcessStoppingEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_STOPPING" + r.serial = nextEventSerial() + return r +} + +func CreateProcessExitedEvent(process string, + group string, + from_state string, + expected int, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: expected, + pid: pid} + r.eventType = "PROCESS_STATE_EXITED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessStoppedEvent(process string, + group string, + from_state string, + pid int) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: pid} + r.eventType = "PROCESS_STATE_STOPPED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessFatalEvent(process string, + group string, + from_state string) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_FATAL" + r.serial = nextEventSerial() + return r +} + +func CreateProcessUnknownEvent(process string, + group string, + from_state string) *ProcessStateEvent { + r := &ProcessStateEvent{process_name: process, + group_name: group, + from_state: from_state, + tries: -1, + expected: -1, + pid: 0} + r.eventType = "PROCESS_STATE_UNKNOWN" + r.serial = nextEventSerial() + return r +} + +func (pse *ProcessStateEvent) GetBody() string { + body := fmt.Sprintf("processname:%s groupname:%s from_state:%s", pse.process_name, pse.group_name, pse.from_state) + if pse.tries >= 0 { + body = fmt.Sprintf("%s tries:%d", body, pse.tries) + } + + if pse.expected != -1 { + body = fmt.Sprintf("%s expected:%d", body, pse.expected) + } + + if pse.pid != 0 { + body = fmt.Sprintf("%s pid:%d", body, pse.pid) + } + return body +} + +type SupervisorStateChangeEvent struct { + BaseEvent +} + +func (s *SupervisorStateChangeEvent) GetBody() string { + return "" +} + +func CreateSupervisorStateChangeRunning() *SupervisorStateChangeEvent { + r := &SupervisorStateChangeEvent{} + r.eventType = "SUPERVISOR_STATE_CHANGE_RUNNING" + r.serial = nextEventSerial() + return r +} + +func createSupervisorStateChangeStopping() *SupervisorStateChangeEvent { + r := &SupervisorStateChangeEvent{} + r.eventType = "SUPERVISOR_STATE_CHANGE_STOPPING" + r.serial = nextEventSerial() + return r +} + +type ProcessLogEvent struct { + BaseEvent + process_name string + group_name string + pid int + data string +} + +func (pe *ProcessLogEvent) GetBody() string { + return fmt.Sprintf("processname:%s groupname:%s pid:%d\n%s", + pe.process_name, + pe.group_name, + pe.pid, + pe.data) +} + +func CreateProcessLogStdoutEvent(process_name string, + group_name string, + pid int, + data string) *ProcessLogEvent { + r := &ProcessLogEvent{process_name: process_name, + group_name: group_name, + pid: pid, + data: data} + r.eventType = "PROCESS_LOG_STDOUT" + r.serial = nextEventSerial() + return r +} + +func CreateProcessLogStderrEvent(process_name string, + group_name string, + pid int, + data string) *ProcessLogEvent { + r := &ProcessLogEvent{process_name: process_name, + group_name: group_name, + pid: pid, + data: data} + r.eventType = "PROCESS_LOG_STDERR" + r.serial = nextEventSerial() + return r +} + +type ProcessGroupEvent struct { + BaseEvent + group_name string +} + +func (pe *ProcessGroupEvent) GetBody() string { + return fmt.Sprintf("groupname:%s", pe.group_name) +} + +func CreateProcessGroupAddedEvent(group_name string) *ProcessGroupEvent { + r := &ProcessGroupEvent{group_name: group_name} + + r.eventType = "PROCESS_GROUP_ADDED" + r.serial = nextEventSerial() + return r +} + +func CreateProcessGroupRemovedEvent(group_name string) *ProcessGroupEvent { + r := &ProcessGroupEvent{group_name: group_name} + + r.eventType = "PROCESS_GROUP_REMOVED" + r.serial = nextEventSerial() + return r +} diff --git a/vendor/github.com/rpoletaev/supervisord/faults/faults.go b/vendor/github.com/rpoletaev/supervisord/faults/faults.go new file mode 100644 index 000000000..2bc730ee2 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/faults/faults.go @@ -0,0 +1,30 @@ +package faults + +import ( + xmlrpc "github.com/ochinchina/gorilla-xmlrpc/xml" +) + +const ( + UNKNOWN_METHOD = 1 + INCORRECT_PARAMETERS = 2 + BAD_ARGUMENTS = 3 + SIGNATURE_UNSUPPORTED = 4 + SHUTDOWN_STATE = 6 + BAD_NAME = 10 + BAD_SIGNAL = 11 + NO_FILE = 20 + NOT_EXECUTABLE = 21 + FAILED = 30 + ABNORMAL_TERMINATION = 40 + SPAWN_ERROR = 50 + ALREADY_STARTED = 60 + NOT_RUNNING = 70 + SUCCESS = 80 + ALREADY_ADDED = 90 + STILL_RUNNING = 91 + CANT_REREAD = 92 +) + +func NewFault(code int, desc string) error { + return &xmlrpc.Fault{Code: code, String: desc} +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log.go b/vendor/github.com/rpoletaev/supervisord/logger/log.go new file mode 100644 index 000000000..189879015 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log.go @@ -0,0 +1,485 @@ +package logger + +import ( + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "path" + "strconv" + "strings" + "sync" + + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/faults" +) + +//implements io.Writer interface + +type Logger interface { + io.WriteCloser + SetPid(pid int) + ReadLog(offset int64, length int64) (string, error) + ReadTailLog(offset int64, length int64) (string, int64, bool, error) + ClearCurLogFile() error + ClearAllLogFile() error +} + +type LogEventEmitter interface { + emitLogEvent(data string) +} + +type FileLogger struct { + name string + maxSize int64 + backups int + curRotate int + fileSize int64 + file *os.File + logEventEmitter LogEventEmitter + locker sync.Locker +} + +type SysLogger struct { + NullLogger + logWriter io.WriteCloser + logEventEmitter LogEventEmitter +} + +type NullLogger struct { + logEventEmitter LogEventEmitter +} + +type NullLocker struct { +} + +func NewFileLogger(name string, maxSize int64, backups int, logEventEmitter LogEventEmitter, locker sync.Locker) *FileLogger { + logger := &FileLogger{name: name, + maxSize: maxSize, + backups: backups, + curRotate: -1, + fileSize: 0, + file: nil, + logEventEmitter: logEventEmitter, + locker: locker} + logger.updateLatestLog() + return logger +} + +func (l *FileLogger) SetPid(pid int) { + //NOTHING TO DO +} + +// return the next log file name +func (l *FileLogger) nextLogFile() { + l.curRotate++ + if l.curRotate >= l.backups { + l.curRotate = 0 + } +} + +func (l *FileLogger) updateLatestLog() { + dir := path.Dir(l.name) + files, err := ioutil.ReadDir(dir) + baseName := path.Base(l.name) + + if err != nil { + l.curRotate = 0 + } else { + //find all the rotate files + var latestFile os.FileInfo + latestNum := -1 + for _, fileInfo := range files { + if !fileInfo.IsDir() && strings.HasPrefix(fileInfo.Name(), baseName+".") { + n, err := strconv.Atoi(fileInfo.Name()[len(baseName)+1:]) + if err == nil && n >= 0 && n < l.backups { + if latestFile == nil || latestFile.ModTime().Before(fileInfo.ModTime()) { + latestFile = fileInfo + latestNum = n + } + } + } + } + l.curRotate = latestNum + if latestFile != nil { + l.fileSize = latestFile.Size() + } else { + l.fileSize = int64(0) + } + if l.fileSize >= l.maxSize || latestFile == nil { + l.nextLogFile() + l.openFile(true) + } else { + l.openFile(false) + } + } +} + +// open the file and truncate the file if trunc is true +func (l *FileLogger) openFile(trunc bool) error { + if l.file != nil { + l.file.Close() + } + var err error + fileName := l.GetCurrentLogFile() + if trunc { + l.file, err = os.Create(fileName) + } else { + l.file, err = os.OpenFile(fileName, os.O_RDWR|os.O_APPEND, 0666) + } + return err +} + +// get the name of current log file +func (l *FileLogger) GetCurrentLogFile() string { + return l.getLogFileName(l.curRotate) +} + +// get the name of previous log file +func (l *FileLogger) GetPrevLogFile() string { + i := (l.curRotate - 1 + l.backups) % l.backups + + return l.getLogFileName(i) +} + +func (l *FileLogger) getLogFileName(index int) string { + return fmt.Sprintf("%s.%d", l.name, index) +} + +// clear the current log file contents +func (l *FileLogger) ClearCurLogFile() error { + l.locker.Lock() + defer l.locker.Unlock() + + return l.openFile(true) +} + +func (l *FileLogger) ClearAllLogFile() error { + l.locker.Lock() + defer l.locker.Unlock() + + for i := 0; i < l.backups && i <= l.curRotate; i++ { + logFile := l.getLogFileName(i) + err := os.Remove(logFile) + if err != nil { + return faults.NewFault(faults.FAILED, err.Error()) + } + } + l.curRotate = 0 + err := l.openFile(true) + if err != nil { + return faults.NewFault(faults.FAILED, err.Error()) + } + return nil +} + +func (l *FileLogger) ReadLog(offset int64, length int64) (string, error) { + if offset < 0 && length != 0 { + return "", faults.NewFault(faults.BAD_ARGUMENTS, "BAD_ARGUMENTS") + } + if offset >= 0 && length < 0 { + return "", faults.NewFault(faults.BAD_ARGUMENTS, "BAD_ARGUMENTS") + } + + l.locker.Lock() + defer l.locker.Unlock() + f, err := os.Open(l.GetCurrentLogFile()) + + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + defer f.Close() + + //check the length of file + statInfo, err := f.Stat() + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + + fileLen := statInfo.Size() + + if offset < 0 { //offset < 0 && length == 0 + offset = fileLen + offset + if offset < 0 { + offset = 0 + } + length = fileLen - offset + } else if length == 0 { //offset >= 0 && length == 0 + if offset > fileLen { + return "", nil + } + length = fileLen - offset + } else { //offset >= 0 && length > 0 + + //if the offset exceeds the length of file + if offset >= fileLen { + return "", nil + } + + //compute actual bytes should be read + + if offset+length > fileLen { + length = fileLen - offset + } + } + + b := make([]byte, length) + n, err := f.ReadAt(b, offset) + if err != nil { + return "", faults.NewFault(faults.FAILED, "FAILED") + } + return string(b[:n]), nil +} + +func (l *FileLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + if offset < 0 { + return "", offset, false, fmt.Errorf("offset should not be less than 0") + } + if length < 0 { + return "", offset, false, fmt.Errorf("length should be not be less than 0") + } + l.locker.Lock() + defer l.locker.Unlock() + + //open the file + f, err := os.Open(l.GetCurrentLogFile()) + if err != nil { + return "", 0, false, err + } + + defer f.Close() + + //get the length of file + statInfo, err := f.Stat() + if err != nil { + return "", 0, false, err + } + + fileLen := statInfo.Size() + + //check if offset exceeds the length of file + if offset >= fileLen { + return "", fileLen, true, nil + } + + //get the length + if offset+length > fileLen { + length = fileLen - offset + } + + b := make([]byte, length) + n, err := f.ReadAt(b, offset) + if err != nil { + return "", offset, false, err + } + return string(b[:n]), offset + int64(n), false, nil + +} + +// Override the function in io.Writer +func (l *FileLogger) Write(p []byte) (int, error) { + l.locker.Lock() + defer l.locker.Unlock() + + n, err := l.file.Write(p) + + if err != nil { + return n, err + } + l.logEventEmitter.emitLogEvent(string(p)) + l.fileSize += int64(n) + if l.fileSize >= l.maxSize { + fileInfo, errStat := os.Stat(fmt.Sprintf("%s.%d", l.name, l.curRotate)) + if errStat == nil { + l.fileSize = fileInfo.Size() + } else { + return n, errStat + } + } + if l.fileSize >= l.maxSize { + l.nextLogFile() + l.openFile(true) + } + return n, err +} + +func (l *FileLogger) Close() error { + if l.file != nil { + return l.file.Close() + } + return nil +} + +func (sl *SysLogger) Write(b []byte) (int, error) { + sl.logEventEmitter.emitLogEvent(string(b)) + if sl.logWriter != nil { + return sl.logWriter.Write(b) + } else { + return 0, errors.New("not connect to syslog server") + } +} + +func (sl *SysLogger) Close() error { + if sl.logWriter != nil { + return sl.logWriter.Close() + } else { + return errors.New("not connect to syslog server") + } +} +func NewNullLogger(logEventEmitter LogEventEmitter) *NullLogger { + return &NullLogger{logEventEmitter: logEventEmitter} +} + +func (l *NullLogger) SetPid(pid int) { + //NOTHING TO DO +} + +func (l *NullLogger) Write(p []byte) (int, error) { + l.logEventEmitter.emitLogEvent(string(p)) + return len(p), nil +} + +func (l *NullLogger) Close() error { + return nil +} + +func (l *NullLogger) ReadLog(offset int64, length int64) (string, error) { + return "", faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func (l *NullLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + return "", 0, false, faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func (l *NullLogger) ClearCurLogFile() error { + return fmt.Errorf("No log") +} + +func (l *NullLogger) ClearAllLogFile() error { + return faults.NewFault(faults.NO_FILE, "NO_FILE") +} + +func NewNullLocker() *NullLocker { + return &NullLocker{} +} + +func (l *NullLocker) Lock() { +} + +func (l *NullLocker) Unlock() { +} + +type StdLogger struct { + NullLogger + logEventEmitter LogEventEmitter + writer io.Writer +} + +func NewStdoutLogger(logEventEmitter LogEventEmitter) *StdLogger { + return &StdLogger{logEventEmitter: logEventEmitter, + writer: os.Stdout} +} + +func (l *StdLogger) Write(p []byte) (int, error) { + n, err := l.writer.Write(p) + if err != nil { + l.logEventEmitter.emitLogEvent(string(p)) + } + return n, err +} + +func NewStderrLogger(logEventEmitter LogEventEmitter) *StdLogger { + return &StdLogger{logEventEmitter: logEventEmitter, + writer: os.Stdout} +} + +type LogCaptureLogger struct { + underlineLogger Logger + procCommEventCapWriter io.Writer + procCommEventCapture *events.ProcCommEventCapture +} + +func NewLogCaptureLogger(underlineLogger Logger, + captureMaxBytes int, + stdType string, + procName string, + groupName string) *LogCaptureLogger { + r, w := io.Pipe() + eventCapture := events.NewProcCommEventCapture(r, + captureMaxBytes, + stdType, + procName, + groupName) + return &LogCaptureLogger{underlineLogger: underlineLogger, + procCommEventCapWriter: w, + procCommEventCapture: eventCapture} +} + +func (l *LogCaptureLogger) SetPid(pid int) { + l.procCommEventCapture.SetPid(pid) +} + +func (l *LogCaptureLogger) Write(p []byte) (int, error) { + l.procCommEventCapWriter.Write(p) + return l.underlineLogger.Write(p) +} + +func (l *LogCaptureLogger) Close() error { + return l.underlineLogger.Close() +} + +func (l *LogCaptureLogger) ReadLog(offset int64, length int64) (string, error) { + return l.underlineLogger.ReadLog(offset, length) +} + +func (l *LogCaptureLogger) ReadTailLog(offset int64, length int64) (string, int64, bool, error) { + return l.underlineLogger.ReadTailLog(offset, length) +} + +func (l *LogCaptureLogger) ClearCurLogFile() error { + return l.underlineLogger.ClearCurLogFile() +} + +func (l *LogCaptureLogger) ClearAllLogFile() error { + return l.underlineLogger.ClearAllLogFile() +} + +type NullLogEventEmitter struct { +} + +func NewNullLogEventEmitter() *NullLogEventEmitter { + return &NullLogEventEmitter{} +} + +func (ne *NullLogEventEmitter) emitLogEvent(data string) { +} + +type StdLogEventEmitter struct { + Type string + process_name string + group_name string + pidFunc func() int +} + +func NewStdoutLogEventEmitter(process_name string, group_name string, procPidFunc func() int) *StdLogEventEmitter { + return &StdLogEventEmitter{Type: "stdout", + process_name: process_name, + group_name: group_name, + pidFunc: procPidFunc} +} + +func NewStderrLogEventEmitter(process_name string, group_name string, procPidFunc func() int) *StdLogEventEmitter { + return &StdLogEventEmitter{Type: "stderr", + process_name: process_name, + group_name: group_name, + pidFunc: procPidFunc} +} + +func (se *StdLogEventEmitter) emitLogEvent(data string) { + if se.Type == "stdout" { + events.EmitEvent(events.CreateProcessLogStdoutEvent(se.process_name, se.group_name, se.pidFunc(), data)) + } else { + events.EmitEvent(events.CreateProcessLogStderrEvent(se.process_name, se.group_name, se.pidFunc(), data)) + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go b/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go new file mode 100644 index 000000000..da683520c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log_unix.go @@ -0,0 +1,16 @@ +// +build !windows,!nacl,!plan9 + +package logger + +import ( + "log/syslog" +) + +func NewSysLogger(name string, logEventEmitter LogEventEmitter) *SysLogger { + writer, err := syslog.New(syslog.LOG_DEBUG, name) + logger := &SysLogger{logEventEmitter: logEventEmitter} + if err == nil { + logger.logWriter = writer + } + return logger +} diff --git a/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go b/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go new file mode 100644 index 000000000..cf4609f57 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/logger/log_windows.go @@ -0,0 +1,7 @@ +// +build windows plan9 nacl + +package logger + +func NewSysLogger(name string, logEventEmitter LogEventEmitter) *SysLogger { + return &SysLogger{logEventEmitter: logEventEmitter, logWriter: nil} +} diff --git a/vendor/github.com/rpoletaev/supervisord/main.go b/vendor/github.com/rpoletaev/supervisord/main.go new file mode 100644 index 000000000..dfc491df6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/main.go @@ -0,0 +1,75 @@ +package main + +import ( + "fmt" + "os" + "os/signal" + "runtime" + "syscall" + + log "github.com/sirupsen/logrus" + "github.com/jessevdk/go-flags" +) + +type Options struct { + Configuration string `short:"c" long:"configuration" description:"the configuration file" default:"supervisord.conf"` + Daemon bool `short:"d" long:"daemon" description:"run as daemon"` +} + +func init() { + log.SetOutput(os.Stdout) + if runtime.GOOS == "windows" { + log.SetFormatter(&log.TextFormatter{DisableColors: true, FullTimestamp: true}) + } else { + log.SetFormatter(&log.TextFormatter{DisableColors: false, FullTimestamp: true}) + } + log.SetLevel(log.DebugLevel) +} + +func initSignals(s *Supervisor) { + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + go func() { + sig := <-sigs + log.WithFields(log.Fields{"signal": sig}).Info("receive a signal to stop all process & exit") + s.procMgr.StopAllProcesses() + os.Exit(-1) + }() + +} + +var options Options +var parser = flags.NewParser(&options, flags.Default & ^flags.PrintErrors) + +func RunServer() { + // infinite loop for handling Restart ('reload' command) + for true { + s := NewSupervisor(options.Configuration) + initSignals(s) + if sErr, _, _, _ := s.Reload(); sErr != nil { + panic(sErr) + } + s.WaitForExit() + } +} + +func main() { + if _, err := parser.Parse(); err != nil { + flagsErr, ok := err.(*flags.Error) + if ok { + switch flagsErr.Type { + case flags.ErrHelp: + fmt.Fprintln(os.Stdout, err) + os.Exit(0) + case flags.ErrCommandRequired: + if options.Daemon { + Deamonize(RunServer) + } else { + RunServer() + } + default: + panic(err) + } + } + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/command_parser.go b/vendor/github.com/rpoletaev/supervisord/process/command_parser.go new file mode 100644 index 000000000..b711bafdb --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/command_parser.go @@ -0,0 +1,81 @@ +package process + +import ( + "fmt" + "unicode" +) + +// find the position of byte ch in the string s start from offset +// +// return: -1 if byte ch is not found, >= offset if the ch is found +// in the string s from offset +func findChar(s string, offset int, ch byte) int { + for i := offset; i < len(s); i++ { + if s[i] == '\\' { + i++ + } else if s[i] == ch { + return i + } + } + return -1 +} + +// skip all the white space and return the first position of non-space char +// +// return: the first position of non-space char or -1 if all the char +// from offset are space +func skipSpace(s string, offset int) int { + for i := offset; i < len(s); i++ { + if !unicode.IsSpace(rune(s[i])) { + return i + } + } + return -1 +} + +func appendArgument(arg string, args []string) []string { + if arg[0] == '"' || arg[0] == '\'' { + return append(args, arg[1:len(arg)-1]) + } + return append(args, arg) +} + +func parseCommand(command string) ([]string, error) { + args := make([]string, 0) + cmdLen := len(command) + for i := 0; i < cmdLen; { + //find the first non-space char + j := skipSpace(command, i) + if j == -1 { + break + } + i = j + for ; j < cmdLen; j++ { + if unicode.IsSpace(rune(command[j])) { + args = appendArgument(command[i:j], args) + i = j + 1 + break + } else if command[j] == '\\' { + j++ + } else if command[j] == '"' || command[j] == '\'' { + k := findChar(command, j+1, command[j]) + if k == -1 { + args = appendArgument(command[i:], args) + i = cmdLen + } else { + args = appendArgument(command[i:k+1], args) + i = k + 1 + } + break + } + } + if j >= cmdLen { + args = appendArgument(command[i:], args) + i = cmdLen + } + } + if len(args) <= 0 { + return nil, fmt.Errorf("no command from empty string") + } + return args, nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/path.go b/vendor/github.com/rpoletaev/supervisord/process/path.go new file mode 100644 index 000000000..82b2fbd1d --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/path.go @@ -0,0 +1,46 @@ +package process + +import ( + "os/user" + "path/filepath" +) + +func path_split(path string) []string { + r := make([]string, 0) + cur_path := path + for { + dir, file := filepath.Split(cur_path) + if len(file) > 0 { + r = append(r, file) + } + if len(dir) <= 0 { + break + } + cur_path = dir[0 : len(dir)-1] + } + for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 { + r[i], r[j] = r[j], r[i] + } + return r +} +func Path_expand(path string) (string, error) { + pathList := path_split(path) + + if len(pathList) > 0 && len(pathList[0]) > 0 && pathList[0][0] == '~' { + var usr *user.User = nil + var err error = nil + + if pathList[0] == "~" { + usr, err = user.Current() + } else { + usr, err = user.Lookup(pathList[0][1:]) + } + + if err != nil { + return "", err + } + pathList[0] = usr.HomeDir + return filepath.Join(pathList...), nil + } + return path, nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go new file mode 100644 index 000000000..90e23f40b --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_linux.go @@ -0,0 +1,12 @@ +// +build linux + +package process + +import ( + "syscall" +) + +func set_deathsig(sysProcAttr *syscall.SysProcAttr) { + sysProcAttr.Setpgid = true + sysProcAttr.Pdeathsig = syscall.SIGKILL +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go new file mode 100644 index 000000000..da0042ecc --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_other.go @@ -0,0 +1,12 @@ +// +build !linux +// +build !windows + +package process + +import ( + "syscall" +) + +func set_deathsig(sysProcAttr *syscall.SysProcAttr) { + sysProcAttr.Setpgid = true +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go new file mode 100644 index 000000000..e19e3d78c --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/pdeathsig_windows.go @@ -0,0 +1,9 @@ +// +build windows +package process + +import ( + "syscall" +) + +func set_deathsig(_ *syscall.SysProcAttr) { +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/process.go b/vendor/github.com/rpoletaev/supervisord/process/process.go new file mode 100644 index 000000000..f016dc3a6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/process.go @@ -0,0 +1,689 @@ +package process + +import ( + "fmt" + "io" + "os" + "os/exec" + "os/user" + "strconv" + "strings" + "sync" + "syscall" + "time" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/logger" + "github.com/rpoletaev/supervisord/signals" + log "github.com/sirupsen/logrus" +) + +type ProcessState int + +const ( + STOPPED ProcessState = iota + STARTING = 10 + RUNNING = 20 + BACKOFF = 30 + STOPPING = 40 + EXITED = 100 + FATAL = 200 + UNKNOWN = 1000 +) + +func (p ProcessState) String() string { + switch p { + case STOPPED: + return "STOPPED" + case STARTING: + return "STARTING" + case RUNNING: + return "RUNNING" + case BACKOFF: + return "BACKOFF" + case STOPPING: + return "STOPPING" + case EXITED: + return "EXITED" + case FATAL: + return "FATAL" + default: + return "UNKNOWN" + } +} + +type Process struct { + supervisor_id string + config *config.ConfigEntry + cmd *exec.Cmd + startTime time.Time + stopTime time.Time + state ProcessState + //true if process is starting + inStart bool + //true if the process is stopped by user + stopByUser bool + retryTimes int + lock sync.RWMutex + stdin io.WriteCloser + StdoutLog logger.Logger + StderrLog logger.Logger +} + +func NewProcess(supervisor_id string, config *config.ConfigEntry) *Process { + proc := &Process{supervisor_id: supervisor_id, + config: config, + cmd: nil, + startTime: time.Unix(0, 0), + stopTime: time.Unix(0, 0), + state: STOPPED, + inStart: false, + stopByUser: false, + retryTimes: 0} + proc.config = config + proc.cmd = nil + + //start the process if autostart is set to true + //if proc.isAutoStart() { + // proc.Start(false) + //} + + return proc +} + +func (p *Process) Start(wait bool) { + log.WithFields(log.Fields{"program": p.GetName()}).Info("try to start program") + p.lock.Lock() + if p.inStart { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start program again, program is already started") + p.lock.Unlock() + return + } + + p.inStart = true + p.stopByUser = false + p.lock.Unlock() + + var runCond *sync.Cond = nil + finished := false + if wait { + runCond = sync.NewCond(&sync.Mutex{}) + runCond.L.Lock() + } + + go func() { + p.retryTimes = 0 + + for { + if wait { + runCond.L.Lock() + } + p.run(func() { + finished = true + if wait { + runCond.L.Unlock() + runCond.Signal() + } + }) + if (p.stopTime.Unix() - p.startTime.Unix()) < int64(p.getStartSeconds()) { + p.retryTimes++ + } else { + p.retryTimes = 0 + } + if p.stopByUser { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Stopped by user, don't start it again") + break + } + if !p.isAutoRestart() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start the stopped program because its autorestart flag is false") + break + } + if p.retryTimes >= p.getStartRetries() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start the stopped program because its retry times ", p.retryTimes, " is greater than start retries ", p.getStartRetries()) + break + } + } + p.lock.Lock() + p.inStart = false + p.lock.Unlock() + }() + if wait && !finished { + runCond.Wait() + runCond.L.Unlock() + } +} + +func (p *Process) GetName() string { + if p.config.IsProgram() { + return p.config.GetProgramName() + } else if p.config.IsEventListener() { + return p.config.GetEventListenerName() + } else { + return "" + } +} + +func (p *Process) GetGroup() string { + return p.config.Group +} + +func (p *Process) GetDescription() string { + p.lock.Lock() + defer p.lock.Unlock() + if p.state == RUNNING { + seconds := int(time.Now().Sub(p.startTime).Seconds()) + minutes := seconds / 60 + hours := minutes / 60 + days := hours / 24 + if days > 0 { + return fmt.Sprintf("pid %d, uptime %d days, %d:%02d:%02d", p.cmd.Process.Pid, days, hours%24, minutes%60, seconds%60) + } else { + return fmt.Sprintf("pid %d, uptime %d:%02d:%02d", p.cmd.Process.Pid, hours%24, minutes%60, seconds%60) + } + } else if p.state != STOPPED { + return p.stopTime.String() + } + return "" +} + +func (p *Process) GetExitstatus() int { + p.lock.Lock() + defer p.lock.Unlock() + + if p.state == EXITED || p.state == BACKOFF { + if p.cmd.ProcessState == nil { + return 0 + } + status, ok := p.cmd.ProcessState.Sys().(syscall.WaitStatus) + if ok { + return status.ExitStatus() + } + } + return 0 +} + +func (p *Process) GetPid() int { + p.lock.Lock() + defer p.lock.Unlock() + + if p.state == STOPPED || p.state == FATAL || p.state == UNKNOWN || p.state == EXITED || p.state == BACKOFF { + return 0 + } + return p.cmd.Process.Pid +} + +// Get the process state +func (p *Process) GetState() ProcessState { + return p.state +} + +func (p *Process) GetStartTime() time.Time { + return p.startTime +} + +func (p *Process) GetStopTime() time.Time { + switch p.state { + case STARTING: + fallthrough + case RUNNING: + fallthrough + case STOPPING: + return time.Unix(0, 0) + default: + return p.stopTime + } +} + +func (p *Process) GetStdoutLogfile() string { + file_name := p.config.GetStringExpression("stdout_logfile", "/dev/null") + expand_file, err := Path_expand(file_name) + if err == nil { + return expand_file + } else { + return file_name + } +} + +func (p *Process) GetStderrLogfile() string { + file_name := p.config.GetStringExpression("stderr_logfile", "/dev/null") + expand_file, err := Path_expand(file_name) + if err == nil { + return expand_file + } else { + return file_name + } +} + +func (p *Process) getStartSeconds() int { + return p.config.GetInt("startsecs", 1) +} + +func (p *Process) getStartRetries() int { + return p.config.GetInt("startretries", 3) +} + +func (p *Process) isAutoStart() bool { + return p.config.GetString("autostart", "true") == "true" +} + +func (p *Process) GetPriority() int { + return p.config.GetInt("priority", 999) +} + +func (p *Process) getNumberProcs() int { + return p.config.GetInt("numprocs", 1) +} + +func (p *Process) SendProcessStdin(chars string) error { + if p.stdin != nil { + _, err := p.stdin.Write([]byte(chars)) + return err + } + return fmt.Errorf("NO_FILE") +} + +// check if the process should be +func (p *Process) isAutoRestart() bool { + autoRestart := p.config.GetString("autorestart", "unexpected") + + if autoRestart == "false" { + return false + } else if autoRestart == "true" { + return true + } else { + p.lock.Lock() + defer p.lock.Unlock() + if p.cmd != nil && p.cmd.ProcessState != nil { + exitCode, err := p.getExitCode() + return err == nil && p.inExitCodes(exitCode) + } + } + return false + +} + +func (p *Process) inExitCodes(exitCode int) bool { + for _, code := range p.getExitCodes() { + if code == exitCode { + return true + } + } + return false +} + +func (p *Process) getExitCode() (int, error) { + if p.cmd.ProcessState == nil { + return -1, fmt.Errorf("no exit code") + } + if status, ok := p.cmd.ProcessState.Sys().(syscall.WaitStatus); ok { + return status.ExitStatus(), nil + } + + return -1, fmt.Errorf("no exit code") + +} + +func (p *Process) getExitCodes() []int { + strExitCodes := strings.Split(p.config.GetString("exitcodes", "0,2"), ",") + result := make([]int, 0) + for _, val := range strExitCodes { + i, err := strconv.Atoi(val) + if err == nil { + result = append(result, i) + } + } + return result +} + +func (p *Process) run(finishCb func()) { + args, err := parseCommand(p.config.GetStringExpression("command", "")) + + if err != nil { + log.Error("the command is empty string") + finishCb() + return + } + p.lock.Lock() + if p.cmd != nil && p.cmd.ProcessState != nil { + status := p.cmd.ProcessState.Sys().(syscall.WaitStatus) + if status.Continued() { + log.WithFields(log.Fields{"program": p.GetName()}).Info("Don't start program because it is running") + p.lock.Unlock() + finishCb() + return + } + } + p.cmd = exec.Command(args[0]) + if len(args) > 1 { + p.cmd.Args = args + } + p.cmd.SysProcAttr = &syscall.SysProcAttr{} + if p.setUser() != nil { + log.WithFields(log.Fields{"user": p.config.GetString("user", "")}).Error("fail to run as user") + p.lock.Unlock() + finishCb() + return + } + set_deathsig(p.cmd.SysProcAttr) + p.setEnv() + p.setDir() + p.setLog() + + p.stdin, _ = p.cmd.StdinPipe() + p.startTime = time.Now() + p.changeStateTo(STARTING) + err = p.cmd.Start() + if err != nil { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Errorf("fail to start program with error:%v", err) + p.changeStateTo(FATAL) + p.stopTime = time.Now() + p.lock.Unlock() + finishCb() + } else { + if p.StdoutLog != nil { + p.StdoutLog.SetPid(p.cmd.Process.Pid) + } + if p.StderrLog != nil { + p.StderrLog.SetPid(p.cmd.Process.Pid) + } + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("success to start program") + startSecs := p.config.GetInt("startsecs", 1) + //Set startsec to 0 to indicate that the program needn't stay + //running for any particular amount of time. + if startSecs <= 0 { + p.changeStateTo(RUNNING) + + } else { + time.Sleep(time.Duration(startSecs) * time.Second) + if tmpProc, err := os.FindProcess(p.cmd.Process.Pid); err == nil && tmpProc != nil { + p.changeStateTo(RUNNING) + } + } + p.lock.Unlock() + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Debug("wait program exit") + finishCb() + err = p.cmd.Wait() + if err == nil { + if p.cmd.ProcessState != nil { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Infof("program stopped with status:%v", p.cmd.ProcessState) + } else { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("program stopped") + } + } else { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Errorf("program stopped with error:%v", err) + } + + p.lock.Lock() + p.stopTime = time.Now() + if p.stopTime.Unix()-p.startTime.Unix() < int64(startSecs) { + p.changeStateTo(BACKOFF) + } else { + p.changeStateTo(EXITED) + } + p.lock.Unlock() + } + +} + +func (p *Process) changeStateTo(procState ProcessState) { + if p.config.IsProgram() { + progName := p.config.GetProgramName() + groupName := p.config.GetGroupName() + if procState == STARTING { + events.EmitEvent(events.CreateProcessStartingEvent(progName, groupName, p.state.String(), p.retryTimes)) + } else if procState == RUNNING { + events.EmitEvent(events.CreateProcessRunningEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == BACKOFF { + events.EmitEvent(events.CreateProcessBackoffEvent(progName, groupName, p.state.String(), p.retryTimes)) + } else if procState == STOPPING { + events.EmitEvent(events.CreateProcessStoppingEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == EXITED { + exitCode, err := p.getExitCode() + expected := 0 + if err == nil && p.inExitCodes(exitCode) { + expected = 1 + } + events.EmitEvent(events.CreateProcessExitedEvent(progName, groupName, p.state.String(), expected, p.cmd.Process.Pid)) + } else if procState == FATAL { + events.EmitEvent(events.CreateProcessFatalEvent(progName, groupName, p.state.String())) + } else if procState == STOPPED { + events.EmitEvent(events.CreateProcessStoppedEvent(progName, groupName, p.state.String(), p.cmd.Process.Pid)) + } else if procState == UNKNOWN { + events.EmitEvent(events.CreateProcessUnknownEvent(progName, groupName, p.state.String())) + } + } + p.state = procState +} + +func (p *Process) Signal(sig os.Signal) error { + p.lock.Lock() + defer p.lock.Unlock() + + return p.sendSignal(sig) +} + +func (p *Process) sendSignal(sig os.Signal) error { + if p.cmd != nil && p.cmd.Process != nil { + err := signals.Kill(p.cmd.Process, sig) + return err + } + return fmt.Errorf("process is not started") +} + +func (p *Process) setEnv() { + env := p.config.GetEnv("environment") + if len(env) != 0 { + p.cmd.Env = append(os.Environ(), env...) + } else { + p.cmd.Env = os.Environ() + } +} + +func (p *Process) setDir() { + dir := p.config.GetStringExpression("directory", "") + if dir != "" { + p.cmd.Dir = dir + } +} + +func (p *Process) setLog() { + if p.config.IsProgram() { + p.StdoutLog = p.createLogger(p.GetStdoutLogfile(), + int64(p.config.GetBytes("stdout_logfile_maxbytes", 50*1024*1024)), + p.config.GetInt("stdout_logfile_backups", 10), + p.createStdoutLogEventEmitter()) + capture_bytes := p.config.GetBytes("stdout_capture_maxbytes", 0) + if capture_bytes > 0 { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("capture stdout process communication") + p.StdoutLog = logger.NewLogCaptureLogger(p.StdoutLog, + capture_bytes, + "PROCESS_COMMUNICATION_STDOUT", + p.GetName(), + p.GetGroup()) + } + + p.cmd.Stdout = p.StdoutLog + + if p.config.GetBool("redirect_stderr", false) { + p.StderrLog = p.StdoutLog + } else { + p.StderrLog = p.createLogger(p.GetStderrLogfile(), + int64(p.config.GetBytes("stderr_logfile_maxbytes", 50*1024*1024)), + p.config.GetInt("stderr_logfile_backups", 10), + p.createStderrLogEventEmitter()) + } + + capture_bytes = p.config.GetBytes("stderr_capture_maxbytes", 0) + + if capture_bytes > 0 { + log.WithFields(log.Fields{"program": p.config.GetProgramName()}).Info("capture stderr process communication") + p.StderrLog = logger.NewLogCaptureLogger(p.StdoutLog, + capture_bytes, + "PROCESS_COMMUNICATION_STDERR", + p.GetName(), + p.GetGroup()) + } + + p.cmd.Stderr = p.StderrLog + + } else if p.config.IsEventListener() { + in, err := p.cmd.StdoutPipe() + if err != nil { + log.WithFields(log.Fields{"eventListener": p.config.GetEventListenerName()}).Error("fail to get stdin") + return + } + out, err := p.cmd.StdinPipe() + if err != nil { + log.WithFields(log.Fields{"eventListener": p.config.GetEventListenerName()}).Error("fail to get stdout") + return + } + events := strings.Split(p.config.GetString("events", ""), ",") + for i, event := range events { + events[i] = strings.TrimSpace(event) + } + + p.registerEventListener(p.config.GetEventListenerName(), + events, + in, + out) + } +} + +func (p *Process) createStdoutLogEventEmitter() logger.LogEventEmitter { + if p.config.GetBytes("stdout_capture_maxbytes", 0) <= 0 && p.config.GetBool("stdout_events_enabled", false) { + return logger.NewStdoutLogEventEmitter(p.config.GetProgramName(), p.config.GetGroupName(), func() int { + return p.GetPid() + }) + } else { + return logger.NewNullLogEventEmitter() + } +} + +func (p *Process) createStderrLogEventEmitter() logger.LogEventEmitter { + if p.config.GetBytes("stderr_capture_maxbytes", 0) <= 0 && p.config.GetBool("stderr_events_enabled", false) { + return logger.NewStdoutLogEventEmitter(p.config.GetProgramName(), p.config.GetGroupName(), func() int { + return p.GetPid() + }) + } else { + return logger.NewNullLogEventEmitter() + } +} + +func (p *Process) registerEventListener(eventListenerName string, + _events []string, + stdin io.Reader, + stdout io.Writer) { + eventListener := events.NewEventListener(eventListenerName, + p.supervisor_id, + stdin, + stdout, + p.config.GetInt("buffer_size", 100)) + events.RegisterEventListener(eventListenerName, _events, eventListener) +} + +func (p *Process) unregisterEventListener(eventListenerName string) { + events.UnregisterEventListener(eventListenerName) +} + +func (p *Process) createLogger(logFile string, maxBytes int64, backups int, logEventEmitter logger.LogEventEmitter) logger.Logger { + var mylogger logger.Logger + mylogger = logger.NewNullLogger(logEventEmitter) + + if logFile == "/dev/stdout" { + mylogger = logger.NewStdoutLogger(logEventEmitter) + } else if logFile == "/dev/stderr" { + mylogger = logger.NewStderrLogger(logEventEmitter) + } else if logFile == "syslog" { + mylogger = logger.NewSysLogger(p.GetName(), logEventEmitter) + } else if len(logFile) > 0 { + mylogger = logger.NewFileLogger(logFile, maxBytes, backups, logEventEmitter, logger.NewNullLocker()) + } + return mylogger +} + +func (p *Process) setUser() error { + userName := p.config.GetString("user", "") + if len(userName) == 0 { + return nil + } + + //check if group is provided + pos := strings.Index(userName, ":") + groupName := "" + if pos != -1 { + groupName = userName[pos+1:] + userName = userName[0:pos] + } + u, err := user.Lookup(userName) + if err != nil { + return err + } + uid, err := strconv.ParseUint(u.Uid, 10, 32) + if err != nil { + return err + } + gid, err := strconv.ParseUint(u.Gid, 10, 32) + if err != nil && groupName == "" { + return err + } + if groupName != "" { + g, err := user.LookupGroup(groupName) + if err != nil { + return err + } + gid, err = strconv.ParseUint(g.Gid, 10, 32) + if err != nil { + return err + } + } + set_user_id(p.cmd.SysProcAttr, uint32(uid), uint32(gid)) + return nil +} + +//send signal to process to stop it +func (p *Process) Stop(wait bool) { + p.lock.RLock() + p.stopByUser = true + p.lock.RUnlock() + log.WithFields(log.Fields{"program": p.GetName()}).Info("stop the program") + sig, err := signals.ToSignal(p.config.GetString("stopsignal", "")) + if err == nil { + p.Signal(sig) + } + waitsecs := time.Duration(p.config.GetInt("stopwaitsecs", 10)) * time.Second + endTime := time.Now().Add(waitsecs) + go func() { + //wait at most "stopwaitsecs" seconds + for { + //if it already exits + if p.state != STARTING && p.state != RUNNING && p.state != STOPPING { + break + } + //if endTime reaches, raise signal syscall.SIGKILL + if endTime.Before(time.Now()) { + log.WithFields(log.Fields{"program": p.GetName()}).Info("force to kill the program") + p.Signal(syscall.SIGKILL) + break + } else { + time.Sleep(1 * time.Second) + } + } + }() + if wait { + for { + // if the program exits + if p.state != STARTING && p.state != RUNNING && p.state != STOPPING { + break + } + time.Sleep(1 * time.Second) + } + } +} + +func (p *Process) GetStatus() string { + if p.cmd.ProcessState.Exited() { + return p.cmd.ProcessState.String() + } + return "running" +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/process_manager.go b/vendor/github.com/rpoletaev/supervisord/process/process_manager.go new file mode 100644 index 000000000..c99e72481 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/process_manager.go @@ -0,0 +1,160 @@ +package process + +import ( + "strings" + "sync" + + "github.com/rpoletaev/supervisord/config" + log "github.com/sirupsen/logrus" +) + +type ProcessManager struct { + procs map[string]*Process + eventListeners map[string]*Process + lock sync.Mutex +} + +func NewProcessManager() *ProcessManager { + return &ProcessManager{procs: make(map[string]*Process), + eventListeners: make(map[string]*Process), + } +} + +func (pm *ProcessManager) CreateProcess(supervisor_id string, config *config.ConfigEntry) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + if config.IsProgram() { + return pm.createProgram(supervisor_id, config) + } else if config.IsEventListener() { + return pm.createEventListener(supervisor_id, config) + } else { + return nil + } +} + +func (pm *ProcessManager) StartAutoStartPrograms() { + pm.ForEachProcess(func(proc *Process) { + if proc.isAutoStart() { + proc.Start(false) + } + }) +} + +func (pm *ProcessManager) createProgram(supervisor_id string, config *config.ConfigEntry) *Process { + procName := config.GetProgramName() + + proc, ok := pm.procs[procName] + + if !ok { + proc = NewProcess(supervisor_id, config) + pm.procs[procName] = proc + } + log.Info("create process:", procName) + return proc +} + +func (pm *ProcessManager) createEventListener(supervisor_id string, config *config.ConfigEntry) *Process { + eventListenerName := config.GetEventListenerName() + + evtListener, ok := pm.eventListeners[eventListenerName] + + if !ok { + evtListener = NewProcess(supervisor_id, config) + pm.eventListeners[eventListenerName] = evtListener + } + log.Info("create event listener:", eventListenerName) + return evtListener +} + +func (pm *ProcessManager) Add(name string, proc *Process) { + pm.lock.Lock() + defer pm.lock.Unlock() + pm.procs[name] = proc + log.Info("add process:", name) +} + +// remove the process from the manager +// +// Arguments: +// name - the name of program +// +// Return the process or nil +func (pm *ProcessManager) Remove(name string) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + proc, _ := pm.procs[name] + delete(pm.procs, name) + log.Info("remove process:", name) + return proc +} + +// return process if found or nil if not found +func (pm *ProcessManager) Find(name string) *Process { + pm.lock.Lock() + defer pm.lock.Unlock() + proc, ok := pm.procs[name] + if ok { + log.Debug("succeed to find process:", name) + } else { + //remove group field if it is included + if pos := strings.Index(name, ":"); pos != -1 { + proc, ok = pm.procs[name[pos+1:]] + } + if !ok { + log.Info("fail to find process:", name) + } + } + return proc +} + +// clear all the processes +func (pm *ProcessManager) Clear() { + pm.lock.Lock() + defer pm.lock.Unlock() + pm.procs = make(map[string]*Process) +} + +func (pm *ProcessManager) ForEachProcess(procFunc func(p *Process)) { + pm.lock.Lock() + defer pm.lock.Unlock() + + procs := pm.getAllProcess() + for _, proc := range procs { + procFunc(proc) + } +} + +func (pm *ProcessManager) getAllProcess() []*Process { + tmpProcs := make([]*Process, 0) + for _, proc := range pm.procs { + tmpProcs = append(tmpProcs, proc) + } + return sortProcess(tmpProcs) +} + +func (pm *ProcessManager) StopAllProcesses() { + pm.ForEachProcess(func(proc *Process) { + proc.Stop(true) + }) +} + +func sortProcess(procs []*Process) []*Process { + prog_configs := make([]*config.ConfigEntry, 0) + for _, proc := range procs { + if proc.config.IsProgram() { + prog_configs = append(prog_configs, proc.config) + } + } + + result := make([]*Process, 0) + p := config.NewProcessSorter() + for _, config := range p.SortProgram(prog_configs) { + for _, proc := range procs { + if proc.config == config { + result = append(result, proc) + } + } + } + + return result +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go b/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go new file mode 100644 index 000000000..ec28fb5c4 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/set_user_id.go @@ -0,0 +1,11 @@ +// +build !windows + +package process + +import ( + "syscall" +) + +func set_user_id(procAttr *syscall.SysProcAttr, uid uint32, gid uint32) { + procAttr.Credential = &syscall.Credential{Uid: uid, Gid: gid, NoSetGroups: true} +} diff --git a/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go b/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go new file mode 100644 index 000000000..d114e4068 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/process/set_user_id_windows.go @@ -0,0 +1,11 @@ +// +build windows + +package process + +import ( + "syscall" +) + +func set_user_id(_ *syscall.SysProcAttr, _ uint32, _ uint32) { + +} diff --git a/vendor/github.com/rpoletaev/supervisord/signals/signal.go b/vendor/github.com/rpoletaev/supervisord/signals/signal.go new file mode 100644 index 000000000..16483a1d6 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/signals/signal.go @@ -0,0 +1,34 @@ +// +build !windows + +package signals + +import ( + "os" + "syscall" +) + +//convert a signal name to signal +func ToSignal(signalName string) (os.Signal, error) { + if signalName == "HUP" { + return syscall.SIGHUP, nil + } else if signalName == "INT" { + return syscall.SIGINT, nil + } else if signalName == "QUIT" { + return syscall.SIGQUIT, nil + } else if signalName == "KILL" { + return syscall.SIGKILL, nil + } else if signalName == "USR1" { + return syscall.SIGUSR1, nil + } else if signalName == "USR2" { + return syscall.SIGUSR2, nil + } else { + return syscall.SIGTERM, nil + + } + +} + +func Kill(process *os.Process, sig os.Signal) error { + localSig := sig.(syscall.Signal) + return syscall.Kill(-process.Pid, localSig) +} diff --git a/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go b/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go new file mode 100644 index 000000000..5c3e539ac --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/signals/signal_windows.go @@ -0,0 +1,46 @@ +// +build windows + +package signals + +import ( + "errors" + "fmt" + log "github.com/sirupsen/logrus" + "os" + "os/exec" + "syscall" +) + +//convert a signal name to signal +func ToSignal(signalName string) (os.Signal, error) { + if signalName == "HUP" { + return syscall.SIGHUP, nil + } else if signalName == "INT" { + return syscall.SIGINT, nil + } else if signalName == "QUIT" { + return syscall.SIGQUIT, nil + } else if signalName == "KILL" { + return syscall.SIGKILL, nil + } else if signalName == "USR1" { + log.Warn("signal USR1 is not supported in windows") + return nil, errors.New("signal USR1 is not supported in windows") + } else if signalName == "USR2" { + log.Warn("signal USR2 is not supported in windows") + return nil, errors.New("signal USR2 is not supported in windows") + } else { + return syscall.SIGTERM, nil + + } + +} + +func Kill(process *os.Process, sig os.Signal) error { + //Signal command can't kill children processes, call taskkill command to kill them + cmd := exec.Command("taskkill", "/F", "/T", "/PID", fmt.Sprintf("%d", process.Pid)) + err := cmd.Start() + if err == nil { + return cmd.Wait() + } + //if fail to find taskkill, fallback to normal signal + return process.Signal(sig) +} diff --git a/vendor/github.com/rpoletaev/supervisord/supervisor.go b/vendor/github.com/rpoletaev/supervisord/supervisor.go new file mode 100644 index 000000000..cc47db0f0 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/supervisor.go @@ -0,0 +1,586 @@ +package main + +import ( + "fmt" + "net/http" + "os" + "strings" + "sync" + "time" + + "github.com/rpoletaev/supervisord/config" + "github.com/rpoletaev/supervisord/events" + "github.com/rpoletaev/supervisord/faults" + "github.com/rpoletaev/supervisord/logger" + "github.com/rpoletaev/supervisord/process" + "github.com/rpoletaev/supervisord/signals" + "github.com/rpoletaev/supervisord/types" + "github.com/rpoletaev/supervisord/util" + + log "github.com/sirupsen/logrus" +) + +const ( + SUPERVISOR_VERSION = "3.0" +) + +type Supervisor struct { + config *config.Config + procMgr *process.ProcessManager + xmlRPC *XmlRPC + logger logger.Logger + restarting bool +} + +type StartProcessArgs struct { + Name string + Wait bool `default:"true"` +} + +type ProcessStdin struct { + Name string + Chars string +} + +type RemoteCommEvent struct { + Type string + Data string +} + +type StateInfo struct { + Statecode int `xml:"statecode"` + Statename string `xml:"statename"` +} + +type RpcTaskResult struct { + Name string `xml:"name"` + Group string `xml:"group"` + Status int `xml:"status"` + Description string `xml:"description"` +} + +type LogReadInfo struct { + Offset int + Length int +} + +type ProcessLogReadInfo struct { + Name string + Offset int + Length int +} + +type ProcessTailLog struct { + LogData string + Offset int64 + Overflow bool +} + +func NewSupervisor(configFile string) *Supervisor { + return &Supervisor{config: config.NewConfig(configFile), + procMgr: process.NewProcessManager(), + xmlRPC: NewXmlRPC(), + restarting: false} +} + +func (s *Supervisor) GetConfig() *config.Config { + return s.config +} + +func (s *Supervisor) GetVersion(r *http.Request, args *struct{}, reply *struct{ Version string }) error { + reply.Version = SUPERVISOR_VERSION + return nil +} + +func (s *Supervisor) GetSupervisorVersion(r *http.Request, args *struct{}, reply *struct{ Version string }) error { + reply.Version = SUPERVISOR_VERSION + return nil +} + +func (s *Supervisor) GetIdentification(r *http.Request, args *struct{}, reply *struct{ Id string }) error { + reply.Id = s.GetSupervisorId() + return nil +} + +func (s *Supervisor) GetSupervisorId() string { + entry, ok := s.config.GetSupervisord() + if ok { + return entry.GetString("identifier", "supervisor") + } else { + return "supervisor" + } +} + +func (s *Supervisor) GetState(r *http.Request, args *struct{}, reply *struct{ StateInfo StateInfo }) error { + //statecode statename + //======================= + // 2 FATAL + // 1 RUNNING + // 0 RESTARTING + // -1 SHUTDOWN + log.Debug("Get state") + reply.StateInfo.Statecode = 1 + reply.StateInfo.Statename = "RUNNING" + return nil +} + +func (s *Supervisor) GetPID(r *http.Request, args *struct{}, reply *struct{ Pid int }) error { + reply.Pid = os.Getpid() + return nil +} + +func (s *Supervisor) ReadLog(r *http.Request, args *LogReadInfo, reply *struct{ Log string }) error { + data, err := s.logger.ReadLog(int64(args.Offset), int64(args.Length)) + reply.Log = data + return err +} + +func (s *Supervisor) ClearLog(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + err := s.logger.ClearAllLogFile() + reply.Ret = err == nil + return err +} + +func (s *Supervisor) Shutdown(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + reply.Ret = true + log.Info("received rpc request to stop all processes & exit") + s.procMgr.StopAllProcesses() + go func() { + time.Sleep(1 * time.Second) + os.Exit(0) + }() + return nil +} + +func (s *Supervisor) Restart(r *http.Request, args *struct{}, reply *struct{ Ret bool }) error { + log.Info("Receive instruction to restart") + s.restarting = true + reply.Ret = true + return nil +} + +func (s *Supervisor) IsRestarting() bool { + return s.restarting +} + +func getProcessInfo(proc *process.Process) *types.ProcessInfo { + return &types.ProcessInfo{Name: proc.GetName(), + Group: proc.GetGroup(), + Description: proc.GetDescription(), + Start: int(proc.GetStartTime().Unix()), + Stop: int(proc.GetStopTime().Unix()), + Now: int(time.Now().Unix()), + State: int(proc.GetState()), + Statename: proc.GetState().String(), + Spawnerr: "", + Exitstatus: proc.GetExitstatus(), + Logfile: proc.GetStdoutLogfile(), + Stdout_logfile: proc.GetStdoutLogfile(), + Stderr_logfile: proc.GetStderrLogfile(), + Pid: proc.GetPid()} + +} + +func (s *Supervisor) GetAllProcessInfo(r *http.Request, args *struct{}, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + reply.AllProcessInfo = make([]types.ProcessInfo, 0) + s.procMgr.ForEachProcess(func(proc *process.Process) { + procInfo := getProcessInfo(proc) + reply.AllProcessInfo = append(reply.AllProcessInfo, *procInfo) + }) + + return nil +} + +func (s *Supervisor) GetProcessInfo(r *http.Request, args *struct{ Name string }, reply *struct{ ProcInfo types.ProcessInfo }) error { + log.Debug("Get process info of: ", args.Name) + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("no process named %s", args.Name) + } + + reply.ProcInfo = *getProcessInfo(proc) + return nil +} + +func (s *Supervisor) StartProcess(r *http.Request, args *StartProcessArgs, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + + if proc == nil { + return fmt.Errorf("fail to find process %s", args.Name) + } + proc.Start(args.Wait) + reply.Success = true + return nil +} + +func (s *Supervisor) StartAllProcesses(r *http.Request, args *struct { + Wait bool `default:"true"` +}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.Start(args.Wait) + processInfo := *getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: processInfo.Name, + Group: processInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + return nil +} + +func (s *Supervisor) StartProcessGroup(r *http.Request, args *StartProcessArgs, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + log.WithFields(log.Fields{"group": args.Name}).Info("start process group") + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + proc.Start(args.Wait) + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + + return nil +} + +func (s *Supervisor) StopProcess(r *http.Request, args *StartProcessArgs, reply *struct{ Success bool }) error { + log.WithFields(log.Fields{"program": args.Name}).Info("stop process") + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("fail to find process %s", args.Name) + } + proc.Stop(args.Wait) + reply.Success = true + return nil +} + +func (s *Supervisor) StopProcessGroup(r *http.Request, args *StartProcessArgs, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + log.WithFields(log.Fields{"group": args.Name}).Info("stop process group") + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + proc.Stop(args.Wait) + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + return nil +} + +func (s *Supervisor) StopAllProcesses(r *http.Request, args *struct { + Wait bool `default:"true"` +}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.Stop(args.Wait) + processInfo := *getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: processInfo.Name, + Group: processInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + return nil +} + +func (s *Supervisor) SignalProcess(r *http.Request, args *types.ProcessSignal, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + reply.Success = false + return fmt.Errorf("No process named %s", args.Name) + } + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + reply.Success = true + return nil +} + +func (s *Supervisor) SignalProcessGroup(r *http.Request, args *types.ProcessSignal, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + } + }) + + s.procMgr.ForEachProcess(func(proc *process.Process) { + if proc.GetGroup() == args.Name { + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + } + }) + return nil +} + +func (s *Supervisor) SignalAllProcesses(r *http.Request, args *types.ProcessSignal, reply *struct{ AllProcessInfo []types.ProcessInfo }) error { + s.procMgr.ForEachProcess(func(proc *process.Process) { + sig, err := signals.ToSignal(args.Signal) + if err == nil { + proc.Signal(sig) + } + }) + s.procMgr.ForEachProcess(func(proc *process.Process) { + reply.AllProcessInfo = append(reply.AllProcessInfo, *getProcessInfo(proc)) + }) + return nil +} + +func (s *Supervisor) SendProcessStdin(r *http.Request, args *ProcessStdin, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + log.WithFields(log.Fields{"program": args.Name}).Error("program does not exist") + return fmt.Errorf("NOT_RUNNING") + } + if proc.GetState() != process.RUNNING { + log.WithFields(log.Fields{"program": args.Name}).Error("program does not run") + return fmt.Errorf("NOT_RUNNING") + } + err := proc.SendProcessStdin(args.Chars) + if err == nil { + reply.Success = true + } else { + reply.Success = false + } + return err +} + +func (s *Supervisor) SendRemoteCommEvent(r *http.Request, args *RemoteCommEvent, reply *struct{ Success bool }) error { + events.EmitEvent(events.NewRemoteCommunicationEvent(args.Type, args.Data)) + reply.Success = true + return nil +} + +func (s *Supervisor) Reload() (error, []string, []string, []string) { + //get the previous loaded programs + prevPrograms := s.config.GetProgramNames() + prevProgGroup := s.config.ProgramGroup.Clone() + + loaded_programs, err := s.config.Load() + + if err == nil { + s.setSupervisordInfo() + s.startEventListeners() + s.createPrograms(prevPrograms) + s.startHttpServer() + s.startAutoStartPrograms() + } + removedPrograms := util.Sub(prevPrograms, loaded_programs) + for _, removedProg := range removedPrograms { + log.WithFields(log.Fields{"program": removedProg}).Info("the program is removed and will be stopped") + s.config.RemoveProgram(removedProg) + proc := s.procMgr.Remove(removedProg) + if proc != nil { + proc.Stop(false) + } + + } + addedGroup, changedGroup, removedGroup := s.config.ProgramGroup.Sub(prevProgGroup) + return err, addedGroup, changedGroup, removedGroup + +} + +func (s *Supervisor) WaitForExit() { + for { + if s.IsRestarting() { + s.procMgr.StopAllProcesses() + break + } + time.Sleep(10 * time.Second) + } +} + +func (s *Supervisor) createPrograms(prevPrograms []string) { + + programs := s.config.GetProgramNames() + for _, entry := range s.config.GetPrograms() { + s.procMgr.CreateProcess(s.GetSupervisorId(), entry) + } + removedPrograms := util.Sub(prevPrograms, programs) + for _, p := range removedPrograms { + s.procMgr.Remove(p) + } +} + +func (s *Supervisor) startAutoStartPrograms() { + s.procMgr.StartAutoStartPrograms() +} + +func (s *Supervisor) startEventListeners() { + eventListeners := s.config.GetEventListeners() + for _, entry := range eventListeners { + s.procMgr.CreateProcess(s.GetSupervisorId(), entry) + } + + if len(eventListeners) > 0 { + time.Sleep(1 * time.Second) + } +} + +func (s *Supervisor) startHttpServer() { + httpServerConfig, ok := s.config.GetInetHttpServer() + if ok { + addr := httpServerConfig.GetString("port", "") + if addr != "" { + go s.xmlRPC.StartInetHttpServer(httpServerConfig.GetString("username", ""), httpServerConfig.GetString("password", ""), addr, s) + } + } + + httpServerConfig, ok = s.config.GetUnixHttpServer() + if ok { + env := config.NewStringExpression("here", s.config.GetConfigFileDir()) + sockFile, err := env.Eval(httpServerConfig.GetString("file", "/tmp/supervisord.sock")) + if err == nil { + go s.xmlRPC.StartUnixHttpServer(httpServerConfig.GetString("username", ""), httpServerConfig.GetString("password", ""), sockFile, s) + } + } + +} + +func (s *Supervisor) setSupervisordInfo() { + supervisordConf, ok := s.config.GetSupervisord() + if ok { + //set supervisord log + + env := config.NewStringExpression("here", s.config.GetConfigFileDir()) + logFile, err := env.Eval(supervisordConf.GetString("logfile", "supervisord.log")) + logFile, err = process.Path_expand(logFile) + logEventEmitter := logger.NewNullLogEventEmitter() + s.logger = logger.NewNullLogger(logEventEmitter) + if err == nil { + logfile_maxbytes := int64(supervisordConf.GetBytes("logfile_maxbytes", 50*1024*1024)) + logfile_backups := supervisordConf.GetInt("logfile_backups", 10) + loglevel := supervisordConf.GetString("loglevel", "info") + switch logFile { + case "/dev/null": + s.logger = logger.NewNullLogger(logEventEmitter) + case "syslog": + s.logger = logger.NewSysLogger("supervisord", logEventEmitter) + case "/dev/stdout": + s.logger = logger.NewStdoutLogger(logEventEmitter) + case "/dev/stderr": + s.logger = logger.NewStderrLogger(logEventEmitter) + case "": + s.logger = logger.NewNullLogger(logEventEmitter) + default: + s.logger = logger.NewFileLogger(logFile, logfile_maxbytes, logfile_backups, logEventEmitter, &sync.Mutex{}) + } + log.SetOutput(s.logger) + log.SetLevel(toLogLevel(loglevel)) + log.SetFormatter(&log.TextFormatter{DisableColors: true}) + } + //set the pid + pidfile, err := env.Eval(supervisordConf.GetString("pidfile", "supervisord.pid")) + if err == nil { + f, err := os.Create(pidfile) + if err == nil { + fmt.Fprintf(f, "%d", os.Getpid()) + f.Close() + } + } + } +} + +func toLogLevel(level string) log.Level { + switch strings.ToLower(level) { + case "critical": + return log.FatalLevel + case "error": + return log.ErrorLevel + case "warn": + return log.WarnLevel + case "info": + return log.InfoLevel + default: + return log.DebugLevel + } +} + +func (s *Supervisor) ReloadConfig(r *http.Request, args *struct{}, reply *types.ReloadConfigResult) error { + log.Info("start to reload config") + err, addedGroup, changedGroup, removedGroup := s.Reload() + if len(addedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(addedGroup, ",")}).Info("added groups") + } + + if len(changedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(changedGroup, ",")}).Info("changed groups") + } + + if len(removedGroup) > 0 { + log.WithFields(log.Fields{"groups": strings.Join(removedGroup, ",")}).Info("removed groups") + } + reply.AddedGroup = addedGroup + reply.ChangedGroup = changedGroup + reply.RemovedGroup = removedGroup + return err +} + +func (s *Supervisor) AddProcessGroup(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + reply.Success = false + return nil +} + +func (s *Supervisor) RemoveProcessGroup(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + reply.Success = false + return nil +} + +func (s *Supervisor) ReadProcessStdoutLog(r *http.Request, args *ProcessLogReadInfo, reply *struct{ LogData string }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, err = proc.StdoutLog.ReadLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) ReadProcessStderrLog(r *http.Request, args *ProcessLogReadInfo, reply *struct{ LogData string }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, err = proc.StderrLog.ReadLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) TailProcessStdoutLog(r *http.Request, args *ProcessLogReadInfo, reply *ProcessTailLog) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + var err error + reply.LogData, reply.Offset, reply.Overflow, err = proc.StdoutLog.ReadTailLog(int64(args.Offset), int64(args.Length)) + return err +} + +func (s *Supervisor) ClearProcessLogs(r *http.Request, args *struct{ Name string }, reply *struct{ Success bool }) error { + proc := s.procMgr.Find(args.Name) + if proc == nil { + return fmt.Errorf("No such process %s", args.Name) + } + err1 := proc.StdoutLog.ClearAllLogFile() + err2 := proc.StderrLog.ClearAllLogFile() + reply.Success = err1 == nil && err2 == nil + if err1 != nil { + return err1 + } + return err2 +} + +func (s *Supervisor) ClearAllProcessLogs(r *http.Request, args *struct{}, reply *struct{ RpcTaskResults []RpcTaskResult }) error { + + s.procMgr.ForEachProcess(func(proc *process.Process) { + proc.StdoutLog.ClearAllLogFile() + proc.StderrLog.ClearAllLogFile() + procInfo := getProcessInfo(proc) + reply.RpcTaskResults = append(reply.RpcTaskResults, RpcTaskResult{ + Name: procInfo.Name, + Group: procInfo.Group, + Status: faults.SUCCESS, + Description: "OK", + }) + }) + + return nil +} diff --git a/vendor/github.com/rpoletaev/supervisord/util/util.go b/vendor/github.com/rpoletaev/supervisord/util/util.go new file mode 100644 index 000000000..e103b0ecc --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/util/util.go @@ -0,0 +1,64 @@ +package util + +// return true if the elem is in the array arr +func InArray(elem interface{}, arr []interface{}) bool { + for _, e := range arr { + if e == elem { + return true + } + } + return false +} + +//return true if the array arr1 contains all elements of array arr2 +func HasAllElements(arr1 []interface{}, arr2 []interface{}) bool { + for _, e2 := range arr2 { + if !InArray(e2, arr1) { + return false + } + } + return true +} + +func StringArrayToInterfacArray(arr []string) []interface{} { + result := make([]interface{}, 0) + for _, s := range arr { + result = append(result, s) + } + return result +} + +func Sub(arr_1 []string, arr_2 []string) []string { + result := make([]string, 0) + for _, s := range arr_1 { + exist := false + for _, s2 := range arr_2 { + if s == s2 { + exist = true + } + } + if !exist { + result = append(result, s) + } + } + return result +} + +func IsSameStringArray(arr_1 []string, arr_2 []string) bool { + if len(arr_1) != len(arr_2) { + return false + } + for _, s := range arr_1 { + exist := false + for _, s2 := range arr_2 { + if s2 == s { + exist = true + break + } + } + if !exist { + return false + } + } + return true +} diff --git a/vendor/github.com/rpoletaev/supervisord/version.go b/vendor/github.com/rpoletaev/supervisord/version.go new file mode 100644 index 000000000..12dc42aee --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/version.go @@ -0,0 +1,24 @@ +package main + +import ( + "fmt" +) + +const VERSION = "1.0.008" + +type VersionCommand struct { +} + +var versionCommand VersionCommand + +func (v VersionCommand) Execute(args []string) error { + fmt.Println(VERSION) + return nil +} + +func init() { + parser.AddCommand("version", + "show the version of supervisor", + "display the supervisor version", + &versionCommand) +} diff --git a/vendor/github.com/rpoletaev/supervisord/xmlrpc.go b/vendor/github.com/rpoletaev/supervisord/xmlrpc.go new file mode 100644 index 000000000..e4381000f --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/xmlrpc.go @@ -0,0 +1,136 @@ +package main + +import ( + "crypto/sha1" + "encoding/hex" + "io" + "net" + "net/http" + "os" + "strings" + + "github.com/gorilla/rpc" + "github.com/ochinchina/gorilla-xmlrpc/xml" + log "github.com/sirupsen/logrus" +) + +type XmlRPC struct { + listeners map[string]net.Listener + // true if RPC is started + started bool +} + +type httpBasicAuth struct { + user string + password string + handler http.Handler +} + +func NewHttpBasicAuth(user string, password string, handler http.Handler) *httpBasicAuth { + if user != "" && password != "" { + log.Debug("require authentication") + } + return &httpBasicAuth{user: user, password: password, handler: handler} +} + +func (h *httpBasicAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) { + if h.user == "" || h.password == "" { + log.Debug("no auth required") + h.handler.ServeHTTP(w, r) + return + } + username, password, ok := r.BasicAuth() + if ok && username == h.user { + if strings.HasPrefix(h.password, "{SHA}") { + log.Debug("auth with SHA") + hash := sha1.New() + io.WriteString(hash, password) + if hex.EncodeToString(hash.Sum(nil)) == h.password[5:] { + h.handler.ServeHTTP(w, r) + return + } + } else if password == h.password { + log.Debug("Auth with normal password") + h.handler.ServeHTTP(w, r) + return + } + } + w.Header().Set("WWW-Authenticate", "Basic realm=\"supervisor\"") + w.WriteHeader(401) +} + +func NewXmlRPC() *XmlRPC { + return &XmlRPC{listeners: make(map[string]net.Listener), started: false} +} + +func (p *XmlRPC) Stop() { + for _, listener := range p.listeners { + listener.Close() + } +} + +func (p *XmlRPC) StartUnixHttpServer(user string, password string, listenAddr string, s *Supervisor) { + os.Remove(listenAddr) + p.startHttpServer(user, password, "unix", listenAddr, s) +} + +func (p *XmlRPC) StartInetHttpServer(user string, password string, listenAddr string, s *Supervisor) { + p.startHttpServer(user, password, "tcp", listenAddr, s) +} + +func (p *XmlRPC) startHttpServer(user string, password string, protocol string, listenAddr string, s *Supervisor) { + if p.started { + return + } + p.started = true + mux := http.NewServeMux() + mux.Handle("/RPC2", NewHttpBasicAuth(user, password, p.createRPCServer(s))) + listener, err := net.Listen(protocol, listenAddr) + if err == nil { + p.listeners[protocol] = listener + http.Serve(listener, mux) + } else { + log.WithFields(log.Fields{"addr": listenAddr, "protocol": protocol}).Error("fail to listen on address") + } + +} +func (p *XmlRPC) createRPCServer(s *Supervisor) *rpc.Server { + RPC := rpc.NewServer() + xmlrpcCodec := xml.NewCodec() + RPC.RegisterCodec(xmlrpcCodec, "text/xml") + RPC.RegisterService(s, "") + + xmlrpcCodec.RegisterAlias("supervisor.getVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getAPIVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getIdentification", "Supervisor.GetIdentification") + xmlrpcCodec.RegisterAlias("supervisor.getState", "Supervisor.GetState") + xmlrpcCodec.RegisterAlias("supervisor.getPID", "Supervisor.GetPID") + xmlrpcCodec.RegisterAlias("supervisor.readLog", "Supervisor.ReadLog") + xmlrpcCodec.RegisterAlias("supervisor.clearLog", "Supervisor.ClearLog") + xmlrpcCodec.RegisterAlias("supervisor.shutdown", "Supervisor.Shutdown") + xmlrpcCodec.RegisterAlias("supervisor.restart", "Supervisor.Restart") + xmlrpcCodec.RegisterAlias("supervisor.getProcessInfo", "Supervisor.GetProcessInfo") + xmlrpcCodec.RegisterAlias("supervisor.getSupervisorVersion", "Supervisor.GetVersion") + xmlrpcCodec.RegisterAlias("supervisor.getAllProcessInfo", "Supervisor.GetAllProcessInfo") + xmlrpcCodec.RegisterAlias("supervisor.startProcess", "Supervisor.StartProcess") + xmlrpcCodec.RegisterAlias("supervisor.startAllProcesses", "Supervisor.StartAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.startProcessGroup", "Supervisor.StartProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.stopProcess", "Supervisor.StopProcess") + xmlrpcCodec.RegisterAlias("supervisor.stopProcessGroup", "Supervisor.StopProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.stopAllProcesses", "Supervisor.StopAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.signalProcess", "Supervisor.SignalProcess") + xmlrpcCodec.RegisterAlias("supervisor.signalProcessGroup", "Supervisor.SignalProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.signalAllProcesses", "Supervisor.SignalAllProcesses") + xmlrpcCodec.RegisterAlias("supervisor.sendProcessStdin", "Supervisor.SendProcessStdin") + xmlrpcCodec.RegisterAlias("supervisor.sendRemoteCommEvent", "Supervisor.SendRemoteCommEvent") + xmlrpcCodec.RegisterAlias("supervisor.reloadConfig", "Supervisor.ReloadConfig") + xmlrpcCodec.RegisterAlias("supervisor.addProcessGroup", "Supervisor.AddProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.removeProcessGroup", "Supervisor.RemoveProcessGroup") + xmlrpcCodec.RegisterAlias("supervisor.readProcessStdoutLog", "Supervisor.ReadProcessStdoutLog") + xmlrpcCodec.RegisterAlias("supervisor.readProcessStderrLog", "Supervisor.ReadProcessStderrLog") + xmlrpcCodec.RegisterAlias("supervisor.tailProcessStdoutLog", "Supervisor.TailProcessStdoutLog") + xmlrpcCodec.RegisterAlias("supervisor.tailProcessStderrLog", "Supervisor.TailProcessStderrLog") + xmlrpcCodec.RegisterAlias("supervisor.clearProcessLogs", "Supervisor.ClearProcessLogs") + xmlrpcCodec.RegisterAlias("supervisor.clearAllProcessLogs", "Supervisor.ClearAllProcessLogs") + return RPC +} diff --git a/vendor/vendor.json b/vendor/vendor.json index a8bbc84e1..1565ffc5c 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -75,6 +75,12 @@ "revision": "1e59b77b52bf8e4b449a57e6f79f21226d571845", "revisionTime": "2017-11-13T18:07:20Z" }, + { + "checksumSHA1": "UcxIsr0IzcSKDqGVnK1HsxnSSVU=", + "path": "github.com/gorilla/rpc", + "revision": "22c016f3df3febe0c1f6727598b6389507e03a18", + "revisionTime": "2016-09-23T22:06:01Z" + }, { "checksumSHA1": "SGSXlSU1TFtg5aTlVA9v4Ka86lU=", "origin": "github.com/centrifugal/centrifugo/vendor/github.com/gorilla/securecookie", @@ -220,6 +226,18 @@ "revision": "179d4d0c4d8d407a32af483c2354df1d2c91e6c3", "revisionTime": "2013-12-21T20:05:32Z" }, + { + "checksumSHA1": "8p5uEwUdi9/xh/XpF1ULlZ00k2w=", + "path": "github.com/ochinchina/go-ini", + "revision": "4dcbd5514a9220bb68c8d45eabf79b25479ea2d1", + "revisionTime": "2018-03-10T02:35:15Z" + }, + { + "checksumSHA1": "E3jV3ILrMbnBTbR4GXkRf7rXc5Q=", + "path": "github.com/ochinchina/gorilla-xmlrpc/xml", + "revision": "ecf2fe693a2ca10ce68d2c7d4c559f1a57d2c845", + "revisionTime": "2017-10-12T05:53:24Z" + }, { "checksumSHA1": "BoXdUBWB8UnSlFlbnuTQaPqfCGk=", "path": "github.com/op/go-logging", @@ -257,6 +275,66 @@ "revision": "2315d5715e36303a941d907f038da7f7c44c773b", "revisionTime": "2017-11-01T20:10:47Z" }, + { + "checksumSHA1": "sESN0ZfX2JIOB6pcxXuw5yXo4+E=", + "path": "github.com/rogpeppe/go-charset/charset", + "revision": "e9ff06f347d3f5d0013d59ed83754f0e88de10d4", + "revisionTime": "2015-06-15T17:25:32Z" + }, + { + "checksumSHA1": "MyUzunzysfhOlm/yJfV89oC+mO4=", + "path": "github.com/rogpeppe/go-charset/data", + "revision": "e9ff06f347d3f5d0013d59ed83754f0e88de10d4", + "revisionTime": "2015-06-15T17:25:32Z" + }, + { + "checksumSHA1": "4yHilxHn118WAI/J+/uQd+lVky0=", + "path": "github.com/rpoletaev/supervisord", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "UHnNA1Cx5MtPY68fBrM/ank3bUY=", + "path": "github.com/rpoletaev/supervisord/config", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "7079G1HzEBpr9xCFA7S7OYtw3F4=", + "path": "github.com/rpoletaev/supervisord/events", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "A+d9lhIE1xvY1fdypT5GdO9C3wY=", + "path": "github.com/rpoletaev/supervisord/faults", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "YLXHdj1snMGqXXiFNQciKtUqTgM=", + "path": "github.com/rpoletaev/supervisord/logger", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "HHkvC6JQjF9hZ4RblCEEKV6k3Is=", + "path": "github.com/rpoletaev/supervisord/process", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "TKy4P7QqrqVfiKwGbXBkP5XNPY4=", + "path": "github.com/rpoletaev/supervisord/signals", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, + { + "checksumSHA1": "F4x0/vDYzuOYgOMp3NlFbbTX1Vg=", + "path": "github.com/rpoletaev/supervisord/util", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, { "checksumSHA1": "eDQ6f1EsNf+frcRO/9XukSEchm8=", "path": "github.com/satori/go.uuid", From d3d7b4da131741c5d137394eb1df715197cac5f2 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 5 Jul 2018 20:51:43 +0500 Subject: [PATCH 128/169] feature/965-money (#424) * Fixed CreateEcosystem * Fixed Money template func * feature/958-column (#414) * Fixed checking column name * Fixed checkColumnName * Fixed recursion * Fixed ContactConditions loop (#429) * remove default_page from roles * Fixed Money template func --- packages/api/template_test.go | 23 +++++++++++++++++++++++ packages/consts/consts.go | 3 +++ packages/template/funcs.go | 3 +++ 3 files changed, 29 insertions(+) diff --git a/packages/api/template_test.go b/packages/api/template_test.go index ea73bf31e..bf2ba6216 100644 --- a/packages/api/template_test.go +++ b/packages/api/template_test.go @@ -20,6 +20,7 @@ import ( "crypto/md5" "encoding/base64" "fmt" + "math/rand" "net/url" "strings" "testing" @@ -149,6 +150,28 @@ var forTest = tplList{ `[{"tag":"text","text":"the varNotZero should be visible"}]`}, } +func TestMoney(t *testing.T) { + var ret contentResult + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + size := 10000000 + money := make([]byte, size) + rand.Seed(time.Now().UnixNano()) + for i := 0; i < size; i++ { + money[i] = '0' + byte(rand.Intn(10)) + } + err := sendPost(`content`, &url.Values{`template`: {`Money(` + string(money) + `)`}}, &ret) + if err != nil { + t.Error(err) + return + } + if RawToString(ret.Tree) != `[{"tag":"text","text":"invalid money value"}]` { + t.Errorf(`wrong value %s`, RawToString(ret.Tree)) + } +} + func TestMobile(t *testing.T) { var ret contentResult gMobile = true diff --git a/packages/consts/consts.go b/packages/consts/consts.go index 45b07c9b6..4e455e7a7 100644 --- a/packages/consts/consts.go +++ b/packages/consts/consts.go @@ -160,3 +160,6 @@ const DefaultTempDirName = "genesis-temp" // DefaultVDE allways is 1 const DefaultVDE = 1 + +// MoneyLength is the maximum number of digits in money value +const MoneyLength = 30 diff --git a/packages/template/funcs.go b/packages/template/funcs.go index c5bb6f44d..085d0522e 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -210,6 +210,9 @@ func moneyTag(par parFunc) string { } cents = converter.StrToInt(sp.Value) } + if len(ret) > consts.MoneyLength { + return `invalid money value` + } if cents != 0 { retDec, err := decimal.NewFromString(ret) if err != nil { From e672f81f439c577d585a9fcd4b3912ef125241fb Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 5 Jul 2018 20:52:33 +0500 Subject: [PATCH 129/169] Changed insert pernission (#435) --- packages/migration/first_tables_data.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/migration/first_tables_data.go b/packages/migration/first_tables_data.go index f549893d5..12abf9d07 100644 --- a/packages/migration/first_tables_data.go +++ b/packages/migration/first_tables_data.go @@ -18,7 +18,7 @@ INSERT INTO "1_tables" ("id", "name", "permissions","columns", "conditions") VAL ( '21', 'ecosystems', - '{"insert": "ContractConditions(\"MainCondition\")", "update": "ContractConditions(\"MainCondition\")", "new_column": "ContractConditions(\"MainCondition\")"}', + '{"insert": "true", "update": "ContractConditions(\"MainCondition\")", "new_column": "ContractConditions(\"MainCondition\")"}', '{"name": "ContractConditions(\"MainCondition\")"}', 'ContractConditions("MainCondition")' ), From 5a52759c9ae71ac03dd492d61611a0307e0c48a3 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 5 Jul 2018 20:53:57 +0500 Subject: [PATCH 130/169] feature/964 bigsize (#423) * add batch insert * test and small fixes * add batch insert * test and small fixes * requested changes * change platform founder to user wallet * Fixed CreateEcosystem * add batch insert * test and small fixes * requested changes * Added test * Added checking size limit in prepare * feature/940 history (#412) * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change update permissions for notifications table * Fixed changing schema of system_parameters table * Added GetPageHistory * add reles_access for 'Apla Consensus asbl' * Added GetMenuHistory * Added GetContractHistory * Added history template * Added block history * Added Source to template funcs * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * separate routes by vde * separate vde migration to own package * temporary commit * temporary commit * fix login * move changes * temporary commit * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Added BOM checking (#406) * feature/887-doublecontract (#407) * Fixed redefining contracts * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Fixed redefining contracts * Fixed CreateEcosystem (#419) * Merge develop * Merge develop * Merge develop * Added GetContractHistory * Merge develop * Added block history * Added Source to template funcs * change platform founder to user wallet * feature/919 include (#405) * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * Added macro to include * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * Fixed query * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * separate routes by vde * separate vde migration to own package * temporary commit * temporary commit * fix login * move changes * temporary commit * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Added macro to include * Fixed query * Added BOM checking (#406) * feature/887-doublecontract (#407) * Fixed redefining contracts * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Fixed redefining contracts * Fixed CreateEcosystem (#419) * Added macro to include * Fixed query * Fixed db query in include * Fixed dbquery in include * feature/958-column (#414) * Fixed checking column name * Fixed checkColumnName * Fixed recursion * Fixed ContactConditions loop (#429) * remove default_page from roles * Added test * Added checking size limit in prepare --- packages/api/errors.go | 2 ++ packages/api/prepare.go | 30 ++++++++++++++++--- packages/api/smart_test.go | 13 ++++++++ packages/conf/syspar/syspar.go | 7 +++++ .../migration/first_ecosys_contracts_data.go | 14 +++++++++ .../migration/first_system_parameters_data.go | 3 +- packages/smart/smart_p.go | 2 +- 7 files changed, 65 insertions(+), 6 deletions(-) diff --git a/packages/api/errors.go b/packages/api/errors.go index eb2e490d2..7d00ca48b 100644 --- a/packages/api/errors.go +++ b/packages/api/errors.go @@ -29,6 +29,8 @@ var ( `E_HEAVYPAGE`: `This page is heavy`, `E_INSTALLED`: `Apla is already installed`, `E_INVALIDWALLET`: `Wallet %s is not valid`, + `E_LIMITFORSIGN`: `Length of forsign is too big (%d)`, + `E_LIMITTXSIZE`: `The size of tx is too big (%d)`, `E_NOTFOUND`: `Page not found`, `E_NOTINSTALLED`: `Apla is not installed`, `E_PARAMNOTFOUND`: `Parameter %s has not been found`, diff --git a/packages/api/prepare.go b/packages/api/prepare.go index 74e6d7fd6..86de44bd8 100644 --- a/packages/api/prepare.go +++ b/packages/api/prepare.go @@ -22,6 +22,7 @@ import ( "net/http" "strings" + "github.com/GenesisKernel/go-genesis/packages/conf/syspar" "github.com/GenesisKernel/go-genesis/packages/consts" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/model" @@ -82,7 +83,7 @@ func (h *contractHandlers) prepareMultipleContract(w http.ResponseWriter, r *htt req := h.multiRequests.NewMultiRequest() forSigns := []string{} - + limitForsign := syspar.GetMaxForsignSize() for _, c := range requests.Contracts { var smartTx tx.SmartContract contract, parerr, err := validateSmartContractJSON(r, data, c.Contract, c.Params) @@ -120,7 +121,11 @@ func (h *contractHandlers) prepareMultipleContract(w http.ResponseWriter, r *htt } else { req.AddContract(c.Contract, c.Params) } - forSigns = append(forSigns, strings.Join(forsign, ",")) + forSign := strings.Join(forsign, ",") + if len(forSign) > int(limitForsign) { + return errorAPI(w, `E_LIMITFORSIGN`, http.StatusBadRequest, len(forSign)) + } + forSigns = append(forSigns, forSign) } h.multiRequests.AddRequest(req) @@ -177,6 +182,9 @@ func (h *contractHandlers) prepareContract(w http.ResponseWriter, r *http.Reques result.ID = req.ID result.ForSign = strings.Join(forsign, ",") + if len(result.ForSign) > int(syspar.GetMaxForsignSize()) { + return errorAPI(w, `E_LIMITFORSIGN`, http.StatusBadRequest, len(result.ForSign)) + } result.Time = converter.Int64ToStr(req.Time.Unix()) result.Expiration = converter.Int64ToStr(req.Time.Add(h.requests.ExpireDuration()).Unix()) data.result = result @@ -184,8 +192,11 @@ func (h *contractHandlers) prepareContract(w http.ResponseWriter, r *http.Reques } func forsignJSONData(w http.ResponseWriter, params map[string]string, logger *log.Entry, fields []*script.FieldInfo) ([]string, map[string]string, error) { + var curSize int64 forsign := []string{} requestParams := map[string]string{} + limitSize := syspar.GetMaxTxSize() + for _, fitem := range fields { if fitem.ContainsTag(`signature`) || fitem.ContainsTag(script.TagFile) { continue @@ -221,14 +232,22 @@ func forsignJSONData(w http.ResponseWriter, params map[string]string, logger *lo val = `0` } } + curSize += int64(len(val)) forsign = append(forsign, val) } + if curSize > limitSize { + return nil, nil, errorAPI(w, `E_LIMITTXSIZE`, http.StatusBadRequest, curSize) + } return forsign, requestParams, nil } func forsignFormData(w http.ResponseWriter, r *http.Request, data *apiData, logger *log.Entry, req *tx.Request, fields []*script.FieldInfo) ([]string, error) { + var curSize int64 + forsign := []string{} + limitSize := syspar.GetMaxTxSize() + for _, fitem := range fields { if strings.Contains(fitem.Tags, `signature`) { continue @@ -242,6 +261,7 @@ func forsignFormData(w http.ResponseWriter, r *http.Request, data *apiData, logg } fileHeader, err := req.WriteFile(fitem.Name, header.Header.Get(`Content-Type`), file) file.Close() + curSize += header.Size if err != nil { log.WithFields(log.Fields{"type": consts.IOError, "error": err}).Error("writing file") return nil, errorAPI(w, err.Error(), http.StatusInternalServerError) @@ -300,9 +320,11 @@ func forsignFormData(w http.ResponseWriter, r *http.Request, data *apiData, logg val = `0` } } - + curSize += int64(len(val)) forsign = append(forsign, val) } - + if curSize > limitSize { + return nil, errorAPI(w, `E_LIMITTXSIZE`, http.StatusBadRequest, curSize) + } return forsign, nil } diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index eb615b202..a300753a7 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -18,9 +18,11 @@ package api import ( "fmt" + "math/rand" "net/url" "strings" "testing" + "time" "github.com/GenesisKernel/go-genesis/packages/converter" "github.com/GenesisKernel/go-genesis/packages/crypto" @@ -121,6 +123,17 @@ func TestMoneyTransfer(t *testing.T) { t.Error(err) return } + size := 1000000 + big := make([]byte, size) + rand.Seed(time.Now().UnixNano()) + for i := 0; i < size; i++ { + big[i] = '0' + byte(rand.Intn(10)) + } + form = url.Values{`Amount`: {string(big)}, `Recipient`: {`0005-2070-2000-0006-0200`}} + if err := postTx(`MoneyTransfer`, &form); err.Error() != `400 {"error": "E_LIMITFORSIGN", "msg": "Length of forsign is too big (1000106)" , "params": ["1000106"]}` { + t.Error(err) + return + } } func TestPage(t *testing.T) { diff --git a/packages/conf/syspar/syspar.go b/packages/conf/syspar/syspar.go index 96b7706ca..29891374e 100644 --- a/packages/conf/syspar/syspar.go +++ b/packages/conf/syspar/syspar.go @@ -48,6 +48,8 @@ const ( MaxBlockSize = `max_block_size` // MaxTxSize is the maximum size of the transaction MaxTxSize = `max_tx_size` + // MaxForsignSize is the maximum size of the forsign of transaction + MaxForsignSize = `max_forsign_size` // MaxBlockFuel is the maximum fuel of the block MaxBlockFuel = `max_fuel_block` // MaxTxFuel is the maximum fuel of the transaction @@ -371,6 +373,11 @@ func GetMaxTxSize() int64 { return converter.StrToInt64(SysString(MaxTxSize)) } +// GetMaxTxTextSize is returns max tx text size +func GetMaxForsignSize() int64 { + return converter.StrToInt64(SysString(MaxForsignSize)) +} + // GetGapsBetweenBlocks is returns gaps between blocks func GetGapsBetweenBlocks() int64 { return converter.StrToInt64(SysString(GapsBetweenBlocks)) diff --git a/packages/migration/first_ecosys_contracts_data.go b/packages/migration/first_ecosys_contracts_data.go index cafe7cb85..e2b505fce 100644 --- a/packages/migration/first_ecosys_contracts_data.go +++ b/packages/migration/first_ecosys_contracts_data.go @@ -2586,6 +2586,20 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { Value string } + conditions { + if Size($Value) == 0 { + warning "Value was not received" + } + if Int($Value) <= 0 { + warning "Value must be greater than zero" + } + } +}', %[1]d, 'ContractConditions("MainCondition")', 2), +('113', 'max_forsign_size', 'contract max_forsign_size { + data { + Value string + } + conditions { if Size($Value) == 0 { warning "Value was not received" diff --git a/packages/migration/first_system_parameters_data.go b/packages/migration/first_system_parameters_data.go index fb2319ba7..955ff6f0f 100644 --- a/packages/migration/first_system_parameters_data.go +++ b/packages/migration/first_system_parameters_data.go @@ -66,5 +66,6 @@ INSERT INTO "1_system_parameters" ("id","name", "value", "conditions") VALUES ('63','block_reward','1000','true'), ('64','incorrect_blocks_per_day','10','true'), ('65','node_ban_time','86400000','true'), - ('66','local_node_ban_time','1800000','true'); + ('66','local_node_ban_time','1800000','true'), + ('67','max_forsign_size', '1000000', 'true'); ` diff --git a/packages/smart/smart_p.go b/packages/smart/smart_p.go index 0a3cf8117..5b57e0e0c 100644 --- a/packages/smart/smart_p.go +++ b/packages/smart/smart_p.go @@ -159,7 +159,7 @@ func UpdateSysParam(sc *SmartContract, name, value, conditions string) (int64, e `page_price`, `commission_size`: ok = ival >= 0 case `max_block_size`, `max_tx_size`, `max_tx_count`, `max_columns`, `max_indexes`, - `max_block_user_tx`, `max_fuel_tx`, `max_fuel_block`: + `max_block_user_tx`, `max_fuel_tx`, `max_fuel_block`, `max_forsign_size`: ok = ival > 0 case `fuel_rate`, `commission_wallet`: err := json.Unmarshal([]byte(value), &list) From d1e33af2a8d1412c1ba386c0983729a147742bb2 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 5 Jul 2018 20:55:06 +0500 Subject: [PATCH 131/169] Fixed dot in money value (#434) --- packages/api/prepare.go | 2 +- packages/api/smart.go | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/api/prepare.go b/packages/api/prepare.go index 86de44bd8..e2019bee2 100644 --- a/packages/api/prepare.go +++ b/packages/api/prepare.go @@ -292,7 +292,7 @@ func forsignFormData(w http.ResponseWriter, r *http.Request, data *apiData, logg } case script.Decimal: - d, err := decimal.NewFromString(r.FormValue(fitem.Name)) + d, err := decimal.NewFromString(strings.Replace(r.FormValue(fitem.Name), `,`, `.`, 1)) if err != nil { logger.WithFields(log.Fields{"type": consts.ConversionError, "error": err}).Error("converting to decimal") return nil, errorAPI(w, err, http.StatusBadRequest) diff --git a/packages/api/smart.go b/packages/api/smart.go index ee6535484..ad46a40eb 100644 --- a/packages/api/smart.go +++ b/packages/api/smart.go @@ -88,7 +88,7 @@ func validateSmartContractJSON(r *http.Request, data *apiData, cntname string, p } } if fitem.Type.String() == script.Decimal { - re := regexp.MustCompile(`^\d+$`) + re := regexp.MustCompile(`^\d+([\.\,]\d+)?$`) if !re.Match([]byte(val)) { log.WithFields(log.Fields{"type": consts.InvalidObject, "value": val}).Error("The value of money is not valid") err = fmt.Errorf(`The value of money %s is not valid`, val) @@ -164,7 +164,7 @@ func validateSmartContract(r *http.Request, data *apiData, result *prepareResult } } if fitem.Type.String() == script.Decimal { - re := regexp.MustCompile(`^\d+$`) + re := regexp.MustCompile(`^\d+([\.\,]\d+)?$`) if !re.Match([]byte(val)) { log.WithFields(log.Fields{"type": consts.InvalidObject, "value": val}).Error("The value of money is not valid") err = fmt.Errorf(`The value of money %s is not valid`, val) From c4716b409600e5d55052ed30e052ccb73430555b Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 5 Jul 2018 20:55:42 +0500 Subject: [PATCH 132/169] Fixed not-latin in table and column names (#433) --- packages/api/tables_test.go | 17 ++++++++++++++--- packages/converter/converter.go | 11 +++++++++++ packages/smart/errors.go | 1 + packages/smart/funcs.go | 7 +++++-- 4 files changed, 31 insertions(+), 5 deletions(-) diff --git a/packages/api/tables_test.go b/packages/api/tables_test.go index 81dfed656..bb69611c8 100644 --- a/packages/api/tables_test.go +++ b/packages/api/tables_test.go @@ -68,8 +68,20 @@ func TestTableName(t *testing.T) { t.Error(err) return } + form := url.Values{"Name": {`кириллица`}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "0", + "conditions":{"update":"true", "read":"true"}}]`}, "ApplicationId": {"1"}, + "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} + assert.EqualError(t, postTx(`NewTable`, &form), + `{"type":"panic","error":"Name кириллица must only contain latin, digit and '_', '-' characters"}`) + + form = url.Values{"Name": {`latin`}, "Columns": {`[{"name":"колонка","type":"varchar", "index": "0", + "conditions":{"update":"true", "read":"true"}}]`}, "ApplicationId": {"1"}, + "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} + assert.EqualError(t, postTx(`NewTable`, &form), + `{"type":"panic","error":"Name колонка must only contain latin, digit and '_', '-' characters"}`) + name := randName(`tbl`) - form := url.Values{"Name": {`tbl-` + name}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "0", + form = url.Values{"Name": {`tbl-` + name}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "0", "conditions":{"update":"true", "read":"true"}}]`}, "ApplicationId": {"100"}, "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} err := postTx(`NewTable`, &form) @@ -81,8 +93,7 @@ func TestTableName(t *testing.T) { action { DBInsert("tbl-` + name + `", "MyName", "test") DBUpdate("tbl-` + name + `", 1, "MyName", "New test") - }}`}, - "Conditions": {`ContractConditions("MainCondition")`}} + }}`}, "ApplicationId": {`100`}, "Conditions": {`ContractConditions("MainCondition")`}} err = postTx("NewContract", &form) if err != nil { t.Error(err) diff --git a/packages/converter/converter.go b/packages/converter/converter.go index bea0c00ba..b89d2fab5 100644 --- a/packages/converter/converter.go +++ b/packages/converter/converter.go @@ -867,6 +867,17 @@ func StripTags(value string) string { return strings.Replace(strings.Replace(value, `<`, `<`, -1), `>`, `>`, -1) } +// IsLatin checks if the specified string contains only latin character, digits and '-', '_'. +func IsLatin(name string) bool { + for _, ch := range []byte(name) { + if !((ch >= '0' && ch <= '9') || ch == '_' || ch == '-' || (ch >= 'a' && ch <= 'z') || + (ch >= 'A' && ch <= 'Z')) { + return false + } + } + return true +} + // IsValidAddress checks if the specified address is apla address. func IsValidAddress(address string) bool { val := []byte(strings.Replace(address, `-`, ``, -1)) diff --git a/packages/smart/errors.go b/packages/smart/errors.go index 60bc7c0b1..fa861ec3c 100644 --- a/packages/smart/errors.go +++ b/packages/smart/errors.go @@ -22,6 +22,7 @@ const ( eTableNotFound = `Table %s has not been found` eContractLoop = `There is loop in %s contract` eContractExist = `Contract %s already exists` + eLatin = `Name %s must only contain latin, digit and '_', '-' characters` ) var ( diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index b2fc24f96..2c7179d53 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -525,8 +525,8 @@ func CreateTable(sc *SmartContract, name, columns, permissions string, applicati return fmt.Errorf("The table name cannot be empty") } - if len(name) > 0 && name[0] == '@' { - return fmt.Errorf(`The name of the table cannot begin with @`) + if !converter.IsLatin(name) { + return fmt.Errorf(eLatin, name) } tableName := getDefTableName(sc, name) @@ -1233,6 +1233,9 @@ func checkColumnName(name string) error { } else if name[0] >= '0' && name[0] <= '9' { return errWrongColumn } + if !converter.IsLatin(name) { + return fmt.Errorf(eLatin, name) + } return nil } From 0af81c0b408194cb02c08aeceac142d4fc5f421f Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 5 Jul 2018 21:02:33 +0300 Subject: [PATCH 133/169] delete bad txes --- packages/daemons/block_generator.go | 6 ++++++ packages/parser/common_parse_data_full.go | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index e6dfefb8e..0bc1996ba 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -180,6 +180,12 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { } continue } + + if err := p.CheckTransaction(time.Now().Unix()); err != nil { + p.ProcessBadTransaction(err) + continue + } + if p.TxSmart != nil { err = limits.CheckLimit(p) if err == parser.ErrLimitStop && i > 0 { diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index 88c0a8410..bad573b34 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -604,6 +604,10 @@ func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { return nil } +func (p *Parser) CheckTransaction(checkTime int64) error { + return checkTransaction(p, checkTime, true) +} + // CheckTransaction is checking transaction func CheckTransaction(data []byte) (*tx.Header, error) { trBuff := bytes.NewBuffer(data) @@ -614,6 +618,7 @@ func CheckTransaction(data []byte) (*tx.Header, error) { err = checkTransaction(p, time.Now().Unix(), true) if err != nil { + p.ProcessBadTransaction(err) return nil, err } From 24451ea7536ee7d824b8e13212a352f5c6c9a8a5 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 6 Jul 2018 07:15:27 +0300 Subject: [PATCH 134/169] add tx_hash to log --- packages/parser/common_parse_data_full.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index bad573b34..a61dec2a7 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -579,7 +579,7 @@ func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { if err != nil { return err } - logger := log.WithFields(log.Fields{"tx_type": p.dataType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID}) + logger := log.WithFields(log.Fields{"tx_hash": p.TxHash, "tx_type": p.dataType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID}) // time in the transaction cannot be more than MAX_TX_FORW seconds of block time if p.TxTime-consts.MAX_TX_FORW > checkTime { logger.WithFields(log.Fields{"tx_max_forw": consts.MAX_TX_FORW, "type": consts.ParameterExceeded}).Error("time in the tx cannot be more than MAX_TX_FORW seconds of block time ") From fd0c71d3ff9137973b9770b11dea896c56d8ba81 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 6 Jul 2018 08:39:51 +0300 Subject: [PATCH 135/169] delete check dup --- packages/parser/common_parse_data_full.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index a61dec2a7..70fea778d 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -605,7 +605,7 @@ func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { } func (p *Parser) CheckTransaction(checkTime int64) error { - return checkTransaction(p, checkTime, true) + return checkTransaction(p, checkTime, false) } // CheckTransaction is checking transaction From e079cf597a3398b68fc0c7dc8598e8c88c67004d Mon Sep 17 00:00:00 2001 From: Roman Potekhin Date: Fri, 6 Jul 2018 10:05:01 +0300 Subject: [PATCH 136/169] replace centrifugo url scheme from http(s) to ws(s) --- packages/api/config.go | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/packages/api/config.go b/packages/api/config.go index d95ec25d3..85374ac06 100644 --- a/packages/api/config.go +++ b/packages/api/config.go @@ -2,6 +2,7 @@ package api import ( "net/http" + "strings" "github.com/GenesisKernel/go-genesis/packages/conf" @@ -31,12 +32,21 @@ func getConfigOption(w http.ResponseWriter, r *http.Request, data *apiData, logg return err } +func replaceHttpSchemeToWs(centrifugoURL string) string { + if strings.HasPrefix(centrifugoURL, "http:") { + return strings.Replace(centrifugoURL, "http:", "ws:", -1) + } else if strings.HasPrefix(centrifugoURL, "https:") { + return strings.Replace(centrifugoURL, "https:", "wss:", -1) + } + return centrifugoURL +} + func centrifugoAddressHandler(w http.ResponseWriter, data *apiData) error { if _, err := publisher.GetStats(); err != nil { log.WithFields(log.Fields{"type": consts.CentrifugoError, "error": err}).Warn("on getting centrifugo stats") return errorAPI(w, err, http.StatusNotFound) } - data.result = conf.Config.Centrifugo.URL + data.result = replaceHttpSchemeToWs(conf.Config.Centrifugo.URL) return nil } From 3969ac19bee5df5b160b3e9c05f20e8f8adb4546 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 2 Jul 2018 12:53:50 +0300 Subject: [PATCH 137/169] add content of default page to system_parameters --- .../migration/first_system_parameters_data.go | 27 ++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/packages/migration/first_system_parameters_data.go b/packages/migration/first_system_parameters_data.go index 955ff6f0f..4e1b6db1e 100644 --- a/packages/migration/first_system_parameters_data.go +++ b/packages/migration/first_system_parameters_data.go @@ -2,7 +2,32 @@ package migration var firstSystemParametersDataSQL = ` INSERT INTO "1_system_parameters" ("id","name", "value", "conditions") VALUES - ('1','default_ecosystem_page', '', 'true'), + ('1','default_ecosystem_page', 'Div(content-wrapper){ + Div(panel panel-primary){ + Div(list-group-item text-center){ + P(Class: h3 m0 text-bold, Body: Congratulations! You created your own ecosystem.) + } + Div(list-group-item){ + Span(Class: h3, Body: "You as Founder hold a complete set of rights for controlling the ecosystem – creating and editing applications, modifying ecosystem parameters, etc. ") + Span(Class: h3, Body: "To get started, you can download the finished applications from the") + Span(Class: h3 text-primary, Body: " https://github.com/GenesisKernel/apps ") + Span(Class: h3, Body: "and install them using the Import service. ") + Span(Class: h3, Body: "The Strong(basic.json) contains applications for managing roles, creating notifications and votings. ") + Span(Class: h3, Body: "Or you can create your own apps using the tools in the Admin tab. ") + Span(Class: h3, Body: "Documentation ") + Span(Class: h3 text-primary, Body: "https://genesiskernel.readthedocs.io") + } + Div(panel-footer text-right clearfix){ + Div(pull-left){ + Button(Body: Ecosystem parameters, Class: btn btn-default, Page: params_list) + }.Style(margin-right: 20px;) + Div(pull-left){ + Button(Body: Dashboard, Class: btn btn-default, Page: admin_dashboard) + } + Button(Body: Import, Class: btn btn-primary, Page: import_upload) + } + } + }', 'true'), ('2','default_ecosystem_menu', '', 'true'), ('3','default_ecosystem_contract', '', 'true'), ('4','gap_between_blocks', '2', 'true'), From b0da772dfe150057b7c184910d693c68baf4c791 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 2 Jul 2018 15:09:38 +0300 Subject: [PATCH 138/169] move updating system parameters before creating default_page --- packages/transaction/custom/first_block.go | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/packages/transaction/custom/first_block.go b/packages/transaction/custom/first_block.go index 7d086d7ae..e217903a1 100644 --- a/packages/transaction/custom/first_block.go +++ b/packages/transaction/custom/first_block.go @@ -74,6 +74,16 @@ func (t *FirstBlockTransaction) Action() error { } amount := decimal.New(consts.FounderAmount, int32(converter.StrToInt64(sp.Value))).String() + commission := &model.SystemParameter{Name: `commission_wallet`} + if err = commission.SaveArray([][]string{{"1", converter.Int64ToStr(keyID)}}); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("saving commission_wallet array") + return utils.ErrInfo(err) + } + if err = syspar.SysUpdate(nil); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") + return utils.ErrInfo(err) + } + err = model.GetDB(t.DbTransaction).Exec(`insert into "1_keys" (id,pub,amount) values(?, ?,?)`, keyID, data.PublicKey, amount).Error if err != nil { @@ -96,15 +106,6 @@ func (t *FirstBlockTransaction) Action() error { if err != nil { return utils.ErrInfo(err) } - commission := &model.SystemParameter{Name: `commission_wallet`} - if err = commission.SaveArray([][]string{{"1", converter.Int64ToStr(keyID)}}); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("saving commission_wallet array") - return utils.ErrInfo(err) - } - if err = syspar.SysUpdate(nil); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - return utils.ErrInfo(err) - } syspar.SetFirstBlockData(data) return nil } From 472f24a32bd4989cbeca0da542fee225eea149a2 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 5 Jul 2018 21:02:33 +0300 Subject: [PATCH 139/169] delete bad txes --- packages/daemons/block_generator.go | 6 + packages/parser/common_parse_data_full.go | 935 ++++++++++++++++++++++ 2 files changed, 941 insertions(+) create mode 100644 packages/parser/common_parse_data_full.go diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index f09f81413..42e3d65d8 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -181,6 +181,12 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { } continue } + + if err := p.CheckTransaction(time.Now().Unix()); err != nil { + p.ProcessBadTransaction(err) + continue + } + if p.TxSmart != nil { err = limits.CheckLimit(p) if err == block.ErrLimitStop && i > 0 { diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go new file mode 100644 index 000000000..bad573b34 --- /dev/null +++ b/packages/parser/common_parse_data_full.go @@ -0,0 +1,935 @@ +// Copyright 2016 The go-daylight Authors +// This file is part of the go-daylight library. +// +// The go-daylight library is free software: you can redistribute it and/or modify +// it under the terms of the GNU Lesser General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// The go-daylight library is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Lesser General Public License for more details. +// +// You should have received a copy of the GNU Lesser General Public License +// along with the go-daylight library. If not, see . + +package parser + +import ( + "bytes" + "encoding/hex" + "fmt" + "strings" + "sync" + "time" + + "github.com/GenesisKernel/go-genesis/packages/conf/syspar" + "github.com/GenesisKernel/go-genesis/packages/consts" + "github.com/GenesisKernel/go-genesis/packages/converter" + "github.com/GenesisKernel/go-genesis/packages/crypto" + "github.com/GenesisKernel/go-genesis/packages/model" + "github.com/GenesisKernel/go-genesis/packages/script" + "github.com/GenesisKernel/go-genesis/packages/smart" + "github.com/GenesisKernel/go-genesis/packages/utils" + "github.com/GenesisKernel/go-genesis/packages/utils/tx" + + "github.com/shopspring/decimal" + log "github.com/sirupsen/logrus" + "gopkg.in/vmihailenco/msgpack.v2" +) + +var txParserCache = &parserCache{cache: make(map[string]*Parser)} + +// Block is storing block data +type Block struct { + Header utils.BlockData + PrevHeader *utils.BlockData + MrklRoot []byte + BinData []byte + Parsers []*Parser + SysUpdate bool + GenBlock bool // it equals true when we are generating a new block + StopCount int // The count of good tx in the block +} + +func (b Block) String() string { + return fmt.Sprintf("header: %s, prevHeader: %s", b.Header, b.PrevHeader) +} + +// GetLogger is returns logger +func (b Block) GetLogger() *log.Entry { + return log.WithFields(log.Fields{"block_id": b.Header.BlockID, "block_time": b.Header.Time, "block_wallet_id": b.Header.KeyID, + "block_state_id": b.Header.EcosystemID, "block_hash": b.Header.Hash, "block_version": b.Header.Version}) +} + +// InsertBlockWOForks is inserting blocks +func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { + block, err := ProcessBlockWherePrevFromBlockchainTable(data, !firstBlock) + if err != nil { + return err + } + block.GenBlock = genBlock + if err := block.CheckBlock(); err != nil { + return err + } + + err = block.PlayBlockSafe() + if err != nil { + return err + } + + log.WithFields(log.Fields{"block_id": block.Header.BlockID}).Debug("block was inserted successfully") + return nil +} + +// PlayBlockSafe is inserting block safely +func (b *Block) PlayBlockSafe() error { + logger := b.GetLogger() + dbTransaction, err := model.StartTransaction() + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting db transaction") + return err + } + + err = b.playBlock(dbTransaction) + if b.GenBlock && b.StopCount > 0 { + doneTx := b.Parsers[:b.StopCount] + trData := make([][]byte, 0, b.StopCount) + for _, tr := range doneTx { + trData = append(trData, tr.TxFullData) + } + NodePrivateKey, _, err := utils.GetNodeKeys() + if err != nil || len(NodePrivateKey) < 1 { + log.WithFields(log.Fields{"type": consts.NodePrivateKeyFilename, "error": err}).Error("reading node private key") + return err + } + + newBlockData, err := MarshallBlock(&b.Header, trData, b.PrevHeader.Hash, NodePrivateKey) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("marshalling new block") + return err + } + + isFirstBlock := b.Header.BlockID == 1 + nb, err := parseBlock(bytes.NewBuffer(newBlockData), isFirstBlock) + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("parsing new block") + return err + } + b.BinData = newBlockData + b.Parsers = nb.Parsers + b.MrklRoot = nb.MrklRoot + b.SysUpdate = nb.SysUpdate + err = nil + } else if err != nil { + dbTransaction.Rollback() + return err + } + + if err := UpdBlockInfo(dbTransaction, b); err != nil { + dbTransaction.Rollback() + return err + } + + if err := InsertIntoBlockchain(dbTransaction, b); err != nil { + dbTransaction.Rollback() + return err + } + + dbTransaction.Commit() + if b.SysUpdate { + b.SysUpdate = false + if err = syspar.SysUpdate(nil); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") + return err + } + } + return nil +} + +// ProcessBlockWherePrevFromMemory is processing block with in memory previous block +func ProcessBlockWherePrevFromMemory(data []byte) (*Block, error) { + if int64(len(data)) > syspar.GetMaxBlockSize() { + log.WithFields(log.Fields{"size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") + return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) + } + + buf := bytes.NewBuffer(data) + if buf.Len() == 0 { + log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("block data is empty") + return nil, fmt.Errorf("empty buffer") + } + + block, err := parseBlock(buf, false) + if err != nil { + return nil, err + } + block.BinData = data + + if err := block.readPreviousBlockFromMemory(); err != nil { + return nil, err + } + return block, nil +} + +// ProcessBlockWherePrevFromBlockchainTable is processing block with in table previous block +func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Block, error) { + if checkSize && int64(len(data)) > syspar.GetMaxBlockSize() { + log.WithFields(log.Fields{"check_size": checkSize, "size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") + return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) + } + + buf := bytes.NewBuffer(data) + if buf.Len() == 0 { + log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("buffer is empty") + return nil, fmt.Errorf("empty buffer") + } + + block, err := parseBlock(buf, !checkSize) + if err != nil { + return nil, err + } + block.BinData = data + + if err := block.readPreviousBlockFromBlockchainTable(); err != nil { + return nil, err + } + + return block, nil +} + +func parseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { + header, err := ParseBlockHeader(blockBuffer, !firstBlock) + if err != nil { + return nil, err + } + + logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_time": header.Time, "block_wallet_id": header.KeyID, + "block_state_id": header.EcosystemID, "block_hash": header.Hash, "block_version": header.Version}) + parsers := make([]*Parser, 0) + + var mrklSlice [][]byte + + // parse transactions + for blockBuffer.Len() > 0 { + transactionSize, err := converter.DecodeLengthBuf(blockBuffer) + if err != nil { + logger.WithFields(log.Fields{"type": consts.UnmarshallingError, "error": err}).Error("transaction size is 0") + return nil, fmt.Errorf("bad block format (%s)", err) + } + if blockBuffer.Len() < int(transactionSize) { + logger.WithFields(log.Fields{"size": blockBuffer.Len(), "match_size": int(transactionSize), "type": consts.SizeDoesNotMatch}).Error("transaction size does not matches encoded length") + return nil, fmt.Errorf("bad block format (transaction len is too big: %d)", transactionSize) + } + + if transactionSize == 0 { + logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("transaction size is 0") + return nil, fmt.Errorf("transaction size is 0") + } + + bufTransaction := bytes.NewBuffer(blockBuffer.Next(int(transactionSize))) + p, err := ParseTransaction(bufTransaction) + if err != nil { + if p != nil && p.TxHash != nil { + p.processBadTransaction(p.TxHash, err.Error()) + } + return nil, fmt.Errorf("parse transaction error(%s)", err) + } + p.BlockData = &header + + parsers = append(parsers, p) + + // build merkle tree + if len(p.TxFullData) > 0 { + dSha256Hash, err := crypto.DoubleHash(p.TxFullData) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing tx full data") + return nil, err + } + dSha256Hash = converter.BinToHex(dSha256Hash) + mrklSlice = append(mrklSlice, dSha256Hash) + } + } + + if len(mrklSlice) == 0 { + mrklSlice = append(mrklSlice, []byte("0")) + } + + return &Block{ + Header: header, + Parsers: parsers, + MrklRoot: utils.MerkleTreeRoot(mrklSlice), + }, nil +} + +// ParseBlockHeader is parses block header +func ParseBlockHeader(binaryBlock *bytes.Buffer, checkMaxSize bool) (utils.BlockData, error) { + var block utils.BlockData + var err error + + if binaryBlock.Len() < 9 { + log.WithFields(log.Fields{"size": binaryBlock.Len(), "type": consts.SizeDoesNotMatch}).Error("binary block size is too small") + return utils.BlockData{}, fmt.Errorf("bad binary block length") + } + + blockVersion := int(converter.BinToDec(binaryBlock.Next(2))) + + if checkMaxSize && int64(binaryBlock.Len()) > syspar.GetMaxBlockSize() { + log.WithFields(log.Fields{"size": binaryBlock.Len(), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") + err = fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"] %v > %v`, + binaryBlock.Len(), syspar.GetMaxBlockSize()) + + return utils.BlockData{}, err + } + + block.BlockID = converter.BinToDec(binaryBlock.Next(4)) + block.Time = converter.BinToDec(binaryBlock.Next(4)) + block.Version = blockVersion + block.EcosystemID = converter.BinToDec(binaryBlock.Next(4)) + block.KeyID, err = converter.DecodeLenInt64Buf(binaryBlock) + if err != nil { + log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "block_time": block.Time, "block_version": block.Version, "error": err}).Error("decoding binary block walletID") + return utils.BlockData{}, err + } + block.NodePosition = converter.BinToDec(binaryBlock.Next(1)) + + if block.BlockID > 1 { + signSize, err := converter.DecodeLengthBuf(binaryBlock) + if err != nil { + log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign size") + return utils.BlockData{}, err + } + if binaryBlock.Len() < signSize { + log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign") + return utils.BlockData{}, fmt.Errorf("bad block format (no sign)") + } + block.Sign = binaryBlock.Next(int(signSize)) + } else { + binaryBlock.Next(1) + } + + return block, nil +} + +// ParseTransaction is parsing transaction +func ParseTransaction(buffer *bytes.Buffer) (*Parser, error) { + if buffer.Len() == 0 { + log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty transaction buffer") + return nil, fmt.Errorf("empty transaction buffer") + } + + hash, err := crypto.Hash(buffer.Bytes()) + // or DoubleHash ? + if err != nil { + log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing transaction") + return nil, err + } + + if p, ok := txParserCache.Get(string(hash)); ok { + return p, nil + } + + p := new(Parser) + p.TxHash = hash + p.TxUsedCost = decimal.New(0, 0) + p.TxFullData = buffer.Bytes() + + txType := int64(buffer.Bytes()[0]) + p.dataType = int(txType) + + // smart contract transaction + if IsContractTransaction(int(txType)) { + // skip byte with transaction type + buffer.Next(1) + p.TxBinaryData = buffer.Bytes() + if err := parseContractTransaction(p, buffer); err != nil { + return nil, err + } + + // TODO: check for what it was here: + /*if err := p.CallContract(smart.CallInit | smart.CallCondition); err != nil { + return nil, err + }*/ + + // struct transaction (only first block transaction for now) + } else if consts.IsStruct(int(txType)) { + p.TxBinaryData = buffer.Bytes() + if err := parseStructTransaction(p, buffer, txType); err != nil { + return p, err + } + + // all other transactions + } else { + // skip byte with transaction type + buffer.Next(1) + p.TxBinaryData = buffer.Bytes() + if err := parseRegularTransaction(p, buffer, txType); err != nil { + return p, err + } + } + + txParserCache.Set(p) + + return p, nil +} + +// IsContractTransaction checks txType +func IsContractTransaction(txType int) bool { + return txType > 127 +} + +func parseContractTransaction(p *Parser, buf *bytes.Buffer) error { + smartTx := tx.SmartContract{} + if err := msgpack.Unmarshal(buf.Bytes(), &smartTx); err != nil { + log.WithFields(log.Fields{"tx_type": p.dataType, "tx_hash": p.TxHash, "error": err, "type": consts.UnmarshallingError}).Error("unmarshalling smart tx msgpack") + return err + } + p.TxPtr = nil + p.TxSmart = &smartTx + p.TxTime = smartTx.Time + p.TxEcosystemID = (smartTx.EcosystemID) + p.TxKeyID = smartTx.KeyID + + contract := smart.GetContractByID(int32(smartTx.Type)) + if contract == nil { + log.WithFields(log.Fields{"contract_type": smartTx.Type, "type": consts.NotFound}).Error("unknown contract") + return fmt.Errorf(`unknown contract %d`, smartTx.Type) + } + forsign := []string{smartTx.ForSign()} + + p.TxContract = contract + p.TxHeader = &smartTx.Header + + input := smartTx.Data + p.TxData = make(map[string]interface{}) + + if contract.Block.Info.(*script.ContractInfo).Tx != nil { + for _, fitem := range *contract.Block.Info.(*script.ContractInfo).Tx { + var err error + var v interface{} + var forv string + var isforv bool + + if fitem.ContainsTag(script.TagFile) { + var ( + data []byte + file *tx.File + ) + if err := converter.BinUnmarshal(&input, &data); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling file") + return err + } + if err := msgpack.Unmarshal(data, &file); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("unmarshalling file msgpack") + return err + } + + p.TxData[fitem.Name] = file.Data + p.TxData[fitem.Name+"MimeType"] = file.MimeType + + forsign = append(forsign, file.MimeType, file.Hash) + continue + } + + switch fitem.Type.String() { + case `uint64`: + var val uint64 + converter.BinUnmarshal(&input, &val) + v = val + case `float64`: + var val float64 + converter.BinUnmarshal(&input, &val) + v = val + case `int64`: + v, err = converter.DecodeLenInt64(&input) + case script.Decimal: + var s string + if err := converter.BinUnmarshal(&input, &s); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling script.Decimal") + return err + } + v, err = decimal.NewFromString(s) + case `string`: + var s string + if err := converter.BinUnmarshal(&input, &s); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") + return err + } + v = s + case `[]uint8`: + var b []byte + if err := converter.BinUnmarshal(&input, &b); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") + return err + } + v = hex.EncodeToString(b) + case `[]interface {}`: + count, err := converter.DecodeLength(&input) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling []interface{}") + return err + } + isforv = true + list := make([]interface{}, 0) + for count > 0 { + length, err := converter.DecodeLength(&input) + if err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling tx length") + return err + } + if len(input) < int(length) { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "length": int(length), "slice length": len(input)}).Error("incorrect tx size") + return fmt.Errorf(`input slice is short`) + } + list = append(list, string(input[:length])) + input = input[length:] + count-- + } + if len(list) > 0 { + slist := make([]string, len(list)) + for j, lval := range list { + slist[j] = lval.(string) + } + forv = strings.Join(slist, `,`) + } + v = list + } + if p.TxData[fitem.Name] == nil { + p.TxData[fitem.Name] = v + } + if err != nil { + return err + } + if strings.Index(fitem.Tags, `image`) >= 0 { + continue + } + if isforv { + v = forv + } + forsign = append(forsign, fmt.Sprintf("%v", v)) + } + } + p.TxData[`forsign`] = strings.Join(forsign, ",") + + return nil +} + +func parseStructTransaction(p *Parser, buf *bytes.Buffer, txType int64) error { + trParser, err := GetParser(p, consts.TxTypes[int(txType)]) + if err != nil { + return err + } + p.txParser = trParser + + p.TxPtr = consts.MakeStruct(consts.TxTypes[int(txType)]) + input := buf.Bytes() + if err := converter.BinUnmarshal(&input, p.TxPtr); err != nil { + log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "tx_type": int(txType)}).Error("getting parser for tx type") + return err + } + + head := consts.Header(p.TxPtr) + p.TxKeyID = head.KeyID + p.TxTime = int64(head.Time) + p.TxType = txType + + err = trParser.Validate() + if err != nil { + return utils.ErrInfo(err) + } + + return nil +} + +func parseRegularTransaction(p *Parser, buf *bytes.Buffer, txType int64) error { + trParser, err := GetParser(p, consts.TxTypes[int(txType)]) + if err != nil { + return err + } + p.txParser = trParser + + err = trParser.Init() + if err != nil { + log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser init") + return err + } + header := trParser.Header() + if header == nil { + log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser get header") + return fmt.Errorf("tx header is nil") + } + + p.TxHeader = header + p.TxTime = header.Time + p.TxType = txType + p.TxEcosystemID = (header.EcosystemID) + p.TxKeyID = header.KeyID + + err = trParser.Validate() + if _, ok := err.(error); ok { + return utils.ErrInfo(err.(error)) + } + + return nil +} + +func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { + err := CheckLogTx(p.TxFullData, checkForDupTr, false) + if err != nil { + return err + } + logger := log.WithFields(log.Fields{"tx_type": p.dataType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID}) + // time in the transaction cannot be more than MAX_TX_FORW seconds of block time + if p.TxTime-consts.MAX_TX_FORW > checkTime { + logger.WithFields(log.Fields{"tx_max_forw": consts.MAX_TX_FORW, "type": consts.ParameterExceeded}).Error("time in the tx cannot be more than MAX_TX_FORW seconds of block time ") + return utils.ErrInfo(fmt.Errorf("transaction time is too big")) + } + + // time in transaction cannot be less than -24 of block time + if p.TxTime < checkTime-consts.MAX_TX_BACK { + logger.WithFields(log.Fields{"tx_max_back": consts.MAX_TX_BACK, "type": consts.ParameterExceeded}).Error("time in the tx cannot be less then -24 of block time") + return utils.ErrInfo(fmt.Errorf("incorrect transaction time")) + } + + if p.TxContract == nil { + if p.BlockData != nil && p.BlockData.BlockID != 1 { + if p.TxKeyID == 0 { + logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("Empty user id") + return utils.ErrInfo(fmt.Errorf("empty user id")) + } + } + } + + return nil +} + +func (p *Parser) CheckTransaction(checkTime int64) error { + return checkTransaction(p, checkTime, true) +} + +// CheckTransaction is checking transaction +func CheckTransaction(data []byte) (*tx.Header, error) { + trBuff := bytes.NewBuffer(data) + p, err := ParseTransaction(trBuff) + if err != nil { + return nil, err + } + + err = checkTransaction(p, time.Now().Unix(), true) + if err != nil { + p.ProcessBadTransaction(err) + return nil, err + } + + return p.TxHeader, nil +} + +func (b *Block) readPreviousBlockFromMemory() error { + return nil +} + +func (b *Block) readPreviousBlockFromBlockchainTable() error { + if b.Header.BlockID == 1 { + b.PrevHeader = &utils.BlockData{} + return nil + } + + var err error + b.PrevHeader, err = GetBlockDataFromBlockChain(b.Header.BlockID - 1) + if err != nil { + return utils.ErrInfo(fmt.Errorf("can't get block %d", b.Header.BlockID-1)) + } + return nil +} + +func playTransaction(p *Parser) (string, error) { + // smart-contract + if p.TxContract != nil { + // check that there are enough money in CallContract + return p.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction) + } + + if p.txParser == nil { + return "", utils.ErrInfo(fmt.Errorf("can't find parser for %d", p.TxType)) + } + + err := p.txParser.Action() + if err != nil { + return "", err + } + + return "", nil +} + +func (b *Block) playBlock(dbTransaction *model.DbTransaction) error { + logger := b.GetLogger() + if _, err := model.DeleteUsedTransactions(dbTransaction); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("delete used transactions") + return err + } + limits := NewLimits(b) + for curTx, p := range b.Parsers { + var ( + msg string + err error + ) + p.DbTransaction = dbTransaction + + err = dbTransaction.Savepoint(curTx) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("using savepoint") + return err + } + msg, err = playTransaction(p) + if err == nil && p.TxSmart != nil { + err = limits.CheckLimit(p) + } + if err != nil { + if err == errNetworkStopping { + return err + } + + if b.GenBlock && err == ErrLimitStop { + b.StopCount = curTx + model.IncrementTxAttemptCount(p.DbTransaction, p.TxHash) + } + errRoll := dbTransaction.RollbackSavepoint(curTx) + if errRoll != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("rolling back to previous savepoint") + return errRoll + } + if b.GenBlock && err == ErrLimitStop { + break + } + // skip this transaction + model.MarkTransactionUsed(p.DbTransaction, p.TxHash) + p.processBadTransaction(p.TxHash, err.Error()) + if p.SysUpdate { + if err = syspar.SysUpdate(p.DbTransaction); err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") + } + p.SysUpdate = false + } + continue + } + err = dbTransaction.ReleaseSavepoint(curTx) + if err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("releasing savepoint") + } + if p.SysUpdate { + b.SysUpdate = true + p.SysUpdate = false + } + + if _, err := model.MarkTransactionUsed(p.DbTransaction, p.TxHash); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("marking transaction used") + return err + } + + // update status + ts := &model.TransactionStatus{} + if err := ts.UpdateBlockMsg(p.DbTransaction, b.Header.BlockID, msg, p.TxHash); err != nil { + logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("updating transaction status block id") + return err + } + if err := InsertInLogTx(p.DbTransaction, p.TxFullData, p.TxTime); err != nil { + return utils.ErrInfo(err) + } + } + return nil +} + +// CheckBlock is checking block +func (b *Block) CheckBlock() error { + logger := b.GetLogger() + // exclude blocks from future + if b.Header.Time > time.Now().Unix() { + logger.WithFields(log.Fields{"type": consts.ParameterExceeded}).Error("block time is larger than now") + return utils.ErrInfo(fmt.Errorf("incorrect block time - block.Header.Time > time.Now().Unix()")) + } + if b.PrevHeader == nil || b.PrevHeader.BlockID != b.Header.BlockID-1 { + if err := b.readPreviousBlockFromBlockchainTable(); err != nil { + logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") + return utils.ErrInfo(err) + } + } + + if b.Header.BlockID == 1 { + return nil + } + + // is this block too early? Allowable error = error_time + if b.PrevHeader != nil { + if b.Header.BlockID != b.PrevHeader.BlockID+1 { + logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") + return utils.ErrInfo(fmt.Errorf("incorrect block_id %d != %d +1", b.Header.BlockID, b.PrevHeader.BlockID)) + } + + // skip time validation for first block + if b.Header.BlockID > 1 { + blockTimeCalculator, err := utils.BuildBlockTimeCalculator(nil) + if err != nil { + logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") + return err + } + + validBlockTime, err := blockTimeCalculator.ValidateBlock(b.Header.NodePosition, time.Unix(b.Header.Time, 0)) + if err != nil { + logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("calculating block time") + return err + } + + if !validBlockTime { + logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("incorrect block time") + return utils.ErrInfo(fmt.Errorf("incorrect block time %d", b.PrevHeader.Time)) + } + } + } + + // check each transaction + txCounter := make(map[int64]int) + txHashes := make(map[string]struct{}) + for _, p := range b.Parsers { + hexHash := string(converter.BinToHex(p.TxHash)) + // check for duplicate transactions + if _, ok := txHashes[hexHash]; ok { + logger.WithFields(log.Fields{"tx_hash": hexHash, "type": consts.DuplicateObject}).Error("duplicate transaction") + return utils.ErrInfo(fmt.Errorf("duplicate transaction %s", hexHash)) + } + txHashes[hexHash] = struct{}{} + + // check for max transaction per user in one block + txCounter[p.TxKeyID]++ + if txCounter[p.TxKeyID] > syspar.GetMaxBlockUserTx() { + return utils.ErrInfo(fmt.Errorf("max_block_user_transactions")) + } + + if err := checkTransaction(p, b.Header.Time, false); err != nil { + return err + } + } + + result, err := b.CheckHash() + if err != nil { + return utils.ErrInfo(err) + } + if !result { + logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect signature") + return fmt.Errorf("incorrect signature / p.PrevBlock.BlockId: %d", b.PrevHeader.BlockID) + } + return nil +} + +// CheckHash is checking hash +func (b *Block) CheckHash() (bool, error) { + logger := b.GetLogger() + if b.Header.BlockID == 1 { + return true, nil + } + // check block signature + if b.PrevHeader != nil { + nodePublicKey, err := syspar.GetNodePublicKeyByPosition(b.Header.NodePosition) + if err != nil { + return false, utils.ErrInfo(err) + } + if len(nodePublicKey) == 0 { + logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("node public key is empty") + return false, utils.ErrInfo(fmt.Errorf("empty nodePublicKey")) + } + // check the signature + forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", b.Header.BlockID, b.PrevHeader.Hash, + b.Header.Time, b.Header.EcosystemID, b.Header.KeyID, b.Header.NodePosition, b.MrklRoot) + + resultCheckSign, err := utils.CheckSign([][]byte{nodePublicKey}, forSign, b.Header.Sign, true) + if err != nil { + logger.WithFields(log.Fields{"error": err, "type": consts.CryptoError}).Error("checking block header sign") + return false, utils.ErrInfo(fmt.Errorf("err: %v / block.PrevHeader.BlockID: %d / block.PrevHeader.Hash: %x / ", err, b.PrevHeader.BlockID, b.PrevHeader.Hash)) + } + + return resultCheckSign, nil + } + + return true, nil +} + +// MarshallBlock is marshalling block +func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, key string) ([]byte, error) { + var mrklArray [][]byte + var blockDataTx []byte + var signed []byte + logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_hash": header.Hash, "block_time": header.Time, "block_version": header.Version, "block_wallet_id": header.KeyID, "block_state_id": header.EcosystemID}) + + for _, tr := range trData { + doubleHash, err := crypto.DoubleHash(tr) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing transaction") + return nil, err + } + mrklArray = append(mrklArray, converter.BinToHex(doubleHash)) + blockDataTx = append(blockDataTx, converter.EncodeLengthPlusData(tr)...) + } + + if key != "" { + if len(mrklArray) == 0 { + mrklArray = append(mrklArray, []byte("0")) + } + mrklRoot := utils.MerkleTreeRoot(mrklArray) + + forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", + header.BlockID, prevHash, header.Time, header.EcosystemID, header.KeyID, header.NodePosition, mrklRoot) + + var err error + signed, err = crypto.Sign(key, forSign) + if err != nil { + logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing blocko") + return nil, err + } + } + + var buf bytes.Buffer + // fill header + buf.Write(converter.DecToBin(header.Version, 2)) + buf.Write(converter.DecToBin(header.BlockID, 4)) + buf.Write(converter.DecToBin(header.Time, 4)) + buf.Write(converter.DecToBin(header.EcosystemID, 4)) + buf.Write(converter.EncodeLenInt64InPlace(header.KeyID)) + buf.Write(converter.DecToBin(header.NodePosition, 1)) + buf.Write(converter.EncodeLengthPlusData(signed)) + // data + buf.Write(blockDataTx) + + return buf.Bytes(), nil +} + +type parserCache struct { + mutex sync.RWMutex + cache map[string]*Parser +} + +func (pc *parserCache) Get(hash string) (p *Parser, ok bool) { + pc.mutex.RLock() + defer pc.mutex.RUnlock() + + p, ok = pc.cache[hash] + return +} + +func (pc *parserCache) Set(p *Parser) { + pc.mutex.Lock() + defer pc.mutex.Unlock() + + pc.cache[string(p.TxHash)] = p +} + +func (pc *parserCache) Clean() { + pc.mutex.Lock() + defer pc.mutex.Unlock() + + pc.cache = make(map[string]*Parser) +} + +// CleanCache cleans cache of transaction parsers +func CleanCache() { + txParserCache.Clean() +} From 943e27f0fe9749b29cf35011e7a02fa6cdbe70ab Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 6 Jul 2018 07:15:27 +0300 Subject: [PATCH 140/169] add tx_hash to log --- packages/parser/common_parse_data_full.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index bad573b34..a61dec2a7 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -579,7 +579,7 @@ func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { if err != nil { return err } - logger := log.WithFields(log.Fields{"tx_type": p.dataType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID}) + logger := log.WithFields(log.Fields{"tx_hash": p.TxHash, "tx_type": p.dataType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID}) // time in the transaction cannot be more than MAX_TX_FORW seconds of block time if p.TxTime-consts.MAX_TX_FORW > checkTime { logger.WithFields(log.Fields{"tx_max_forw": consts.MAX_TX_FORW, "type": consts.ParameterExceeded}).Error("time in the tx cannot be more than MAX_TX_FORW seconds of block time ") From d657c915eecf642f82e7251c56b625d979d381cd Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 6 Jul 2018 08:39:51 +0300 Subject: [PATCH 141/169] delete check dup --- packages/parser/common_parse_data_full.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go index a61dec2a7..70fea778d 100644 --- a/packages/parser/common_parse_data_full.go +++ b/packages/parser/common_parse_data_full.go @@ -605,7 +605,7 @@ func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { } func (p *Parser) CheckTransaction(checkTime int64) error { - return checkTransaction(p, checkTime, true) + return checkTransaction(p, checkTime, false) } // CheckTransaction is checking transaction From 0fb2ad2f2473cca376cd4d66f14919c7f31c9265 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Fri, 6 Jul 2018 13:02:40 +0300 Subject: [PATCH 142/169] merge with refactored logic --- packages/daemons/block_generator.go | 4 +- packages/parser/common_parse_data_full.go | 935 ---------------------- 2 files changed, 2 insertions(+), 937 deletions(-) delete mode 100644 packages/parser/common_parse_data_full.go diff --git a/packages/daemons/block_generator.go b/packages/daemons/block_generator.go index 42e3d65d8..94f82783b 100644 --- a/packages/daemons/block_generator.go +++ b/packages/daemons/block_generator.go @@ -182,8 +182,8 @@ func processTransactions(logger *log.Entry) ([]*model.Transaction, error) { continue } - if err := p.CheckTransaction(time.Now().Unix()); err != nil { - p.ProcessBadTransaction(err) + if err := p.Check(time.Now().Unix(), false); err != nil { + transaction.MarkTransactionBad(p.DbTransaction, p.TxHash, err.Error()) continue } diff --git a/packages/parser/common_parse_data_full.go b/packages/parser/common_parse_data_full.go deleted file mode 100644 index 70fea778d..000000000 --- a/packages/parser/common_parse_data_full.go +++ /dev/null @@ -1,935 +0,0 @@ -// Copyright 2016 The go-daylight Authors -// This file is part of the go-daylight library. -// -// The go-daylight library is free software: you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. -// -// The go-daylight library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public License -// along with the go-daylight library. If not, see . - -package parser - -import ( - "bytes" - "encoding/hex" - "fmt" - "strings" - "sync" - "time" - - "github.com/GenesisKernel/go-genesis/packages/conf/syspar" - "github.com/GenesisKernel/go-genesis/packages/consts" - "github.com/GenesisKernel/go-genesis/packages/converter" - "github.com/GenesisKernel/go-genesis/packages/crypto" - "github.com/GenesisKernel/go-genesis/packages/model" - "github.com/GenesisKernel/go-genesis/packages/script" - "github.com/GenesisKernel/go-genesis/packages/smart" - "github.com/GenesisKernel/go-genesis/packages/utils" - "github.com/GenesisKernel/go-genesis/packages/utils/tx" - - "github.com/shopspring/decimal" - log "github.com/sirupsen/logrus" - "gopkg.in/vmihailenco/msgpack.v2" -) - -var txParserCache = &parserCache{cache: make(map[string]*Parser)} - -// Block is storing block data -type Block struct { - Header utils.BlockData - PrevHeader *utils.BlockData - MrklRoot []byte - BinData []byte - Parsers []*Parser - SysUpdate bool - GenBlock bool // it equals true when we are generating a new block - StopCount int // The count of good tx in the block -} - -func (b Block) String() string { - return fmt.Sprintf("header: %s, prevHeader: %s", b.Header, b.PrevHeader) -} - -// GetLogger is returns logger -func (b Block) GetLogger() *log.Entry { - return log.WithFields(log.Fields{"block_id": b.Header.BlockID, "block_time": b.Header.Time, "block_wallet_id": b.Header.KeyID, - "block_state_id": b.Header.EcosystemID, "block_hash": b.Header.Hash, "block_version": b.Header.Version}) -} - -// InsertBlockWOForks is inserting blocks -func InsertBlockWOForks(data []byte, genBlock, firstBlock bool) error { - block, err := ProcessBlockWherePrevFromBlockchainTable(data, !firstBlock) - if err != nil { - return err - } - block.GenBlock = genBlock - if err := block.CheckBlock(); err != nil { - return err - } - - err = block.PlayBlockSafe() - if err != nil { - return err - } - - log.WithFields(log.Fields{"block_id": block.Header.BlockID}).Debug("block was inserted successfully") - return nil -} - -// PlayBlockSafe is inserting block safely -func (b *Block) PlayBlockSafe() error { - logger := b.GetLogger() - dbTransaction, err := model.StartTransaction() - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("starting db transaction") - return err - } - - err = b.playBlock(dbTransaction) - if b.GenBlock && b.StopCount > 0 { - doneTx := b.Parsers[:b.StopCount] - trData := make([][]byte, 0, b.StopCount) - for _, tr := range doneTx { - trData = append(trData, tr.TxFullData) - } - NodePrivateKey, _, err := utils.GetNodeKeys() - if err != nil || len(NodePrivateKey) < 1 { - log.WithFields(log.Fields{"type": consts.NodePrivateKeyFilename, "error": err}).Error("reading node private key") - return err - } - - newBlockData, err := MarshallBlock(&b.Header, trData, b.PrevHeader.Hash, NodePrivateKey) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("marshalling new block") - return err - } - - isFirstBlock := b.Header.BlockID == 1 - nb, err := parseBlock(bytes.NewBuffer(newBlockData), isFirstBlock) - if err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("parsing new block") - return err - } - b.BinData = newBlockData - b.Parsers = nb.Parsers - b.MrklRoot = nb.MrklRoot - b.SysUpdate = nb.SysUpdate - err = nil - } else if err != nil { - dbTransaction.Rollback() - return err - } - - if err := UpdBlockInfo(dbTransaction, b); err != nil { - dbTransaction.Rollback() - return err - } - - if err := InsertIntoBlockchain(dbTransaction, b); err != nil { - dbTransaction.Rollback() - return err - } - - dbTransaction.Commit() - if b.SysUpdate { - b.SysUpdate = false - if err = syspar.SysUpdate(nil); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - return err - } - } - return nil -} - -// ProcessBlockWherePrevFromMemory is processing block with in memory previous block -func ProcessBlockWherePrevFromMemory(data []byte) (*Block, error) { - if int64(len(data)) > syspar.GetMaxBlockSize() { - log.WithFields(log.Fields{"size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") - return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) - } - - buf := bytes.NewBuffer(data) - if buf.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("block data is empty") - return nil, fmt.Errorf("empty buffer") - } - - block, err := parseBlock(buf, false) - if err != nil { - return nil, err - } - block.BinData = data - - if err := block.readPreviousBlockFromMemory(); err != nil { - return nil, err - } - return block, nil -} - -// ProcessBlockWherePrevFromBlockchainTable is processing block with in table previous block -func ProcessBlockWherePrevFromBlockchainTable(data []byte, checkSize bool) (*Block, error) { - if checkSize && int64(len(data)) > syspar.GetMaxBlockSize() { - log.WithFields(log.Fields{"check_size": checkSize, "size": len(data), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") - return nil, utils.ErrInfo(fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"]`)) - } - - buf := bytes.NewBuffer(data) - if buf.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("buffer is empty") - return nil, fmt.Errorf("empty buffer") - } - - block, err := parseBlock(buf, !checkSize) - if err != nil { - return nil, err - } - block.BinData = data - - if err := block.readPreviousBlockFromBlockchainTable(); err != nil { - return nil, err - } - - return block, nil -} - -func parseBlock(blockBuffer *bytes.Buffer, firstBlock bool) (*Block, error) { - header, err := ParseBlockHeader(blockBuffer, !firstBlock) - if err != nil { - return nil, err - } - - logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_time": header.Time, "block_wallet_id": header.KeyID, - "block_state_id": header.EcosystemID, "block_hash": header.Hash, "block_version": header.Version}) - parsers := make([]*Parser, 0) - - var mrklSlice [][]byte - - // parse transactions - for blockBuffer.Len() > 0 { - transactionSize, err := converter.DecodeLengthBuf(blockBuffer) - if err != nil { - logger.WithFields(log.Fields{"type": consts.UnmarshallingError, "error": err}).Error("transaction size is 0") - return nil, fmt.Errorf("bad block format (%s)", err) - } - if blockBuffer.Len() < int(transactionSize) { - logger.WithFields(log.Fields{"size": blockBuffer.Len(), "match_size": int(transactionSize), "type": consts.SizeDoesNotMatch}).Error("transaction size does not matches encoded length") - return nil, fmt.Errorf("bad block format (transaction len is too big: %d)", transactionSize) - } - - if transactionSize == 0 { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("transaction size is 0") - return nil, fmt.Errorf("transaction size is 0") - } - - bufTransaction := bytes.NewBuffer(blockBuffer.Next(int(transactionSize))) - p, err := ParseTransaction(bufTransaction) - if err != nil { - if p != nil && p.TxHash != nil { - p.processBadTransaction(p.TxHash, err.Error()) - } - return nil, fmt.Errorf("parse transaction error(%s)", err) - } - p.BlockData = &header - - parsers = append(parsers, p) - - // build merkle tree - if len(p.TxFullData) > 0 { - dSha256Hash, err := crypto.DoubleHash(p.TxFullData) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing tx full data") - return nil, err - } - dSha256Hash = converter.BinToHex(dSha256Hash) - mrklSlice = append(mrklSlice, dSha256Hash) - } - } - - if len(mrklSlice) == 0 { - mrklSlice = append(mrklSlice, []byte("0")) - } - - return &Block{ - Header: header, - Parsers: parsers, - MrklRoot: utils.MerkleTreeRoot(mrklSlice), - }, nil -} - -// ParseBlockHeader is parses block header -func ParseBlockHeader(binaryBlock *bytes.Buffer, checkMaxSize bool) (utils.BlockData, error) { - var block utils.BlockData - var err error - - if binaryBlock.Len() < 9 { - log.WithFields(log.Fields{"size": binaryBlock.Len(), "type": consts.SizeDoesNotMatch}).Error("binary block size is too small") - return utils.BlockData{}, fmt.Errorf("bad binary block length") - } - - blockVersion := int(converter.BinToDec(binaryBlock.Next(2))) - - if checkMaxSize && int64(binaryBlock.Len()) > syspar.GetMaxBlockSize() { - log.WithFields(log.Fields{"size": binaryBlock.Len(), "max_size": syspar.GetMaxBlockSize(), "type": consts.ParameterExceeded}).Error("binary block size exceeds max block size") - err = fmt.Errorf(`len(binaryBlock) > variables.Int64["max_block_size"] %v > %v`, - binaryBlock.Len(), syspar.GetMaxBlockSize()) - - return utils.BlockData{}, err - } - - block.BlockID = converter.BinToDec(binaryBlock.Next(4)) - block.Time = converter.BinToDec(binaryBlock.Next(4)) - block.Version = blockVersion - block.EcosystemID = converter.BinToDec(binaryBlock.Next(4)) - block.KeyID, err = converter.DecodeLenInt64Buf(binaryBlock) - if err != nil { - log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "block_time": block.Time, "block_version": block.Version, "error": err}).Error("decoding binary block walletID") - return utils.BlockData{}, err - } - block.NodePosition = converter.BinToDec(binaryBlock.Next(1)) - - if block.BlockID > 1 { - signSize, err := converter.DecodeLengthBuf(binaryBlock) - if err != nil { - log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign size") - return utils.BlockData{}, err - } - if binaryBlock.Len() < signSize { - log.WithFields(log.Fields{"type": consts.UnmarshallingError, "block_id": block.BlockID, "time": block.Time, "version": block.Version, "error": err}).Error("decoding binary sign") - return utils.BlockData{}, fmt.Errorf("bad block format (no sign)") - } - block.Sign = binaryBlock.Next(int(signSize)) - } else { - binaryBlock.Next(1) - } - - return block, nil -} - -// ParseTransaction is parsing transaction -func ParseTransaction(buffer *bytes.Buffer) (*Parser, error) { - if buffer.Len() == 0 { - log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty transaction buffer") - return nil, fmt.Errorf("empty transaction buffer") - } - - hash, err := crypto.Hash(buffer.Bytes()) - // or DoubleHash ? - if err != nil { - log.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("hashing transaction") - return nil, err - } - - if p, ok := txParserCache.Get(string(hash)); ok { - return p, nil - } - - p := new(Parser) - p.TxHash = hash - p.TxUsedCost = decimal.New(0, 0) - p.TxFullData = buffer.Bytes() - - txType := int64(buffer.Bytes()[0]) - p.dataType = int(txType) - - // smart contract transaction - if IsContractTransaction(int(txType)) { - // skip byte with transaction type - buffer.Next(1) - p.TxBinaryData = buffer.Bytes() - if err := parseContractTransaction(p, buffer); err != nil { - return nil, err - } - - // TODO: check for what it was here: - /*if err := p.CallContract(smart.CallInit | smart.CallCondition); err != nil { - return nil, err - }*/ - - // struct transaction (only first block transaction for now) - } else if consts.IsStruct(int(txType)) { - p.TxBinaryData = buffer.Bytes() - if err := parseStructTransaction(p, buffer, txType); err != nil { - return p, err - } - - // all other transactions - } else { - // skip byte with transaction type - buffer.Next(1) - p.TxBinaryData = buffer.Bytes() - if err := parseRegularTransaction(p, buffer, txType); err != nil { - return p, err - } - } - - txParserCache.Set(p) - - return p, nil -} - -// IsContractTransaction checks txType -func IsContractTransaction(txType int) bool { - return txType > 127 -} - -func parseContractTransaction(p *Parser, buf *bytes.Buffer) error { - smartTx := tx.SmartContract{} - if err := msgpack.Unmarshal(buf.Bytes(), &smartTx); err != nil { - log.WithFields(log.Fields{"tx_type": p.dataType, "tx_hash": p.TxHash, "error": err, "type": consts.UnmarshallingError}).Error("unmarshalling smart tx msgpack") - return err - } - p.TxPtr = nil - p.TxSmart = &smartTx - p.TxTime = smartTx.Time - p.TxEcosystemID = (smartTx.EcosystemID) - p.TxKeyID = smartTx.KeyID - - contract := smart.GetContractByID(int32(smartTx.Type)) - if contract == nil { - log.WithFields(log.Fields{"contract_type": smartTx.Type, "type": consts.NotFound}).Error("unknown contract") - return fmt.Errorf(`unknown contract %d`, smartTx.Type) - } - forsign := []string{smartTx.ForSign()} - - p.TxContract = contract - p.TxHeader = &smartTx.Header - - input := smartTx.Data - p.TxData = make(map[string]interface{}) - - if contract.Block.Info.(*script.ContractInfo).Tx != nil { - for _, fitem := range *contract.Block.Info.(*script.ContractInfo).Tx { - var err error - var v interface{} - var forv string - var isforv bool - - if fitem.ContainsTag(script.TagFile) { - var ( - data []byte - file *tx.File - ) - if err := converter.BinUnmarshal(&input, &data); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling file") - return err - } - if err := msgpack.Unmarshal(data, &file); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("unmarshalling file msgpack") - return err - } - - p.TxData[fitem.Name] = file.Data - p.TxData[fitem.Name+"MimeType"] = file.MimeType - - forsign = append(forsign, file.MimeType, file.Hash) - continue - } - - switch fitem.Type.String() { - case `uint64`: - var val uint64 - converter.BinUnmarshal(&input, &val) - v = val - case `float64`: - var val float64 - converter.BinUnmarshal(&input, &val) - v = val - case `int64`: - v, err = converter.DecodeLenInt64(&input) - case script.Decimal: - var s string - if err := converter.BinUnmarshal(&input, &s); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling script.Decimal") - return err - } - v, err = decimal.NewFromString(s) - case `string`: - var s string - if err := converter.BinUnmarshal(&input, &s); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") - return err - } - v = s - case `[]uint8`: - var b []byte - if err := converter.BinUnmarshal(&input, &b); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling string") - return err - } - v = hex.EncodeToString(b) - case `[]interface {}`: - count, err := converter.DecodeLength(&input) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling []interface{}") - return err - } - isforv = true - list := make([]interface{}, 0) - for count > 0 { - length, err := converter.DecodeLength(&input) - if err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError}).Error("bin unmarshalling tx length") - return err - } - if len(input) < int(length) { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "length": int(length), "slice length": len(input)}).Error("incorrect tx size") - return fmt.Errorf(`input slice is short`) - } - list = append(list, string(input[:length])) - input = input[length:] - count-- - } - if len(list) > 0 { - slist := make([]string, len(list)) - for j, lval := range list { - slist[j] = lval.(string) - } - forv = strings.Join(slist, `,`) - } - v = list - } - if p.TxData[fitem.Name] == nil { - p.TxData[fitem.Name] = v - } - if err != nil { - return err - } - if strings.Index(fitem.Tags, `image`) >= 0 { - continue - } - if isforv { - v = forv - } - forsign = append(forsign, fmt.Sprintf("%v", v)) - } - } - p.TxData[`forsign`] = strings.Join(forsign, ",") - - return nil -} - -func parseStructTransaction(p *Parser, buf *bytes.Buffer, txType int64) error { - trParser, err := GetParser(p, consts.TxTypes[int(txType)]) - if err != nil { - return err - } - p.txParser = trParser - - p.TxPtr = consts.MakeStruct(consts.TxTypes[int(txType)]) - input := buf.Bytes() - if err := converter.BinUnmarshal(&input, p.TxPtr); err != nil { - log.WithFields(log.Fields{"error": err, "type": consts.UnmarshallingError, "tx_type": int(txType)}).Error("getting parser for tx type") - return err - } - - head := consts.Header(p.TxPtr) - p.TxKeyID = head.KeyID - p.TxTime = int64(head.Time) - p.TxType = txType - - err = trParser.Validate() - if err != nil { - return utils.ErrInfo(err) - } - - return nil -} - -func parseRegularTransaction(p *Parser, buf *bytes.Buffer, txType int64) error { - trParser, err := GetParser(p, consts.TxTypes[int(txType)]) - if err != nil { - return err - } - p.txParser = trParser - - err = trParser.Init() - if err != nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser init") - return err - } - header := trParser.Header() - if header == nil { - log.WithFields(log.Fields{"error": err, "tx_type": int(txType)}).Error("parser get header") - return fmt.Errorf("tx header is nil") - } - - p.TxHeader = header - p.TxTime = header.Time - p.TxType = txType - p.TxEcosystemID = (header.EcosystemID) - p.TxKeyID = header.KeyID - - err = trParser.Validate() - if _, ok := err.(error); ok { - return utils.ErrInfo(err.(error)) - } - - return nil -} - -func checkTransaction(p *Parser, checkTime int64, checkForDupTr bool) error { - err := CheckLogTx(p.TxFullData, checkForDupTr, false) - if err != nil { - return err - } - logger := log.WithFields(log.Fields{"tx_hash": p.TxHash, "tx_type": p.dataType, "tx_time": p.TxTime, "tx_state_id": p.TxEcosystemID}) - // time in the transaction cannot be more than MAX_TX_FORW seconds of block time - if p.TxTime-consts.MAX_TX_FORW > checkTime { - logger.WithFields(log.Fields{"tx_max_forw": consts.MAX_TX_FORW, "type": consts.ParameterExceeded}).Error("time in the tx cannot be more than MAX_TX_FORW seconds of block time ") - return utils.ErrInfo(fmt.Errorf("transaction time is too big")) - } - - // time in transaction cannot be less than -24 of block time - if p.TxTime < checkTime-consts.MAX_TX_BACK { - logger.WithFields(log.Fields{"tx_max_back": consts.MAX_TX_BACK, "type": consts.ParameterExceeded}).Error("time in the tx cannot be less then -24 of block time") - return utils.ErrInfo(fmt.Errorf("incorrect transaction time")) - } - - if p.TxContract == nil { - if p.BlockData != nil && p.BlockData.BlockID != 1 { - if p.TxKeyID == 0 { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("Empty user id") - return utils.ErrInfo(fmt.Errorf("empty user id")) - } - } - } - - return nil -} - -func (p *Parser) CheckTransaction(checkTime int64) error { - return checkTransaction(p, checkTime, false) -} - -// CheckTransaction is checking transaction -func CheckTransaction(data []byte) (*tx.Header, error) { - trBuff := bytes.NewBuffer(data) - p, err := ParseTransaction(trBuff) - if err != nil { - return nil, err - } - - err = checkTransaction(p, time.Now().Unix(), true) - if err != nil { - p.ProcessBadTransaction(err) - return nil, err - } - - return p.TxHeader, nil -} - -func (b *Block) readPreviousBlockFromMemory() error { - return nil -} - -func (b *Block) readPreviousBlockFromBlockchainTable() error { - if b.Header.BlockID == 1 { - b.PrevHeader = &utils.BlockData{} - return nil - } - - var err error - b.PrevHeader, err = GetBlockDataFromBlockChain(b.Header.BlockID - 1) - if err != nil { - return utils.ErrInfo(fmt.Errorf("can't get block %d", b.Header.BlockID-1)) - } - return nil -} - -func playTransaction(p *Parser) (string, error) { - // smart-contract - if p.TxContract != nil { - // check that there are enough money in CallContract - return p.CallContract(smart.CallInit | smart.CallCondition | smart.CallAction) - } - - if p.txParser == nil { - return "", utils.ErrInfo(fmt.Errorf("can't find parser for %d", p.TxType)) - } - - err := p.txParser.Action() - if err != nil { - return "", err - } - - return "", nil -} - -func (b *Block) playBlock(dbTransaction *model.DbTransaction) error { - logger := b.GetLogger() - if _, err := model.DeleteUsedTransactions(dbTransaction); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("delete used transactions") - return err - } - limits := NewLimits(b) - for curTx, p := range b.Parsers { - var ( - msg string - err error - ) - p.DbTransaction = dbTransaction - - err = dbTransaction.Savepoint(curTx) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("using savepoint") - return err - } - msg, err = playTransaction(p) - if err == nil && p.TxSmart != nil { - err = limits.CheckLimit(p) - } - if err != nil { - if err == errNetworkStopping { - return err - } - - if b.GenBlock && err == ErrLimitStop { - b.StopCount = curTx - model.IncrementTxAttemptCount(p.DbTransaction, p.TxHash) - } - errRoll := dbTransaction.RollbackSavepoint(curTx) - if errRoll != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("rolling back to previous savepoint") - return errRoll - } - if b.GenBlock && err == ErrLimitStop { - break - } - // skip this transaction - model.MarkTransactionUsed(p.DbTransaction, p.TxHash) - p.processBadTransaction(p.TxHash, err.Error()) - if p.SysUpdate { - if err = syspar.SysUpdate(p.DbTransaction); err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("updating syspar") - } - p.SysUpdate = false - } - continue - } - err = dbTransaction.ReleaseSavepoint(curTx) - if err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("releasing savepoint") - } - if p.SysUpdate { - b.SysUpdate = true - p.SysUpdate = false - } - - if _, err := model.MarkTransactionUsed(p.DbTransaction, p.TxHash); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("marking transaction used") - return err - } - - // update status - ts := &model.TransactionStatus{} - if err := ts.UpdateBlockMsg(p.DbTransaction, b.Header.BlockID, msg, p.TxHash); err != nil { - logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": p.TxHash}).Error("updating transaction status block id") - return err - } - if err := InsertInLogTx(p.DbTransaction, p.TxFullData, p.TxTime); err != nil { - return utils.ErrInfo(err) - } - } - return nil -} - -// CheckBlock is checking block -func (b *Block) CheckBlock() error { - logger := b.GetLogger() - // exclude blocks from future - if b.Header.Time > time.Now().Unix() { - logger.WithFields(log.Fields{"type": consts.ParameterExceeded}).Error("block time is larger than now") - return utils.ErrInfo(fmt.Errorf("incorrect block time - block.Header.Time > time.Now().Unix()")) - } - if b.PrevHeader == nil || b.PrevHeader.BlockID != b.Header.BlockID-1 { - if err := b.readPreviousBlockFromBlockchainTable(); err != nil { - logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") - return utils.ErrInfo(err) - } - } - - if b.Header.BlockID == 1 { - return nil - } - - // is this block too early? Allowable error = error_time - if b.PrevHeader != nil { - if b.Header.BlockID != b.PrevHeader.BlockID+1 { - logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("block id is larger then previous more than on 1") - return utils.ErrInfo(fmt.Errorf("incorrect block_id %d != %d +1", b.Header.BlockID, b.PrevHeader.BlockID)) - } - - // skip time validation for first block - if b.Header.BlockID > 1 { - blockTimeCalculator, err := utils.BuildBlockTimeCalculator(nil) - if err != nil { - logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("building block time calculator") - return err - } - - validBlockTime, err := blockTimeCalculator.ValidateBlock(b.Header.NodePosition, time.Unix(b.Header.Time, 0)) - if err != nil { - logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("calculating block time") - return err - } - - if !validBlockTime { - logger.WithFields(log.Fields{"type": consts.BlockError, "error": err}).Error("incorrect block time") - return utils.ErrInfo(fmt.Errorf("incorrect block time %d", b.PrevHeader.Time)) - } - } - } - - // check each transaction - txCounter := make(map[int64]int) - txHashes := make(map[string]struct{}) - for _, p := range b.Parsers { - hexHash := string(converter.BinToHex(p.TxHash)) - // check for duplicate transactions - if _, ok := txHashes[hexHash]; ok { - logger.WithFields(log.Fields{"tx_hash": hexHash, "type": consts.DuplicateObject}).Error("duplicate transaction") - return utils.ErrInfo(fmt.Errorf("duplicate transaction %s", hexHash)) - } - txHashes[hexHash] = struct{}{} - - // check for max transaction per user in one block - txCounter[p.TxKeyID]++ - if txCounter[p.TxKeyID] > syspar.GetMaxBlockUserTx() { - return utils.ErrInfo(fmt.Errorf("max_block_user_transactions")) - } - - if err := checkTransaction(p, b.Header.Time, false); err != nil { - return err - } - } - - result, err := b.CheckHash() - if err != nil { - return utils.ErrInfo(err) - } - if !result { - logger.WithFields(log.Fields{"type": consts.InvalidObject}).Error("incorrect signature") - return fmt.Errorf("incorrect signature / p.PrevBlock.BlockId: %d", b.PrevHeader.BlockID) - } - return nil -} - -// CheckHash is checking hash -func (b *Block) CheckHash() (bool, error) { - logger := b.GetLogger() - if b.Header.BlockID == 1 { - return true, nil - } - // check block signature - if b.PrevHeader != nil { - nodePublicKey, err := syspar.GetNodePublicKeyByPosition(b.Header.NodePosition) - if err != nil { - return false, utils.ErrInfo(err) - } - if len(nodePublicKey) == 0 { - logger.WithFields(log.Fields{"type": consts.EmptyObject}).Error("node public key is empty") - return false, utils.ErrInfo(fmt.Errorf("empty nodePublicKey")) - } - // check the signature - forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", b.Header.BlockID, b.PrevHeader.Hash, - b.Header.Time, b.Header.EcosystemID, b.Header.KeyID, b.Header.NodePosition, b.MrklRoot) - - resultCheckSign, err := utils.CheckSign([][]byte{nodePublicKey}, forSign, b.Header.Sign, true) - if err != nil { - logger.WithFields(log.Fields{"error": err, "type": consts.CryptoError}).Error("checking block header sign") - return false, utils.ErrInfo(fmt.Errorf("err: %v / block.PrevHeader.BlockID: %d / block.PrevHeader.Hash: %x / ", err, b.PrevHeader.BlockID, b.PrevHeader.Hash)) - } - - return resultCheckSign, nil - } - - return true, nil -} - -// MarshallBlock is marshalling block -func MarshallBlock(header *utils.BlockData, trData [][]byte, prevHash []byte, key string) ([]byte, error) { - var mrklArray [][]byte - var blockDataTx []byte - var signed []byte - logger := log.WithFields(log.Fields{"block_id": header.BlockID, "block_hash": header.Hash, "block_time": header.Time, "block_version": header.Version, "block_wallet_id": header.KeyID, "block_state_id": header.EcosystemID}) - - for _, tr := range trData { - doubleHash, err := crypto.DoubleHash(tr) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("double hashing transaction") - return nil, err - } - mrklArray = append(mrklArray, converter.BinToHex(doubleHash)) - blockDataTx = append(blockDataTx, converter.EncodeLengthPlusData(tr)...) - } - - if key != "" { - if len(mrklArray) == 0 { - mrklArray = append(mrklArray, []byte("0")) - } - mrklRoot := utils.MerkleTreeRoot(mrklArray) - - forSign := fmt.Sprintf("0,%d,%x,%d,%d,%d,%d,%s", - header.BlockID, prevHash, header.Time, header.EcosystemID, header.KeyID, header.NodePosition, mrklRoot) - - var err error - signed, err = crypto.Sign(key, forSign) - if err != nil { - logger.WithFields(log.Fields{"type": consts.CryptoError, "error": err}).Error("signing blocko") - return nil, err - } - } - - var buf bytes.Buffer - // fill header - buf.Write(converter.DecToBin(header.Version, 2)) - buf.Write(converter.DecToBin(header.BlockID, 4)) - buf.Write(converter.DecToBin(header.Time, 4)) - buf.Write(converter.DecToBin(header.EcosystemID, 4)) - buf.Write(converter.EncodeLenInt64InPlace(header.KeyID)) - buf.Write(converter.DecToBin(header.NodePosition, 1)) - buf.Write(converter.EncodeLengthPlusData(signed)) - // data - buf.Write(blockDataTx) - - return buf.Bytes(), nil -} - -type parserCache struct { - mutex sync.RWMutex - cache map[string]*Parser -} - -func (pc *parserCache) Get(hash string) (p *Parser, ok bool) { - pc.mutex.RLock() - defer pc.mutex.RUnlock() - - p, ok = pc.cache[hash] - return -} - -func (pc *parserCache) Set(p *Parser) { - pc.mutex.Lock() - defer pc.mutex.Unlock() - - pc.cache[string(p.TxHash)] = p -} - -func (pc *parserCache) Clean() { - pc.mutex.Lock() - defer pc.mutex.Unlock() - - pc.cache = make(map[string]*Parser) -} - -// CleanCache cleans cache of transaction parsers -func CleanCache() { - txParserCache.Clean() -} From 1cecfc9f8cbb6b82439ff6e0222ec02745758ab2 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 9 Jul 2018 13:25:57 +0300 Subject: [PATCH 143/169] move incrementTxAtempt --- packages/block/block.go | 5 ++++- packages/consts/log_types.go | 1 + packages/model/transaction.go | 24 +++++++++++++++++++++++- packages/transaction/db.go | 4 ++++ 4 files changed, 32 insertions(+), 2 deletions(-) diff --git a/packages/block/block.go b/packages/block/block.go index 4a4ca4494..426fee657 100644 --- a/packages/block/block.go +++ b/packages/block/block.go @@ -136,6 +136,9 @@ func (b *Block) Play(dbTransaction *model.DbTransaction) error { logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("using savepoint") return err } + + model.IncrementTxAttemptCount(nil, t.TxHash) + msg, err = t.Play() if err == nil && t.TxSmart != nil { err = limits.CheckLimit(t) @@ -147,8 +150,8 @@ func (b *Block) Play(dbTransaction *model.DbTransaction) error { if b.GenBlock && err == ErrLimitStop { b.StopCount = curTx - model.IncrementTxAttemptCount(t.DbTransaction, t.TxHash) } + errRoll := dbTransaction.RollbackSavepoint(curTx) if errRoll != nil { logger.WithFields(log.Fields{"type": consts.DBError, "error": err, "tx_hash": t.TxHash}).Error("rolling back to previous savepoint") diff --git a/packages/consts/log_types.go b/packages/consts/log_types.go index 5f421a00b..37422b9cd 100644 --- a/packages/consts/log_types.go +++ b/packages/consts/log_types.go @@ -56,4 +56,5 @@ const ( SyncProcess = "SyncProcess" WrongModeError = "WrongModeError" VDEManagerError = "VDEManagerError" + BadTxError = "BadTxError" ) diff --git a/packages/model/transaction.go b/packages/model/transaction.go index 0715be6f2..e4d5c75e7 100644 --- a/packages/model/transaction.go +++ b/packages/model/transaction.go @@ -1,6 +1,11 @@ package model -import "github.com/GenesisKernel/go-genesis/packages/consts" +import ( + "fmt" + + "github.com/GenesisKernel/go-genesis/packages/consts" + log "github.com/sirupsen/logrus" +) // This constants contains values of transactions priority const ( @@ -149,6 +154,10 @@ func (t *Transaction) Create() error { // IncrementTxAttemptCount increases attempt column func IncrementTxAttemptCount(transaction *DbTransaction, transactionHash []byte) (int64, error) { + defer func() { + go logTrBigAttemptCount(transaction, transactionHash) + }() + query := GetDB(transaction).Exec("update transactions set attempt=attempt+1, used = case when attempt>10 then 1 else 0 end where hash = ?", transactionHash) return query.RowsAffected, query.Error @@ -162,3 +171,16 @@ func getTxRateByTxType(txType int8) transactionRate { return 0 } } + +func logTrBigAttemptCount(tbtx *DbTransaction, txHash []byte) { + t := Transaction{} + if err := tbtx.conn.Where("hash = ?", txHash).First(&t).Error; err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting tx by hash") + return + } + + if t.Attempt >= 10 { + txString := fmt.Sprintf("tx_hash: %s, tx_data: %s, tx_attempt: %d", t.Hash, t.Data, t.Attempt) + log.WithFields(log.Fields{"type": consts.BadTxError, "tx_info": txString}).Error("logging tx attempt count") + } +} diff --git a/packages/transaction/db.go b/packages/transaction/db.go index 197a22be4..2fb3c27ad 100644 --- a/packages/transaction/db.go +++ b/packages/transaction/db.go @@ -102,6 +102,10 @@ func MarkTransactionBad(dbTransaction *model.DbTransaction, hash []byte, errText if len(errText) > 255 { errText = errText[:255] } + + // set loglevel as error because default level setups to "error" + log.WithFields(log.Fields{"type": consts.BadTxError, "tx_hash": string(hash), "error": errText}).Error("tx marked as bad") + // looks like there is not hash in queue_tx in this moment qtx := &model.QueueTx{} _, err := qtx.GetByHash(dbTransaction, hash) From 778ec335ef11fbae719b055570c5616573590d25 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 9 Jul 2018 17:10:37 +0300 Subject: [PATCH 144/169] requested changes --- packages/block/block.go | 20 +++++++++++++++++++- packages/consts/consts.go | 2 ++ packages/model/transaction.go | 26 +++++++++++--------------- 3 files changed, 32 insertions(+), 16 deletions(-) diff --git a/packages/block/block.go b/packages/block/block.go index 426fee657..8950d0eb0 100644 --- a/packages/block/block.go +++ b/packages/block/block.go @@ -123,7 +123,20 @@ func (b *Block) Play(dbTransaction *model.DbTransaction) error { logger.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("delete used transactions") return err } + limits := NewLimits(b) + + txHashes := make([][]byte, 0, len(b.Transactions)) + for _, btx := range b.Transactions { + txHashes = append(txHashes, btx.TxHash) + } + + storedTxes, err := model.GetTxesByHashlist() + if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting txes by hashlist") + return err + } + for curTx, t := range b.Transactions { var ( msg string @@ -138,6 +151,12 @@ func (b *Block) Play(dbTransaction *model.DbTransaction) error { } model.IncrementTxAttemptCount(nil, t.TxHash) + if stx, ok := storedTxes[string(t.TxHash)]; ok { + stx.Attempt >= consts.MaxtTXAttempt - 1 { + txString := fmt.Sprintf("tx_hash: %s, tx_data: %s, tx_attempt: %d", t.Hash, t.Data, t.Attempt) + log.WithFields(log.Fields{"type": consts.BadTxError, "tx_info": txString}).Error("tx attempts exceeded, transaction marked as bad") + } + } msg, err = t.Play() if err == nil && t.TxSmart != nil { @@ -161,7 +180,6 @@ func (b *Block) Play(dbTransaction *model.DbTransaction) error { break } // skip this transaction - model.MarkTransactionUsed(t.DbTransaction, t.TxHash) transaction.MarkTransactionBad(t.DbTransaction, t.TxHash, err.Error()) if t.SysUpdate { if err = syspar.SysUpdate(t.DbTransaction); err != nil { diff --git a/packages/consts/consts.go b/packages/consts/consts.go index 4e455e7a7..391791ba9 100644 --- a/packages/consts/consts.go +++ b/packages/consts/consts.go @@ -93,6 +93,8 @@ const AvailableBCGap = 4 const DefaultNodesConnectDelay = 6 +const MaxTXAttempt = 10 + const ( TxTypeFirstBlock = 1 TxTypeStopNetwork = 2 diff --git a/packages/model/transaction.go b/packages/model/transaction.go index e4d5c75e7..7612ef605 100644 --- a/packages/model/transaction.go +++ b/packages/model/transaction.go @@ -1,10 +1,7 @@ package model import ( - "fmt" - "github.com/GenesisKernel/go-genesis/packages/consts" - log "github.com/sirupsen/logrus" ) // This constants contains values of transactions priority @@ -154,10 +151,6 @@ func (t *Transaction) Create() error { // IncrementTxAttemptCount increases attempt column func IncrementTxAttemptCount(transaction *DbTransaction, transactionHash []byte) (int64, error) { - defer func() { - go logTrBigAttemptCount(transaction, transactionHash) - }() - query := GetDB(transaction).Exec("update transactions set attempt=attempt+1, used = case when attempt>10 then 1 else 0 end where hash = ?", transactionHash) return query.RowsAffected, query.Error @@ -172,15 +165,18 @@ func getTxRateByTxType(txType int8) transactionRate { } } -func logTrBigAttemptCount(tbtx *DbTransaction, txHash []byte) { - t := Transaction{} - if err := tbtx.conn.Where("hash = ?", txHash).First(&t).Error; err != nil { - log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting tx by hash") - return +// GetTxesByHashlist returns map of hash-*Transaction +func GetTxesByHashlist(dbtx *DbTransaction, hashes [][]byte) (map[string]*Transaction, error) { + txes := []Transaction{} + if err := GetDB(dbtx).Where("hash in (?)", hashes).Find(&txes).Error; err != nil { + return nil, err } - if t.Attempt >= 10 { - txString := fmt.Sprintf("tx_hash: %s, tx_data: %s, tx_attempt: %d", t.Hash, t.Data, t.Attempt) - log.WithFields(log.Fields{"type": consts.BadTxError, "tx_info": txString}).Error("logging tx attempt count") + txMap := make(map[string]*Transaction, len(txes)) + + for _, tx := range txes { + txMap[string(tx.Hash)] = &tx } + + return txMap, nil } From 3a7e0f3463f9f4d6abbe9c75b651359272744226 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Mon, 9 Jul 2018 17:14:29 +0300 Subject: [PATCH 145/169] req changes small fixes --- packages/block/block.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/block/block.go b/packages/block/block.go index 8950d0eb0..f26b58f8e 100644 --- a/packages/block/block.go +++ b/packages/block/block.go @@ -131,7 +131,7 @@ func (b *Block) Play(dbTransaction *model.DbTransaction) error { txHashes = append(txHashes, btx.TxHash) } - storedTxes, err := model.GetTxesByHashlist() + storedTxes, err := model.GetTxesByHashlist(dbTransaction, txHashes) if err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("on getting txes by hashlist") return err @@ -152,8 +152,8 @@ func (b *Block) Play(dbTransaction *model.DbTransaction) error { model.IncrementTxAttemptCount(nil, t.TxHash) if stx, ok := storedTxes[string(t.TxHash)]; ok { - stx.Attempt >= consts.MaxtTXAttempt - 1 { - txString := fmt.Sprintf("tx_hash: %s, tx_data: %s, tx_attempt: %d", t.Hash, t.Data, t.Attempt) + if stx.Attempt >= consts.MaxTXAttempt-1 { + txString := fmt.Sprintf("tx_hash: %s, tx_data: %s, tx_attempt: %d", stx.Hash, stx.Data, stx.Attempt) log.WithFields(log.Fields{"type": consts.BadTxError, "tx_info": txString}).Error("tx attempts exceeded, transaction marked as bad") } } From 3464acd5d58351cc722c9f102fde60b5d9cb0127 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Tue, 10 Jul 2018 19:53:52 +0500 Subject: [PATCH 146/169] feature/1002 history (#436) * Added GetHistoryRow func * Added RollbackId to template history --- packages/api/smart_test.go | 19 ++++++++++++ packages/smart/funcs.go | 63 ++++++++++++++++++++++++++++++++++---- packages/template/funcs.go | 19 ++++++++---- 3 files changed, 89 insertions(+), 12 deletions(-) diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index a300753a7..f0856a86d 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -935,6 +935,23 @@ func TestPageHistory(t *testing.T) { }`}, "ApplicationId": {`1`}, `Conditions`: {`true`}} assert.NoError(t, postTx(`NewContract`, &form)) + form = url.Values{`Value`: {`contract GetRow` + name + ` { + data { + IdPage int + } + action { + var ret array + var row got map + ret = GetPageHistory($IdPage) + row = ret[1] + got = GetPageHistoryRow($IdPage, Int(row["id"])) + if got["block_id"] != row["block_id"] { + error "GetPageHistory" + } + } + }`}, "ApplicationId": {`1`}, `Conditions`: {`true`}} + assert.NoError(t, postTx(`NewContract`, &form)) + _, msg, err := postTxResult(`Get`+name, &url.Values{"IdPage": {id}, "IdMenu": {idmenu}, "IdCont": {idCont}}) assert.NoError(t, err) @@ -950,6 +967,8 @@ func TestPageHistory(t *testing.T) { assert.EqualError(t, postTx(`Get`+name, &url.Values{"IdPage": {`1000000`}, "IdMenu": {idmenu}, "IdCont": {idCont}}), `{"type":"panic","error":"Record has not been found"}`) + assert.NoError(t, postTx(`GetRow`+name, &url.Values{"IdPage": {id}})) + var retTemp contentResult assert.NoError(t, sendPost(`content`, &url.Values{`template`: {fmt.Sprintf(`GetPageHistory(MySrc,%s)`, id)}}, &retTemp)) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 2c7179d53..74a3ed9e7 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -256,6 +256,10 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { "GetBlockHistory": GetBlockHistory, "GetMenuHistory": GetMenuHistory, "GetContractHistory": GetContractHistory, + "GetPageHistoryRow": GetPageHistoryRow, + "GetBlockHistoryRow": GetBlockHistoryRow, + "GetMenuHistoryRow": GetMenuHistoryRow, + "GetContractHistoryRow": GetContractHistoryRow, } switch vt { @@ -1768,7 +1772,8 @@ func GetVDEList(sc *SmartContract) (map[string]string, error) { return vdemanager.Manager.ListProcess() } -func GetHistory(transaction *model.DbTransaction, ecosystem int64, tableName string, id int64) ([]interface{}, error) { +func GetHistory(transaction *model.DbTransaction, ecosystem int64, tableName string, + id, idRollback int64) ([]interface{}, error) { table := fmt.Sprintf(`%d_%s`, ecosystem, tableName) rows, err := model.GetDB(transaction).Table(table).Where("id=?", id).Rows() if err != nil { @@ -1815,7 +1820,19 @@ func GetHistory(transaction *model.DbTransaction, ecosystem int64, tableName str } for _, tx := range *txs { if len(rollbackList) > 0 { - rollbackList[len(rollbackList)-1].(map[string]string)[`block_id`] = converter.Int64ToStr(tx.BlockID) + prev := rollbackList[len(rollbackList)-1].(map[string]string) + prev[`block_id`] = converter.Int64ToStr(tx.BlockID) + prev[`id`] = converter.Int64ToStr(tx.ID) + block := model.Block{} + if ok, err := block.Get(tx.BlockID); ok { + prev[`block_time`] = time.Unix(block.Time, 0).Format(`2006-01-02 15:04:05`) + } else if err != nil { + log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("getting block time") + return nil, err + } + if idRollback == tx.ID { + return rollbackList[len(rollbackList)-1 : len(rollbackList)], nil + } } if tx.Data == "" { continue @@ -1831,21 +1848,55 @@ func GetHistory(transaction *model.DbTransaction, ecosystem int64, tableName str rollbackList = append(rollbackList, rollback) curVal = rollback } + if idRollback > 0 { + return []interface{}{}, nil + } return rollbackList, nil } func GetBlockHistory(sc *SmartContract, id int64) ([]interface{}, error) { - return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `blocks`, id) + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `blocks`, id, 0) } func GetPageHistory(sc *SmartContract, id int64) ([]interface{}, error) { - return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `pages`, id) + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `pages`, id, 0) } func GetMenuHistory(sc *SmartContract, id int64) ([]interface{}, error) { - return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `menu`, id) + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `menu`, id, 0) } func GetContractHistory(sc *SmartContract, id int64) ([]interface{}, error) { - return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `contracts`, id) + return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `contracts`, id, 0) +} + +func GetHistoryRow(sc *SmartContract, tableName string, id, idRollback int64) (map[string]interface{}, + error) { + list, err := GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, tableName, id, idRollback) + if err != nil { + return nil, err + } + result := map[string]interface{}{} + if len(list) > 0 { + for key, val := range list[0].(map[string]string) { + result[key] = val + } + } + return result, nil +} + +func GetBlockHistoryRow(sc *SmartContract, id, idRollback int64) (map[string]interface{}, error) { + return GetHistoryRow(sc, `blocks`, id, idRollback) +} + +func GetPageHistoryRow(sc *SmartContract, id, idRollback int64) (map[string]interface{}, error) { + return GetHistoryRow(sc, `pages`, id, idRollback) +} + +func GetMenuHistoryRow(sc *SmartContract, id, idRollback int64) (map[string]interface{}, error) { + return GetHistoryRow(sc, `menu`, id, idRollback) +} + +func GetContractHistoryRow(sc *SmartContract, id, idRollback int64) (map[string]interface{}, error) { + return GetHistoryRow(sc, `contracts`, id, idRollback) } diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 085d0522e..4f8624744 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -63,10 +63,14 @@ func init() { funcs[`EcosysParam`] = tplFunc{ecosysparTag, defaultTag, `ecosyspar`, `Name,Index,Source`} funcs[`Em`] = tplFunc{defaultTag, defaultTag, `em`, `Body,Class`} funcs[`GetVar`] = tplFunc{getvarTag, defaultTag, `getvar`, `Name`} - funcs[`GetContractHistory`] = tplFunc{getContractHistoryTag, defaultTag, `getcontracthistory`, `Source,Id`} - funcs[`GetMenuHistory`] = tplFunc{getMenuHistoryTag, defaultTag, `getmenuhistory`, `Source,Id`} - funcs[`GetBlockHistory`] = tplFunc{getBlockHistoryTag, defaultTag, `getblockhistory`, `Source,Id`} - funcs[`GetPageHistory`] = tplFunc{getPageHistoryTag, defaultTag, `getpagehistory`, `Source,Id`} + funcs[`GetContractHistory`] = tplFunc{getContractHistoryTag, defaultTag, `getcontracthistory`, + `Source,Id,RollbackId`} + funcs[`GetMenuHistory`] = tplFunc{getMenuHistoryTag, defaultTag, `getmenuhistory`, + `Source,Id,RollbackId`} + funcs[`GetBlockHistory`] = tplFunc{getBlockHistoryTag, defaultTag, `getblockhistory`, + `Source,Id,RollbackId`} + funcs[`GetPageHistory`] = tplFunc{getPageHistoryTag, defaultTag, `getpagehistory`, + `Source,Id,RollbackId`} funcs[`ImageInput`] = tplFunc{defaultTag, defaultTag, `imageinput`, `Name,Width,Ratio,Format`} funcs[`InputErr`] = tplFunc{defaultTag, defaultTag, `inputerr`, `*`} funcs[`JsonToSource`] = tplFunc{jsontosourceTag, defaultTag, `jsontosource`, `Source,Data`} @@ -1235,9 +1239,12 @@ func columntypeTag(par parFunc) string { func getHistoryTag(par parFunc, table string) string { setAllAttr(par) - + var rollID int64 + if len((*par.Pars)["RollbackId"]) > 0 { + rollID = converter.StrToInt64(macro((*par.Pars)[`RollbackId`], par.Workspace.Vars)) + } list, err := smart.GetHistory(nil, converter.StrToInt64((*par.Workspace.Vars)[`ecosystem_id`]), - table, converter.StrToInt64(macro((*par.Pars)[`Id`], par.Workspace.Vars))) + table, converter.StrToInt64(macro((*par.Pars)[`Id`], par.Workspace.Vars)), rollID) if err != nil { return err.Error() } From 075a8305effdf8f16328f27b7187feb28722c638 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Tue, 10 Jul 2018 19:54:53 +0500 Subject: [PATCH 147/169] feature/990-shutdown (#432) * remove default_page from roles * Fixed loop conditions * Fixed loop in system funcs * Fixed loop conditions * Fixed loop in system funcs * Fixed ContractAccess --- packages/api/contract_test.go | 79 ++++++++++++++++++++++------------- packages/api/smart_test.go | 20 ++++----- packages/api/template_test.go | 22 ++++------ packages/script/vm.go | 15 +++++++ packages/script/vminit.go | 6 ++- packages/smart/funcs.go | 31 ++++++++------ 6 files changed, 105 insertions(+), 68 deletions(-) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index 6a9bef2f3..f2ac1c0c9 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -98,7 +98,7 @@ var contracts = []smartContract{ CallContract("RecCall", par) } }`, []smartParams{ - {nil, map[string]string{`error`: `{"type":"panic","error":"there is loop in @1RecCall contract"}`}}, + {nil, map[string]string{`error`: `{"type":"panic","error":"There is loop in @1RecCall contract"}`}}, }}, {`Recursion`, `contract Recursion { data { } @@ -445,7 +445,7 @@ func TestNewTableWithEmptyName(t *testing.T) { } if err := postTx("NewTable", &form); err == nil || err.Error() != - `{"type":"error","error":"Table name cannot be empty"}` { + `400 {"error": "E_SERVER", "msg": "Name is empty" }` { t.Error(`wrong error`, err) } @@ -484,7 +484,7 @@ func TestActivateContracts(t *testing.T) { data { Par string } - action { Test("active", $Par)}}`}, `Conditions`: {`true`}} + action { Test("active", $Par)}}`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} if err := postTx(`NewContract`, &form); err != nil { t.Error(err) return @@ -540,7 +540,7 @@ func TestDeactivateContracts(t *testing.T) { data { Par string } - action { Test("active", $Par)}}`}, `Conditions`: {`true`}} + action { Test("active", $Par)}}`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} assert.NoError(t, postTx(`NewContract`, &form)) var ret getContractResult @@ -599,7 +599,7 @@ func TestSignature(t *testing.T) { } action { $result = "OK " + Str($Amount) - }}`}, `Conditions`: {`true`}} + }}`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} if err := postTx(`NewContract`, &form); err != nil { t.Error(err) return @@ -615,7 +615,7 @@ func TestSignature(t *testing.T) { $result = "OOOPS " + Str($Amount) } } - `}, `Conditions`: {`true`}} + `}, `Conditions`: {`true`}, "ApplicationId": {"1"}} if err := postTx(`NewContract`, &form); err != nil { t.Error(err) return @@ -808,7 +808,7 @@ func TestUpdateFunc(t *testing.T) { } func action { $result = Sprintf("X=%s %s %s", $par, $original_contract, $this_contract) - }}`}, `Conditions`: {`true`}} + }}`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} _, id, err := postTxResult(`NewContract`, &form) assert.NoError(t, err) @@ -818,7 +818,7 @@ func TestUpdateFunc(t *testing.T) { var ret map ret = DBFind("contracts").Columns("id,value").WhereId(10).Row() $result = ret["id"] - }}`}, `Conditions`: {`true`}} + }}`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} assert.NoError(t, postTx(`NewContract`, &form)) form = url.Values{`Value`: {`contract row` + rnd + ` { @@ -828,7 +828,7 @@ func TestUpdateFunc(t *testing.T) { $result = ret }} - `}, `Conditions`: {`true`}} + `}, "ApplicationId": {"1"}, `Conditions`: {`true`}} assert.NoError(t, postTx(`NewContract`, &form)) _, msg, err := postTxResult(`one`+rnd, &url.Values{}) @@ -847,7 +847,7 @@ func TestUpdateFunc(t *testing.T) { action { $result = f` + rnd + `("par",$Par) + " " + $this_contract }} - `}, `Conditions`: {`true`}} + `}, "ApplicationId": {"1"}, `Conditions`: {`true`}} _, idcnt, err := postTxResult(`NewContract`, &form) if err != nil { t.Error(err) @@ -862,7 +862,7 @@ func TestUpdateFunc(t *testing.T) { return "Y="+input }`}, `Conditions`: {`true`}} err = postTx(`EditContract`, &form) - assert.EqualError(t, postTx(`EditContract`, &form), `{"type":"error","error":"Contracts or functions names cannot be changed"}`) + assert.EqualError(t, postTx(`EditContract`, &form), `{"type":"panic","error":"Contracts or functions names cannot be changed"}`) form = url.Values{`Id`: {id}, `Value`: {`contract f` + rnd + `{ data { @@ -911,7 +911,7 @@ func TestGlobalVars(t *testing.T) { $key_id = 1234 $result = Str($key_id) + $Par }} - `}, `Conditions`: {`true`}} + `}, "ApplicationId": {"1"}, `Conditions`: {`true`}} err := postTx(`NewContract`, &form) if err == nil { t.Errorf(`must be error`) @@ -925,7 +925,7 @@ func TestGlobalVars(t *testing.T) { action { $result = $Test + Str($ecosystem_id) } - }`}, `Conditions`: {`true`}} + }`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} err = postTx(`NewContract`, &form) if err != nil { t.Error(err) @@ -946,7 +946,7 @@ func TestGlobalVars(t *testing.T) { $result = CallContract("c_` + rnd + `", params) + c_` + rnd + `("Test","OK") } } - }`}, `Conditions`: {`true`}} + }`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} err = postTx(`NewContract`, &form) if err != nil { t.Error(err) @@ -957,7 +957,7 @@ func TestGlobalVars(t *testing.T) { action { $result = $Test + $aaa } - }`}, `Conditions`: {`true`}} + }`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} err = postTx(`NewContract`, &form) if err != nil { t.Error(err) @@ -990,7 +990,7 @@ func TestContractChain(t *testing.T) { } rnd := `rnd` + crypto.RandSeq(4) - form := url.Values{"Name": {rnd}, "Columns": {`[{"name":"value","type":"varchar", "index": "0", + form := url.Values{"Name": {rnd}, "ApplicationId": {"1"}, "Columns": {`[{"name":"value","type":"varchar", "index": "0", "conditions":"true"}, {"name":"amount", "type":"number","index": "0", "conditions":"true"}]`}, "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} @@ -1012,7 +1012,7 @@ func TestContractChain(t *testing.T) { $new = $record["value"] DBUpdate("` + rnd + `", $Id, "value", $new+"="+$new ) } - }`}, `Conditions`: {`true`}} + }`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} err = postTx(`NewContract`, &form) if err != nil { t.Error(err) @@ -1034,7 +1034,7 @@ func TestContractChain(t *testing.T) { $result = $record["value"] } } - `}, `Conditions`: {`true`}} + `}, "ApplicationId": {"1"}, `Conditions`: {`true`}} err = postTx(`NewContract`, &form) if err != nil { t.Error(err) @@ -1068,10 +1068,10 @@ func TestLoopCond(t *testing.T) { return } form = url.Values{`Value`: {`contract ` + rnd + `2 { - conditions { - ContractConditions("` + rnd + `1") - } - }`}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + conditions { + ContractConditions("` + rnd + `1") + } + }`}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} err = postTx(`NewContract`, &form) if err != nil { t.Error(err) @@ -1085,18 +1085,37 @@ func TestLoopCond(t *testing.T) { } sid := ret.TableID form = url.Values{`Value`: {`contract ` + rnd + `1 { - conditions { - ContractConditions("` + rnd + `2") - } - }`}, `Id`: {sid}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + conditions { + ContractConditions("` + rnd + `2") + } + }`}, `Id`: {sid}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} err = postTx(`EditContract`, &form) if err != nil { t.Error(err) return } - err = postTx(rnd+`2`, &url.Values{}) - if err != nil { - t.Error(err) - return + assert.EqualError(t, postTx(rnd+`2`, &url.Values{}), `{"type":"panic","error":"There is loop in `+rnd+`1 contract"}`) + + form = url.Values{"Name": {`ecosystems`}, "InsertPerm": {`ContractConditions("MainCondition")`}, + "UpdatePerm": {`EditEcosysName(1, "HANG")`}, + "NewColumnPerm": {`ContractConditions("MainCondition")`}} + assert.NoError(t, postTx(`EditTable`, &form)) + assert.EqualError(t, postTx(`EditEcosystemName`, &url.Values{"EcosystemID": {`1`}, + "NewName": {`Hang`}}), `{"type":"panic","error":"There is loop in EditEcosysName contract"}`) + + form = url.Values{`Value`: {`contract ` + rnd + `shutdown { + action + { DBInsert("` + rnd + `table", "test", "SHUTDOWN") } + }`}, `Conditions`: {`true`}, `ApplicationId`: {`1`}} + assert.NoError(t, postTx(`NewContract`, &form)) + + form = url.Values{ + "Name": {rnd + `table`}, + "Columns": {`[{"name":"test","type":"varchar", "index": "0", "conditions":"true"}]`}, + "ApplicationId": {"1"}, + "Permissions": {`{"insert": "` + rnd + `shutdown()", "update" : "true", "new_column": "true"}`}, } + require.NoError(t, postTx("NewTable", &form)) + + assert.EqualError(t, postTx(rnd+`shutdown`, &url.Values{}), `{"type":"panic","error":"There is loop in @1`+rnd+`shutdown contract"}`) } diff --git a/packages/api/smart_test.go b/packages/api/smart_test.go index f0856a86d..ddc572e5a 100644 --- a/packages/api/smart_test.go +++ b/packages/api/smart_test.go @@ -49,7 +49,7 @@ func TestUpperName(t *testing.T) { return } rnd := crypto.RandSeq(4) - form := url.Values{"Name": {"testTable" + rnd}, "Columns": {`[{"name":"num","type":"text", "conditions":"true"}, + form := url.Values{"Name": {"testTable" + rnd}, "ApplicationId": {"1"}, "Columns": {`[{"name":"num","type":"text", "conditions":"true"}, {"name":"text", "type":"text","conditions":"true"}]`}, "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} err := postTx(`NewTable`, &form) @@ -65,7 +65,7 @@ func TestUpperName(t *testing.T) { action { DBInsert("testTable` + rnd + `", "num, text", "fgdgf", "124234") } - }`}, `Conditions`: {`true`}} + }`}, "ApplicationId": {"1"}, `Conditions`: {`true`}} if err := postTx(`NewContract`, &form); err != nil { t.Error(err) return @@ -393,7 +393,7 @@ func TestUpdateSysParam(t *testing.T) { } DBUpdateSysParam("max_indexes", "4", "false" ) } - }`}, + }`}, "ApplicationId": {"1"}, "Conditions": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx("NewContract", &form)) @@ -573,7 +573,7 @@ func TestPartitialEdit(t *testing.T) { name := randName(`part`) form := url.Values{"Name": {name}, "Value": {"Span(Original text)"}, - "Menu": {"original_menu"}, "Conditions": {"ContractConditions(`MainCondition`)"}} + "Menu": {"original_menu"}, "ApplicationId": {"1"}, "Conditions": {"ContractConditions(`MainCondition`)"}} assert.NoError(t, postTx(`NewPage`, &form)) var retList listResult @@ -604,7 +604,7 @@ func TestPartitialEdit(t *testing.T) { assert.Equal(t, menu, ret.Value["menu"]) form = url.Values{"Name": {name}, "Value": {`MenuItem(One)`}, "Title": {`My Menu`}, - "Conditions": {"ContractConditions(`MainCondition`)"}} + "ApplicationId": {"1"}, "Conditions": {"ContractConditions(`MainCondition`)"}} assert.NoError(t, postTx(`NewMenu`, &form)) assert.NoError(t, sendGet(`list/menu`, nil, &retList)) idItem = retList.Count @@ -616,7 +616,7 @@ func TestPartitialEdit(t *testing.T) { assert.Equal(t, conditions, ret.Value["conditions"]) form = url.Values{"Name": {name}, "Value": {`Span(Block)`}, - "Conditions": {"ContractConditions(`MainCondition`)"}} + "ApplicationId": {"1"}, "Conditions": {"ContractConditions(`MainCondition`)"}} assert.NoError(t, postTx(`NewBlock`, &form)) assert.NoError(t, sendGet(`list/blocks`, nil, &retList)) idItem = retList.Count @@ -638,7 +638,7 @@ func TestContractEdit(t *testing.T) { action { $result = "before" } - }`}, + }`}, "ApplicationId": {"1"}, "Conditions": {"ContractConditions(`MainCondition`)"}} err := postTx(`NewContract`, &form) if err != nil { @@ -740,7 +740,7 @@ func TestJSON(t *testing.T) { info JSONEncode(a) } - }`}, + }`}, "ApplicationId": {"1"}, "Conditions": {"true"}, })) assert.EqualError(t, postTx(contract, &url.Values{}), `{"type":"info","error":"[{\"k1\":1,\"k2\":2},{\"k1\":1,\"k2\":2}]"}`) @@ -754,7 +754,7 @@ func TestJSON(t *testing.T) { action { info Sprintf("%#v", JSONDecode($Input)) } - }`}, + }`}, "ApplicationId": {"1"}, "Conditions": {"true"}, })) @@ -786,7 +786,7 @@ func TestBytesToString(t *testing.T) { action { $result = BytesToString($File) } - }`}, + }`}, "ApplicationId": {"1"}, "Conditions": {"true"}, })) diff --git a/packages/api/template_test.go b/packages/api/template_test.go index bf2ba6216..8578e0706 100644 --- a/packages/api/template_test.go +++ b/packages/api/template_test.go @@ -22,7 +22,6 @@ import ( "fmt" "math/rand" "net/url" - "strings" "testing" "time" @@ -358,28 +357,25 @@ func TestStringToBinary(t *testing.T) { } conditions {} action { - UploadBinary("Name,AppID,Data,DataMimeType", "test", 1, StringToBytes($Content), "text/plain") + UploadBinary("Name,ApplicationId,Data,DataMimeType", "test", 1, StringToBytes($Content), "text/plain") + $result = $key_id } } - `}, - "Conditions": {"true"}, + `}, "ApplicationId": {`1`}, "Conditions": {"true"}, } assert.NoError(t, postTx("NewContract", &form)) form = url.Values{"Content": {content}} - assert.NoError(t, postTx(contract, &form)) + _, msg, err := postTxResult(contract, &form) + assert.NoError(t, err) form = url.Values{ - "template": {`SetVar(link, Binary(Name: test, AppID: 1)) #link#`}, - } - var ret struct { - Tree []struct { - Link string `json:"text"` - } `json:"tree"` + "template": {`SetVar(link, Binary(Name: test, AppID: 1, MemberID: ` + msg + `))#link#`}, } + var ret contentResult assert.NoError(t, sendPost(`content`, &form, &ret)) - - data, err := sendRawRequest("GET", strings.TrimSpace(ret.Tree[0].Link), nil) + link := RawToString(ret.Tree) + data, err := sendRawRequest("GET", link[23:len(link)-3], nil) assert.NoError(t, err) assert.Equal(t, content, string(data)) } diff --git a/packages/script/vm.go b/packages/script/vm.go index 65f8d81c2..077a81c18 100644 --- a/packages/script/vm.go +++ b/packages/script/vm.go @@ -187,6 +187,18 @@ func (rt *RunTime) callFunc(cmd uint16, obj *ObjInfo) (err error) { var result []reflect.Value pars := make([]reflect.Value, in) limit := 0 + var ( + stack Stacker + ok bool + ) + if finfo.Name != `ContractConditions` && finfo.Name != `ExecContract` && + finfo.Name != `ContractAccess` { + if stack, ok = (*rt.extend)["sc"].(Stacker); ok { + if err := stack.AppendStack(finfo.Name); err != nil { + return err + } + } + } (*rt.extend)[`rt`] = rt auto := 0 for k := 0; k < in; k++ { @@ -224,6 +236,9 @@ func (rt *RunTime) callFunc(cmd uint16, obj *ObjInfo) (err error) { result = foo.Call(pars) } rt.stack = rt.stack[:shift] + if stack != nil { + stack.AppendStack("") + } for i, iret := range result { // first return value of every extend function that makes queries to DB is cost diff --git a/packages/script/vminit.go b/packages/script/vminit.go index a82309641..23128c73d 100644 --- a/packages/script/vminit.go +++ b/packages/script/vminit.go @@ -192,7 +192,7 @@ type ExtendData struct { // Stacker represents interface for working with call stack type Stacker interface { - AppendStack(contract string) + AppendStack(contract string) error } // ParseContract gets a state identifier and the name of the contract from the full name like @[id]name @@ -290,7 +290,9 @@ func ExecContract(rt *RunTime, name, txs string, params ...interface{}) (interfa var stack Stacker if stack, ok = (*rt.extend)["sc"].(Stacker); ok { - stack.AppendStack(name) + if err := stack.AppendStack(name); err != nil { + return nil, err + } } if (*rt.extend)[`sc`] != nil && isSignature { obj := rt.vm.Objects[`check_signature`] diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 74a3ed9e7..7b5486736 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -88,21 +88,26 @@ type SmartContract struct { TxCost int64 // Maximum cost of executing contract TxUsedCost decimal.Decimal // Used cost of CPU resources BlockData *utils.BlockData - Loop map[string]bool TxHash []byte PublicKeys [][]byte DbTransaction *model.DbTransaction } // AppendStack adds an element to the stack of contract call or removes the top element when name is empty -func (sc *SmartContract) AppendStack(contract string) { +func (sc *SmartContract) AppendStack(contract string) error { cont := sc.TxContract if len(contract) > 0 { + for _, item := range cont.StackCont { + if item == contract { + return fmt.Errorf(eContractLoop, contract) + } + } cont.StackCont = append(cont.StackCont, contract) } else { cont.StackCont = cont.StackCont[:len(cont.StackCont)-1] } (*sc.TxContract.Extend)["stack"] = cont.StackCont + return nil } var ( @@ -345,8 +350,14 @@ func ContractAccess(sc *SmartContract, names ...interface{}) bool { if name[0] != '@' { name = fmt.Sprintf(`@%d`, sc.TxSmart.EcosystemID) + name } - if sc.TxContract.StackCont[len(sc.TxContract.StackCont)-1] == name { - return true + for i := len(sc.TxContract.StackCont) - 1; i >= 0; i-- { + contName := sc.TxContract.StackCont[i] + if strings.HasPrefix(contName, `@`) { + if contName == name { + return true + } + break + } } } } @@ -374,20 +385,14 @@ func ContractConditions(sc *SmartContract, names ...interface{}) (bool, error) { } vars := map[string]interface{}{`ecosystem_id`: int64(sc.TxSmart.EcosystemID), `key_id`: sc.TxSmart.KeyID, `sc`: sc, `original_contract`: ``, `this_contract`: ``, `role_id`: sc.TxSmart.RoleID} - - if sc.Loop == nil { - sc.Loop = make(map[string]bool) - } - if _, ok := sc.Loop[`loop_`+name]; ok { - log.WithFields(log.Fields{"type": consts.ContractError, "contract_name": name}).Error("there is loop in contract") - return false, fmt.Errorf(eContractLoop, name) + if err := sc.AppendStack(name); err != nil { + return false, err } - sc.Loop[`loop_`+name] = true _, err := VMRun(sc.VM, block, []interface{}{}, &vars) if err != nil { return false, err } - delete(sc.Loop, `loop_`+name) + sc.AppendStack(``) } else { log.WithFields(log.Fields{"type": consts.EmptyObject}).Error("empty contract name in ContractConditions") return false, fmt.Errorf(`empty contract name in ContractConditions`) From 382e690ce3097037a16cace7a96ec7833612130f Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Tue, 10 Jul 2018 19:55:38 +0500 Subject: [PATCH 148/169] feature/1004-desc (#437) * Fixed predefined column names * Fixed PrepareColumns * Fixed DelayedContract --- packages/api/tables_test.go | 55 ++++++++++++++++--- .../migration/first_ecosys_contracts_data.go | 4 +- packages/smart/funcs.go | 4 +- packages/smart/selective.go | 6 +- packages/template/funcs.go | 7 ++- 5 files changed, 62 insertions(+), 14 deletions(-) diff --git a/packages/api/tables_test.go b/packages/api/tables_test.go index bb69611c8..c57465d4d 100644 --- a/packages/api/tables_test.go +++ b/packages/api/tables_test.go @@ -148,7 +148,7 @@ func TestJSONTable(t *testing.T) { name := randName(`json`) form := url.Values{"Name": {name}, "Columns": {`[{"name":"MyName","type":"varchar", "index": "0", "conditions":"true"}, {"name":"Doc", "type":"json","index": "0", "conditions":"true"}]`}, - "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} + "ApplicationId": {`1`}, "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} assert.NoError(t, postTx(`NewTable`, &form)) checkGet := func(want string) { @@ -170,7 +170,7 @@ func TestJSONTable(t *testing.T) { DBInsert("` + name + `", "MyName,Doc", "test3", "{\"title\": {\"name\":\"Test att\",\"text\":\"low\"}}") DBInsert("` + name + `", "MyName,doc", "test4", "{\"languages\": {\"arr_id\":{\"1\":\"0\",\"2\":\"0\",\"3\":\"0\"}}}") DBInsert("` + name + `", "MyName,doc", "test5", "{\"app_id\": \"33\"}") - }}`}, + }}`}, "ApplicationId": {`1`}, "Conditions": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx("NewContract", &form)) @@ -193,7 +193,7 @@ func TestJSONTable(t *testing.T) { empty = DBFind("` + name + `").WhereId(4).One("doc->languages->arr_id->2") $result = out + Str(DBFind("` + name + `").WhereId($Id).One("doc->check")) + tmp + where +one + empty } - }`}, + }`}, "ApplicationId": {`1`}, "Conditions": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx("NewContract", &form)) @@ -204,7 +204,7 @@ func TestJSONTable(t *testing.T) { mydoc["type"] = "doc" mydoc["doc"] = "Some test text." DBUpdate("` + name + `", 2, "myname,Doc", "test3", mydoc) - }}`}, + }}`}, "ApplicationId": {`1`}, "Conditions": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx("NewContract", &form)) @@ -220,7 +220,7 @@ func TestJSONTable(t *testing.T) { DBUpdate("` + name + `", 3, "doc->flag,doc->sub", "Flag", 100) DBUpdate("` + name + `", 3, "doc->temp", "Temp") }} - `}, + `}, "ApplicationId": {`1`}, "Conditions": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx("NewContract", &form)) assert.NoError(t, postTx(name, &url.Values{})) @@ -239,7 +239,7 @@ func TestJSONTable(t *testing.T) { } action { $result = DBFind("contracts").WhereId($Id).Row() - }}`}, + }}`}, "ApplicationId": {`1`}, "Conditions": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx("NewContract", &form)) @@ -247,7 +247,7 @@ func TestJSONTable(t *testing.T) { action { $temp = res` + name + `("Id",10) $result = $temp["id"] - }}`}, + }}`}, "ApplicationId": {`1`}, "Conditions": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx("NewContract", &form)) @@ -284,3 +284,44 @@ func TestJSONTable(t *testing.T) { assert.Equal(t, item.want, RawToString(ret.Tree)) } } + +func TestTableDesc(t *testing.T) { + if err := keyLogin(1); err != nil { + t.Error(err) + return + } + name := randName(`tbl`) + form := url.Values{"Name": {name}, "Columns": {`[{"name":"desc","type":"varchar", "index": "0", + "conditions":{"update":"true", "read":"true"}}]`}, "ApplicationId": {"1"}, + "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} + assert.NoError(t, postTx(`NewTable`, &form)) + + form = url.Values{"Name": {name}, "Value": {`contract ` + name + ` { + action { + DBInsert("` + name + `", "desc", "test") + DBUpdate("` + name + `", 1, "desc", "new test") + $result = DBFind("` + name + `").Columns("desc").WhereId(1).One("desc") + var vals map + vals = DBRow("pages").Columns("NAME, menu").Where("id = ?", 1) + $result = $result + vals["name"] + }}`}, "ApplicationId": {"1"}, + "Conditions": {`ContractConditions("MainCondition")`}} + assert.NoError(t, postTx("NewContract", &form)) + + _, msg, err := postTxResult(name, &url.Values{}) + assert.NoError(t, err) + if msg != `new testdefault_page` { + t.Errorf(`wrong msg %s`, msg) + } + + form = url.Values{ + "template": {`DBFind("` + name + `", src1)`}, + } + var ret contentResult + assert.NoError(t, sendPost(`content`, &form, &ret)) + + if RawToString(ret.Tree) != `[{"tag":"dbfind","attr":{"columns":["id","desc"],"data":[["1","new test"]],"name":"`+name+`","source":"src1","types":["text","text"]}}]` { + t.Error(fmt.Errorf(`wrong tree %s`, RawToString(ret.Tree))) + return + } +} diff --git a/packages/migration/first_ecosys_contracts_data.go b/packages/migration/first_ecosys_contracts_data.go index e2b505fce..4d5c85b40 100644 --- a/packages/migration/first_ecosys_contracts_data.go +++ b/packages/migration/first_ecosys_contracts_data.go @@ -1333,7 +1333,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } } action { - DBInsert("delayed_contracts", "contract,key_id,block_id,every_block,\"limit\",conditions", $Contract, $key_id, $BlockID, $EveryBlock, $Limit, $Conditions) + DBInsert("delayed_contracts", "contract,key_id,block_id,every_block,limit,conditions", $Contract, $key_id, $BlockID, $EveryBlock, $Limit, $Conditions) } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('38', 'EditDelayedContract','contract EditDelayedContract { @@ -1366,7 +1366,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { } } action { - DBUpdate("delayed_contracts", $Id, "contract,key_id,block_id,every_block,counter,\"limit\",deleted,conditions", $Contract, $key_id, $BlockID, $EveryBlock, 0, $Limit, $Deleted, $Conditions) + DBUpdate("delayed_contracts", $Id, "contract,key_id,block_id,every_block,counter,limit,deleted,conditions", $Contract, $key_id, $BlockID, $EveryBlock, 0, $Limit, $Deleted, $Conditions) } }', %[1]d, 'ContractConditions("MainCondition")', 1), ('39', 'CallDelayedContract','contract CallDelayedContract { diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 7b5486736..3598509dd 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -726,6 +726,7 @@ func DBInsert(sc *SmartContract, tblname string, params string, val ...interface func PrepareColumns(columns string) string { colList := make([]string, 0) for _, icol := range strings.Split(columns, `,`) { + icol = strings.TrimSpace(icol) if strings.Contains(icol, `->`) { colfield := strings.Split(icol, `->`) if len(colfield) == 2 { @@ -734,6 +735,8 @@ func PrepareColumns(columns string) string { icol = fmt.Sprintf(`%s::jsonb#>>'{%s}' as "%[1]s.%[3]s"`, colfield[0], strings.Join(colfield[1:], `,`), strings.Join(colfield[1:], `.`)) } + } else if !strings.ContainsAny(icol, `:*>"`) { + icol = `"` + icol + `"` } colList = append(colList, icol) } @@ -816,7 +819,6 @@ func DBSelect(sc *SmartContract, tblname string, columns string, id int64, order columns = strings.Join(cols, `,`) } columns = PrepareColumns(columns) - rows, err = model.GetDB(sc.DbTransaction).Table(tblname).Select(columns).Where(where, params...).Order(order). Offset(offset).Limit(limit).Rows() if err != nil { diff --git a/packages/smart/selective.go b/packages/smart/selective.go index 80677c43f..366ba6b69 100644 --- a/packages/smart/selective.go +++ b/packages/smart/selective.go @@ -78,7 +78,7 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter } else if strings.Contains(field, `->`) { addSQLFields += field[:strings.Index(field, `->`)] + `,` } else { - addSQLFields += field + "," + addSQLFields += `"` + field + `",` } } @@ -161,7 +161,7 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter jsonFields[colfield[0]][colfield[1]] = values[i] } } else { - addSQLUpdate += fields[i] + `='` + escapeSingleQuotes(values[i]) + `',` + addSQLUpdate += `"` + fields[i] + `"='` + escapeSingleQuotes(values[i]) + `',` } } for colname, colvals := range jsonFields { @@ -220,7 +220,7 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter } else if strings.HasPrefix(fields[i], `timestamp `) { addSQLIns0 = append(addSQLIns0, fields[i][len(`timestamp `):]) } else { - addSQLIns0 = append(addSQLIns0, fields[i]) + addSQLIns0 = append(addSQLIns0, `"`+fields[i]+`"`) } if converter.IsByteColumn(table, fields[i]) && len(values[i]) != 0 { addSQLIns1 = append(addSQLIns1, `decode('`+hex.EncodeToString([]byte(values[i]))+`','HEX')`) diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 4f8624744..1a8b5f52d 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -627,7 +627,12 @@ func dbfindTag(par parFunc) string { break } } - fields = strings.Join(queryColumns, ", ") + for i, field := range queryColumns { + if !strings.ContainsAny(field, `:.>"`) { + queryColumns[i] = `"` + field + `"` + } + } + fields = strings.Join(queryColumns, `, `) for i, key := range columnNames { if strings.Contains(key, `->`) { columnNames[i] = strings.Replace(key, `->`, `.`, -1) From 3168353e8135272fcd7ae11401e93c664bf35a94 Mon Sep 17 00:00:00 2001 From: gentee Date: Wed, 11 Jul 2018 12:48:04 +0500 Subject: [PATCH 149/169] Fixed forsign bug when data is empty --- packages/transaction/transaction.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/transaction/transaction.go b/packages/transaction/transaction.go index 18b68aa7b..9df7b9608 100644 --- a/packages/transaction/transaction.go +++ b/packages/transaction/transaction.go @@ -280,6 +280,8 @@ func (t *Transaction) parseFromContract(buf *bytes.Buffer) error { if err := t.fillTxData(*txInfo, input, forsign); err != nil { return err } + } else { + t.TxData[`forsign`] = strings.Join(forsign, ",") } return nil From b25f295ca7ba02a869717d1c6a53907f02dbba9b Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Wed, 11 Jul 2018 17:28:00 +0500 Subject: [PATCH 150/169] Added maxpay to keys (#441) --- packages/migration/ecosystem.go | 1 + packages/migration/tables_data.go | 1 + packages/model/keys.go | 1 + packages/smart/smart.go | 33 +++++++++++++++++++++++-------- 4 files changed, 28 insertions(+), 8 deletions(-) diff --git a/packages/migration/ecosystem.go b/packages/migration/ecosystem.go index 9f7afffb4..588e6c625 100644 --- a/packages/migration/ecosystem.go +++ b/packages/migration/ecosystem.go @@ -41,6 +41,7 @@ var schemaEcosystem = `DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_ke "id" bigint NOT NULL DEFAULT '0', "pub" bytea NOT NULL DEFAULT '', "amount" decimal(30) NOT NULL DEFAULT '0' CHECK (amount >= 0), + "maxpay" decimal(30) NOT NULL DEFAULT '0' CHECK (maxpay >= 0), "multi" bigint NOT NULL DEFAULT '0', "deleted" bigint NOT NULL DEFAULT '0', "blocked" bigint NOT NULL DEFAULT '0' diff --git a/packages/migration/tables_data.go b/packages/migration/tables_data.go index be8177b61..fc40b4a70 100644 --- a/packages/migration/tables_data.go +++ b/packages/migration/tables_data.go @@ -13,6 +13,7 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co "new_column": "ContractConditions(\"MainCondition\")"}', '{"pub": "ContractConditions(\"MainCondition\")", "amount": "ContractConditions(\"MainCondition\")", + "maxpay": "ContractConditions(\"MainCondition\")", "deleted": "ContractConditions(\"MainCondition\")", "blocked": "ContractConditions(\"MainCondition\")", "multi": "ContractConditions(\"MainCondition\")"}', diff --git a/packages/model/keys.go b/packages/model/keys.go index c6d46403f..00e19be93 100644 --- a/packages/model/keys.go +++ b/packages/model/keys.go @@ -10,6 +10,7 @@ type Key struct { ID int64 `gorm:"primary_key;not null"` PublicKey []byte `gorm:"column:pub;not null"` Amount string `gorm:"not null"` + Maxpay string `gorm:"not null"` Deleted int64 `gorm:"not null"` Blocked int64 `gorm:"not null"` } diff --git a/packages/smart/smart.go b/packages/smart/smart.go index f529a7779..1a597d81a 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -42,7 +42,6 @@ type Contract struct { Name string Called uint32 FreeRequest bool - TxPrice int64 // custom price for citizens TxGovAccount int64 // state wallet EGSRate float64 // money/EGS rate TableAccounts string @@ -922,12 +921,21 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { if !isActive && !bytes.Equal(wallet.PublicKey, payWallet.PublicKey) && !bytes.Equal(sc.TxSmart.PublicKey, payWallet.PublicKey) && sc.TxSmart.SignedBy == 0 { return retError(ErrDiffKeys) } - var amount decimal.Decimal + var amount, maxpay decimal.Decimal amount, err = decimal.NewFromString(payWallet.Amount) if err != nil { logger.WithFields(log.Fields{"type": consts.ConversionError, "error": err, "value": payWallet.Amount}).Error("converting pay wallet amount from string to decimal") return retError(err) } + maxpay, err = decimal.NewFromString(payWallet.Maxpay) + if err != nil { + logger.WithFields(log.Fields{"type": consts.ConversionError, "error": err, "value": payWallet.Maxpay}).Error("converting pay wallet maxpay from string to decimal") + return retError(err) + } + if maxpay.GreaterThan(decimal.New(0, 0)) && maxpay.LessThan(amount) { + amount = maxpay + } + if cprice := sc.TxContract.GetFunc(`price`); cprice != nil { var ret []interface{} if ret, err = VMRun(sc.VM, cprice, nil, sc.TxContract.Extend); err != nil { @@ -960,16 +968,26 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { } } sizeFuel = syspar.GetSizeFuel() * int64(len(sc.TxSmart.Data)) / 1024 - if amount.Cmp(decimal.New(sizeFuel+price, 0).Mul(fuelRate)) <= 0 { + priceCost := decimal.New(price, 0) + if amount.LessThanOrEqual(priceCost.Mul(fuelRate)) { logger.WithFields(log.Fields{"type": consts.NoFunds}).Error("current balance is not enough") return retError(ErrCurrentBalance) } + maxCost := amount.Div(fuelRate).Floor() + fullCost := decimal.New((*sc.TxContract.Extend)[`txcost`].(int64), 0).Add(priceCost) + if maxCost.LessThan(fullCost) { + (*sc.TxContract.Extend)[`txcost`] = converter.StrToInt64(maxCost.String()) - price + } } } - before := (*sc.TxContract.Extend)[`txcost`].(int64) + price + before := (*sc.TxContract.Extend)[`txcost`].(int64) // Payment for the size (*sc.TxContract.Extend)[`txcost`] = (*sc.TxContract.Extend)[`txcost`].(int64) - sizeFuel + if (*sc.TxContract.Extend)[`txcost`].(int64) <= 0 { + logger.WithFields(log.Fields{"type": consts.NoFunds}).Error("current balance is not enough for payment") + return retError(ErrCurrentBalance) + } _, nameContract := script.ParseContract(sc.TxContract.Name) (*sc.TxContract.Extend)[`original_contract`] = nameContract @@ -985,14 +1003,13 @@ func (sc *SmartContract) CallContract(flags int) (string, error) { sc.TxContract.Called = 1 << i _, err = VMRun(sc.VM, cfunc, nil, sc.TxContract.Extend) if err != nil { - before -= price + price = 0 break } } } - sc.TxFuel = before - (*sc.TxContract.Extend)[`txcost`].(int64) - price - sc.TxUsedCost = decimal.New(before-(*sc.TxContract.Extend)[`txcost`].(int64), 0) - sc.TxContract.TxPrice = price + sc.TxFuel = before - (*sc.TxContract.Extend)[`txcost`].(int64) + sc.TxUsedCost = decimal.New(sc.TxFuel+price, 0) if (*sc.TxContract.Extend)[`result`] != nil { result = fmt.Sprint((*sc.TxContract.Extend)[`result`]) if len(result) > 255 { From 30f27f4e2163fe836fb01e121ca8a94773b57eda Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Wed, 11 Jul 2018 18:39:39 +0500 Subject: [PATCH 151/169] Added Popup for AddToolButton (#444) --- packages/template/funcs.go | 5 ++++- packages/template/template_test.go | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 1a8b5f52d..8e1966278 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -52,7 +52,7 @@ var ( func init() { funcs[`Lower`] = tplFunc{lowerTag, defaultTag, `lower`, `Text`} - funcs[`AddToolButton`] = tplFunc{defaultTag, defaultTag, `addtoolbutton`, `Title,Icon,Page,PageParams`} + funcs[`AddToolButton`] = tplFunc{defaultTailTag, defaultTailTag, `addtoolbutton`, `Title,Icon,Page,PageParams`} funcs[`Address`] = tplFunc{addressTag, defaultTag, `address`, `Wallet`} funcs[`AppParam`] = tplFunc{appparTag, defaultTag, `apppar`, `Name,App,Index,Source`} funcs[`Calculate`] = tplFunc{calculateTag, defaultTag, `calculate`, `Exp,Type,Prec`} @@ -111,6 +111,9 @@ func init() { funcs[`Binary`] = tplFunc{binaryTag, defaultTag, "binary", "AppID,Name,MemberID"} funcs[`GetColumnType`] = tplFunc{columntypeTag, defaultTag, `columntype`, `Table,Column`} + tails[`addtoolbutton`] = forTails{map[string]tailInfo{ + `Popup`: {tplFunc{popupTag, defaultTailFull, `popup`, `Width,Header`}, true}, + }} tails[`button`] = forTails{map[string]tailInfo{ `Alert`: {tplFunc{alertTag, defaultTailFull, `alert`, `Text,ConfirmButton,CancelButton,Icon`}, true}, `Popup`: {tplFunc{popupTag, defaultTailFull, `popup`, `Width,Header`}, true}, diff --git a/packages/template/template_test.go b/packages/template/template_test.go index 0e8344485..60d14e153 100644 --- a/packages/template/template_test.go +++ b/packages/template/template_test.go @@ -42,6 +42,8 @@ func TestJSON(t *testing.T) { } var forTest = tplList{ + {`AddToolButton(Title: Open, Page: default).Popup(Width: 50, Header: Test)`, + `[{"tag":"addtoolbutton","attr":{"page":"default","popup":{"header":"Test","width":"50"},"title":"Open"}}]`}, {`SetVar(ok, OK)Input(Type: text, Value: #ok# Now(YY))Input(Type:text, Value: #ok# Some text)`, `[{"tag":"input","attr":{"type":"text","value":"OK Now(YY)"}},{"tag":"input","attr":{"type":"text","value":"OK Some text"}}]`}, {`SetVar(format, MMYY)Now(#format#,1 day)Now()`, `[{"tag":"now","attr":{"format":"MMYY","interval":"1 day"}},{"tag":"now"}]`}, From 1945ece35bee033f634dfddb33885805797bf410 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 12 Jul 2018 09:34:35 +0500 Subject: [PATCH 152/169] Added Ecosystem par to Binary (#442) --- packages/api/template_test.go | 23 ++++++++++++++--------- packages/template/funcs.go | 8 ++++---- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/packages/api/template_test.go b/packages/api/template_test.go index 8578e0706..8ef880f23 100644 --- a/packages/api/template_test.go +++ b/packages/api/template_test.go @@ -288,9 +288,10 @@ func TestBinary(t *testing.T) { assert.NoError(t, keyLogin(1)) params := map[string]string{ - "AppID": "1", - "MemberID": "1", - "Name": "file", + "ApplicationId": "1", + "AppID": "1", + "MemberID": "1", + "Name": "file", } data, err := base64.StdEncoding.DecodeString(imageData) @@ -310,7 +311,7 @@ func TestBinary(t *testing.T) { result string }{ { - `Image(Src: Binary(Name: file, AppID: 1, MemberID: 1))`, + `Image(Src: Binary(Name: file, AppID: 1, MemberID: #key_id#))`, `\[{"tag":"image","attr":{"src":"/data/1_binaries/\d+/data/` + hashImage + `"}}\]`, }, { @@ -318,7 +319,11 @@ func TestBinary(t *testing.T) { `\[{"tag":"image","attr":{"src":"/data/1_binaries/\d+/data/` + hashImage + `"}}\]`, }, { - `SetVar(name, file)SetVar(app_id, 1)SetVar(member_id, 1)Image(Src: Binary(Name: #name#, AppID: #app_id#, MemberID: #member_id#))`, + `SetVar(eco, 1)Image(Src: Binary().ById(` + id + `).Ecosystem(#eco#)`, + `\[{"tag":"image","attr":{"src":"/data/1_binaries/\d+/data/` + hashImage + `"}}\]`, + }, + { + `SetVar(name, file)SetVar(app_id, 1)SetVar(member_id, #key_id#)Image(Src: Binary(Name: #name#, AppID: #app_id#, MemberID: #member_id#))`, `\[{"tag":"image","attr":{"src":"/data/1_binaries/\d+/data/` + hashImage + `"}}\]`, }, { @@ -326,12 +331,12 @@ func TestBinary(t *testing.T) { `\[{"tag":"image","attr":{"src":"/data/1_binaries/\d+/data/` + hashImage + `"}}\]`, }, { - `DBFind(Name: binaries, Src: mysrc).Where("app_id=1 AND member_id = 1 AND name = 'file'").Custom(img){Image(Src: #data#)}Table(mysrc, "Image=img")`, - `\[{"tag":"dbfind","attr":{"columns":\["id","app_id","member_id","name","data","hash","mime_type","img"\],"data":\[\["\d+","1","1","file","{\\"link\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\",\\"title\\":\\"` + hashImage + `\\"}","` + hashImage + `","application/octet-stream","\[{\\"tag\\":\\"image\\",\\"attr\\":{\\"src\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\"}}\]"\]\],"name":"binaries","source":"Src: mysrc","types":\["text","text","text","text","blob","text","text","tags"\],"where":"app_id=1 AND member_id = 1 AND name = 'file'"}},{"tag":"table","attr":{"columns":\[{"Name":"img","Title":"Image"}\],"source":"mysrc"}}\]`, + `DBFind(Name: binaries, Src: mysrc).Where("app_id=1 AND member_id = #key_id# AND name = 'file'").Custom(img){Image(Src: #data#)}Table(mysrc, "Image=img")`, + `\[{"tag":"dbfind","attr":{"columns":\["id","app_id","member_id","name","data","hash","mime_type","img"\],"data":\[\["\d+","1","\d+","file","{\\"link\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\",\\"title\\":\\"` + hashImage + `\\"}","` + hashImage + `","application/octet-stream","\[{\\"tag\\":\\"image\\",\\"attr\\":{\\"src\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\"}}\]"\]\],"name":"binaries","source":"Src: mysrc","types":\["text","text","text","text","blob","text","text","tags"\],"where":"app_id=1 AND member_id = \d+ AND name = 'file'"}},{"tag":"table","attr":{"columns":\[{"Name":"img","Title":"Image"}\],"source":"mysrc"}}\]`, }, { - `DBFind(Name: binaries, Src: mysrc).Where("app_id=1 AND member_id = 1 AND name = 'file'").Vars(prefix)Image(Src: "#prefix_data#")`, - `\[{"tag":"dbfind","attr":{"columns":\["id","app_id","member_id","name","data","hash","mime_type"\],"data":\[\["\d+","1","1","file","{\\"link\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\",\\"title\\":\\"` + hashImage + `\\"}","` + hashImage + `","application/octet-stream"\]\],"name":"binaries","source":"Src: mysrc","types":\["text","text","text","text","blob","text","text"\],"where":"app_id=1 AND member_id = 1 AND name = 'file'"}},{"tag":"image","attr":{"src":"{\\"link\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\",\\"title\\":\\"` + hashImage + `\\"}"}}\]`, + `DBFind(Name: binaries, Src: mysrc).Where("app_id=1 AND member_id = #key_id# AND name = 'file'").Vars(prefix)Image(Src: "#prefix_data#")`, + `\[{"tag":"dbfind","attr":{"columns":\["id","app_id","member_id","name","data","hash","mime_type"\],"data":\[\["\d+","1","\d+","file","{\\"link\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\",\\"title\\":\\"` + hashImage + `\\"}","` + hashImage + `","application/octet-stream"\]\],"name":"binaries","source":"Src: mysrc","types":\["text","text","text","text","blob","text","text"\],"where":"app_id=1 AND member_id = \d+ AND name = 'file'"}},{"tag":"image","attr":{"src":"{\\"link\\":\\"/data/1_binaries/\d+/data/` + hashImage + `\\",\\"title\\":\\"` + hashImage + `\\"}"}}\]`, }, } diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 8e1966278..25e8f40a6 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -180,7 +180,8 @@ func init() { `Validate`: {tplFunc{validateTag, validateFull, `validate`, `*`}, false}, }} tails[`binary`] = forTails{map[string]tailInfo{ - `ById`: {tplFunc{tailTag, defaultTailFull, `id`, `id`}, false}, + `ById`: {tplFunc{tailTag, defaultTailFull, `id`, `id`}, false}, + `Ecosystem`: {tplFunc{tailTag, defaultTailFull, `ecosystem`, `ecosystem`}, false}, }} } @@ -1191,14 +1192,13 @@ func imageTag(par parFunc) string { func binaryTag(par parFunc) string { var ecosystemID string + + defaultTail(par, `binary`) if par.Node.Attr[`ecosystem`] != nil { ecosystemID = par.Node.Attr[`ecosystem`].(string) } else { ecosystemID = (*par.Workspace.Vars)[`ecosystem_id`] } - - defaultTail(par, `binary`) - binary := &model.Binary{} binary.SetTablePrefix(ecosystemID) From 1978126117371ebb0a4aa7da437b428cd3c37f5f Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 12 Jul 2018 09:36:23 +0500 Subject: [PATCH 153/169] Feature/975 read (#428) * Delete unused functions from parsers/common.go * Delete unused fields from parser * Move rollback to separate package * Move entities to separate files, phase 1 * Eliminate parser/common_tx_parser, move all to parser/db.go * eliminate common_ stuff in parsers, move all functions to block.go and transaction.go, move all to object-oriented style, rename parser to transaction * divide block and transaction to separate packages * Fix bug with passing nil pointer, while parsing StructTransaction, regularTransactions doest not needed anymore * Delete unecessary fields in tx struct * Fix transaction GetLogger method * some renaming and move filling txData to separate method * Added read checking * Added ReadPerm * Removed Println * Fixed ContactConditions loop (#429) * add content of default page to system_parameters * remove default_page from roles * move updating system parameters before creating default_page * Added read checking * Added ReadPerm * Removed Println * Merged develop * Fixed AccessColumns * Fixed 1_tables permission * feature/965-money (#424) * Fixed CreateEcosystem * Fixed Money template func * feature/958-column (#414) * Fixed checking column name * Fixed checkColumnName * Fixed recursion * Fixed ContactConditions loop (#429) * remove default_page from roles * Fixed Money template func * Changed insert pernission (#435) * feature/964 bigsize (#423) * add batch insert * test and small fixes * add batch insert * test and small fixes * requested changes * change platform founder to user wallet * Fixed CreateEcosystem * add batch insert * test and small fixes * requested changes * Added test * Added checking size limit in prepare * feature/940 history (#412) * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change update permissions for notifications table * Fixed changing schema of system_parameters table * Added GetPageHistory * add reles_access for 'Apla Consensus asbl' * Added GetMenuHistory * Added GetContractHistory * Added history template * Added block history * Added Source to template funcs * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * separate routes by vde * separate vde migration to own package * temporary commit * temporary commit * fix login * move changes * temporary commit * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Added BOM checking (#406) * feature/887-doublecontract (#407) * Fixed redefining contracts * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Fixed redefining contracts * Fixed CreateEcosystem (#419) * Merge develop * Merge develop * Merge develop * Added GetContractHistory * Merge develop * Added block history * Added Source to template funcs * change platform founder to user wallet * feature/919 include (#405) * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * Added macro to include * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * Fixed query * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * move changes * setup vde mode for vm in default handler * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * move changes * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * move changes * separate routes by vde * separate vde migration to own package * temporary commit * temporary commit * fix login * move changes * temporary commit * separate routes by vde * separate vde migration to own package * temp commit * temporary commit * temporary commit * fix login * temporary commit * temp commit * remove fmt from login api handlers * add drop db function * fix manager * fix rebase errors * vendoring supervisord * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Added macro to include * Fixed query * Added BOM checking (#406) * feature/887-doublecontract (#407) * Fixed redefining contracts * change update permissions for notifications table * Fixed changing schema of system_parameters table * add reles_access for 'Apla Consensus asbl' * change founder account to user account on adding role_participant * Revert "change founder account to user account on adding role_participant" This reverts commit c13fd44ec482ca2a789983b78c42df83500a1ddc. * Fixed redefining contracts * Fixed CreateEcosystem (#419) * Added macro to include * Fixed query * Fixed db query in include * Fixed dbquery in include * feature/958-column (#414) * Fixed checking column name * Fixed checkColumnName * Fixed recursion * Fixed ContactConditions loop (#429) * remove default_page from roles * Added test * Added checking size limit in prepare * Fixed dot in money value (#434) * Fixed not-latin in table and column names (#433) * delete bad txes * add tx_hash to log * delete check dup * add content of default page to system_parameters * move updating system parameters before creating default_page * delete bad txes * add tx_hash to log * delete check dup * merge with refactored logic * feature/1002 history (#436) * Added GetHistoryRow func * Added RollbackId to template history * feature/990-shutdown (#432) * remove default_page from roles * Fixed loop conditions * Fixed loop in system funcs * Fixed loop conditions * Fixed loop in system funcs * Fixed ContractAccess * feature/1004-desc (#437) * Fixed predefined column names * Fixed PrepareColumns * Fixed DelayedContract * Fixed forsign bug when data is empty * Added read checking * Added ReadPerm * Removed Println * Merged develop * Added ReadPerm * Removed Println * Fixed AccessColumns * Fixed 1_tables permission --- packages/api/read_test.go | 90 ++++++++--------- .../migration/first_ecosys_contracts_data.go | 4 + packages/smart/funcs.go | 22 ++--- packages/smart/smart.go | 97 +++++++++++-------- packages/template/funcs.go | 16 +-- 5 files changed, 121 insertions(+), 108 deletions(-) diff --git a/packages/api/read_test.go b/packages/api/read_test.go index d2f577d31..959983aac 100644 --- a/packages/api/read_test.go +++ b/packages/api/read_test.go @@ -27,27 +27,21 @@ import ( func TestRead(t *testing.T) { var ( - err error - ret vdeCreateResult retCont contentResult ) assert.NoError(t, keyLogin(1)) - if err = sendPost(`vde/create`, nil, &ret); err != nil && - err.Error() != `400 {"error": "E_VDECREATED", "msg": "Virtual Dedicated Ecosystem is already created" }` { - t.Error(err) - return - } name := randName(`tbl`) - form := url.Values{"vde": {`true`}, "Name": {name}, "Columns": {`[{"name":"my","type":"varchar", "index": "1", + form := url.Values{"Name": {name}, "ApplicationId": {`1`}, + "Columns": {`[{"name":"my","type":"varchar", "index": "1", "conditions":"true"}, {"name":"amount", "type":"number","index": "0", "conditions":"{\"update\":\"true\", \"read\":\"true\"}"}, {"name":"active", "type":"character","index": "0", "conditions":"{\"update\":\"true\", \"read\":\"false\"}"}]`}, "Permissions": {`{"insert": "true", "update" : "true", "read": "true", "new_column": "true"}`}} assert.NoError(t, postTx(`NewTable`, &form)) - contFill := fmt.Sprintf(`contract %s { + contList := []string{`contract %s { action { DBInsert("%[1]s", "my,amount", "Alex", 100 ) DBInsert("%[1]s", "my,amount", "Alex 2", 13300 ) @@ -56,51 +50,48 @@ func TestRead(t *testing.T) { DBInsert("%[1]s", "my,amount", "John Mike", 0 ) DBInsert("%[1]s", "my,amount", "Serena Martin", 777 ) } - } - - contract Get%[1]s { + }`, + `contract Get%s { action { var row array row = DBFind("%[1]s").Where("id>= ? and id<= ?", 2, 5) } - } - - contract GetOK%[1]s { + }`, + `contract GetOK%s { action { var row array row = DBFind("%[1]s").Columns("my,amount").Where("id>= ? and id<= ?", 2, 5) } - } - - contract GetData%[1]s { + }`, + `contract GetData%s { action { var row array row = DBFind("%[1]s").Columns("active").Where("id>= ? and id<= ?", 2, 5) } + }`, + `func ReadFilter%s bool { + var i int + var row map + while i < Len($data) { + row = $data[i] + if i == 1 || i == 3 { + row["my"] = "No name" + $data[i] = row + } + i = i+ 1 + } + return true + }`, } - - func ReadFilter%[1]s bool { - var i int - var row map - while i < Len($data) { - row = $data[i] - if i == 1 || i == 3 { - row["my"] = "No name" - $data[i] = row - } - i = i+ 1 - } - return true + for _, contract := range contList { + form = url.Values{"Value": {fmt.Sprintf(contract, name)}, "ApplicationId": {`1`}, + "Conditions": {`true`}} + assert.NoError(t, postTx(`NewContract`, &form)) } - `, name) - form = url.Values{"Value": {contFill}, - "Conditions": {`true`}, "vde": {`true`}} - assert.NoError(t, postTx(`NewContract`, &form)) - assert.NoError(t, postTx(name, &url.Values{"vde": {`true`}})) + assert.NoError(t, postTx(name, &url.Values{})) - assert.EqualError(t, postTx(`GetData`+name, &url.Values{"vde": {`true`}}), `500 {"error": "E_SERVER", "msg": "{\"type\":\"panic\",\"error\":\"Access denied\"}" }`) - - assert.NoError(t, sendPost(`content`, &url.Values{`vde`: {`true`}, `template`: { + assert.EqualError(t, postTx(`GetData`+name, &url.Values{}), `{"type":"panic","error":"Access denied"}`) + assert.NoError(t, sendPost(`content`, &url.Values{`template`: { `DBFind(` + name + `, src).Limit(2)`}}, &retCont)) if strings.Contains(RawToString(retCont.Tree), `active`) { @@ -108,23 +99,28 @@ func TestRead(t *testing.T) { return } - assert.NoError(t, postTx(`GetOK`+name, &url.Values{"vde": {`true`}})) + assert.NoError(t, postTx(`GetOK`+name, &url.Values{})) - assert.NoError(t, postTx(`EditColumn`, &url.Values{"vde": {`true`}, `TableName`: {name}, `Name`: {`active`}, + assert.NoError(t, postTx(`EditColumn`, &url.Values{`TableName`: {name}, `Name`: {`active`}, `Permissions`: {`{"update":"true", "read":"ContractConditions(\"MainCondition\")"}`}})) - assert.NoError(t, postTx(`Get`+name, &url.Values{"vde": {`true`}})) + assert.NoError(t, postTx(`Get`+name, &url.Values{})) + + form = url.Values{"Name": {name}, "InsertPerm": {`ContractConditions("MainCondition")`}, + "UpdatePerm": {"true"}, "ReadPerm": {`false`}, "NewColumnPerm": {`true`}} + assert.NoError(t, postTx(`EditTable`, &form)) + assert.EqualError(t, postTx(`GetOK`+name, &url.Values{}), `{"type":"panic","error":"Access denied"}`) - form = url.Values{"Name": {name}, "vde": {`true`}, - "Permissions": {`{"insert": "ContractConditions(\"MainCondition\")", - "update" : "true", "filter": "ReadFilter` + name + `()", "new_column": "ContractConditions(\"MainCondition\")"}`}} + form = url.Values{"Name": {name}, "InsertPerm": {`ContractConditions("MainCondition")`}, + "UpdatePerm": {"true"}, "FilterPerm": {`ReadFilter` + name + `()`}, + "NewColumnPerm": {`ContractConditions("MainCondition")`}} assert.NoError(t, postTx(`EditTable`, &form)) var tableInfo tableResult - assert.NoError(t, sendGet(`table/`+name+`?vde=true`, nil, &tableInfo)) + assert.NoError(t, sendGet(`table/`+name, nil, &tableInfo)) assert.Equal(t, `ReadFilter`+name+`()`, tableInfo.Filter) - assert.NoError(t, sendPost(`content`, &url.Values{`vde`: {`true`}, `template`: { + assert.NoError(t, sendPost(`content`, &url.Values{`template`: { `DBFind(` + name + `, src).Limit(2)`}}, &retCont)) if !strings.Contains(RawToString(retCont.Tree), `No name`) { t.Errorf(`wrong tree %s`, RawToString(retCont.Tree)) diff --git a/packages/migration/first_ecosys_contracts_data.go b/packages/migration/first_ecosys_contracts_data.go index 4d5c85b40..2862bdbd0 100644 --- a/packages/migration/first_ecosys_contracts_data.go +++ b/packages/migration/first_ecosys_contracts_data.go @@ -337,6 +337,7 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { InsertPerm string UpdatePerm string NewColumnPerm string + ReadPerm string "optional" } conditions { @@ -354,6 +355,9 @@ VALUES ('2', 'DelApplication', 'contract DelApplication { permissions["insert"] = $InsertPerm permissions["update"] = $UpdatePerm permissions["new_column"] = $NewColumnPerm + if $ReadPerm { + permissions["read"] = $ReadPerm + } $Permissions = permissions TableConditions($Name, "", JSONEncode($Permissions)) } diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 3598509dd..ec16b145c 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -807,17 +807,17 @@ func DBSelect(sc *SmartContract, tblname string, columns string, id int64, order ecosystem = sc.TxSmart.EcosystemID } tblname = GetTableName(sc, tblname, ecosystem) - if sc.VDE { - perm, err = sc.AccessTablePerm(tblname, `read`) - if err != nil { - return 0, nil, err - } - cols := strings.Split(columns, `,`) - if err = sc.AccessColumns(tblname, &cols, false); err != nil { - return 0, nil, err - } - columns = strings.Join(cols, `,`) + + perm, err = sc.AccessTablePerm(tblname, `read`) + if err != nil { + return 0, nil, err } + colsList := strings.Split(columns, `,`) + if err = sc.AccessColumns(tblname, &colsList, false); err != nil { + return 0, nil, err + } + columns = strings.Join(colsList, `,`) + columns = PrepareColumns(columns) rows, err = model.GetDB(sc.DbTransaction).Table(tblname).Select(columns).Where(where, params...).Order(order). Offset(offset).Limit(limit).Rows() @@ -854,7 +854,7 @@ func DBSelect(sc *SmartContract, tblname string, columns string, id int64, order } result = append(result, reflect.ValueOf(row).Interface()) } - if sc.VDE && perm != nil && len(perm[`filter`]) > 0 { + if perm != nil && len(perm[`filter`]) > 0 { fltResult, err := VMEvalIf(sc.VM, perm[`filter`], uint32(sc.TxSmart.EcosystemID), &map[string]interface{}{ `data`: result, `original_contract`: ``, `this_contract`: ``, diff --git a/packages/smart/smart.go b/packages/smart/smart.go index 1a597d81a..d20ac1515 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -586,9 +586,9 @@ func (sc *SmartContract) AccessTablePerm(table, action string) (map[string]strin tablePermission map[string]string ) logger := sc.GetLogger() - + isRead := action == `read` if table == getDefTableName(sc, `parameters`) || table == getDefTableName(sc, `app_params`) { - if sc.TxSmart.KeyID == converter.StrToInt64(EcosysParam(sc, `founder_account`)) { + if isRead || sc.TxSmart.KeyID == converter.StrToInt64(EcosysParam(sc, `founder_account`)) { return tablePermission, nil } logger.WithFields(log.Fields{"type": consts.AccessDenied}).Error("Access denied") @@ -599,6 +599,9 @@ func (sc *SmartContract) AccessTablePerm(table, action string) (map[string]strin logger.WithFields(log.Fields{"table": table, "error": err, "type": consts.DBError}).Error("checking custom table") return tablePermission, err } else if !isCustom { + if isRead { + return tablePermission, nil + } return tablePermission, fmt.Errorf(table + ` is not a custom table`) } @@ -643,11 +646,6 @@ func getPermColumns(input string) (perm permColumn, err error) { return } -type colAccess struct { - ok bool - original string -} - // AccessColumns checks access rights to the columns func (sc *SmartContract) AccessColumns(table string, columns *[]string, update bool) error { logger := sc.GetLogger() @@ -672,65 +670,80 @@ func (sc *SmartContract) AccessColumns(table string, columns *[]string, update b return err } if !found { + if !update { + return nil + } return fmt.Errorf(eTableNotFound, table) } var cols map[string]string - // Every item of checkColumns has 'ok' boolean value. If it equals false then the key-column - // doesn't have read/update access rights. - checkColumns := make(map[string]colAccess) err = json.Unmarshal([]byte(tables.Columns), &cols) if err != nil { logger.WithFields(log.Fields{"type": consts.JSONUnmarshallError, "error": err}).Error("getting table columns") return err } + colNames := make([]string, 0, len(*columns)) for _, col := range *columns { - colname := converter.Sanitize(col, `*->`) - if strings.Contains(colname, `->`) { - colname = colname[:strings.Index(colname, `->`)] - } - if !update && colname == `*` { + if col == `*` { for column := range cols { - checkColumns[column] = colAccess{true, column} + colNames = append(colNames, column) } - break - } - checkColumns[colname] = colAccess{true, colname} - } - _, isall := checkColumns[`*`] - for column, cond := range cols { - if ca, ok := checkColumns[column]; (!ok || !ca.ok) && !isall { continue } - perm, err := getPermColumns(cond) - if err != nil { - logger.WithFields(log.Fields{"type": consts.InvalidObject, "error": err}).Error("getting access columns") - return err + colNames = append(colNames, col) + } + + colList := make([]string, len(colNames)) + for i, col := range colNames { + colname := converter.Sanitize(col, `->`) + if strings.Contains(colname, `->`) { + colname = colname[:strings.Index(colname, `->`)] } - if update { - cond = perm.Update - } else { - cond = perm.Read + colList[i] = colname + } + checked := make(map[string]bool) + var notaccess bool + for i, name := range colList { + if status, ok := checked[name]; ok { + if !status { + colList[i] = `` + } + continue } + cond := cols[name] if len(cond) > 0 { - ret, err := sc.EvalIf(cond) + perm, err := getPermColumns(cond) if err != nil { - logger.WithFields(log.Fields{"condition": cond, "column": column, - "type": consts.EvalError}).Error("evaluating condition") + logger.WithFields(log.Fields{"type": consts.InvalidObject, "error": err}).Error("getting access columns") return err } - if !ret { - if update { - return errAccessDenied + if update { + cond = perm.Update + } else { + cond = perm.Read + } + if len(cond) > 0 { + ret, err := sc.EvalIf(cond) + if err != nil { + logger.WithFields(log.Fields{"condition": cond, "column": name, + "type": consts.EvalError}).Error("evaluating condition") + return err + } + checked[name] = ret + if !ret { + if update { + return errAccessDenied + } + colList[i] = `` + notaccess = true } - checkColumns[column] = colAccess{false, ``} } } } - if !update { + if !update && notaccess { retColumn := make([]string, 0) - for key, val := range checkColumns { - if val.ok && key != `*` { - retColumn = append(retColumn, val.original) + for i, val := range colList { + if val != `` { + retColumn = append(retColumn, colNames[i]) } } if len(retColumn) == 0 { diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 25e8f40a6..3bd679113 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -596,6 +596,13 @@ func dbfindTag(par parFunc) string { } columnNames := make([]string, 0) + fieldsList := strings.Split(fields, ",") + perm, err = sc.AccessTablePerm(tblname, `read`) + if err != nil || sc.AccessColumns(tblname, &fieldsList, false) != nil { + return `Access denied` + } + fields = strings.Join(fieldsList, `,`) + if fields != "*" { if !strings.Contains(fields, "id") { fields += ",id" @@ -610,13 +617,6 @@ func dbfindTag(par parFunc) string { } } - if sc.VDE { - perm, err = sc.AccessTablePerm(tblname, `read`) - if err != nil || sc.AccessColumns(tblname, &queryColumns, false) != nil { - return `Access denied` - } - } - for i, col := range queryColumns { switch columnTypes[col] { case "bytea": @@ -736,7 +736,7 @@ func dbfindTag(par parFunc) string { } data = append(data, row) } - if sc.VDE && perm != nil && len(perm[`filter`]) > 0 { + if perm != nil && len(perm[`filter`]) > 0 { result := make([]interface{}, len(data)) for i, item := range data { row := make(map[string]string) From 52f6fa782ed246b385b37094733c7da93ea060ae Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 12 Jul 2018 11:50:27 +0500 Subject: [PATCH 154/169] feature/1011 hash (#440) * Fixed initVars * Added params * Fixed content/hash * Fixed test * Added multipartBuf --- packages/api/api.go | 6 +++-- packages/api/content.go | 34 ++++++++++++++++++++++++----- packages/api/template_test.go | 41 ++++++++++++++++++++++++++++++----- 3 files changed, 68 insertions(+), 13 deletions(-) diff --git a/packages/api/api.go b/packages/api/api.go index 9e55102aa..7ae5629a3 100644 --- a/packages/api/api.go +++ b/packages/api/api.go @@ -42,8 +42,9 @@ import ( ) const ( - jwtPrefix = "Bearer " - jwtExpire = 36000 // By default, seconds + jwtPrefix = "Bearer " + jwtExpire = 36000 // By default, seconds + multipartBuf = 100000 // the buffer size for ParseMultipartForm ) type apiData struct { @@ -163,6 +164,7 @@ func getHeader(txName string, data *apiData) (tx.Header, error) { // DefaultHandler is a common handle function for api requests func DefaultHandler(method, pattern string, params map[string]int, handlers ...apiHandle) hr.Handle { return hr.Handle(func(w http.ResponseWriter, r *http.Request, ps hr.Params) { + r.ParseMultipartForm(multipartBuf) counterName := statsd.APIRouteCounterName(method, pattern) statsd.Client.Inc(counterName+statsd.Count, 1, 1.0) startTime := time.Now() diff --git a/packages/api/content.go b/packages/api/content.go index 02b1d9198..1bf53c5ae 100644 --- a/packages/api/content.go +++ b/packages/api/content.go @@ -56,12 +56,34 @@ func initVars(r *http.Request, data *apiData) *map[string]string { vars[name] = r.FormValue(name) } vars[`_full`] = `0` - vars[`ecosystem_id`] = converter.Int64ToStr(data.ecosystemId) - vars[`key_id`] = converter.Int64ToStr(data.keyId) - vars[`isMobile`] = data.isMobile - vars[`role_id`] = converter.Int64ToStr(data.roleId) - vars[`ecosystem_name`] = data.ecosystemName - + if data.keyId != 0 { + vars[`ecosystem_id`] = converter.Int64ToStr(data.ecosystemId) + vars[`key_id`] = converter.Int64ToStr(data.keyId) + vars[`isMobile`] = data.isMobile + vars[`role_id`] = converter.Int64ToStr(data.roleId) + vars[`ecosystem_name`] = data.ecosystemName + } else { + vars[`ecosystem_id`] = vars[`ecosystem`] + if len(vars[`keyID`]) > 0 { + vars[`key_id`] = vars[`keyID`] + } else { + vars[`key_id`] = `0` + } + if len(vars[`roleID`]) > 0 { + vars[`role_id`] = vars[`roleID`] + } else { + vars[`role_id`] = `0` + } + if len(vars[`isMobile`]) == 0 { + vars[`isMobile`] = `0` + } + if len(vars[`ecosystem_id`]) != 0 { + ecosystems := model.Ecosystem{} + if found, _ := ecosystems.Get(converter.StrToInt64(vars[`ecosystem_id`])); found { + vars[`ecosystem_name`] = ecosystems.Name + } + } + } if _, ok := vars[`lang`]; !ok { vars[`lang`] = r.Header.Get(`Accept-Language`) } diff --git a/packages/api/template_test.go b/packages/api/template_test.go index 8ef880f23..af0b74e7e 100644 --- a/packages/api/template_test.go +++ b/packages/api/template_test.go @@ -37,19 +37,50 @@ type tplItem struct { type tplList []tplItem func TestAPI(t *testing.T) { - var ret contentResult - var retHash hashResult - err := sendPost(`content/hash/default_page`, &url.Values{}, &retHash) - if err != nil { + var ( + ret contentResult + retHash, retHash2 hashResult + err error + msg string + ) + + if err := keyLogin(1); err != nil { t.Error(err) return } + name := randName(`page`) + value := `Div(,#ecosystem_id#) + Div(,#key_id#) + Div(,#role_id#) + Div(,#isMobile#)` + form := url.Values{"Name": {name}, "Value": {value}, "ApplicationId": {`1`}, + "Menu": {`default_menu`}, "Conditions": {"ContractConditions(`MainCondition`)"}} + assert.NoError(t, postTx(`NewPage`, &form)) + + assert.NoError(t, sendPost(`content/hash/`+name, &url.Values{}, &retHash)) if len(retHash.Hash) != 64 { t.Error(`wrong hash ` + retHash.Hash) return } + form = url.Values{"Name": {name}, "Value": {`contract ` + name + ` { + action { + $result = $key_id + }}`}, "ApplicationId": {`1`}, "Conditions": {`ContractConditions("MainCondition")`}} + assert.NoError(t, postTx("NewContract", &form)) + _, msg, err = postTxResult(name, &url.Values{}) + assert.NoError(t, err) - if err = keyLogin(1); err != nil { + gAddress = `` + gPrivate = `` + gPublic = `` + gAuth = `` + assert.NoError(t, sendPost(`content/hash/`+name, &url.Values{`ecosystem`: {`1`}, `keyID`: {msg}, `roleID`: {`0`}}, + &retHash2)) + if retHash.Hash != retHash2.Hash { + t.Error(`Wrong hash`) + return + } + if err := keyLogin(1); err != nil { t.Error(err) return } From aa0b192dc71fdfbb0f9596cbf2fc60c0e4e6d552 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 12 Jul 2018 15:21:14 +0300 Subject: [PATCH 155/169] add func MemoryLeak that allow drop node --- packages/smart/funcs.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 2c7179d53..736797a13 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -256,6 +256,7 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { "GetBlockHistory": GetBlockHistory, "GetMenuHistory": GetMenuHistory, "GetContractHistory": GetContractHistory, + "MemoryLeak": MemoryLeak, } switch vt { @@ -1842,6 +1843,10 @@ func GetPageHistory(sc *SmartContract, id int64) ([]interface{}, error) { return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `pages`, id) } +func MemoryLeak(sc *SmartContract) error { + MemoryLeak(sc) + return nil +} func GetMenuHistory(sc *SmartContract, id int64) ([]interface{}, error) { return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `menu`, id) } From 98b3d0fbb144855a46746e1459b0fc829a8773be Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 12 Jul 2018 15:22:07 +0300 Subject: [PATCH 156/169] fix attempts count to mark as used, add constant for attempt tx count --- packages/model/transaction.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/packages/model/transaction.go b/packages/model/transaction.go index 7612ef605..e040e439e 100644 --- a/packages/model/transaction.go +++ b/packages/model/transaction.go @@ -151,8 +151,7 @@ func (t *Transaction) Create() error { // IncrementTxAttemptCount increases attempt column func IncrementTxAttemptCount(transaction *DbTransaction, transactionHash []byte) (int64, error) { - query := GetDB(transaction).Exec("update transactions set attempt=attempt+1, used = case when attempt>10 then 1 else 0 end where hash = ?", - transactionHash) + query := GetDB(transaction).Exec("update transactions set attempt=attempt+1, used = case when attempt >= ? then 1 else 0 end where hash = ?", consts.MaxTXAttempt-1, transactionHash) return query.RowsAffected, query.Error } From 8e18ccf792d220d9b64349be94d3f8ec6e526cef Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 12 Jul 2018 17:28:49 +0300 Subject: [PATCH 157/169] 1016 xlsm funcs (#445) * add excelize to govendor * add funcs for work with excel files * add deepcopy to govendor * Added GetJSONFromExcel GetRowsCountExcel * Renamed GetJSONFromExcel to GetDataFromExcel * Renamed Excel to XLSX * Added checking endLine --- packages/smart/builtin_excel.go | 63 + packages/smart/funcs.go | 2 + .../excelize/CODE_OF_CONDUCT.md | 46 + .../excelize/CONTRIBUTING.md | 375 +++ .../360EntSecGroup-Skylar/excelize/LICENSE | 29 + .../360EntSecGroup-Skylar/excelize/README.md | 175 ++ .../excelize/README_zh.md | 175 ++ .../360EntSecGroup-Skylar/excelize/cell.go | 589 ++++ .../360EntSecGroup-Skylar/excelize/chart.go | 1165 +++++++ .../360EntSecGroup-Skylar/excelize/col.go | 366 +++ .../360EntSecGroup-Skylar/excelize/comment.go | 251 ++ .../360EntSecGroup-Skylar/excelize/date.go | 141 + .../excelize/excelize.go | 403 +++ .../excelize/excelize.png | Bin 0 -> 54188 bytes .../360EntSecGroup-Skylar/excelize/file.go | 93 + .../360EntSecGroup-Skylar/excelize/hsl.go | 141 + .../360EntSecGroup-Skylar/excelize/lib.go | 166 + .../360EntSecGroup-Skylar/excelize/logo.png | Bin 0 -> 4208 bytes .../360EntSecGroup-Skylar/excelize/picture.go | 481 +++ .../360EntSecGroup-Skylar/excelize/rows.go | 461 +++ .../360EntSecGroup-Skylar/excelize/shape.go | 419 +++ .../360EntSecGroup-Skylar/excelize/sheet.go | 661 ++++ .../360EntSecGroup-Skylar/excelize/sheetpr.go | 139 + .../excelize/sheetview.go | 152 + .../360EntSecGroup-Skylar/excelize/styles.go | 2751 +++++++++++++++++ .../360EntSecGroup-Skylar/excelize/table.go | 451 +++ .../excelize/templates.go | 31 + .../excelize/vmlDrawing.go | 135 + .../excelize/xmlChart.go | 612 ++++ .../excelize/xmlComments.go | 55 + .../excelize/xmlContentTypes.go | 26 + .../excelize/xmlDecodeDrawing.go | 187 ++ .../excelize/xmlDrawing.go | 388 +++ .../excelize/xmlSharedStrings.go | 46 + .../excelize/xmlStyles.go | 356 +++ .../excelize/xmlTable.go | 205 ++ .../excelize/xmlTheme.go | 140 + .../excelize/xmlWorkbook.go | 282 ++ .../excelize/xmlWorksheet.go | 573 ++++ vendor/github.com/kardianos/osext/LICENSE | 27 + vendor/github.com/kardianos/osext/README.md | 21 + vendor/github.com/kardianos/osext/osext.go | 33 + .../github.com/kardianos/osext/osext_go18.go | 9 + .../github.com/kardianos/osext/osext_plan9.go | 22 + .../kardianos/osext/osext_procfs.go | 36 + .../kardianos/osext/osext_sysctl.go | 126 + .../kardianos/osext/osext_windows.go | 36 + vendor/github.com/mohae/deepcopy/LICENSE | 21 + vendor/github.com/mohae/deepcopy/README.md | 8 + vendor/github.com/mohae/deepcopy/deepcopy.go | 125 + .../rpoletaev/supervisord/types/comm-types.go | 33 + .../supervisord/xmlrpcclient/xml_processor.go | 109 + .../supervisord/xmlrpcclient/xmlrpc-client.go | 191 ++ vendor/github.com/sevlyar/go-daemon/LICENSE | 7 + vendor/github.com/sevlyar/go-daemon/README.md | 63 + .../github.com/sevlyar/go-daemon/command.go | 99 + vendor/github.com/sevlyar/go-daemon/daemon.go | 44 + .../sevlyar/go-daemon/daemon_stub.go | 52 + .../sevlyar/go-daemon/daemon_unix.go | 264 ++ .../github.com/sevlyar/go-daemon/lock_file.go | 109 + .../sevlyar/go-daemon/lock_file_stub.go | 11 + .../sevlyar/go-daemon/lock_file_unix.go | 23 + vendor/github.com/sevlyar/go-daemon/signal.go | 59 + .../sevlyar/go-daemon/syscall_dup.go | 12 + .../sevlyar/go-daemon/syscall_dup_arm64.go | 11 + vendor/vendor.json | 36 + 66 files changed, 14318 insertions(+) create mode 100644 packages/smart/builtin_excel.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/CODE_OF_CONDUCT.md create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/CONTRIBUTING.md create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/LICENSE create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/README.md create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/README_zh.md create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/cell.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/chart.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/col.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/comment.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/date.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/excelize.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/excelize.png create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/file.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/hsl.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/lib.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/logo.png create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/picture.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/rows.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/shape.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/sheet.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/sheetpr.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/sheetview.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/styles.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/table.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/templates.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/vmlDrawing.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlChart.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlComments.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlContentTypes.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDecodeDrawing.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDrawing.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlSharedStrings.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlStyles.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTable.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTheme.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorkbook.go create mode 100644 vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorksheet.go create mode 100644 vendor/github.com/kardianos/osext/LICENSE create mode 100644 vendor/github.com/kardianos/osext/README.md create mode 100644 vendor/github.com/kardianos/osext/osext.go create mode 100644 vendor/github.com/kardianos/osext/osext_go18.go create mode 100644 vendor/github.com/kardianos/osext/osext_plan9.go create mode 100644 vendor/github.com/kardianos/osext/osext_procfs.go create mode 100644 vendor/github.com/kardianos/osext/osext_sysctl.go create mode 100644 vendor/github.com/kardianos/osext/osext_windows.go create mode 100644 vendor/github.com/mohae/deepcopy/LICENSE create mode 100644 vendor/github.com/mohae/deepcopy/README.md create mode 100644 vendor/github.com/mohae/deepcopy/deepcopy.go create mode 100644 vendor/github.com/rpoletaev/supervisord/types/comm-types.go create mode 100644 vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xml_processor.go create mode 100644 vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xmlrpc-client.go create mode 100644 vendor/github.com/sevlyar/go-daemon/LICENSE create mode 100644 vendor/github.com/sevlyar/go-daemon/README.md create mode 100644 vendor/github.com/sevlyar/go-daemon/command.go create mode 100644 vendor/github.com/sevlyar/go-daemon/daemon.go create mode 100644 vendor/github.com/sevlyar/go-daemon/daemon_stub.go create mode 100644 vendor/github.com/sevlyar/go-daemon/daemon_unix.go create mode 100644 vendor/github.com/sevlyar/go-daemon/lock_file.go create mode 100644 vendor/github.com/sevlyar/go-daemon/lock_file_stub.go create mode 100644 vendor/github.com/sevlyar/go-daemon/lock_file_unix.go create mode 100644 vendor/github.com/sevlyar/go-daemon/signal.go create mode 100644 vendor/github.com/sevlyar/go-daemon/syscall_dup.go create mode 100644 vendor/github.com/sevlyar/go-daemon/syscall_dup_arm64.go diff --git a/packages/smart/builtin_excel.go b/packages/smart/builtin_excel.go new file mode 100644 index 000000000..588deae24 --- /dev/null +++ b/packages/smart/builtin_excel.go @@ -0,0 +1,63 @@ +package smart + +import ( + "bytes" + + "github.com/GenesisKernel/go-genesis/packages/converter" + + xl "github.com/360EntSecGroup-Skylar/excelize" + "github.com/GenesisKernel/go-genesis/packages/model" + log "github.com/sirupsen/logrus" +) + +// GetDataFromXLSX returns json by parameters range +func GetDataFromXLSX(sc *SmartContract, binaryID, startLine, linesCount, sheetNum int64) (data []interface{}, err error) { + book, err := excelBookFromStoredBinary(sc, binaryID) + if err != nil || book == nil { + return nil, err + } + + sheetName := book.GetSheetName(int(sheetNum)) + rows := book.GetRows(sheetName) + endLine := startLine + linesCount + if endLine > int64(len(rows)) { + endLine = int64(len(rows)) + } + processedRows := []interface{}{} + for ; startLine < endLine; startLine++ { + var row []interface{} + for _, item := range rows[startLine] { + row = append(row, item) + } + processedRows = append(processedRows, row) + } + return processedRows, nil +} + +// GetRowsCountXLSX returns count of rows from excel file +func GetRowsCountXLSX(sc *SmartContract, binaryID, sheetNum int64) (int, error) { + book, err := excelBookFromStoredBinary(sc, binaryID) + if err != nil { + return -1, err + } + + sheetName := book.GetSheetName(int(sheetNum)) + rows := book.GetRows(sheetName) + return len(rows), nil +} + +func excelBookFromStoredBinary(sc *SmartContract, binaryID int64) (*xl.File, error) { + bin := &model.Binary{} + bin.SetTablePrefix(converter.Int64ToStr(sc.TxSmart.EcosystemID)) + found, err := bin.GetByID(binaryID) + if err != nil { + return nil, err + } + + if !found { + log.WithFields(log.Fields{"binary_id": binaryID}).Error("binary_id not found") + return nil, nil + } + + return xl.OpenReader(bytes.NewReader(bin.Data)) +} diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index ec16b145c..698de7d50 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -265,6 +265,8 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { "GetBlockHistoryRow": GetBlockHistoryRow, "GetMenuHistoryRow": GetMenuHistoryRow, "GetContractHistoryRow": GetContractHistoryRow, + "GetDataFromXLSX": GetDataFromXLSX, + "GetRowsCountXLSX": GetRowsCountXLSX, } switch vt { diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/CODE_OF_CONDUCT.md b/vendor/github.com/360EntSecGroup-Skylar/excelize/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..a84b47ff9 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/CODE_OF_CONDUCT.md @@ -0,0 +1,46 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at [xuri.me](https://xuri.me). The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/CONTRIBUTING.md b/vendor/github.com/360EntSecGroup-Skylar/excelize/CONTRIBUTING.md new file mode 100644 index 000000000..5239a9470 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/CONTRIBUTING.md @@ -0,0 +1,375 @@ +# Contributing to excelize + +Want to hack on excelize? Awesome! This page contains information about reporting issues as well as some tips and +guidelines useful to experienced open source contributors. Finally, make sure +you read our [community guidelines](#community-guidelines) before you +start participating. + +## Topics + +* [Reporting Security Issues](#reporting-security-issues) +* [Design and Cleanup Proposals](#design-and-cleanup-proposals) +* [Reporting Issues](#reporting-other-issues) +* [Quick Contribution Tips and Guidelines](#quick-contribution-tips-and-guidelines) +* [Community Guidelines](#community-guidelines) + +## Reporting security issues + +The excelize maintainers take security seriously. If you discover a security +issue, please bring it to their attention right away! + +Please **DO NOT** file a public issue, instead send your report privately to +[xuri.me](https://xuri.me). + +Security reports are greatly appreciated and we will publicly thank you for it. +We currently do not offer a paid security bounty program, but are not +ruling it out in the future. + +## Reporting other issues + +A great way to contribute to the project is to send a detailed report when you +encounter an issue. We always appreciate a well-written, thorough bug report, +and will thank you for it! + +Check that [our issue database](https://github.com/360EntSecGroup-Skylar/excelize/issues) +doesn't already include that problem or suggestion before submitting an issue. +If you find a match, you can use the "subscribe" button to get notified on +updates. Do *not* leave random "+1" or "I have this too" comments, as they +only clutter the discussion, and don't help resolving it. However, if you +have ways to reproduce the issue or have additional information that may help +resolving the issue, please leave a comment. + +When reporting issues, always include the output of `go env`. + +Also include the steps required to reproduce the problem if possible and +applicable. This information will help us review and fix your issue faster. +When sending lengthy log-files, consider posting them as a gist [https://gist.github.com](https://gist.github.com). +Don't forget to remove sensitive data from your logfiles before posting (you can +replace those parts with "REDACTED"). + +## Quick contribution tips and guidelines + +This section gives the experienced contributor some tips and guidelines. + +### Pull requests are always welcome + +Not sure if that typo is worth a pull request? Found a bug and know how to fix +it? Do it! We will appreciate it. Any significant improvement should be +documented as [a GitHub issue](https://github.com/360EntSecGroup-Skylar/excelize/issues) before +anybody starts working on it. + +We are always thrilled to receive pull requests. We do our best to process them +quickly. If your pull request is not accepted on the first try, +don't get discouraged! + +### Design and cleanup proposals + +You can propose new designs for existing excelize features. You can also design +entirely new features. We really appreciate contributors who want to refactor or +otherwise cleanup our project. + +We try hard to keep excelize lean and focused. Excelize can't do everything for +everybody. This means that we might decide against incorporating a new feature. +However, there might be a way to implement that feature *on top of* excelize. + +### Conventions + +Fork the repository and make changes on your fork in a feature branch: + +* If it's a bug fix branch, name it XXXX-something where XXXX is the number of + the issue. +* If it's a feature branch, create an enhancement issue to announce + your intentions, and name it XXXX-something where XXXX is the number of the + issue. + +Submit unit tests for your changes. Go has a great test framework built in; use +it! Take a look at existing tests for inspiration. Run the full test on your branch before +submitting a pull request. + +Update the documentation when creating or modifying features. Test your +documentation changes for clarity, concision, and correctness, as well as a +clean documentation build. + +Write clean code. Universally formatted code promotes ease of writing, reading, +and maintenance. Always run `gofmt -s -w file.go` on each changed file before +committing your changes. Most editors have plug-ins that do this automatically. + +Pull request descriptions should be as clear as possible and include a reference +to all the issues that they address. + +### Successful Changes + +Before contributing large or high impact changes, make the effort to coordinate +with the maintainers of the project before submitting a pull request. This +prevents you from doing extra work that may or may not be merged. + +Large PRs that are just submitted without any prior communication are unlikely +to be successful. + +While pull requests are the methodology for submitting changes to code, changes +are much more likely to be accepted if they are accompanied by additional +engineering work. While we don't define this explicitly, most of these goals +are accomplished through communication of the design goals and subsequent +solutions. Often times, it helps to first state the problem before presenting +solutions. + +Typically, the best methods of accomplishing this are to submit an issue, +stating the problem. This issue can include a problem statement and a +checklist with requirements. If solutions are proposed, alternatives should be +listed and eliminated. Even if the criteria for elimination of a solution is +frivolous, say so. + +Larger changes typically work best with design documents. These are focused on +providing context to the design at the time the feature was conceived and can +inform future documentation contributions. + +### Commit Messages + +Commit messages must start with a capitalized and short summary +written in the imperative, followed by an optional, more detailed explanatory +text which is separated from the summary by an empty line. + +Commit messages should follow best practices, including explaining the context +of the problem and how it was solved, including in caveats or follow up changes +required. They should tell the story of the change and provide readers +understanding of what led to it. + +In practice, the best approach to maintaining a nice commit message is to +leverage a `git add -p` and `git commit --amend` to formulate a solid +changeset. This allows one to piece together a change, as information becomes +available. + +If you squash a series of commits, don't just submit that. Re-write the commit +message, as if the series of commits was a single stroke of brilliance. + +That said, there is no requirement to have a single commit for a PR, as long as +each commit tells the story. For example, if there is a feature that requires a +package, it might make sense to have the package in a separate commit then have +a subsequent commit that uses it. + +Remember, you're telling part of the story with the commit message. Don't make +your chapter weird. + +### Review + +Code review comments may be added to your pull request. Discuss, then make the +suggested modifications and push additional commits to your feature branch. Post +a comment after pushing. New commits show up in the pull request automatically, +but the reviewers are notified only when you comment. + +Pull requests must be cleanly rebased on top of master without multiple branches +mixed into the PR. + +**Git tip**: If your PR no longer merges cleanly, use `rebase master` in your +feature branch to update your pull request rather than `merge master`. + +Before you make a pull request, squash your commits into logical units of work +using `git rebase -i` and `git push -f`. A logical unit of work is a consistent +set of patches that should be reviewed together: for example, upgrading the +version of a vendored dependency and taking advantage of its now available new +feature constitute two separate units of work. Implementing a new function and +calling it in another file constitute a single logical unit of work. The very +high majority of submissions should have a single commit, so if in doubt: squash +down to one. + +After every commit, make sure the test passes. Include documentation +changes in the same pull request so that a revert would remove all traces of +the feature or fix. + +Include an issue reference like `Closes #XXXX` or `Fixes #XXXX` in commits that +close an issue. Including references automatically closes the issue on a merge. + +Please see the [Coding Style](#coding-style) for further guidelines. + +### Merge approval + +The excelize maintainers use LGTM (Looks Good To Me) in comments on the code review to +indicate acceptance. + +### Sign your work + +The sign-off is a simple line at the end of the explanation for the patch. Your +signature certifies that you wrote the patch or otherwise have the right to pass +it on as an open-source patch. The rules are pretty simple: if you can certify +the below (from [developercertificate.org](http://developercertificate.org/)): + +```text +Developer Certificate of Origin +Version 1.1 + +Copyright (C) 2004, 2006 The Linux Foundation and its contributors. +1 Letterman Drive +Suite D4700 +San Francisco, CA, 94129 + +Everyone is permitted to copy and distribute verbatim copies of this +license document, but changing it is not allowed. + +Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +(a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +(b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +(c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +(d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. +``` + +Then you just add a line to every git commit message: + + Signed-off-by: Ri Xu https://xuri.me + +Use your real name (sorry, no pseudonyms or anonymous contributions.) + +If you set your `user.name` and `user.email` git configs, you can sign your +commit automatically with `git commit -s`. + +### How can I become a maintainer + +First, all maintainers have 3 things + +* They share responsibility in the project's success. +* They have made a long-term, recurring time investment to improve the project. +* They spend that time doing whatever needs to be done, not necessarily what + is the most interesting or fun. + +Maintainers are often under-appreciated, because their work is harder to appreciate. +It's easy to appreciate a really cool and technically advanced feature. It's harder +to appreciate the absence of bugs, the slow but steady improvement in stability, +or the reliability of a release process. But those things distinguish a good +project from a great one. + +Don't forget: being a maintainer is a time investment. Make sure you +will have time to make yourself available. You don't have to be a +maintainer to make a difference on the project! + +If you want to become a meintainer, contact [xuri.me](https://xuri.me) and given a introduction of you. + +## Community guidelines + +We want to keep the community awesome, growing and collaborative. We need +your help to keep it that way. To help with this we've come up with some general +guidelines for the community as a whole: + +* Be nice: Be courteous, respectful and polite to fellow community members: + no regional, racial, gender, or other abuse will be tolerated. We like + nice people way better than mean ones! + +* Encourage diversity and participation: Make everyone in our community feel + welcome, regardless of their background and the extent of their + contributions, and do everything possible to encourage participation in + our community. + +* Keep it legal: Basically, don't get us in trouble. Share only content that + you own, do not share private or sensitive information, and don't break + the law. + +* Stay on topic: Make sure that you are posting to the correct channel and + avoid off-topic discussions. Remember when you update an issue or respond + to an email you are potentially sending to a large number of people. Please + consider this before you update. Also remember that nobody likes spam. + +* Don't send email to the maintainers: There's no need to send email to the + maintainers to ask them to investigate an issue or to take a look at a + pull request. Instead of sending an email, GitHub mentions should be + used to ping maintainers to review a pull request, a proposal or an + issue. + +### Guideline violations — 3 strikes method + +The point of this section is not to find opportunities to punish people, but we +do need a fair way to deal with people who are making our community suck. + +1. First occurrence: We'll give you a friendly, but public reminder that the + behavior is inappropriate according to our guidelines. + +2. Second occurrence: We will send you a private message with a warning that + any additional violations will result in removal from the community. + +3. Third occurrence: Depending on the violation, we may need to delete or ban + your account. + +**Notes:** + +* Obvious spammers are banned on first occurrence. If we don't do this, we'll + have spam all over the place. + +* Violations are forgiven after 6 months of good behavior, and we won't hold a + grudge. + +* People who commit minor infractions will get some education, rather than + hammering them in the 3 strikes process. + +* The rules apply equally to everyone in the community, no matter how much + you've contributed. + +* Extreme violations of a threatening, abusive, destructive or illegal nature + will be addressed immediately and are not subject to 3 strikes or forgiveness. + +* Contact [xuri.me](https://xuri.me) to report abuse or appeal violations. In the case of + appeals, we know that mistakes happen, and we'll work with you to come up with a + fair solution if there has been a misunderstanding. + +## Coding Style + +Unless explicitly stated, we follow all coding guidelines from the Go +community. While some of these standards may seem arbitrary, they somehow seem +to result in a solid, consistent codebase. + +It is possible that the code base does not currently comply with these +guidelines. We are not looking for a massive PR that fixes this, since that +goes against the spirit of the guidelines. All new contributions should make a +best effort to clean up and make the code base better than they left it. +Obviously, apply your best judgement. Remember, the goal here is to make the +code base easier for humans to navigate and understand. Always keep that in +mind when nudging others to comply. + +The rules: + +1. All code should be formatted with `gofmt -s`. +2. All code should pass the default levels of + [`golint`](https://github.com/golang/lint). +3. All code should follow the guidelines covered in [Effective + Go](http://golang.org/doc/effective_go.html) and [Go Code Review + Comments](https://github.com/golang/go/wiki/CodeReviewComments). +4. Comment the code. Tell us the why, the history and the context. +5. Document _all_ declarations and methods, even private ones. Declare + expectations, caveats and anything else that may be important. If a type + gets exported, having the comments already there will ensure it's ready. +6. Variable name length should be proportional to its context and no longer. + `noCommaALongVariableNameLikeThisIsNotMoreClearWhenASimpleCommentWouldDo`. + In practice, short methods will have short variable names and globals will + have longer names. +7. No underscores in package names. If you need a compound name, step back, + and re-examine why you need a compound name. If you still think you need a + compound name, lose the underscore. +8. No utils or helpers packages. If a function is not general enough to + warrant its own package, it has not been written generally enough to be a + part of a util package. Just leave it unexported and well-documented. +9. All tests should run with `go test` and outside tooling should not be + required. No, we don't need another unit testing framework. Assertion + packages are acceptable if they provide _real_ incremental value. +10. Even though we call these "rules" above, they are actually just + guidelines. Since you've read all the rules, you now know that. + +If you are having trouble getting into the mood of idiomatic Go, we recommend +reading through [Effective Go](https://golang.org/doc/effective_go.html). The +[Go Blog](https://blog.golang.org) is also a great resource. Drinking the +kool-aid is a lot easier than going thirsty. diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/LICENSE b/vendor/github.com/360EntSecGroup-Skylar/excelize/LICENSE new file mode 100644 index 000000000..4ca04b8f6 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2016 - 2018 360 Enterprise Security Group, Endpoint Security, +inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of Excelize nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/README.md b/vendor/github.com/360EntSecGroup-Skylar/excelize/README.md new file mode 100644 index 000000000..88a67f3ba --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/README.md @@ -0,0 +1,175 @@ +![Excelize](./excelize.png "Excelize") + +# Excelize + +[![Build Status](https://travis-ci.org/360EntSecGroup-Skylar/excelize.svg?branch=master)](https://travis-ci.org/360EntSecGroup-Skylar/excelize) +[![Code Coverage](https://codecov.io/gh/360EntSecGroup-Skylar/excelize/branch/master/graph/badge.svg)](https://codecov.io/gh/360EntSecGroup-Skylar/excelize) +[![Go Report Card](https://goreportcard.com/badge/github.com/360EntSecGroup-Skylar/excelize)](https://goreportcard.com/report/github.com/360EntSecGroup-Skylar/excelize) +[![GoDoc](https://godoc.org/github.com/360EntSecGroup-Skylar/excelize?status.svg)](https://godoc.org/github.com/360EntSecGroup-Skylar/excelize) +[![Licenses](https://img.shields.io/badge/license-bsd-orange.svg)](https://opensource.org/licenses/BSD-3-Clause) +[![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.me/xuri) + +## Introduction + +Excelize is a library written in pure Go and providing a set of functions that allow you to write to and read from XLSX files. Support reads and writes XLSX file generated by Microsoft Excel™ 2007 and later. Support save file without losing original charts of XLSX. This library needs Go version 1.8 or later. The full API docs can be seen using go's built-in documentation tool, or online at [godoc.org](https://godoc.org/github.com/360EntSecGroup-Skylar/excelize) and [Chinese translation](https://xuri.me/excelize/zh_cn). + +## Basic Usage + +### Installation + +```go +go get github.com/360EntSecGroup-Skylar/excelize +``` + +### Create XLSX file + +Here is a minimal example usage that will create XLSX file. + +```go +package main + +import ( + "fmt" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + xlsx := excelize.NewFile() + // Create a new sheet. + index := xlsx.NewSheet("Sheet2") + // Set value of a cell. + xlsx.SetCellValue("Sheet2", "A2", "Hello world.") + xlsx.SetCellValue("Sheet1", "B2", 100) + // Set active sheet of the workbook. + xlsx.SetActiveSheet(index) + // Save xlsx file by the given path. + err := xlsx.SaveAs("./Book1.xlsx") + if err != nil { + fmt.Println(err) + } +} +``` + +### Reading XLSX file + +The following constitutes the bare to read a XLSX document. + +```go +package main + +import ( + "fmt" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + xlsx, err := excelize.OpenFile("./Book1.xlsx") + if err != nil { + fmt.Println(err) + return + } + // Get value from cell by given worksheet name and axis. + cell := xlsx.GetCellValue("Sheet1", "B2") + fmt.Println(cell) + // Get all the rows in the Sheet1. + rows := xlsx.GetRows("Sheet1") + for _, row := range rows { + for _, colCell := range row { + fmt.Print(colCell, "\t") + } + fmt.Println() + } +} +``` + +### Add chart to XLSX file + +With Excelize chart generation and management is as easy as a few lines of code. You can build charts based off data in your worksheet or generate charts without any data in your worksheet at all. + +![Excelize](./test/images/chart.png "Excelize") + +```go +package main + +import ( + "fmt" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + categories := map[string]string{"A2": "Small", "A3": "Normal", "A4": "Large", "B1": "Apple", "C1": "Orange", "D1": "Pear"} + values := map[string]int{"B2": 2, "C2": 3, "D2": 3, "B3": 5, "C3": 2, "D3": 4, "B4": 6, "C4": 7, "D4": 8} + xlsx := excelize.NewFile() + for k, v := range categories { + xlsx.SetCellValue("Sheet1", k, v) + } + for k, v := range values { + xlsx.SetCellValue("Sheet1", k, v) + } + xlsx.AddChart("Sheet1", "E1", `{"type":"col3DClustered","series":[{"name":"Sheet1!$A$2","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$2:$D$2"},{"name":"Sheet1!$A$3","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$3:$D$3"},{"name":"Sheet1!$A$4","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$4:$D$4"}],"title":{"name":"Fruit 3D Clustered Column Chart"}}`) + // Save xlsx file by the given path. + err := xlsx.SaveAs("./Book1.xlsx") + if err != nil { + fmt.Println(err) + } +} + +``` + +### Add picture to XLSX file + +```go +package main + +import ( + "fmt" + _ "image/gif" + _ "image/jpeg" + _ "image/png" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + xlsx, err := excelize.OpenFile("./Book1.xlsx") + if err != nil { + fmt.Println(err) + return + } + // Insert a picture. + err = xlsx.AddPicture("Sheet1", "A2", "./image1.png", "") + if err != nil { + fmt.Println(err) + } + // Insert a picture to worksheet with scaling. + err = xlsx.AddPicture("Sheet1", "D2", "./image2.jpg", `{"x_scale": 0.5, "y_scale": 0.5}`) + if err != nil { + fmt.Println(err) + } + // Insert a picture offset in the cell with printing support. + err = xlsx.AddPicture("Sheet1", "H2", "./image3.gif", `{"x_offset": 15, "y_offset": 10, "print_obj": true, "lock_aspect_ratio": false, "locked": false}`) + if err != nil { + fmt.Println(err) + } + // Save the xlsx file with the origin path. + err = xlsx.Save() + if err != nil { + fmt.Println(err) + } +} +``` + +## Contributing + +Contributions are welcome! Open a pull request to fix a bug, or open an issue to discuss a new feature or change. XML is compliant with [part 1 of the 5th edition of the ECMA-376 Standard for Office Open XML](http://www.ecma-international.org/publications/standards/Ecma-376.htm). + +## Credits + +Some struct of XML originally by [tealeg/xlsx](https://github.com/tealeg/xlsx). + +## Licenses + +This program is under the terms of the BSD 3-Clause License. See [https://opensource.org/licenses/BSD-3-Clause](https://opensource.org/licenses/BSD-3-Clause). diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/README_zh.md b/vendor/github.com/360EntSecGroup-Skylar/excelize/README_zh.md new file mode 100644 index 000000000..ef149b087 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/README_zh.md @@ -0,0 +1,175 @@ +![Excelize](./excelize.png "Excelize") + +# Excelize + +[![Build Status](https://travis-ci.org/360EntSecGroup-Skylar/excelize.svg?branch=master)](https://travis-ci.org/360EntSecGroup-Skylar/excelize) +[![Code Coverage](https://codecov.io/gh/360EntSecGroup-Skylar/excelize/branch/master/graph/badge.svg)](https://codecov.io/gh/360EntSecGroup-Skylar/excelize) +[![Go Report Card](https://goreportcard.com/badge/github.com/360EntSecGroup-Skylar/excelize)](https://goreportcard.com/report/github.com/360EntSecGroup-Skylar/excelize) +[![GoDoc](https://godoc.org/github.com/360EntSecGroup-Skylar/excelize?status.svg)](https://godoc.org/github.com/360EntSecGroup-Skylar/excelize) +[![Licenses](https://img.shields.io/badge/license-bsd-orange.svg)](https://opensource.org/licenses/BSD-3-Clause) +[![Donate](https://img.shields.io/badge/Donate-PayPal-green.svg)](https://www.paypal.me/xuri) + +## 简介 + +Excelize 是 Go 语言编写的用于操作 Office Excel 文档类库,基于 ECMA-376 Office OpenXML 标准。可以使用它来读取、写入由 Microsoft Excel™ 2007 及以上版本创建的 XLSX 文档。相比较其他的开源类库,Excelize 支持写入原本带有图片(表)、透视表和切片器等复杂样式的文档,还支持向 Excel 文档中插入图片与图表,并且在保存后不会丢失文档原有样式,可以应用于各类报表系统中。使用本类库要求使用的 Go 语言为 1.8 或更高版本,完整的 API 使用文档请访问 [godoc.org](https://godoc.org/github.com/360EntSecGroup-Skylar/excelize) 或查看 [中文翻译](https://xuri.me/excelize/zh_cn)。 + +## 快速上手 + +### 安装 + +```go +go get github.com/360EntSecGroup-Skylar/excelize +``` + +### 创建 Excel 文档 + +下面是一个创建 Excel 文档的简单例子: + +```go +package main + +import ( + "fmt" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + xlsx := excelize.NewFile() + // 创建一个工作表 + index := xlsx.NewSheet("Sheet2") + // 设置单元格的值 + xlsx.SetCellValue("Sheet2", "A2", "Hello world.") + xlsx.SetCellValue("Sheet1", "B2", 100) + // 设置工作簿的默认工作表 + xlsx.SetActiveSheet(index) + // 根据指定路径保存文件 + err := xlsx.SaveAs("./Book1.xlsx") + if err != nil { + fmt.Println(err) + } +} +``` + +### 读取 Excel 文档 + +下面是读取 Excel 文档的例子: + +```go +package main + +import ( + "fmt" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + xlsx, err := excelize.OpenFile("./Book1.xlsx") + if err != nil { + fmt.Println(err) + return + } + // 获取工作表中指定单元格的值 + cell := xlsx.GetCellValue("Sheet1", "B2") + fmt.Println(cell) + // 获取 Sheet1 上所有单元格 + rows := xlsx.GetRows("Sheet1") + for _, row := range rows { + for _, colCell := range row { + fmt.Print(colCell, "\t") + } + fmt.Println() + } +} +``` + +### 在 Excel 文档中创建图表 + +使用 Excelize 生成图表十分简单,仅需几行代码。您可以根据工作表中的已有数据构建图表,或向工作表中添加数据并创建图表。 + +![Excelize](./test/images/chart.png "Excelize") + +```go +package main + +import ( + "fmt" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + categories := map[string]string{"A2": "Small", "A3": "Normal", "A4": "Large", "B1": "Apple", "C1": "Orange", "D1": "Pear"} + values := map[string]int{"B2": 2, "C2": 3, "D2": 3, "B3": 5, "C3": 2, "D3": 4, "B4": 6, "C4": 7, "D4": 8} + xlsx := excelize.NewFile() + for k, v := range categories { + xlsx.SetCellValue("Sheet1", k, v) + } + for k, v := range values { + xlsx.SetCellValue("Sheet1", k, v) + } + xlsx.AddChart("Sheet1", "E1", `{"type":"col3DClustered","series":[{"name":"Sheet1!$A$2","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$2:$D$2"},{"name":"Sheet1!$A$3","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$3:$D$3"},{"name":"Sheet1!$A$4","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$4:$D$4"}],"title":{"name":"Fruit 3D Clustered Column Chart"}}`) + // 根据指定路径保存文件 + err := xlsx.SaveAs("./Book1.xlsx") + if err != nil { + fmt.Println(err) + } +} + +``` + +### 向 Excel 文档中插入图片 + +```go +package main + +import ( + "fmt" + _ "image/gif" + _ "image/jpeg" + _ "image/png" + + "github.com/360EntSecGroup-Skylar/excelize" +) + +func main() { + xlsx, err := excelize.OpenFile("./Book1.xlsx") + if err != nil { + fmt.Println(err) + return + } + // 插入图片 + err = xlsx.AddPicture("Sheet1", "A2", "./image1.png", "") + if err != nil { + fmt.Println(err) + } + // 在工作表中插入图片,并设置图片的缩放比例 + err = xlsx.AddPicture("Sheet1", "D2", "./image2.jpg", `{"x_scale": 0.5, "y_scale": 0.5}`) + if err != nil { + fmt.Println(err) + } + // 在工作表中插入图片,并设置图片的打印属性 + err = xlsx.AddPicture("Sheet1", "H2", "./image3.gif", `{"x_offset": 15, "y_offset": 10, "print_obj": true, "lock_aspect_ratio": false, "locked": false}`) + if err != nil { + fmt.Println(err) + } + // 保存文件 + err = xlsx.Save() + if err != nil { + fmt.Println(err) + } +} +``` + +## 社区合作 + +欢迎您为此项目贡献代码,提出建议或问题、修复 Bug 以及参与讨论对新功能的想法。 XML 符合标准: [part 1 of the 5th edition of the ECMA-376 Standard for Office Open XML](http://www.ecma-international.org/publications/standards/Ecma-376.htm)。 + +## 致谢 + +本类库中部分 XML 结构体的定义参考了开源项目:[tealeg/xlsx](https://github.com/tealeg/xlsx). + +## 开源许可 + +本项目遵循 BSD 3-Clause 开源许可协议,访问 [https://opensource.org/licenses/BSD-3-Clause](https://opensource.org/licenses/BSD-3-Clause) 查看许可协议文件。 diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/cell.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/cell.go new file mode 100644 index 000000000..eb265cc68 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/cell.go @@ -0,0 +1,589 @@ +package excelize + +import ( + "encoding/xml" + "fmt" + "reflect" + "strconv" + "strings" + "time" +) + +const ( + // STCellFormulaTypeArray defined the formula is an array formula. + STCellFormulaTypeArray = "array" + // STCellFormulaTypeDataTable defined the formula is a data table formula. + STCellFormulaTypeDataTable = "dataTable" + // STCellFormulaTypeNormal defined the formula is a regular cell formula. + STCellFormulaTypeNormal = "normal" + // STCellFormulaTypeShared defined the formula is part of a shared formula. + STCellFormulaTypeShared = "shared" +) + +// mergeCellsParser provides function to check merged cells in worksheet by +// given axis. +func (f *File) mergeCellsParser(xlsx *xlsxWorksheet, axis string) string { + axis = strings.ToUpper(axis) + if xlsx.MergeCells != nil { + for i := 0; i < len(xlsx.MergeCells.Cells); i++ { + if checkCellInArea(axis, xlsx.MergeCells.Cells[i].Ref) { + axis = strings.Split(xlsx.MergeCells.Cells[i].Ref, ":")[0] + } + } + } + return axis +} + +// SetCellValue provides function to set value of a cell. The following shows +// the supported data types: +// +// int +// int8 +// int16 +// int32 +// int64 +// uint +// uint8 +// uint16 +// uint32 +// uint64 +// float32 +// float64 +// string +// []byte +// time.Duration +// time.Time +// bool +// nil +// +// Note that default date format is m/d/yy h:mm of time.Time type value. You can +// set numbers format by SetCellStyle() method. +func (f *File) SetCellValue(sheet, axis string, value interface{}) { + switch t := value.(type) { + case float32: + f.SetCellDefault(sheet, axis, strconv.FormatFloat(float64(value.(float32)), 'f', -1, 32)) + case float64: + f.SetCellDefault(sheet, axis, strconv.FormatFloat(float64(value.(float64)), 'f', -1, 64)) + case string: + f.SetCellStr(sheet, axis, t) + case []byte: + f.SetCellStr(sheet, axis, string(t)) + case time.Duration: + f.SetCellDefault(sheet, axis, strconv.FormatFloat(float64(value.(time.Duration).Seconds()/86400), 'f', -1, 32)) + f.setDefaultTimeStyle(sheet, axis, 21) + case time.Time: + f.SetCellDefault(sheet, axis, strconv.FormatFloat(float64(timeToExcelTime(timeToUTCTime(value.(time.Time)))), 'f', -1, 64)) + f.setDefaultTimeStyle(sheet, axis, 22) + case nil: + f.SetCellStr(sheet, axis, "") + case bool: + f.SetCellBool(sheet, axis, bool(value.(bool))) + default: + f.setCellIntValue(sheet, axis, value) + } +} + +// setCellIntValue provides function to set int value of a cell. +func (f *File) setCellIntValue(sheet, axis string, value interface{}) { + switch value.(type) { + case int: + f.SetCellInt(sheet, axis, value.(int)) + case int8: + f.SetCellInt(sheet, axis, int(value.(int8))) + case int16: + f.SetCellInt(sheet, axis, int(value.(int16))) + case int32: + f.SetCellInt(sheet, axis, int(value.(int32))) + case int64: + f.SetCellInt(sheet, axis, int(value.(int64))) + case uint: + f.SetCellInt(sheet, axis, int(value.(uint))) + case uint8: + f.SetCellInt(sheet, axis, int(value.(uint8))) + case uint16: + f.SetCellInt(sheet, axis, int(value.(uint16))) + case uint32: + f.SetCellInt(sheet, axis, int(value.(uint32))) + case uint64: + f.SetCellInt(sheet, axis, int(value.(uint64))) + default: + f.SetCellStr(sheet, axis, fmt.Sprintf("%v", value)) + } +} + +// SetCellBool provides function to set bool type value of a cell by given +// worksheet name, cell coordinates and cell value. +func (f *File) SetCellBool(sheet, axis string, value bool) { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + col := string(strings.Map(letterOnlyMapF, axis)) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return + } + xAxis := row - 1 + yAxis := TitleToNumber(col) + + rows := xAxis + 1 + cell := yAxis + 1 + + completeRow(xlsx, rows, cell) + completeCol(xlsx, rows, cell) + + xlsx.SheetData.Row[xAxis].C[yAxis].S = f.prepareCellStyle(xlsx, cell, xlsx.SheetData.Row[xAxis].C[yAxis].S) + xlsx.SheetData.Row[xAxis].C[yAxis].T = "b" + if value { + xlsx.SheetData.Row[xAxis].C[yAxis].V = "1" + } else { + xlsx.SheetData.Row[xAxis].C[yAxis].V = "0" + } +} + +// GetCellValue provides function to get formatted value from cell by given +// worksheet name and axis in XLSX file. If it is possible to apply a format to +// the cell value, it will do so, if not then an error will be returned, along +// with the raw value of the cell. +func (f *File) GetCellValue(sheet, axis string) string { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return "" + } + xAxis := row - 1 + rows := len(xlsx.SheetData.Row) + if rows > 1 { + lastRow := xlsx.SheetData.Row[rows-1].R + if lastRow >= rows { + rows = lastRow + } + } + if rows < xAxis { + return "" + } + for k := range xlsx.SheetData.Row { + if xlsx.SheetData.Row[k].R == row { + for i := range xlsx.SheetData.Row[k].C { + if axis == xlsx.SheetData.Row[k].C[i].R { + val, _ := xlsx.SheetData.Row[k].C[i].getValueFrom(f, f.sharedStringsReader()) + return val + } + } + } + } + return "" +} + +// formattedValue provides function to returns a value after formatted. If it is +// possible to apply a format to the cell value, it will do so, if not then an +// error will be returned, along with the raw value of the cell. +func (f *File) formattedValue(s int, v string) string { + if s == 0 { + return v + } + styleSheet := f.stylesReader() + ok := builtInNumFmtFunc[styleSheet.CellXfs.Xf[s].NumFmtID] + if ok != nil { + return ok(styleSheet.CellXfs.Xf[s].NumFmtID, v) + } + return v +} + +// GetCellStyle provides function to get cell style index by given worksheet +// name and cell coordinates. +func (f *File) GetCellStyle(sheet, axis string) int { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + col := string(strings.Map(letterOnlyMapF, axis)) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return 0 + } + xAxis := row - 1 + yAxis := TitleToNumber(col) + + rows := xAxis + 1 + cell := yAxis + 1 + + completeRow(xlsx, rows, cell) + completeCol(xlsx, rows, cell) + + return f.prepareCellStyle(xlsx, cell, xlsx.SheetData.Row[xAxis].C[yAxis].S) +} + +// GetCellFormula provides function to get formula from cell by given worksheet +// name and axis in XLSX file. +func (f *File) GetCellFormula(sheet, axis string) string { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return "" + } + xAxis := row - 1 + rows := len(xlsx.SheetData.Row) + if rows > 1 { + lastRow := xlsx.SheetData.Row[rows-1].R + if lastRow >= rows { + rows = lastRow + } + } + if rows < xAxis { + return "" + } + for k := range xlsx.SheetData.Row { + if xlsx.SheetData.Row[k].R == row { + for i := range xlsx.SheetData.Row[k].C { + if axis == xlsx.SheetData.Row[k].C[i].R { + if xlsx.SheetData.Row[k].C[i].F.T == STCellFormulaTypeShared { + return getSharedForumula(xlsx, xlsx.SheetData.Row[k].C[i].F.Si) + } + if xlsx.SheetData.Row[k].C[i].F != nil { + return xlsx.SheetData.Row[k].C[i].F.Content + } + } + } + } + } + return "" +} + +// getSharedForumula find a cell contains the same formula as another cell, +// the "shared" value can be used for the t attribute and the si attribute can +// be used to refer to the cell containing the formula. Two formulas are +// considered to be the same when their respective representations in +// R1C1-reference notation, are the same. +// +// Note that this function not validate ref tag to check the cell if or not in +// allow area, and always return origin shared formula. +func getSharedForumula(xlsx *xlsxWorksheet, si string) string { + for k := range xlsx.SheetData.Row { + for i := range xlsx.SheetData.Row[k].C { + if xlsx.SheetData.Row[k].C[i].F == nil { + continue + } + if xlsx.SheetData.Row[k].C[i].F.T != STCellFormulaTypeShared { + continue + } + if xlsx.SheetData.Row[k].C[i].F.Si != si { + continue + } + if xlsx.SheetData.Row[k].C[i].F.Ref != "" { + return xlsx.SheetData.Row[k].C[i].F.Content + } + } + } + return "" +} + +// SetCellFormula provides function to set cell formula by given string and +// worksheet name. +func (f *File) SetCellFormula(sheet, axis, formula string) { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + col := string(strings.Map(letterOnlyMapF, axis)) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return + } + xAxis := row - 1 + yAxis := TitleToNumber(col) + + rows := xAxis + 1 + cell := yAxis + 1 + + completeRow(xlsx, rows, cell) + completeCol(xlsx, rows, cell) + + if xlsx.SheetData.Row[xAxis].C[yAxis].F != nil { + xlsx.SheetData.Row[xAxis].C[yAxis].F.Content = formula + } else { + f := xlsxF{ + Content: formula, + } + xlsx.SheetData.Row[xAxis].C[yAxis].F = &f + } +} + +// SetCellHyperLink provides function to set cell hyperlink by given worksheet +// name and link URL address. LinkType defines two types of hyperlink "External" +// for web site or "Location" for moving to one of cell in this workbook. The +// below is example for external link. +// +// xlsx.SetCellHyperLink("Sheet1", "A3", "https://github.com/360EntSecGroup-Skylar/excelize", "External") +// // Set underline and font color style for the cell. +// style, _ := xlsx.NewStyle(`{"font":{"color":"#1265BE","underline":"single"}}`) +// xlsx.SetCellStyle("Sheet1", "A3", "A3", style) +// +// A this is another example for "Location": +// +// xlsx.SetCellHyperLink("Sheet1", "A3", "Sheet1!A40", "Location") +// +func (f *File) SetCellHyperLink(sheet, axis, link, linkType string) { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + linkTypes := map[string]xlsxHyperlink{ + "External": {}, + "Location": {Location: link}, + } + hyperlink, ok := linkTypes[linkType] + if !ok || axis == "" { + return + } + hyperlink.Ref = axis + if linkType == "External" { + rID := f.addSheetRelationships(sheet, SourceRelationshipHyperLink, link, linkType) + hyperlink.RID = "rId" + strconv.Itoa(rID) + } + if xlsx.Hyperlinks == nil { + xlsx.Hyperlinks = &xlsxHyperlinks{} + } + xlsx.Hyperlinks.Hyperlink = append(xlsx.Hyperlinks.Hyperlink, hyperlink) +} + +// GetCellHyperLink provides function to get cell hyperlink by given worksheet +// name and axis. Boolean type value link will be ture if the cell has a +// hyperlink and the target is the address of the hyperlink. Otherwise, the +// value of link will be false and the value of the target will be a blank +// string. For example get hyperlink of Sheet1!H6: +// +// link, target := xlsx.GetCellHyperLink("Sheet1", "H6") +// +func (f *File) GetCellHyperLink(sheet, axis string) (bool, string) { + var link bool + var target string + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + if xlsx.Hyperlinks == nil || axis == "" { + return link, target + } + for h := range xlsx.Hyperlinks.Hyperlink { + if xlsx.Hyperlinks.Hyperlink[h].Ref == axis { + link = true + target = xlsx.Hyperlinks.Hyperlink[h].Location + if xlsx.Hyperlinks.Hyperlink[h].RID != "" { + target = f.getSheetRelationshipsTargetByID(sheet, xlsx.Hyperlinks.Hyperlink[h].RID) + } + } + } + return link, target +} + +// MergeCell provides function to merge cells by given coordinate area and sheet +// name. For example create a merged cell of D3:E9 on Sheet1: +// +// xlsx.MergeCell("Sheet1", "D3", "E9") +// +// If you create a merged cell that overlaps with another existing merged cell, +// those merged cells that already exist will be removed. +func (f *File) MergeCell(sheet, hcell, vcell string) { + if hcell == vcell { + return + } + + hcell = strings.ToUpper(hcell) + vcell = strings.ToUpper(vcell) + + // Coordinate conversion, convert C1:B3 to 2,0,1,2. + hcol := string(strings.Map(letterOnlyMapF, hcell)) + hrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, hcell)) + hyAxis := hrow - 1 + hxAxis := TitleToNumber(hcol) + + vcol := string(strings.Map(letterOnlyMapF, vcell)) + vrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, vcell)) + vyAxis := vrow - 1 + vxAxis := TitleToNumber(vcol) + + if vxAxis < hxAxis { + hcell, vcell = vcell, hcell + vxAxis, hxAxis = hxAxis, vxAxis + } + + if vyAxis < hyAxis { + hcell, vcell = vcell, hcell + vyAxis, hyAxis = hyAxis, vyAxis + } + + xlsx := f.workSheetReader(sheet) + if xlsx.MergeCells != nil { + mergeCell := xlsxMergeCell{} + // Correct the coordinate area, such correct C1:B3 to B1:C3. + mergeCell.Ref = ToAlphaString(hxAxis) + strconv.Itoa(hyAxis+1) + ":" + ToAlphaString(vxAxis) + strconv.Itoa(vyAxis+1) + // Delete the merged cells of the overlapping area. + for i := 0; i < len(xlsx.MergeCells.Cells); i++ { + if checkCellInArea(hcell, xlsx.MergeCells.Cells[i].Ref) || checkCellInArea(strings.Split(xlsx.MergeCells.Cells[i].Ref, ":")[0], mergeCell.Ref) { + xlsx.MergeCells.Cells = append(xlsx.MergeCells.Cells[:i], xlsx.MergeCells.Cells[i+1:]...) + } else if checkCellInArea(vcell, xlsx.MergeCells.Cells[i].Ref) || checkCellInArea(strings.Split(xlsx.MergeCells.Cells[i].Ref, ":")[1], mergeCell.Ref) { + xlsx.MergeCells.Cells = append(xlsx.MergeCells.Cells[:i], xlsx.MergeCells.Cells[i+1:]...) + } + } + xlsx.MergeCells.Cells = append(xlsx.MergeCells.Cells, &mergeCell) + } else { + mergeCell := xlsxMergeCell{} + // Correct the coordinate area, such correct C1:B3 to B1:C3. + mergeCell.Ref = ToAlphaString(hxAxis) + strconv.Itoa(hyAxis+1) + ":" + ToAlphaString(vxAxis) + strconv.Itoa(vyAxis+1) + mergeCells := xlsxMergeCells{} + mergeCells.Cells = append(mergeCells.Cells, &mergeCell) + xlsx.MergeCells = &mergeCells + } +} + +// SetCellInt provides function to set int type value of a cell by given +// worksheet name, cell coordinates and cell value. +func (f *File) SetCellInt(sheet, axis string, value int) { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + col := string(strings.Map(letterOnlyMapF, axis)) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return + } + xAxis := row - 1 + yAxis := TitleToNumber(col) + + rows := xAxis + 1 + cell := yAxis + 1 + + completeRow(xlsx, rows, cell) + completeCol(xlsx, rows, cell) + + xlsx.SheetData.Row[xAxis].C[yAxis].S = f.prepareCellStyle(xlsx, cell, xlsx.SheetData.Row[xAxis].C[yAxis].S) + xlsx.SheetData.Row[xAxis].C[yAxis].T = "" + xlsx.SheetData.Row[xAxis].C[yAxis].V = strconv.Itoa(value) +} + +// prepareCellStyle provides function to prepare style index of cell in +// worksheet by given column index and style index. +func (f *File) prepareCellStyle(xlsx *xlsxWorksheet, col, style int) int { + if xlsx.Cols != nil && style == 0 { + for _, v := range xlsx.Cols.Col { + if v.Min <= col && col <= v.Max { + style = v.Style + } + } + } + return style +} + +// SetCellStr provides function to set string type value of a cell. Total number +// of characters that a cell can contain 32767 characters. +func (f *File) SetCellStr(sheet, axis, value string) { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + if len(value) > 32767 { + value = value[0:32767] + } + col := string(strings.Map(letterOnlyMapF, axis)) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return + } + xAxis := row - 1 + yAxis := TitleToNumber(col) + + rows := xAxis + 1 + cell := yAxis + 1 + + completeRow(xlsx, rows, cell) + completeCol(xlsx, rows, cell) + + // Leading space(s) character detection. + if len(value) > 0 { + if value[0] == 32 { + xlsx.SheetData.Row[xAxis].C[yAxis].XMLSpace = xml.Attr{ + Name: xml.Name{Space: NameSpaceXML, Local: "space"}, + Value: "preserve", + } + } + } + xlsx.SheetData.Row[xAxis].C[yAxis].S = f.prepareCellStyle(xlsx, cell, xlsx.SheetData.Row[xAxis].C[yAxis].S) + xlsx.SheetData.Row[xAxis].C[yAxis].T = "str" + xlsx.SheetData.Row[xAxis].C[yAxis].V = value +} + +// SetCellDefault provides function to set string type value of a cell as +// default format without escaping the cell. +func (f *File) SetCellDefault(sheet, axis, value string) { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + col := string(strings.Map(letterOnlyMapF, axis)) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return + } + xAxis := row - 1 + yAxis := TitleToNumber(col) + + rows := xAxis + 1 + cell := yAxis + 1 + + completeRow(xlsx, rows, cell) + completeCol(xlsx, rows, cell) + + xlsx.SheetData.Row[xAxis].C[yAxis].S = f.prepareCellStyle(xlsx, cell, xlsx.SheetData.Row[xAxis].C[yAxis].S) + xlsx.SheetData.Row[xAxis].C[yAxis].T = "" + xlsx.SheetData.Row[xAxis].C[yAxis].V = value +} + +// SetSheetRow writes an array to row by given worksheet name, starting +// coordinate and a pointer to array type 'slice'. For example, writes an +// array to row 6 start with the cell B6 on Sheet1: +// +// xlsx.SetSheetRow("Sheet1", "B6", &[]interface{}{"1", nil, 2}) +// +func (f *File) SetSheetRow(sheet, axis string, slice interface{}) { + xlsx := f.workSheetReader(sheet) + axis = f.mergeCellsParser(xlsx, axis) + col := string(strings.Map(letterOnlyMapF, axis)) + row, err := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + if err != nil { + return + } + // Make sure 'slice' is a Ptr to Slice + v := reflect.ValueOf(slice) + if v.Kind() != reflect.Ptr { + return + } + v = v.Elem() + if v.Kind() != reflect.Slice { + return + } + + xAxis := row - 1 + yAxis := TitleToNumber(col) + + rows := xAxis + 1 + cell := yAxis + 1 + + completeRow(xlsx, rows, cell) + completeCol(xlsx, rows, cell) + + idx := 0 + for i := cell - 1; i < v.Len()+cell-1; i++ { + c := ToAlphaString(i) + strconv.Itoa(row) + f.SetCellValue(sheet, c, v.Index(idx).Interface()) + idx++ + } +} + +// checkCellInArea provides function to determine if a given coordinate is +// within an area. +func checkCellInArea(cell, area string) bool { + cell = strings.ToUpper(cell) + area = strings.ToUpper(area) + + ref := strings.Split(area, ":") + if len(ref) < 2 { + return false + } + + from := ref[0] + to := ref[1] + + col, row := getCellColRow(cell) + fromCol, fromRow := getCellColRow(from) + toCol, toRow := getCellColRow(to) + + return axisLowerOrEqualThan(fromCol, col) && axisLowerOrEqualThan(col, toCol) && axisLowerOrEqualThan(fromRow, row) && axisLowerOrEqualThan(row, toRow) +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/chart.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/chart.go new file mode 100644 index 000000000..7ba1d91e3 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/chart.go @@ -0,0 +1,1165 @@ +package excelize + +import ( + "encoding/json" + "encoding/xml" + "strconv" + "strings" +) + +// This section defines the currently supported chart types. +const ( + Bar = "bar" + BarStacked = "barStacked" + BarPercentStacked = "barPercentStacked" + Bar3DClustered = "bar3DClustered" + Bar3DStacked = "bar3DStacked" + Bar3DPercentStacked = "bar3DPercentStacked" + Col = "col" + ColStacked = "colStacked" + ColPercentStacked = "colPercentStacked" + Col3DClustered = "col3DClustered" + Col3D = "col3D" + Col3DStacked = "col3DStacked" + Col3DPercentStacked = "col3DPercentStacked" + Doughnut = "doughnut" + Line = "line" + Pie = "pie" + Pie3D = "pie3D" + Radar = "radar" + Scatter = "scatter" +) + +// This section defines the default value of chart properties. +var ( + chartView3DRotX = map[string]int{ + Bar: 0, + BarStacked: 0, + BarPercentStacked: 0, + Bar3DClustered: 15, + Bar3DStacked: 15, + Bar3DPercentStacked: 15, + Col: 0, + ColStacked: 0, + ColPercentStacked: 0, + Col3DClustered: 15, + Col3D: 15, + Col3DStacked: 15, + Col3DPercentStacked: 15, + Doughnut: 0, + Line: 0, + Pie: 0, + Pie3D: 30, + Radar: 0, + Scatter: 0, + } + chartView3DRotY = map[string]int{ + Bar: 0, + BarStacked: 0, + BarPercentStacked: 0, + Bar3DClustered: 20, + Bar3DStacked: 20, + Bar3DPercentStacked: 20, + Col: 0, + ColStacked: 0, + ColPercentStacked: 0, + Col3DClustered: 20, + Col3D: 20, + Col3DStacked: 20, + Col3DPercentStacked: 20, + Doughnut: 0, + Line: 0, + Pie: 0, + Pie3D: 0, + Radar: 0, + Scatter: 0, + } + chartView3DDepthPercent = map[string]int{ + Bar: 100, + BarStacked: 100, + BarPercentStacked: 100, + Bar3DClustered: 100, + Bar3DStacked: 100, + Bar3DPercentStacked: 100, + Col: 100, + ColStacked: 100, + ColPercentStacked: 100, + Col3DClustered: 100, + Col3D: 100, + Col3DStacked: 100, + Col3DPercentStacked: 100, + Doughnut: 100, + Line: 100, + Pie: 100, + Pie3D: 100, + Radar: 100, + Scatter: 100, + } + chartView3DRAngAx = map[string]int{ + Bar: 0, + BarStacked: 0, + BarPercentStacked: 0, + Bar3DClustered: 1, + Bar3DStacked: 1, + Bar3DPercentStacked: 1, + Col: 0, + ColStacked: 0, + ColPercentStacked: 0, + Col3DClustered: 1, + Col3D: 1, + Col3DStacked: 1, + Col3DPercentStacked: 1, + Doughnut: 0, + Line: 0, + Pie: 0, + Pie3D: 0, + Radar: 0, + Scatter: 0, + } + chartLegendPosition = map[string]string{ + "bottom": "b", + "left": "l", + "right": "r", + "top": "t", + "top_right": "tr", + } + chartValAxNumFmtFormatCode = map[string]string{ + Bar: "General", + BarStacked: "General", + BarPercentStacked: "0%", + Bar3DClustered: "General", + Bar3DStacked: "General", + Bar3DPercentStacked: "0%", + Col: "General", + ColStacked: "General", + ColPercentStacked: "0%", + Col3DClustered: "General", + Col3D: "General", + Col3DStacked: "General", + Col3DPercentStacked: "0%", + Doughnut: "General", + Line: "General", + Pie: "General", + Pie3D: "General", + Radar: "General", + Scatter: "General", + } + plotAreaChartGrouping = map[string]string{ + Bar: "clustered", + BarStacked: "stacked", + BarPercentStacked: "percentStacked", + Bar3DClustered: "clustered", + Bar3DStacked: "stacked", + Bar3DPercentStacked: "percentStacked", + Col: "clustered", + ColStacked: "stacked", + ColPercentStacked: "percentStacked", + Col3DClustered: "clustered", + Col3D: "standard", + Col3DStacked: "stacked", + Col3DPercentStacked: "percentStacked", + Line: "standard", + } + plotAreaChartBarDir = map[string]string{ + Bar: "bar", + BarStacked: "bar", + BarPercentStacked: "bar", + Bar3DClustered: "bar", + Bar3DStacked: "bar", + Bar3DPercentStacked: "bar", + Col: "col", + ColStacked: "col", + ColPercentStacked: "col", + Col3DClustered: "col", + Col3D: "col", + Col3DStacked: "col", + Col3DPercentStacked: "col", + Line: "standard", + } + orientation = map[bool]string{ + true: "maxMin", + false: "minMax", + } + catAxPos = map[bool]string{ + true: "t", + false: "b", + } + valAxPos = map[bool]string{ + true: "r", + false: "l", + } +) + +// parseFormatChartSet provides function to parse the format settings of the +// chart with default value. +func parseFormatChartSet(formatSet string) (*formatChart, error) { + format := formatChart{ + Dimension: formatChartDimension{ + Width: 480, + Height: 290, + }, + Format: formatPicture{ + FPrintsWithSheet: true, + FLocksWithSheet: false, + NoChangeAspect: false, + OffsetX: 0, + OffsetY: 0, + XScale: 1.0, + YScale: 1.0, + }, + Legend: formatChartLegend{ + Position: "bottom", + ShowLegendKey: false, + }, + Title: formatChartTitle{ + Name: " ", + }, + ShowBlanksAs: "gap", + } + err := json.Unmarshal([]byte(formatSet), &format) + return &format, err +} + +// AddChart provides the method to add chart in a sheet by given chart format +// set (such as offset, scale, aspect ratio setting and print settings) and +// properties set. For example, create 3D clustered column chart with data +// Sheet1!$A$29:$D$32: +// +// package main +// +// import ( +// "fmt" +// +// "github.com/360EntSecGroup-Skylar/excelize" +// ) +// +// func main() { +// categories := map[string]string{"A2": "Small", "A3": "Normal", "A4": "Large", "B1": "Apple", "C1": "Orange", "D1": "Pear"} +// values := map[string]int{"B2": 2, "C2": 3, "D2": 3, "B3": 5, "C3": 2, "D3": 4, "B4": 6, "C4": 7, "D4": 8} +// xlsx := excelize.NewFile() +// for k, v := range categories { +// xlsx.SetCellValue("Sheet1", k, v) +// } +// for k, v := range values { +// xlsx.SetCellValue("Sheet1", k, v) +// } +// xlsx.AddChart("Sheet1", "E1", `{"type":"col3DClustered","dimension":{"width":640,"height":480},"series":[{"name":"Sheet1!$A$2","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$2:$D$2"},{"name":"Sheet1!$A$3","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$3:$D$3"},{"name":"Sheet1!$A$4","categories":"Sheet1!$B$1:$D$1","values":"Sheet1!$B$4:$D$4"}],"format":{"x_scale":1.0,"y_scale":1.0,"x_offset":15,"y_offset":10,"print_obj":true,"lock_aspect_ratio":false,"locked":false},"legend":{"position":"bottom","show_legend_key":false},"title":{"name":"Fruit 3D Clustered Column Chart"},"plotarea":{"show_bubble_size":true,"show_cat_name":false,"show_leader_lines":false,"show_percent":true,"show_series_name":true,"show_val":true},"show_blanks_as":"zero","x_axis":{"reverse_order":true},"y_axis":{"maximum":7.5,"minimum":0.5}}`) +// // Save xlsx file by the given path. +// err := xlsx.SaveAs("./Book1.xlsx") +// if err != nil { +// fmt.Println(err) +// } +// } +// +// The following shows the type of chart supported by excelize: +// +// Type | Chart +// ---------------------+------------------------------ +// bar | 2D clustered bar chart +// barStacked | 2D stacked bar chart +// barPercentStacked | 2D 100% stacked bar chart +// bar3DClustered | 3D clustered bar chart +// bar3DStacked | 3D stacked bar chart +// bar3DPercentStacked | 3D 100% stacked bar chart +// col | 2D clustered column chart +// colStacked | 2D stacked column chart +// colPercentStacked | 2D 100% stacked column chart +// col3DClustered | 3D clustered column chart +// col3D | 3D column chart +// col3DStacked | 3D stacked column chart +// col3DPercentStacked | 3D 100% stacked column chart +// doughnut | doughnut chart +// line | line chart +// pie | pie chart +// pie3D | 3D pie chart +// radar | radar chart +// scatter | scatter chart +// +// In Excel a chart series is a collection of information that defines which data is plotted such as values, axis labels and formatting. +// +// The series options that can be set are: +// +// name +// categories +// values +// +// name: Set the name for the series. The name is displayed in the chart legend and in the formula bar. The name property is optional and if it isn't supplied it will default to Series 1..n. The name can also be a formula such as Sheet1!$A$1 +// +// categories: This sets the chart category labels. The category is more or less the same as the X axis. In most chart types the categories property is optional and the chart will just assume a sequential series from 1..n. +// +// values: This is the most important property of a series and is the only mandatory option for every chart object. This option links the chart with the worksheet data that it displays. +// +// Set properties of the chart legend. The options that can be set are: +// +// position +// show_legend_key +// +// position: Set the position of the chart legend. The default legend position is right. The available positions are: +// +// top +// bottom +// left +// right +// top_right +// +// show_legend_key: Set the legend keys shall be shown in data labels. The default value is false. +// +// Set properties of the chart title. The properties that can be set are: +// +// title +// +// name: Set the name (title) for the chart. The name is displayed above the chart. The name can also be a formula such as Sheet1!$A$1 or a list with a sheetname. The name property is optional. The default is to have no chart title. +// +// Specifies how blank cells are plotted on the chart by show_blanks_as. The default value is gap. The options that can be set are: +// +// gap +// span +// zero +// +// gap: Specifies that blank values shall be left as a gap. +// +// sapn: Specifies that blank values shall be spanned with a line. +// +// zero: Specifies that blank values shall be treated as zero. +// +// Set chart offset, scale, aspect ratio setting and print settings by format, same as function AddPicture. +// +// Set the position of the chart plot area by plotarea. The properties that can be set are: +// +// show_bubble_size +// show_cat_name +// show_leader_lines +// show_percent +// show_series_name +// show_val +// +// show_bubble_size: Specifies the bubble size shall be shown in a data label. The show_bubble_size property is optional. The default value is false. +// +// show_cat_name: Specifies that the category name shall be shown in the data label. The show_cat_name property is optional. The default value is true. +// +// show_leader_lines: Specifies leader lines shall be shown for data labels. The show_leader_lines property is optional. The default value is false. +// +// show_percent: Specifies that the percentage shall be shown in a data label. The show_percent property is optional. The default value is false. +// +// show_series_name: Specifies that the series name shall be shown in a data label. The show_series_name property is optional. The default value is false. +// +// show_val: Specifies that the value shall be shown in a data label. The show_val property is optional. The default value is false. +// +// Set the primary horizontal and vertical axis options by x_axis and y_axis. The properties that can be set are: +// +// reverse_order +// maximum +// minimum +// +// reverse_order: Specifies that the categories or values on reverse order (orientation of the chart). The reverse_order property is optional. The default value is false. +// maximum: Specifies that the fixed maximum, 0 is auto. The maximum property is optional. The default value is auto. +// minimum: Specifies that the fixed minimum, 0 is auto. The minimum property is optional. The default value is auto. +// +// Set chart size by dimension property. The dimension property is optional. The default width is 480, and height is 290. +// +func (f *File) AddChart(sheet, cell, format string) error { + formatSet, err := parseFormatChartSet(format) + if err != nil { + return err + } + // Read sheet data. + xlsx := f.workSheetReader(sheet) + // Add first picture for given sheet, create xl/drawings/ and xl/drawings/_rels/ folder. + drawingID := f.countDrawings() + 1 + chartID := f.countCharts() + 1 + drawingXML := "xl/drawings/drawing" + strconv.Itoa(drawingID) + ".xml" + drawingID, drawingXML = f.prepareDrawing(xlsx, drawingID, sheet, drawingXML) + drawingRID := f.addDrawingRelationships(drawingID, SourceRelationshipChart, "../charts/chart"+strconv.Itoa(chartID)+".xml", "") + f.addDrawingChart(sheet, drawingXML, cell, formatSet.Dimension.Width, formatSet.Dimension.Height, drawingRID, &formatSet.Format) + f.addChart(formatSet) + f.addContentTypePart(chartID, "chart") + f.addContentTypePart(drawingID, "drawings") + return err +} + +// countCharts provides function to get chart files count storage in the +// folder xl/charts. +func (f *File) countCharts() int { + count := 0 + for k := range f.XLSX { + if strings.Contains(k, "xl/charts/chart") { + count++ + } + } + return count +} + +// prepareDrawing provides function to prepare drawing ID and XML by given +// drawingID, worksheet name and default drawingXML. +func (f *File) prepareDrawing(xlsx *xlsxWorksheet, drawingID int, sheet, drawingXML string) (int, string) { + sheetRelationshipsDrawingXML := "../drawings/drawing" + strconv.Itoa(drawingID) + ".xml" + if xlsx.Drawing != nil { + // The worksheet already has a picture or chart relationships, use the relationships drawing ../drawings/drawing%d.xml. + sheetRelationshipsDrawingXML = f.getSheetRelationshipsTargetByID(sheet, xlsx.Drawing.RID) + drawingID, _ = strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(sheetRelationshipsDrawingXML, "../drawings/drawing"), ".xml")) + drawingXML = strings.Replace(sheetRelationshipsDrawingXML, "..", "xl", -1) + } else { + // Add first picture for given sheet. + rID := f.addSheetRelationships(sheet, SourceRelationshipDrawingML, sheetRelationshipsDrawingXML, "") + f.addSheetDrawing(sheet, rID) + } + return drawingID, drawingXML +} + +// addChart provides function to create chart as xl/charts/chart%d.xml by given +// format sets. +func (f *File) addChart(formatSet *formatChart) { + count := f.countCharts() + xlsxChartSpace := xlsxChartSpace{ + XMLNSc: NameSpaceDrawingMLChart, + XMLNSa: NameSpaceDrawingML, + XMLNSr: SourceRelationship, + XMLNSc16r2: SourceRelationshipChart201506, + Date1904: &attrValBool{Val: false}, + Lang: &attrValString{Val: "en-US"}, + RoundedCorners: &attrValBool{Val: false}, + Chart: cChart{ + Title: &cTitle{ + Tx: cTx{ + Rich: &cRich{ + P: aP{ + PPr: &aPPr{ + DefRPr: aRPr{ + Kern: 1200, + Strike: "noStrike", + U: "none", + Sz: 1400, + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{ + Val: "tx1", + LumMod: &attrValInt{ + Val: 65000, + }, + LumOff: &attrValInt{ + Val: 35000, + }, + }, + }, + Ea: &aEa{ + Typeface: "+mn-ea", + }, + Cs: &aCs{ + Typeface: "+mn-cs", + }, + Latin: &aLatin{ + Typeface: "+mn-lt", + }, + }, + }, + R: &aR{ + RPr: aRPr{ + Lang: "en-US", + AltLang: "en-US", + }, + T: formatSet.Title.Name, + }, + }, + }, + }, + TxPr: cTxPr{ + P: aP{ + PPr: &aPPr{ + DefRPr: aRPr{ + Kern: 1200, + U: "none", + Sz: 14000, + Strike: "noStrike", + }, + }, + EndParaRPr: &aEndParaRPr{ + Lang: "en-US", + }, + }, + }, + }, + View3D: &cView3D{ + RotX: &attrValInt{Val: chartView3DRotX[formatSet.Type]}, + RotY: &attrValInt{Val: chartView3DRotY[formatSet.Type]}, + DepthPercent: &attrValInt{Val: chartView3DDepthPercent[formatSet.Type]}, + RAngAx: &attrValInt{Val: chartView3DRAngAx[formatSet.Type]}, + }, + Floor: &cThicknessSpPr{ + Thickness: &attrValInt{Val: 0}, + }, + SideWall: &cThicknessSpPr{ + Thickness: &attrValInt{Val: 0}, + }, + BackWall: &cThicknessSpPr{ + Thickness: &attrValInt{Val: 0}, + }, + PlotArea: &cPlotArea{}, + Legend: &cLegend{ + LegendPos: &attrValString{Val: chartLegendPosition[formatSet.Legend.Position]}, + Overlay: &attrValBool{Val: false}, + }, + + PlotVisOnly: &attrValBool{Val: false}, + DispBlanksAs: &attrValString{Val: formatSet.ShowBlanksAs}, + ShowDLblsOverMax: &attrValBool{Val: false}, + }, + SpPr: &cSpPr{ + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{Val: "bg1"}, + }, + Ln: &aLn{ + W: 9525, + Cap: "flat", + Cmpd: "sng", + Algn: "ctr", + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{Val: "tx1", + LumMod: &attrValInt{ + Val: 15000, + }, + LumOff: &attrValInt{ + Val: 85000, + }, + }, + }, + }, + }, + PrintSettings: &cPrintSettings{ + PageMargins: &cPageMargins{ + B: 0.75, + L: 0.7, + R: 0.7, + T: 0.7, + Header: 0.3, + Footer: 0.3, + }, + }, + } + plotAreaFunc := map[string]func(*formatChart) *cPlotArea{ + Bar: f.drawBaseChart, + BarStacked: f.drawBaseChart, + BarPercentStacked: f.drawBaseChart, + Bar3DClustered: f.drawBaseChart, + Bar3DStacked: f.drawBaseChart, + Bar3DPercentStacked: f.drawBaseChart, + Col: f.drawBaseChart, + ColStacked: f.drawBaseChart, + ColPercentStacked: f.drawBaseChart, + Col3DClustered: f.drawBaseChart, + Col3D: f.drawBaseChart, + Col3DStacked: f.drawBaseChart, + Col3DPercentStacked: f.drawBaseChart, + Doughnut: f.drawDoughnutChart, + Line: f.drawLineChart, + Pie3D: f.drawPie3DChart, + Pie: f.drawPieChart, + Radar: f.drawRadarChart, + Scatter: f.drawScatterChart, + } + xlsxChartSpace.Chart.PlotArea = plotAreaFunc[formatSet.Type](formatSet) + + chart, _ := xml.Marshal(xlsxChartSpace) + media := "xl/charts/chart" + strconv.Itoa(count+1) + ".xml" + f.saveFileList(media, chart) +} + +// drawBaseChart provides function to draw the c:plotArea element for bar, +// and column series charts by given format sets. +func (f *File) drawBaseChart(formatSet *formatChart) *cPlotArea { + c := cCharts{ + BarDir: &attrValString{ + Val: "col", + }, + Grouping: &attrValString{ + Val: "clustered", + }, + VaryColors: &attrValBool{ + Val: true, + }, + Ser: f.drawChartSeries(formatSet), + DLbls: f.drawChartDLbls(formatSet), + AxID: []*attrValInt{ + {Val: 754001152}, + {Val: 753999904}, + }, + } + c.BarDir.Val = plotAreaChartBarDir[formatSet.Type] + c.Grouping.Val = plotAreaChartGrouping[formatSet.Type] + if formatSet.Type == "colStacked" || formatSet.Type == "barStacked" || formatSet.Type == "barPercentStacked" || formatSet.Type == "colPercentStacked" { + c.Overlap = &attrValInt{Val: 100} + } + catAx := f.drawPlotAreaCatAx(formatSet) + valAx := f.drawPlotAreaValAx(formatSet) + charts := map[string]*cPlotArea{ + "bar": { + BarChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "barStacked": { + BarChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "barPercentStacked": { + BarChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "bar3DClustered": { + Bar3DChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "bar3DStacked": { + Bar3DChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "bar3DPercentStacked": { + Bar3DChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "col": { + BarChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "colStacked": { + BarChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "colPercentStacked": { + BarChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "col3DClustered": { + Bar3DChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "col3D": { + Bar3DChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "col3DStacked": { + Bar3DChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + "col3DPercentStacked": { + Bar3DChart: &c, + CatAx: catAx, + ValAx: valAx, + }, + } + return charts[formatSet.Type] +} + +// drawDoughnutChart provides function to draw the c:plotArea element for +// doughnut chart by given format sets. +func (f *File) drawDoughnutChart(formatSet *formatChart) *cPlotArea { + return &cPlotArea{ + DoughnutChart: &cCharts{ + VaryColors: &attrValBool{ + Val: true, + }, + Ser: f.drawChartSeries(formatSet), + HoleSize: &attrValInt{Val: 75}, + }, + } +} + +// drawLineChart provides function to draw the c:plotArea element for line chart +// by given format sets. +func (f *File) drawLineChart(formatSet *formatChart) *cPlotArea { + return &cPlotArea{ + LineChart: &cCharts{ + Grouping: &attrValString{ + Val: plotAreaChartGrouping[formatSet.Type], + }, + VaryColors: &attrValBool{ + Val: false, + }, + Ser: f.drawChartSeries(formatSet), + DLbls: f.drawChartDLbls(formatSet), + Smooth: &attrValBool{ + Val: false, + }, + AxID: []*attrValInt{ + {Val: 754001152}, + {Val: 753999904}, + }, + }, + CatAx: f.drawPlotAreaCatAx(formatSet), + ValAx: f.drawPlotAreaValAx(formatSet), + } +} + +// drawPieChart provides function to draw the c:plotArea element for pie chart +// by given format sets. +func (f *File) drawPieChart(formatSet *formatChart) *cPlotArea { + return &cPlotArea{ + PieChart: &cCharts{ + VaryColors: &attrValBool{ + Val: true, + }, + Ser: f.drawChartSeries(formatSet), + }, + } +} + +// drawPie3DChart provides function to draw the c:plotArea element for 3D pie +// chart by given format sets. +func (f *File) drawPie3DChart(formatSet *formatChart) *cPlotArea { + return &cPlotArea{ + Pie3DChart: &cCharts{ + VaryColors: &attrValBool{ + Val: true, + }, + Ser: f.drawChartSeries(formatSet), + }, + } +} + +// drawRadarChart provides function to draw the c:plotArea element for radar +// chart by given format sets. +func (f *File) drawRadarChart(formatSet *formatChart) *cPlotArea { + return &cPlotArea{ + RadarChart: &cCharts{ + RadarStyle: &attrValString{ + Val: "marker", + }, + VaryColors: &attrValBool{ + Val: false, + }, + Ser: f.drawChartSeries(formatSet), + DLbls: f.drawChartDLbls(formatSet), + AxID: []*attrValInt{ + {Val: 754001152}, + {Val: 753999904}, + }, + }, + CatAx: f.drawPlotAreaCatAx(formatSet), + ValAx: f.drawPlotAreaValAx(formatSet), + } +} + +// drawScatterChart provides function to draw the c:plotArea element for scatter +// chart by given format sets. +func (f *File) drawScatterChart(formatSet *formatChart) *cPlotArea { + return &cPlotArea{ + ScatterChart: &cCharts{ + ScatterStyle: &attrValString{ + Val: "smoothMarker", // line,lineMarker,marker,none,smooth,smoothMarker + }, + VaryColors: &attrValBool{ + Val: false, + }, + Ser: f.drawChartSeries(formatSet), + DLbls: f.drawChartDLbls(formatSet), + AxID: []*attrValInt{ + {Val: 754001152}, + {Val: 753999904}, + }, + }, + CatAx: f.drawPlotAreaCatAx(formatSet), + ValAx: f.drawPlotAreaValAx(formatSet), + } +} + +// drawChartSeries provides function to draw the c:ser element by given format +// sets. +func (f *File) drawChartSeries(formatSet *formatChart) *[]cSer { + ser := []cSer{} + for k := range formatSet.Series { + ser = append(ser, cSer{ + IDx: &attrValInt{Val: k}, + Order: &attrValInt{Val: k}, + Tx: &cTx{ + StrRef: &cStrRef{ + F: formatSet.Series[k].Name, + }, + }, + SpPr: f.drawChartSeriesSpPr(k, formatSet), + Marker: f.drawChartSeriesMarker(k, formatSet), + DPt: f.drawChartSeriesDPt(k, formatSet), + DLbls: f.drawChartSeriesDLbls(formatSet), + Cat: f.drawChartSeriesCat(formatSet.Series[k], formatSet), + Val: f.drawChartSeriesVal(formatSet.Series[k], formatSet), + XVal: f.drawChartSeriesXVal(formatSet.Series[k], formatSet), + YVal: f.drawChartSeriesYVal(formatSet.Series[k], formatSet), + }) + } + return &ser +} + +// drawChartSeriesSpPr provides function to draw the c:spPr element by given +// format sets. +func (f *File) drawChartSeriesSpPr(i int, formatSet *formatChart) *cSpPr { + spPrScatter := &cSpPr{ + Ln: &aLn{ + W: 25400, + NoFill: " ", + }, + } + spPrLine := &cSpPr{ + Ln: &aLn{ + W: 25400, + Cap: "rnd", // rnd, sq, flat + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{Val: "accent" + strconv.Itoa(i+1)}, + }, + }, + } + chartSeriesSpPr := map[string]*cSpPr{Bar: nil, BarStacked: nil, BarPercentStacked: nil, Bar3DClustered: nil, Bar3DStacked: nil, Bar3DPercentStacked: nil, Col: nil, ColStacked: nil, ColPercentStacked: nil, Col3DClustered: nil, Col3D: nil, Col3DStacked: nil, Col3DPercentStacked: nil, Doughnut: nil, Line: spPrLine, Pie: nil, Pie3D: nil, Radar: nil, Scatter: spPrScatter} + return chartSeriesSpPr[formatSet.Type] +} + +// drawChartSeriesDPt provides function to draw the c:dPt element by given data +// index and format sets. +func (f *File) drawChartSeriesDPt(i int, formatSet *formatChart) []*cDPt { + dpt := []*cDPt{{ + IDx: &attrValInt{Val: i}, + Bubble3D: &attrValBool{Val: false}, + SpPr: &cSpPr{ + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{Val: "accent" + strconv.Itoa(i+1)}, + }, + Ln: &aLn{ + W: 25400, + Cap: "rnd", + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{Val: "lt" + strconv.Itoa(i+1)}, + }, + }, + Sp3D: &aSp3D{ + ContourW: 25400, + ContourClr: &aContourClr{ + SchemeClr: &aSchemeClr{Val: "lt" + strconv.Itoa(i+1)}, + }, + }, + }, + }} + chartSeriesDPt := map[string][]*cDPt{Bar: nil, BarStacked: nil, BarPercentStacked: nil, Bar3DClustered: nil, Bar3DStacked: nil, Bar3DPercentStacked: nil, Col: nil, ColStacked: nil, ColPercentStacked: nil, Col3DClustered: nil, Col3D: nil, Col3DStacked: nil, Col3DPercentStacked: nil, Doughnut: nil, Line: nil, Pie: dpt, Pie3D: dpt, Radar: nil, Scatter: nil} + return chartSeriesDPt[formatSet.Type] +} + +// drawChartSeriesCat provides function to draw the c:cat element by given chart +// series and format sets. +func (f *File) drawChartSeriesCat(v formatChartSeries, formatSet *formatChart) *cCat { + cat := &cCat{ + StrRef: &cStrRef{ + F: v.Categories, + }, + } + chartSeriesCat := map[string]*cCat{Bar: cat, BarStacked: cat, BarPercentStacked: cat, Bar3DClustered: cat, Bar3DStacked: cat, Bar3DPercentStacked: cat, Col: cat, ColStacked: cat, ColPercentStacked: cat, Col3DClustered: cat, Col3D: cat, Col3DStacked: cat, Col3DPercentStacked: cat, Doughnut: cat, Line: cat, Pie: cat, Pie3D: cat, Radar: cat, Scatter: nil} + return chartSeriesCat[formatSet.Type] +} + +// drawChartSeriesVal provides function to draw the c:val element by given chart +// series and format sets. +func (f *File) drawChartSeriesVal(v formatChartSeries, formatSet *formatChart) *cVal { + val := &cVal{ + NumRef: &cNumRef{ + F: v.Values, + }, + } + chartSeriesVal := map[string]*cVal{Bar: val, BarStacked: val, BarPercentStacked: val, Bar3DClustered: val, Bar3DStacked: val, Bar3DPercentStacked: val, Col: val, ColStacked: val, ColPercentStacked: val, Col3DClustered: val, Col3D: val, Col3DStacked: val, Col3DPercentStacked: val, Doughnut: val, Line: val, Pie: val, Pie3D: val, Radar: val, Scatter: nil} + return chartSeriesVal[formatSet.Type] +} + +// drawChartSeriesMarker provides function to draw the c:marker element by given +// data index and format sets. +func (f *File) drawChartSeriesMarker(i int, formatSet *formatChart) *cMarker { + marker := &cMarker{ + Symbol: &attrValString{Val: "circle"}, + Size: &attrValInt{Val: 5}, + SpPr: &cSpPr{ + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{ + Val: "accent" + strconv.Itoa(i+1), + }, + }, + Ln: &aLn{ + W: 9252, + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{ + Val: "accent" + strconv.Itoa(i+1), + }, + }, + }, + }, + } + chartSeriesMarker := map[string]*cMarker{Bar: nil, BarStacked: nil, BarPercentStacked: nil, Bar3DClustered: nil, Bar3DStacked: nil, Bar3DPercentStacked: nil, Col: nil, ColStacked: nil, ColPercentStacked: nil, Col3DClustered: nil, Col3D: nil, Col3DStacked: nil, Col3DPercentStacked: nil, Doughnut: nil, Line: nil, Pie: nil, Pie3D: nil, Radar: nil, Scatter: marker} + return chartSeriesMarker[formatSet.Type] +} + +// drawChartSeriesXVal provides function to draw the c:xVal element by given +// chart series and format sets. +func (f *File) drawChartSeriesXVal(v formatChartSeries, formatSet *formatChart) *cCat { + cat := &cCat{ + StrRef: &cStrRef{ + F: v.Categories, + }, + } + chartSeriesXVal := map[string]*cCat{Bar: nil, BarStacked: nil, BarPercentStacked: nil, Bar3DClustered: nil, Bar3DStacked: nil, Bar3DPercentStacked: nil, Col: nil, ColStacked: nil, ColPercentStacked: nil, Col3DClustered: nil, Col3D: nil, Col3DStacked: nil, Col3DPercentStacked: nil, Doughnut: nil, Line: nil, Pie: nil, Pie3D: nil, Radar: nil, Scatter: cat} + return chartSeriesXVal[formatSet.Type] +} + +// drawChartSeriesYVal provides function to draw the c:yVal element by given +// chart series and format sets. +func (f *File) drawChartSeriesYVal(v formatChartSeries, formatSet *formatChart) *cVal { + val := &cVal{ + NumRef: &cNumRef{ + F: v.Values, + }, + } + chartSeriesYVal := map[string]*cVal{Bar: nil, BarStacked: nil, BarPercentStacked: nil, Bar3DClustered: nil, Bar3DStacked: nil, Bar3DPercentStacked: nil, Col: nil, ColStacked: nil, ColPercentStacked: nil, Col3DClustered: nil, Col3D: nil, Col3DStacked: nil, Col3DPercentStacked: nil, Doughnut: nil, Line: nil, Pie: nil, Pie3D: nil, Radar: nil, Scatter: val} + return chartSeriesYVal[formatSet.Type] +} + +// drawChartDLbls provides function to draw the c:dLbls element by given format +// sets. +func (f *File) drawChartDLbls(formatSet *formatChart) *cDLbls { + return &cDLbls{ + ShowLegendKey: &attrValBool{Val: formatSet.Legend.ShowLegendKey}, + ShowVal: &attrValBool{Val: formatSet.Plotarea.ShowVal}, + ShowCatName: &attrValBool{Val: formatSet.Plotarea.ShowCatName}, + ShowSerName: &attrValBool{Val: formatSet.Plotarea.ShowSerName}, + ShowBubbleSize: &attrValBool{Val: formatSet.Plotarea.ShowBubbleSize}, + ShowPercent: &attrValBool{Val: formatSet.Plotarea.ShowPercent}, + ShowLeaderLines: &attrValBool{Val: formatSet.Plotarea.ShowLeaderLines}, + } +} + +// drawChartSeriesDLbls provides function to draw the c:dLbls element by given +// format sets. +func (f *File) drawChartSeriesDLbls(formatSet *formatChart) *cDLbls { + dLbls := f.drawChartDLbls(formatSet) + chartSeriesDLbls := map[string]*cDLbls{Bar: dLbls, BarStacked: dLbls, BarPercentStacked: dLbls, Bar3DClustered: dLbls, Bar3DStacked: dLbls, Bar3DPercentStacked: dLbls, Col: dLbls, ColStacked: dLbls, ColPercentStacked: dLbls, Col3DClustered: dLbls, Col3D: dLbls, Col3DStacked: dLbls, Col3DPercentStacked: dLbls, Doughnut: dLbls, Line: dLbls, Pie: dLbls, Pie3D: dLbls, Radar: dLbls, Scatter: nil} + return chartSeriesDLbls[formatSet.Type] +} + +// drawPlotAreaCatAx provides function to draw the c:catAx element. +func (f *File) drawPlotAreaCatAx(formatSet *formatChart) []*cAxs { + min := &attrValFloat{Val: formatSet.XAxis.Minimum} + max := &attrValFloat{Val: formatSet.XAxis.Maximum} + if formatSet.XAxis.Minimum == 0 { + min = nil + } + if formatSet.XAxis.Maximum == 0 { + max = nil + } + return []*cAxs{ + { + AxID: &attrValInt{Val: 754001152}, + Scaling: &cScaling{ + Orientation: &attrValString{Val: orientation[formatSet.XAxis.ReverseOrder]}, + Max: max, + Min: min, + }, + Delete: &attrValBool{Val: false}, + AxPos: &attrValString{Val: catAxPos[formatSet.XAxis.ReverseOrder]}, + NumFmt: &cNumFmt{ + FormatCode: "General", + SourceLinked: true, + }, + MajorTickMark: &attrValString{Val: "none"}, + MinorTickMark: &attrValString{Val: "none"}, + TickLblPos: &attrValString{Val: "nextTo"}, + SpPr: f.drawPlotAreaSpPr(), + TxPr: f.drawPlotAreaTxPr(), + CrossAx: &attrValInt{Val: 753999904}, + Crosses: &attrValString{Val: "autoZero"}, + Auto: &attrValBool{Val: true}, + LblAlgn: &attrValString{Val: "ctr"}, + LblOffset: &attrValInt{Val: 100}, + NoMultiLvlLbl: &attrValBool{Val: false}, + }, + } +} + +// drawPlotAreaValAx provides function to draw the c:valAx element. +func (f *File) drawPlotAreaValAx(formatSet *formatChart) []*cAxs { + min := &attrValFloat{Val: formatSet.YAxis.Minimum} + max := &attrValFloat{Val: formatSet.YAxis.Maximum} + if formatSet.YAxis.Minimum == 0 { + min = nil + } + if formatSet.YAxis.Maximum == 0 { + max = nil + } + return []*cAxs{ + { + AxID: &attrValInt{Val: 753999904}, + Scaling: &cScaling{ + Orientation: &attrValString{Val: orientation[formatSet.YAxis.ReverseOrder]}, + Max: max, + Min: min, + }, + Delete: &attrValBool{Val: false}, + AxPos: &attrValString{Val: valAxPos[formatSet.YAxis.ReverseOrder]}, + NumFmt: &cNumFmt{ + FormatCode: chartValAxNumFmtFormatCode[formatSet.Type], + SourceLinked: true, + }, + MajorTickMark: &attrValString{Val: "none"}, + MinorTickMark: &attrValString{Val: "none"}, + TickLblPos: &attrValString{Val: "nextTo"}, + SpPr: f.drawPlotAreaSpPr(), + TxPr: f.drawPlotAreaTxPr(), + CrossAx: &attrValInt{Val: 754001152}, + Crosses: &attrValString{Val: "autoZero"}, + CrossBetween: &attrValString{Val: "between"}, + }, + } +} + +// drawPlotAreaSpPr provides function to draw the c:spPr element. +func (f *File) drawPlotAreaSpPr() *cSpPr { + return &cSpPr{ + Ln: &aLn{ + W: 9525, + Cap: "flat", + Cmpd: "sng", + Algn: "ctr", + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{ + Val: "tx1", + LumMod: &attrValInt{Val: 15000}, + LumOff: &attrValInt{Val: 85000}, + }, + }, + }, + } +} + +// drawPlotAreaTxPr provides function to draw the c:txPr element. +func (f *File) drawPlotAreaTxPr() *cTxPr { + return &cTxPr{ + BodyPr: aBodyPr{ + Rot: -60000000, + SpcFirstLastPara: true, + VertOverflow: "ellipsis", + Vert: "horz", + Wrap: "square", + Anchor: "ctr", + AnchorCtr: true, + }, + P: aP{ + PPr: &aPPr{ + DefRPr: aRPr{ + Sz: 900, + B: false, + I: false, + U: "none", + Strike: "noStrike", + Kern: 1200, + Baseline: 0, + SolidFill: &aSolidFill{ + SchemeClr: &aSchemeClr{ + Val: "tx1", + LumMod: &attrValInt{Val: 15000}, + LumOff: &attrValInt{Val: 85000}, + }, + }, + Latin: &aLatin{Typeface: "+mn-lt"}, + Ea: &aEa{Typeface: "+mn-ea"}, + Cs: &aCs{Typeface: "+mn-cs"}, + }, + }, + EndParaRPr: &aEndParaRPr{Lang: "en-US"}, + }, + } +} + +// drawingParser provides function to parse drawingXML. In order to solve the +// problem that the label structure is changed after serialization and +// deserialization, two different structures: decodeWsDr and encodeWsDr are +// defined. +func (f *File) drawingParser(drawingXML string, content *xlsxWsDr) int { + cNvPrID := 1 + _, ok := f.XLSX[drawingXML] + if ok { // Append Model + decodeWsDr := decodeWsDr{} + _ = xml.Unmarshal([]byte(f.readXML(drawingXML)), &decodeWsDr) + content.R = decodeWsDr.R + cNvPrID = len(decodeWsDr.OneCellAnchor) + len(decodeWsDr.TwoCellAnchor) + 1 + for _, v := range decodeWsDr.OneCellAnchor { + content.OneCellAnchor = append(content.OneCellAnchor, &xdrCellAnchor{ + EditAs: v.EditAs, + GraphicFrame: v.Content, + }) + } + for _, v := range decodeWsDr.TwoCellAnchor { + content.TwoCellAnchor = append(content.TwoCellAnchor, &xdrCellAnchor{ + EditAs: v.EditAs, + GraphicFrame: v.Content, + }) + } + } + return cNvPrID +} + +// addDrawingChart provides function to add chart graphic frame by given sheet, +// drawingXML, cell, width, height, relationship index and format sets. +func (f *File) addDrawingChart(sheet, drawingXML, cell string, width, height, rID int, formatSet *formatPicture) { + cell = strings.ToUpper(cell) + fromCol := string(strings.Map(letterOnlyMapF, cell)) + fromRow, _ := strconv.Atoi(strings.Map(intOnlyMapF, cell)) + row := fromRow - 1 + col := TitleToNumber(fromCol) + width = int(float64(width) * formatSet.XScale) + height = int(float64(height) * formatSet.YScale) + colStart, rowStart, _, _, colEnd, rowEnd, x2, y2 := f.positionObjectPixels(sheet, col, row, formatSet.OffsetX, formatSet.OffsetY, width, height) + content := xlsxWsDr{} + content.A = NameSpaceDrawingML + content.Xdr = NameSpaceDrawingMLSpreadSheet + cNvPrID := f.drawingParser(drawingXML, &content) + twoCellAnchor := xdrCellAnchor{} + twoCellAnchor.EditAs = formatSet.Positioning + from := xlsxFrom{} + from.Col = colStart + from.ColOff = formatSet.OffsetX * EMU + from.Row = rowStart + from.RowOff = formatSet.OffsetY * EMU + to := xlsxTo{} + to.Col = colEnd + to.ColOff = x2 * EMU + to.Row = rowEnd + to.RowOff = y2 * EMU + twoCellAnchor.From = &from + twoCellAnchor.To = &to + + graphicFrame := xlsxGraphicFrame{ + NvGraphicFramePr: xlsxNvGraphicFramePr{ + CNvPr: &xlsxCNvPr{ + ID: f.countCharts() + f.countMedia() + 1, + Name: "Chart " + strconv.Itoa(cNvPrID), + }, + }, + Graphic: &xlsxGraphic{ + GraphicData: &xlsxGraphicData{ + URI: NameSpaceDrawingMLChart, + Chart: &xlsxChart{ + C: NameSpaceDrawingMLChart, + R: SourceRelationship, + RID: "rId" + strconv.Itoa(rID), + }, + }, + }, + } + graphic, _ := xml.Marshal(graphicFrame) + twoCellAnchor.GraphicFrame = string(graphic) + twoCellAnchor.ClientData = &xdrClientData{ + FLocksWithSheet: formatSet.FLocksWithSheet, + FPrintsWithSheet: formatSet.FPrintsWithSheet, + } + content.TwoCellAnchor = append(content.TwoCellAnchor, &twoCellAnchor) + output, _ := xml.Marshal(content) + f.saveFileList(drawingXML, output) +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/col.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/col.go new file mode 100644 index 000000000..05ad0cceb --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/col.go @@ -0,0 +1,366 @@ +package excelize + +import ( + "bytes" + "math" + "strconv" + "strings" +) + +// Define the default cell size and EMU unit of measurement. +const ( + defaultColWidthPixels float64 = 64 + defaultRowHeightPixels float64 = 20 + EMU int = 9525 +) + +// GetColVisible provides a function to get visible of a single column by given +// worksheet name and column name. For example, get visible state of column D +// in Sheet1: +// +// xlsx.GetColVisible("Sheet1", "D") +// +func (f *File) GetColVisible(sheet, column string) bool { + xlsx := f.workSheetReader(sheet) + col := TitleToNumber(strings.ToUpper(column)) + 1 + visible := true + if xlsx.Cols == nil { + return visible + } + for c := range xlsx.Cols.Col { + if xlsx.Cols.Col[c].Min <= col && col <= xlsx.Cols.Col[c].Max { + visible = !xlsx.Cols.Col[c].Hidden + } + } + return visible +} + +// SetColVisible provides a function to set visible of a single column by given +// worksheet name and column name. For example, hide column D in Sheet1: +// +// xlsx.SetColVisible("Sheet1", "D", false) +// +func (f *File) SetColVisible(sheet, column string, visible bool) { + xlsx := f.workSheetReader(sheet) + c := TitleToNumber(strings.ToUpper(column)) + 1 + col := xlsxCol{ + Min: c, + Max: c, + Hidden: !visible, + CustomWidth: true, + } + if xlsx.Cols == nil { + cols := xlsxCols{} + cols.Col = append(cols.Col, col) + xlsx.Cols = &cols + return + } + for v := range xlsx.Cols.Col { + if xlsx.Cols.Col[v].Min <= c && c <= xlsx.Cols.Col[v].Max { + col = xlsx.Cols.Col[v] + } + } + col.Min = c + col.Max = c + col.Hidden = !visible + col.CustomWidth = true + xlsx.Cols.Col = append(xlsx.Cols.Col, col) +} + +// GetColOutlineLevel provides a function to get outline level of a single +// column by given worksheet name and column name. For example, get outline +// level of column D in Sheet1: +// +// xlsx.GetColOutlineLevel("Sheet1", "D") +// +func (f *File) GetColOutlineLevel(sheet, column string) uint8 { + xlsx := f.workSheetReader(sheet) + col := TitleToNumber(strings.ToUpper(column)) + 1 + level := uint8(0) + if xlsx.Cols == nil { + return level + } + for c := range xlsx.Cols.Col { + if xlsx.Cols.Col[c].Min <= col && col <= xlsx.Cols.Col[c].Max { + level = xlsx.Cols.Col[c].OutlineLevel + } + } + return level +} + +// SetColOutlineLevel provides a function to set outline level of a single +// column by given worksheet name and column name. For example, set outline +// level of column D in Sheet1 to 2: +// +// xlsx.SetColOutlineLevel("Sheet1", "D", 2) +// +func (f *File) SetColOutlineLevel(sheet, column string, level uint8) { + xlsx := f.workSheetReader(sheet) + c := TitleToNumber(strings.ToUpper(column)) + 1 + col := xlsxCol{ + Min: c, + Max: c, + OutlineLevel: level, + CustomWidth: true, + } + if xlsx.Cols == nil { + cols := xlsxCols{} + cols.Col = append(cols.Col, col) + xlsx.Cols = &cols + return + } + for v := range xlsx.Cols.Col { + if xlsx.Cols.Col[v].Min <= c && c <= xlsx.Cols.Col[v].Max { + col = xlsx.Cols.Col[v] + } + } + col.Min = c + col.Max = c + col.OutlineLevel = level + col.CustomWidth = true + xlsx.Cols.Col = append(xlsx.Cols.Col, col) +} + +// SetColWidth provides function to set the width of a single column or multiple +// columns. For example: +// +// xlsx := excelize.NewFile() +// xlsx.SetColWidth("Sheet1", "A", "H", 20) +// err := xlsx.Save() +// if err != nil { +// fmt.Println(err) +// } +// +func (f *File) SetColWidth(sheet, startcol, endcol string, width float64) { + min := TitleToNumber(strings.ToUpper(startcol)) + 1 + max := TitleToNumber(strings.ToUpper(endcol)) + 1 + if min > max { + min, max = max, min + } + xlsx := f.workSheetReader(sheet) + col := xlsxCol{ + Min: min, + Max: max, + Width: width, + CustomWidth: true, + } + if xlsx.Cols != nil { + xlsx.Cols.Col = append(xlsx.Cols.Col, col) + } else { + cols := xlsxCols{} + cols.Col = append(cols.Col, col) + xlsx.Cols = &cols + } +} + +// positionObjectPixels calculate the vertices that define the position of a +// graphical object within the worksheet in pixels. +// +// +------------+------------+ +// | A | B | +// +-----+------------+------------+ +// | |(x1,y1) | | +// | 1 |(A1)._______|______ | +// | | | | | +// | | | | | +// +-----+----| OBJECT |-----+ +// | | | | | +// | 2 | |______________. | +// | | | (B2)| +// | | | (x2,y2)| +// +-----+------------+------------+ +// +// Example of an object that covers some of the area from cell A1 to B2. +// +// Based on the width and height of the object we need to calculate 8 vars: +// +// colStart, rowStart, colEnd, rowEnd, x1, y1, x2, y2. +// +// We also calculate the absolute x and y position of the top left vertex of +// the object. This is required for images. +// +// The width and height of the cells that the object occupies can be +// variable and have to be taken into account. +// +// The values of col_start and row_start are passed in from the calling +// function. The values of col_end and row_end are calculated by +// subtracting the width and height of the object from the width and +// height of the underlying cells. +// +// colStart # Col containing upper left corner of object. +// x1 # Distance to left side of object. +// +// rowStart # Row containing top left corner of object. +// y1 # Distance to top of object. +// +// colEnd # Col containing lower right corner of object. +// x2 # Distance to right side of object. +// +// rowEnd # Row containing bottom right corner of object. +// y2 # Distance to bottom of object. +// +// width # Width of object frame. +// height # Height of object frame. +// +// xAbs # Absolute distance to left side of object. +// yAbs # Absolute distance to top side of object. +// +func (f *File) positionObjectPixels(sheet string, colStart, rowStart, x1, y1, width, height int) (int, int, int, int, int, int, int, int) { + xAbs := 0 + yAbs := 0 + + // Calculate the absolute x offset of the top-left vertex. + for colID := 1; colID <= colStart; colID++ { + xAbs += f.getColWidth(sheet, colID) + } + xAbs += x1 + + // Calculate the absolute y offset of the top-left vertex. + // Store the column change to allow optimisations. + for rowID := 1; rowID <= rowStart; rowID++ { + yAbs += f.getRowHeight(sheet, rowID) + } + yAbs += y1 + + // Adjust start column for offsets that are greater than the col width. + for x1 >= f.getColWidth(sheet, colStart) { + x1 -= f.getColWidth(sheet, colStart) + colStart++ + } + + // Adjust start row for offsets that are greater than the row height. + for y1 >= f.getRowHeight(sheet, rowStart) { + y1 -= f.getRowHeight(sheet, rowStart) + rowStart++ + } + + // Initialise end cell to the same as the start cell. + colEnd := colStart + rowEnd := rowStart + + width += x1 + height += y1 + + // Subtract the underlying cell widths to find end cell of the object. + for width >= f.getColWidth(sheet, colEnd) { + colEnd++ + width -= f.getColWidth(sheet, colEnd) + } + + // Subtract the underlying cell heights to find end cell of the object. + for height >= f.getRowHeight(sheet, rowEnd) { + rowEnd++ + height -= f.getRowHeight(sheet, rowEnd) + } + + // The end vertices are whatever is left from the width and height. + x2 := width + y2 := height + return colStart, rowStart, xAbs, yAbs, colEnd, rowEnd, x2, y2 +} + +// getColWidth provides function to get column width in pixels by given sheet +// name and column index. +func (f *File) getColWidth(sheet string, col int) int { + xlsx := f.workSheetReader(sheet) + if xlsx.Cols != nil { + var width float64 + for _, v := range xlsx.Cols.Col { + if v.Min <= col && col <= v.Max { + width = v.Width + } + } + if width != 0 { + return int(convertColWidthToPixels(width)) + } + } + // Optimisation for when the column widths haven't changed. + return int(defaultColWidthPixels) +} + +// GetColWidth provides function to get column width by given worksheet name and +// column index. +func (f *File) GetColWidth(sheet, column string) float64 { + col := TitleToNumber(strings.ToUpper(column)) + 1 + xlsx := f.workSheetReader(sheet) + if xlsx.Cols != nil { + var width float64 + for _, v := range xlsx.Cols.Col { + if v.Min <= col && col <= v.Max { + width = v.Width + } + } + if width != 0 { + return width + } + } + // Optimisation for when the column widths haven't changed. + return defaultColWidthPixels +} + +// InsertCol provides function to insert a new column before given column index. +// For example, create a new column before column C in Sheet1: +// +// xlsx.InsertCol("Sheet1", "C") +// +func (f *File) InsertCol(sheet, column string) { + col := TitleToNumber(strings.ToUpper(column)) + f.adjustHelper(sheet, col, -1, 1) +} + +// RemoveCol provides function to remove single column by given worksheet name +// and column index. For example, remove column C in Sheet1: +// +// xlsx.RemoveCol("Sheet1", "C") +// +func (f *File) RemoveCol(sheet, column string) { + xlsx := f.workSheetReader(sheet) + for r := range xlsx.SheetData.Row { + for k, v := range xlsx.SheetData.Row[r].C { + axis := v.R + col := string(strings.Map(letterOnlyMapF, axis)) + if col == column { + xlsx.SheetData.Row[r].C = append(xlsx.SheetData.Row[r].C[:k], xlsx.SheetData.Row[r].C[k+1:]...) + } + } + } + col := TitleToNumber(strings.ToUpper(column)) + f.adjustHelper(sheet, col, -1, -1) +} + +// Completion column element tags of XML in a sheet. +func completeCol(xlsx *xlsxWorksheet, row, cell int) { + buffer := bytes.Buffer{} + for r := range xlsx.SheetData.Row { + if len(xlsx.SheetData.Row[r].C) < cell { + start := len(xlsx.SheetData.Row[r].C) + for iii := start; iii < cell; iii++ { + buffer.WriteString(ToAlphaString(iii)) + buffer.WriteString(strconv.Itoa(r + 1)) + xlsx.SheetData.Row[r].C = append(xlsx.SheetData.Row[r].C, xlsxC{ + R: buffer.String(), + }) + buffer.Reset() + } + } + } +} + +// convertColWidthToPixels provieds function to convert the width of a cell from +// user's units to pixels. Excel rounds the column width to the nearest pixel. +// If the width hasn't been set by the user we use the default value. If the +// column is hidden it has a value of zero. +func convertColWidthToPixels(width float64) float64 { + var padding float64 = 5 + var pixels float64 + var maxDigitWidth float64 = 7 + if width == 0 { + return pixels + } + if width < 1 { + pixels = (width * 12) + 0.5 + return math.Ceil(pixels) + } + pixels = (width*maxDigitWidth + 0.5) + padding + return math.Ceil(pixels) +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/comment.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/comment.go new file mode 100644 index 000000000..bab737068 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/comment.go @@ -0,0 +1,251 @@ +package excelize + +import ( + "encoding/json" + "encoding/xml" + "fmt" + "strconv" + "strings" +) + +// parseFormatCommentsSet provides function to parse the format settings of the +// comment with default value. +func parseFormatCommentsSet(formatSet string) (*formatComment, error) { + format := formatComment{ + Author: "Author:", + Text: " ", + } + err := json.Unmarshal([]byte(formatSet), &format) + return &format, err +} + +// GetComments retrievs all comments and returns a map +// of worksheet name to the worksheet comments. +func (f *File) GetComments() (comments map[string]*xlsxComments) { + comments = map[string]*xlsxComments{} + for n := range f.sheetMap { + commentID := f.GetSheetIndex(n) + commentsXML := "xl/comments" + strconv.Itoa(commentID) + ".xml" + c, ok := f.XLSX[commentsXML] + if ok { + d := xlsxComments{} + xml.Unmarshal([]byte(c), &d) + comments[n] = &d + } + } + return +} + +// AddComment provides the method to add comment in a sheet by given worksheet +// index, cell and format set (such as author and text). Note that the max +// author length is 255 and the max text length is 32512. For example, add a +// comment in Sheet1!$A$30: +// +// xlsx.AddComment("Sheet1", "A30", `{"author":"Excelize: ","text":"This is a comment."}`) +// +func (f *File) AddComment(sheet, cell, format string) error { + formatSet, err := parseFormatCommentsSet(format) + if err != nil { + return err + } + // Read sheet data. + xlsx := f.workSheetReader(sheet) + commentID := f.countComments() + 1 + drawingVML := "xl/drawings/vmlDrawing" + strconv.Itoa(commentID) + ".vml" + sheetRelationshipsComments := "../comments" + strconv.Itoa(commentID) + ".xml" + sheetRelationshipsDrawingVML := "../drawings/vmlDrawing" + strconv.Itoa(commentID) + ".vml" + if xlsx.LegacyDrawing != nil { + // The worksheet already has a comments relationships, use the relationships drawing ../drawings/vmlDrawing%d.vml. + sheetRelationshipsDrawingVML = f.getSheetRelationshipsTargetByID(sheet, xlsx.LegacyDrawing.RID) + commentID, _ = strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(sheetRelationshipsDrawingVML, "../drawings/vmlDrawing"), ".vml")) + drawingVML = strings.Replace(sheetRelationshipsDrawingVML, "..", "xl", -1) + } else { + // Add first comment for given sheet. + rID := f.addSheetRelationships(sheet, SourceRelationshipDrawingVML, sheetRelationshipsDrawingVML, "") + f.addSheetRelationships(sheet, SourceRelationshipComments, sheetRelationshipsComments, "") + f.addSheetLegacyDrawing(sheet, rID) + } + commentsXML := "xl/comments" + strconv.Itoa(commentID) + ".xml" + f.addComment(commentsXML, cell, formatSet) + var colCount int + for i, l := range strings.Split(formatSet.Text, "\n") { + if ll := len(l); ll > colCount { + if i == 0 { + ll += len(formatSet.Author) + } + colCount = ll + } + } + f.addDrawingVML(commentID, drawingVML, cell, strings.Count(formatSet.Text, "\n")+1, colCount) + f.addContentTypePart(commentID, "comments") + return err +} + +// addDrawingVML provides function to create comment as +// xl/drawings/vmlDrawing%d.vml by given commit ID and cell. +func (f *File) addDrawingVML(commentID int, drawingVML, cell string, lineCount, colCount int) { + col := string(strings.Map(letterOnlyMapF, cell)) + row, _ := strconv.Atoi(strings.Map(intOnlyMapF, cell)) + xAxis := row - 1 + yAxis := TitleToNumber(col) + vml := vmlDrawing{ + XMLNSv: "urn:schemas-microsoft-com:vml", + XMLNSo: "urn:schemas-microsoft-com:office:office", + XMLNSx: "urn:schemas-microsoft-com:office:excel", + XMLNSmv: "http://macVmlSchemaUri", + Shapelayout: &xlsxShapelayout{ + Ext: "edit", + IDmap: &xlsxIDmap{ + Ext: "edit", + Data: commentID, + }, + }, + Shapetype: &xlsxShapetype{ + ID: "_x0000_t202", + Coordsize: "21600,21600", + Spt: 202, + Path: "m0,0l0,21600,21600,21600,21600,0xe", + Stroke: &xlsxStroke{ + Joinstyle: "miter", + }, + VPath: &vPath{ + Gradientshapeok: "t", + Connecttype: "miter", + }, + }, + } + sp := encodeShape{ + Fill: &vFill{ + Color2: "#fbfe82", + Angle: -180, + Type: "gradient", + Fill: &oFill{ + Ext: "view", + Type: "gradientUnscaled", + }, + }, + Shadow: &vShadow{ + On: "t", + Color: "black", + Obscured: "t", + }, + Path: &vPath{ + Connecttype: "none", + }, + Textbox: &vTextbox{ + Style: "mso-direction-alt:auto", + Div: &xlsxDiv{ + Style: "text-align:left", + }, + }, + ClientData: &xClientData{ + ObjectType: "Note", + Anchor: fmt.Sprintf( + "%d, 23, %d, 0, %d, %d, %d, 5", + 1+yAxis, 1+xAxis, 2+yAxis+lineCount, colCount+yAxis, 2+xAxis+lineCount), + AutoFill: "True", + Row: xAxis, + Column: yAxis, + }, + } + s, _ := xml.Marshal(sp) + shape := xlsxShape{ + ID: "_x0000_s1025", + Type: "#_x0000_t202", + Style: "position:absolute;73.5pt;width:108pt;height:59.25pt;z-index:1;visibility:hidden", + Fillcolor: "#fbf6d6", + Strokecolor: "#edeaa1", + Val: string(s[13 : len(s)-14]), + } + c, ok := f.XLSX[drawingVML] + if ok { + d := decodeVmlDrawing{} + _ = xml.Unmarshal([]byte(c), &d) + for _, v := range d.Shape { + s := xlsxShape{ + ID: "_x0000_s1025", + Type: "#_x0000_t202", + Style: "position:absolute;73.5pt;width:108pt;height:59.25pt;z-index:1;visibility:hidden", + Fillcolor: "#fbf6d6", + Strokecolor: "#edeaa1", + Val: v.Val, + } + vml.Shape = append(vml.Shape, s) + } + } + vml.Shape = append(vml.Shape, shape) + v, _ := xml.Marshal(vml) + f.XLSX[drawingVML] = v +} + +// addComment provides function to create chart as xl/comments%d.xml by given +// cell and format sets. +func (f *File) addComment(commentsXML, cell string, formatSet *formatComment) { + a := formatSet.Author + t := formatSet.Text + if len(a) > 255 { + a = a[0:255] + } + if len(t) > 32512 { + t = t[0:32512] + } + comments := xlsxComments{ + Authors: []xlsxAuthor{ + { + Author: formatSet.Author, + }, + }, + } + cmt := xlsxComment{ + Ref: cell, + AuthorID: 0, + Text: xlsxText{ + R: []xlsxR{ + { + RPr: &xlsxRPr{ + B: " ", + Sz: &attrValFloat{Val: 9}, + Color: &xlsxColor{ + Indexed: 81, + }, + RFont: &attrValString{Val: "Calibri"}, + Family: &attrValInt{Val: 2}, + }, + T: a, + }, + { + RPr: &xlsxRPr{ + Sz: &attrValFloat{Val: 9}, + Color: &xlsxColor{ + Indexed: 81, + }, + RFont: &attrValString{Val: "Calibri"}, + Family: &attrValInt{Val: 2}, + }, + T: t, + }, + }, + }, + } + c, ok := f.XLSX[commentsXML] + if ok { + d := xlsxComments{} + _ = xml.Unmarshal([]byte(c), &d) + comments.CommentList.Comment = append(comments.CommentList.Comment, d.CommentList.Comment...) + } + comments.CommentList.Comment = append(comments.CommentList.Comment, cmt) + v, _ := xml.Marshal(comments) + f.saveFileList(commentsXML, v) +} + +// countComments provides function to get comments files count storage in the +// folder xl. +func (f *File) countComments() int { + count := 0 + for k := range f.XLSX { + if strings.Contains(k, "xl/comments") { + count++ + } + } + return count +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/date.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/date.go new file mode 100644 index 000000000..f3db0ee82 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/date.go @@ -0,0 +1,141 @@ +package excelize + +import ( + "math" + "time" +) + +// timeLocationUTC defined the UTC time location. +var timeLocationUTC, _ = time.LoadLocation("UTC") + +// timeToUTCTime provides function to convert time to UTC time. +func timeToUTCTime(t time.Time) time.Time { + return time.Date(t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second(), t.Nanosecond(), timeLocationUTC) +} + +// timeToExcelTime provides function to convert time to Excel time. +func timeToExcelTime(t time.Time) float64 { + // TODO in future this should probably also handle date1904 and like TimeFromExcelTime + var excelTime float64 + var deltaDays int64 + excelTime = 0 + deltaDays = 290 * 364 + // check if UnixNano would be out of int64 range + for t.Unix() > deltaDays*24*60*60 { + // reduce by aprox. 290 years, which is max for int64 nanoseconds + delta := time.Duration(deltaDays) * 24 * time.Hour + excelTime = excelTime + float64(deltaDays) + t = t.Add(-delta) + } + // finally add remainder of UnixNano to keep nano precision + // and 25569 which is days between 1900 and 1970 + return excelTime + float64(t.UnixNano())/8.64e13 + 25569.0 +} + +// shiftJulianToNoon provides function to process julian date to noon. +func shiftJulianToNoon(julianDays, julianFraction float64) (float64, float64) { + switch { + case -0.5 < julianFraction && julianFraction < 0.5: + julianFraction += 0.5 + case julianFraction >= 0.5: + julianDays++ + julianFraction -= 0.5 + case julianFraction <= -0.5: + julianDays-- + julianFraction += 1.5 + } + return julianDays, julianFraction +} + +// fractionOfADay provides function to return the integer values for hour, +// minutes, seconds and nanoseconds that comprised a given fraction of a day. +// values would round to 1 us. +func fractionOfADay(fraction float64) (hours, minutes, seconds, nanoseconds int) { + + const ( + c1us = 1e3 + c1s = 1e9 + c1day = 24 * 60 * 60 * c1s + ) + + frac := int64(c1day*fraction + c1us/2) + nanoseconds = int((frac%c1s)/c1us) * c1us + frac /= c1s + seconds = int(frac % 60) + frac /= 60 + minutes = int(frac % 60) + hours = int(frac / 60) + return +} + +// julianDateToGregorianTime provides function to convert julian date to +// gregorian time. +func julianDateToGregorianTime(part1, part2 float64) time.Time { + part1I, part1F := math.Modf(part1) + part2I, part2F := math.Modf(part2) + julianDays := part1I + part2I + julianFraction := part1F + part2F + julianDays, julianFraction = shiftJulianToNoon(julianDays, julianFraction) + day, month, year := doTheFliegelAndVanFlandernAlgorithm(int(julianDays)) + hours, minutes, seconds, nanoseconds := fractionOfADay(julianFraction) + return time.Date(year, time.Month(month), day, hours, minutes, seconds, nanoseconds, time.UTC) +} + +// By this point generations of programmers have repeated the algorithm sent to +// the editor of "Communications of the ACM" in 1968 (published in CACM, volume +// 11, number 10, October 1968, p.657). None of those programmers seems to have +// found it necessary to explain the constants or variable names set out by +// Henry F. Fliegel and Thomas C. Van Flandern. Maybe one day I'll buy that +// jounal and expand an explanation here - that day is not today. +func doTheFliegelAndVanFlandernAlgorithm(jd int) (day, month, year int) { + l := jd + 68569 + n := (4 * l) / 146097 + l = l - (146097*n+3)/4 + i := (4000 * (l + 1)) / 1461001 + l = l - (1461*i)/4 + 31 + j := (80 * l) / 2447 + d := l - (2447*j)/80 + l = j / 11 + m := j + 2 - (12 * l) + y := 100*(n-49) + i + l + return d, m, y +} + +// timeFromExcelTime provides function to convert an excelTime representation +// (stored as a floating point number) to a time.Time. +func timeFromExcelTime(excelTime float64, date1904 bool) time.Time { + const MDD int64 = 106750 // Max time.Duration Days, aprox. 290 years + var date time.Time + var intPart = int64(excelTime) + // Excel uses Julian dates prior to March 1st 1900, and Gregorian + // thereafter. + if intPart <= 61 { + const OFFSET1900 = 15018.0 + const OFFSET1904 = 16480.0 + const MJD0 float64 = 2400000.5 + var date time.Time + if date1904 { + date = julianDateToGregorianTime(MJD0, excelTime+OFFSET1904) + } else { + date = julianDateToGregorianTime(MJD0, excelTime+OFFSET1900) + } + return date + } + var floatPart = excelTime - float64(intPart) + var dayNanoSeconds float64 = 24 * 60 * 60 * 1000 * 1000 * 1000 + if date1904 { + date = time.Date(1904, 1, 1, 0, 0, 0, 0, time.UTC) + } else { + date = time.Date(1899, 12, 30, 0, 0, 0, 0, time.UTC) + } + + // Duration is limited to aprox. 290 years + for intPart > MDD { + durationDays := time.Duration(MDD) * time.Hour * 24 + date = date.Add(durationDays) + intPart = intPart - MDD + } + durationDays := time.Duration(intPart) * time.Hour * 24 + durationPart := time.Duration(dayNanoSeconds * floatPart) + return date.Add(durationDays).Add(durationPart) +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/excelize.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/excelize.go new file mode 100644 index 000000000..99243a7ca --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/excelize.go @@ -0,0 +1,403 @@ +package excelize + +import ( + "archive/zip" + "bytes" + "encoding/xml" + "io" + "io/ioutil" + "os" + "strconv" + "strings" +) + +// File define a populated XLSX file struct. +type File struct { + checked map[string]bool + sheetMap map[string]string + ContentTypes *xlsxTypes + Path string + SharedStrings *xlsxSST + Sheet map[string]*xlsxWorksheet + SheetCount int + Styles *xlsxStyleSheet + Theme *xlsxTheme + WorkBook *xlsxWorkbook + WorkBookRels *xlsxWorkbookRels + XLSX map[string][]byte +} + +// OpenFile take the name of an XLSX file and returns a populated XLSX file +// struct for it. +func OpenFile(filename string) (*File, error) { + file, err := os.Open(filename) + if err != nil { + return nil, err + } + defer file.Close() + f, err := OpenReader(file) + if err != nil { + return nil, err + } + f.Path = filename + return f, nil +} + +// OpenReader take an io.Reader and return a populated XLSX file. +func OpenReader(r io.Reader) (*File, error) { + b, err := ioutil.ReadAll(r) + if err != nil { + return nil, err + } + + zr, err := zip.NewReader(bytes.NewReader(b), int64(len(b))) + if err != nil { + return nil, err + } + + file, sheetCount, err := ReadZipReader(zr) + if err != nil { + return nil, err + } + f := &File{ + checked: make(map[string]bool), + Sheet: make(map[string]*xlsxWorksheet), + SheetCount: sheetCount, + XLSX: file, + } + f.sheetMap = f.getSheetMap() + f.Styles = f.stylesReader() + f.Theme = f.themeReader() + return f, nil +} + +// setDefaultTimeStyle provides function to set default numbers format for +// time.Time type cell value by given worksheet name, cell coordinates and +// number format code. +func (f *File) setDefaultTimeStyle(sheet, axis string, format int) { + if f.GetCellStyle(sheet, axis) == 0 { + style, _ := f.NewStyle(`{"number_format": ` + strconv.Itoa(format) + `}`) + f.SetCellStyle(sheet, axis, axis, style) + } +} + +// workSheetReader provides function to get the pointer to the structure after +// deserialization by given worksheet name. +func (f *File) workSheetReader(sheet string) *xlsxWorksheet { + name, ok := f.sheetMap[trimSheetName(sheet)] + if !ok { + name = "xl/worksheets/" + strings.ToLower(sheet) + ".xml" + } + if f.Sheet[name] == nil { + var xlsx xlsxWorksheet + _ = xml.Unmarshal(f.readXML(name), &xlsx) + if f.checked == nil { + f.checked = make(map[string]bool) + } + ok := f.checked[name] + if !ok { + checkSheet(&xlsx) + checkRow(&xlsx) + f.checked[name] = true + } + f.Sheet[name] = &xlsx + } + return f.Sheet[name] +} + +// checkSheet provides function to fill each row element and make that is +// continuous in a worksheet of XML. +func checkSheet(xlsx *xlsxWorksheet) { + row := len(xlsx.SheetData.Row) + if row >= 1 { + lastRow := xlsx.SheetData.Row[row-1].R + if lastRow >= row { + row = lastRow + } + } + sheetData := xlsxSheetData{} + existsRows := map[int]int{} + for k := range xlsx.SheetData.Row { + existsRows[xlsx.SheetData.Row[k].R] = k + } + for i := 0; i < row; i++ { + _, ok := existsRows[i+1] + if ok { + sheetData.Row = append(sheetData.Row, xlsx.SheetData.Row[existsRows[i+1]]) + } else { + sheetData.Row = append(sheetData.Row, xlsxRow{ + R: i + 1, + }) + } + } + xlsx.SheetData = sheetData +} + +// replaceWorkSheetsRelationshipsNameSpaceBytes provides function to replace +// xl/worksheets/sheet%d.xml XML tags to self-closing for compatible Microsoft +// Office Excel 2007. +func replaceWorkSheetsRelationshipsNameSpaceBytes(workbookMarshal []byte) []byte { + var oldXmlns = []byte(``) + var newXmlns = []byte(``) + workbookMarshal = bytes.Replace(workbookMarshal, oldXmlns, newXmlns, -1) + return workbookMarshal +} + +// UpdateLinkedValue fix linked values within a spreadsheet are not updating in +// Office Excel 2007 and 2010. This function will be remove value tag when met a +// cell have a linked value. Reference +// https://social.technet.microsoft.com/Forums/office/en-US/e16bae1f-6a2c-4325-8013-e989a3479066/excel-2010-linked-cells-not-updating?forum=excel +// +// Notice: after open XLSX file Excel will be update linked value and generate +// new value and will prompt save file or not. +// +// For example: +// +// +// +// SUM(Sheet2!D2,Sheet2!D11) +// 100 +// +// +// +// to +// +// +// +// SUM(Sheet2!D2,Sheet2!D11) +// +// +// +func (f *File) UpdateLinkedValue() { + for _, name := range f.GetSheetMap() { + xlsx := f.workSheetReader(name) + for indexR := range xlsx.SheetData.Row { + for indexC, col := range xlsx.SheetData.Row[indexR].C { + if col.F != nil && col.V != "" { + xlsx.SheetData.Row[indexR].C[indexC].V = "" + xlsx.SheetData.Row[indexR].C[indexC].T = "" + } + } + } + } +} + +// adjustHelper provides function to adjust rows and columns dimensions, +// hyperlinks, merged cells and auto filter when inserting or deleting rows or +// columns. +// +// sheet: Worksheet name that we're editing +// column: Index number of the column we're inserting/deleting before +// row: Index number of the row we're inserting/deleting before +// offset: Number of rows/column to insert/delete negative values indicate deletion +// +// TODO: adjustPageBreaks, adjustComments, adjustDataValidations, adjustProtectedCells +// +func (f *File) adjustHelper(sheet string, column, row, offset int) { + xlsx := f.workSheetReader(sheet) + f.adjustRowDimensions(xlsx, row, offset) + f.adjustColDimensions(xlsx, column, offset) + f.adjustHyperlinks(sheet, column, row, offset) + f.adjustMergeCells(xlsx, column, row, offset) + f.adjustAutoFilter(xlsx, column, row, offset) + checkSheet(xlsx) + checkRow(xlsx) +} + +// adjustColDimensions provides function to update column dimensions when +// inserting or deleting rows or columns. +func (f *File) adjustColDimensions(xlsx *xlsxWorksheet, column, offset int) { + for i, r := range xlsx.SheetData.Row { + for k, v := range r.C { + axis := v.R + col := string(strings.Map(letterOnlyMapF, axis)) + row, _ := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + yAxis := TitleToNumber(col) + if yAxis >= column && column != -1 { + xlsx.SheetData.Row[i].C[k].R = ToAlphaString(yAxis+offset) + strconv.Itoa(row) + } + } + } +} + +// adjustRowDimensions provides function to update row dimensions when inserting +// or deleting rows or columns. +func (f *File) adjustRowDimensions(xlsx *xlsxWorksheet, rowIndex, offset int) { + if rowIndex == -1 { + return + } + for i, r := range xlsx.SheetData.Row { + if r.R >= rowIndex { + xlsx.SheetData.Row[i].R += offset + for k, v := range xlsx.SheetData.Row[i].C { + axis := v.R + col := string(strings.Map(letterOnlyMapF, axis)) + row, _ := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + xAxis := row + offset + xlsx.SheetData.Row[i].C[k].R = col + strconv.Itoa(xAxis) + } + } + } +} + +// adjustHyperlinks provides function to update hyperlinks when inserting or +// deleting rows or columns. +func (f *File) adjustHyperlinks(sheet string, column, rowIndex, offset int) { + xlsx := f.workSheetReader(sheet) + + // order is important + if xlsx.Hyperlinks != nil && offset < 0 { + for i, v := range xlsx.Hyperlinks.Hyperlink { + axis := v.Ref + col := string(strings.Map(letterOnlyMapF, axis)) + row, _ := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + yAxis := TitleToNumber(col) + if row == rowIndex || yAxis == column { + f.deleteSheetRelationships(sheet, v.RID) + if len(xlsx.Hyperlinks.Hyperlink) > 1 { + xlsx.Hyperlinks.Hyperlink = append(xlsx.Hyperlinks.Hyperlink[:i], xlsx.Hyperlinks.Hyperlink[i+1:]...) + } else { + xlsx.Hyperlinks = nil + } + } + } + } + + if xlsx.Hyperlinks != nil { + for i, v := range xlsx.Hyperlinks.Hyperlink { + axis := v.Ref + col := string(strings.Map(letterOnlyMapF, axis)) + row, _ := strconv.Atoi(strings.Map(intOnlyMapF, axis)) + xAxis := row + offset + yAxis := TitleToNumber(col) + if rowIndex != -1 && row >= rowIndex { + xlsx.Hyperlinks.Hyperlink[i].Ref = col + strconv.Itoa(xAxis) + } + if column != -1 && yAxis >= column { + xlsx.Hyperlinks.Hyperlink[i].Ref = ToAlphaString(yAxis+offset) + strconv.Itoa(row) + } + } + } +} + +// adjustMergeCellsHelper provides function to update merged cells when inserting or +// deleting rows or columns. +func (f *File) adjustMergeCellsHelper(xlsx *xlsxWorksheet, column, rowIndex, offset int) { + if xlsx.MergeCells != nil { + for k, v := range xlsx.MergeCells.Cells { + beg := strings.Split(v.Ref, ":")[0] + end := strings.Split(v.Ref, ":")[1] + + begcol := string(strings.Map(letterOnlyMapF, beg)) + begrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, beg)) + begxAxis := begrow + offset + begyAxis := TitleToNumber(begcol) + + endcol := string(strings.Map(letterOnlyMapF, end)) + endrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, end)) + endxAxis := endrow + offset + endyAxis := TitleToNumber(endcol) + + if rowIndex != -1 { + if begrow > 1 && begrow >= rowIndex { + beg = begcol + strconv.Itoa(begxAxis) + } + if endrow > 1 && endrow >= rowIndex { + end = endcol + strconv.Itoa(endxAxis) + } + } + + if column != -1 { + if begyAxis >= column { + beg = ToAlphaString(begyAxis+offset) + strconv.Itoa(endrow) + } + if endyAxis >= column { + end = ToAlphaString(endyAxis+offset) + strconv.Itoa(endrow) + } + } + + xlsx.MergeCells.Cells[k].Ref = beg + ":" + end + } + } +} + +// adjustMergeCells provides function to update merged cells when inserting or +// deleting rows or columns. +func (f *File) adjustMergeCells(xlsx *xlsxWorksheet, column, rowIndex, offset int) { + f.adjustMergeCellsHelper(xlsx, column, rowIndex, offset) + + if xlsx.MergeCells != nil && offset < 0 { + for k, v := range xlsx.MergeCells.Cells { + beg := strings.Split(v.Ref, ":")[0] + end := strings.Split(v.Ref, ":")[1] + if beg == end { + xlsx.MergeCells.Count += offset + if len(xlsx.MergeCells.Cells) > 1 { + xlsx.MergeCells.Cells = append(xlsx.MergeCells.Cells[:k], xlsx.MergeCells.Cells[k+1:]...) + } else { + xlsx.MergeCells = nil + } + } + } + } +} + +// adjustAutoFilter provides function to update the auto filter when inserting +// or deleting rows or columns. +func (f *File) adjustAutoFilter(xlsx *xlsxWorksheet, column, rowIndex, offset int) { + f.adjustAutoFilterHelper(xlsx, column, rowIndex, offset) + + if xlsx.AutoFilter != nil { + beg := strings.Split(xlsx.AutoFilter.Ref, ":")[0] + end := strings.Split(xlsx.AutoFilter.Ref, ":")[1] + + begcol := string(strings.Map(letterOnlyMapF, beg)) + begrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, beg)) + begxAxis := begrow + offset + + endcol := string(strings.Map(letterOnlyMapF, end)) + endrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, end)) + endxAxis := endrow + offset + endyAxis := TitleToNumber(endcol) + + if rowIndex != -1 { + if begrow >= rowIndex { + beg = begcol + strconv.Itoa(begxAxis) + } + if endrow >= rowIndex { + end = endcol + strconv.Itoa(endxAxis) + } + } + + if column != -1 && endyAxis >= column { + end = ToAlphaString(endyAxis+offset) + strconv.Itoa(endrow) + } + xlsx.AutoFilter.Ref = beg + ":" + end + } +} + +// adjustAutoFilterHelper provides function to update the auto filter when +// inserting or deleting rows or columns. +func (f *File) adjustAutoFilterHelper(xlsx *xlsxWorksheet, column, rowIndex, offset int) { + if xlsx.AutoFilter != nil { + beg := strings.Split(xlsx.AutoFilter.Ref, ":")[0] + end := strings.Split(xlsx.AutoFilter.Ref, ":")[1] + + begcol := string(strings.Map(letterOnlyMapF, beg)) + begrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, beg)) + begyAxis := TitleToNumber(begcol) + + endcol := string(strings.Map(letterOnlyMapF, end)) + endyAxis := TitleToNumber(endcol) + endrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, end)) + + if (begrow == rowIndex && offset < 0) || (column == begyAxis && column == endyAxis) { + xlsx.AutoFilter = nil + for i, r := range xlsx.SheetData.Row { + if begrow < r.R && r.R <= endrow { + xlsx.SheetData.Row[i].Hidden = false + } + } + } + } +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/excelize.png b/vendor/github.com/360EntSecGroup-Skylar/excelize/excelize.png new file mode 100644 index 0000000000000000000000000000000000000000..9f220b514f2d14eb9a5a39c833468b13c64621d6 GIT binary patch literal 54188 zcmYg%WmFtn7bPJf1PBn^A$a5N?(Xiv9fG^NLnDp56Ck)taCdiir*Ruz-uq_e{;6KI z`qtXF&N=&Ng~`i`!NX$1LO?*kONa|ALO^`{2?6oJ>S#~u5_zbFXN;m(o1Lp|HlLO}Rte}dqbhlKbJ_yF-O0RrNSBm{&j=6^r_ zcjFT&1cW~_1jPTY0x*B~LHu|9-~Rty|Nm|OJn#S8_V+RW-!|WDl0oHv{;?SOT=elB z#bCufecy?gsHQOl>>g9x2Y);QT~xPF@+meq>+%GG)pb% z>p_Vks_M-Y&Z9_8UVu9}`|Vgif}Ye4W?g4u8fK^cbDv39>El-U1lk{AY2kwFPQPtA zyS;GJE^TgYZEb91XJcdI;dyy&^XSlPb3g98(er6`JiEVC5{u5{^t@M=lpK1kctoKbaySen}$X1Q_;iCID$iE-@Q>3FygUio@iRJcdhCd9k>NN%r_2pXQ znY<|#$~(KeK+aWSWQn)y6OMPCdSBlY_;qAt>=ZUnkJ;9iZXch6;9mM7v7QZlmrvYY z74CcR&S}^^iFU5)LZJis3(!ptb7y;VTOm_&h%~jZ^Qb^aLnKGYCGCpodL(0X=%yGO zl#1#4u|^o7)o>&ay=FPX2H-?S!#ZUhvRRG%1NUlfO)auX)c&J5mm?y@#Orq69Xcs{ zt#!KJiF@JAmqjTi2sX~TS^c{!a7gZ<$N#7OV6v0 z)o7LeZIT$BCZ+YT4hkAL|K=nXTOPxjK$b8o$wJCljVzJF-cLP9i(D3&D;-b&ktP7J z`dvPb2#rE~9_9e&Bmd3T-<}$}HzP|00n={*kAjCU5+92*?ug)_3=?+f1@C~%O^%t$L zJfxBC+Ae{&tLam8kbuC)KDmo%WLiEa1hw5>jCnGRRHJ75lxFE-#*I&5NSw8ShnxzW zVY*vjg?>rG3PHL>2Tk}U2SR~dxu6p+N}k7XUk2IR0ouq{<#A7u;b&aM0k4ci1-Ewk z>F=aeTT;hcQkDT79rUa=Wwz{loL;H~E-pRFIgKQFM*K+r+ji>nAbv_j6M=tMJSDOrE^1pq6(4+?Q8W?R8W!oe z7=ei8(DsPar3idlX~~60AVF}$@#=7nWdmD-m8g$`tSl|~>47w0a_*BydKj$G@m zhDXVE6|Z@oMs@;xuOwqkq=`y)d+z#$64?_!V53-6%8AM&lK5DEiocBugxsnyDGhP5VU^!2iP~sPAk^8+w1YHlh=!z(X^`HzXxuPb=MOv z9-huK`MkA@CKD2q6nvls0Q;lqVfjOPzETc#mwTGcS)wHcExKPcu@-qTzXsiX!E+~| ziOj~(DNXJ}RjOuPT;UJ6M2&qz2u5e43H6b=e!}!1T^CT9! z3=It31KnqW;(bfGBXE!HN72eU{WnO zOy?^qyzq=6{S+#TUJ-yKmCujIKDTFH*R$nXC@9XYua9S^wr`Kyh$RiS#`$G{zan@= z{6d-?JFIZz{A~7irqf(|yxJ1qB1ZSd@M1E0#@lraMMtpS8(~tZ%|ZJ8yDIhAz(}?$ zF6VvJQ|E`a*4DYqH+=^;rN}=2Ve>nKUlFxKxQqkLESiu(w4!QAmcPR3iUr^~FGZZf zjH50^Sb(j$0FJ2^Pcp&^Q~L?&SYQO)RX7F5YER>qI4j4PWsg3u!3fX?3lF8h;Vcw4ODXUN!5O_$y#34$n=)gqd1p; z*3Ulx=>t59^tZ(|mVH|N@RQM5Wh?UlKn;XW3>6jziIo)LgPZlMR7DQ-woce(9}El( z+45LztXRS1;!*yUyC1j1V+CReweH+2M0U&WVWWEt!?~ZGOuVn83Xr( z$3u2JPPby(w1V5tS&4(k4o_+{4=sgNxi&ePNDzZ`o`?-?AG6s&q0&Kdt+Fei4gS)db$7#kV?mdyy0xbjQ;?wyU_;*E4ccO zlTTkH-f}%2kLUek9iQU)x>|n0o{_P+y@S)kgL@t4%~-~Zo_Cf0D5ZyKk#2X$_$xRf zITu#K#t!w5CN)(FiR=W0w}kFsk_9DrUr!g4*>$lis5=Cr_$U7ohBR8pDT;gZ-~zXIh_hk}mpCC@Fmu zeQ4X*Lw9^Oifq?Tw(esBk7VOE=cO<)H5UYHK>DNi&mWbjgaETF*X>Zz6Xli3R6$>q ztsIyst(mhMGf3tGNhIx4f1RY}2zUsK`?fL-%U@Ahv00OO@x z%qjnz4FiWHEaGH`%q*BmgEhHoc`9H`_AeU<^h9M{;vok8z~J`eQAnyfvrO5U04wD% z_MdZK=?E%a!!Tcnj(oauK0=dy{><5kklf;8`zwbXA6`%mE4GTz+BgWvSH>7lRrlPI zOsXXjW`Ce)ZSI+4yp?9lJKy#c*x6~Tq_~iNM(R zO+{7J#DP4NJmnxj^?JI6#kwnSvn(K9l1jm&k$-Rv4aGNqbom>_d`~uJIb-*vWqyCI zNLT@Uu?lVqn|0JdJwZxkiD_YrR$3HE>eMJ2ydes5h7NQ(r=9;__J#<&TKqH`iOc#k zcDs?`GMF(P9gk+)f(BbJ*(KRPz}>@=Irllya-Qq%gu(H(HLx06AxY!=xUHc;<$FV zAhMiB{g(nNA#%CszTBDOhey5K#zxn}m8LNia@wccKrr!IZA8f zB9sb4NU~n?-r`ZfF^#ghi1>69559Sf@O%wZ%)(_lNvb~BormQ(UT)$w`g3_wnKRU1 z^$onjB653srTqvU&U9^fqvD9cD%X2(Qm+_5#XJy|JQy+ZLwu&NoK_M*ZHL9?JTQVa z#f^M(Aj}9Ggd2KVBw96C$M-TdTXp`(QZ@<&^(L7@K0Mm-ioMBU@K}+NMv0yLiD6Zp zW@|Lfswpg*UPnqxCz!WA+x{$*u4Q4_=V8;Pv?NZA?~zVkS)JYRL|xhM@oJXKRrQ0r z{7IYssDFfRZZfU2+}Y0YaSf|FFbBmXnIr|Oi<-D_VoJllFD9+wVX|se8nOxfMzBp& zs%b3c`Z1~ee*yHkrdGqC(b4$3B-nMWj$O+R=V9JdQz!fX6xG z(LKB+t(dA7h=ffzPW&J=%-|jeS-2=-Qmp>4>T=5I919!tb{k*&s^jM7rrXWK-A&xP zgd8~$mW=1?nxoX*qOB^=bDV*wps`b{rWe5i5?)C%bU}?nWbAPxn;{pLQ{A#!OJ$fB z%irxkpi<3v`QvmT4i;r0nJxCWu z2hd9ObYv#O&H8PA__`At(Xl02$1HY^hs$Wx7wPK2kxJT=pS;-0gqMc8iyCHsBb)b< z#~Pet#2I9k9~JSa)^zcLn8E3YnRS0e1{dT_L7d=Tfwv#0uy2)4H;q}uaEr@nwVJAe zhKBa`+P!0R*A?Wqwr1(m6l_on$qE{!iW4SOQ8o`h8`>|3b;jdlcXf3~%=b6m*B%-|C6yFqMU2?rgVWu4omRQ8 zpa-jY+Hun1EvcbghX{3n{Z)JkOXFaLLMm)3-~!Lky;+K|Rxnw?c4^2w+#-9KBJr#m z%|ezxXFRGs$G=wfWtXX|JdN!M0hfBHsAgH4fK9Z)9EW7lh~KCYFJ?cpW;;2(mZPec zmWTyWC(M-o&;+eK=W?$UA}3!i27_6X2DI^dr||?G z7u0VygbWuBEt}lKtYbbqh=-=(re@}jBuot44D3#i&SkHafzA@q`Vr{;XsV&rqtfJznpVU$=)0zD zTG)^7Zr(Mv>u;GdS0A%5#-$%fk4>HdUa~C29&{pV_DIg@}0^C z{*WPM*OA9vskGr&$CX%z-^WoXvLkc5r@8V!-Q07!%|uKpo1YvETi4tkLsbhIa()}! z0}Gz}PdL356-9{2#xZ!Cj}c#hhg4VHy=6%>dH(~84xpnW*xlZ(VW4bpRLwSO=*|Jr z!#O$6-`hlP9g}$Zo6XypUu~LMLnT!hd$DCjg))D4^8eIcn;e2}o{wlqK zYg!?QrS4a{=)@RR1;v7b0$ga803%{#g0)&$b9($uF&EPrHAk=AYt1S~uZT_4L%ymO z6e!3c?Y=_7@2VTAgGUP$3Un zK*7S7*K7X(&PvXqGDUVtPrb8hh3Kr;*|YG@ojJGzPkWK{DK<{N50fe>oxR3$Q}8cw zKztiA4ZJk$i;Rwnp(z%up5nd>NlG@8vFj;^etCKRU_E!lKz6<3z>p|nGyP( zKy(%LNM`p&iTA;1b;&6a4gIL3vs7Qm>m`yxNv}PHY?Q7$VZ;pIy}B9S(1c%fA1&nN z;fEQ2wq!|uCha;IdCTYM2&&(q)v)wn*OVSM&66-+9zw@rJJUhGoqbM6emU2Jl_rI| zFND+n0ZG1m58@(h?)_<$re_cKvx?$w|JGS{nYxF1BU;(4qTA_uOSmk&JJIO!Fbf79le(87N(vE!s=oniMO}+;h^JjRJry_N}0plI5HeKc0YgYJ~tLElS=tc$|&yb3e?e*(9I#K{Bb{p z=6t#ohMK})MEXdDnNw>cA1%9ZIypIS2v^4KKVPX;m-_!`VSAN;^D!EEkN#<;7(ck@ zU=$*kw5Y)JaRu8Sd|$}QWTFJc;yyi>y-QV^sko8=Qn9rHpEnpvz=dzX zx=sU{f|x#%8OqF3DamrLw>&n-UNU|y~_%xy}b`l3~Zsr%E zijg<^kqZ&`>h<VX<|E$$JMvu&`Q@{?>R>^N@g zdRD1;Mq<_&u6<>_&SBqSf9$=F^S*rLd5LG{=0>x4mJJt&=FE>!^Awp_A80H`;ym9D*h9i7=aX+w9<=+jxJLyj(`ZnrmC7-uFynVdRn8 zj^8*VvBjRJ|&5kVAX>eSE<8 zNZC6F2d}L)kTR;y7x=v??BF1@&^*^Nm{xXXwM~~VxrOU+PzQ~$#yj}b?tZmFFm7pygIW9* zP9qV<^SL?4Jhj+NF_1fX6*4Va3EQBymzK#ij4hb&0de0(u0Zj0z2og-v-`cNLPJfx zv*rVtWo1B|;LW>sed~#5TKXrUmdsuVz(H(^9){F>@C>ZO@IKv}V7p(DQ=FR8b|^81 z1>68SDVS6vP!GL6H?B3=1^iF7HGHM^at|Xnaab4XY#R zDaNyBxAUv>uO0`O;Y%}xBb+3iz1hcHivSJsu{q~_`d5G2JWAeRV1o?iD|8eT6fQ3> z?d}e% zuQND3B0DN4XKqQM_J97yxwy1go8H6u$!2x_#g$`DJ;*;nP%wg- zK~$}NbDmH4V4d$-6gxF}Ab5I#_%yC{q>;Y+TR9yb7V_h6KLJlu?dd|5VW;m0=a{Qr z487+Vk692c7E(1~eKBykO5W^LWqwGSg%^0A!c+(XffVFEO8_G<=;|&1ybH_ceiIt? z&{Hty`4ptr%*FnT+gr4hrf#fv2ip!aX6HTe<{X+_DIoLt>H>pHHJi9qTXJkysDS77 zQdjCekHsVwg;_Cw-1Oek{AYdHtqGDX%HVS-%RelKkmzrn!07j$aQ zvxU_nQ8X(QGAaa=Ew*4S)G|7_e;)jPYOqz36IE0M;t{N=?&-8R^LX5zpg3n`J)Rj= zeyC_9l1n!KR{HCDkm2msHDVR4SW2szLhl-6lBTQc99>2U5gwW=` z-V8Hn^Y<7zp7jDv=Z5{!94(FP_mAaIH<|_#N}dXJ*AaPb^W_n2iQAvT%NQiIDwIC8 zbt9~p1WgG-ow*WVe8JB89_#`>P#}pecS~z)d%0Wpk>kNC*e%HO^kiF;LorVMG!o3* ziU%j55Ur7%cg`|(JUq@HD5VpJHHFHT^=88a5@5 zhkHOC6rqBl(PI5RG9gwL<;CMw)K;gYDo-~=D@|SI@gTd@nNp*jks+JOE%CoHi1hS| zrTy0ZCBy6MDsN(0?iU%9t#`!VfHxW)Z1EyL@E+`X1KL1NNj(Chv33X5ee2gh6T zNhXUWy55Xtwdcdt(9jV1;@HzvZv?`Nj?kNAH&Y~$W5_dosU%&jr6F7?kD#ECF6^FH z$!znK%eq&?yC9s)x3{+JpJ4nm(9qE7Y|n$B+Y@(^HUfjaB?01t&BSzqgDWI$$$m1Q zQzjW5Um?fQ1kCWC9*+?V+uK%Mx;`;PgA$VDt^aisIiCvh-!bqm)L9DhZ+xda|1)jU zXEw^-MiKpiX3p zX5){GuHIZLl+2udm6b*(XAdE-1js~%xe9jYrIM`ejAgcxenl0PAmLkEmIl2J@3FD5 zlZW02jJGO}mjJ{y%Qw%W3uQDeB&7lC_&kFMvN}k#yMtr}nGpug4%el$^94jZ9Gawi z=~UDzXKPu}&qd?bU;kC7Pa|a8`50pGy49$?v+}G-t4JYwZ~aF|^Qi#^B@J`(@oP^% zAHva`SXg4R3ZvcaG~Knbc)k1s0#;X7C!30jOhrZe_&x(-3iE5vmsn2r@Ji}2O`lp+ zPQ2c;-^?#eQB&MC+XoEOg|c%V4Xyw(_RyE2Rr!a<~`g9snGO#dW2ugTKfF=0Pk%S z=Pm;qo9Gs)H;LtXvAoY2UwODTf|u8!Y;Or8Q6vjg6IAYpcno|i*I!y(isuH$$HhJj zBndWUEE?~>+kn_6KXYLS&dUO}OjOkI{3pbi&=sqaex@ivO}BEq1EOQ}1$W2%Aoj8! zn|`QZD&B{k_n!|AQ5&G2I1%vJz+&|#?mr*5u4q2*IaDp8)udmO|T))lpSl1x3 zG%Pann2Q%gc*f%BDZk~4a~gLArk_}yL05};N8CN$9CH{N7L*HxAOK)Pd1>hA#00~7 zq94hli*U~ti_RbQDh4A}8i6pPrQ(G4lC=q%eu@}D%D_Vx0laC&@Rt3w zr)XS`sWuOn*C;~y1>g%+(z^v44<+=o?LIr5rV5SY*YLe5mX>`39BLukeb8x_v3CHu z?iUki=QFZPr{E(s`~7u-+2wGgKe;*FCcr8FjxvtXSo_dqFqxlA@a>2-shHqM-1he4 z_S!Os(FBVO+>a6nc`MjSKsnjN79THSUwPxgF3Oau{9?eqmQ^y?*$(NIq0UH&` zhnPdJrQSR749!=V)!eG4aTdeE96^dQZao>$2)b*Pef{$d-i9I~A|F1yJD8~^2h%ul z67-1W2HOsYec4hcMit8O9Q^i8B*$v5VZd*)mb%8F(p4}F*WmN{3GO((_zn6wAg7~< zTmtxS0mxbn;MW)2ms~u6q+c0&C3R44#~!dlZmktaW9JUelNC&o^Vx56KFxf3Eb}Ta z0KxcQ+}_%xXrIlLpm09k-7$ykunLyjbx>Cph!AZE3}Sdm^`LrshO zF6`eljDHzTveS(DG3Ejj4Ub%8(MU7WvHoS1(!DBokY-2dq%Ts@I!vvH$|m6R`u(+B zDcVH71UWf*3wmQyQwE>UD?`&gG~!td*6u!|sqC*N$pn3L0CTc7ik3XpEE@I8k?;DW z{C=nJ8Er+zDV}?i8yJ)Q_x!g8f%qa>$)&&oW2+(nvq6ni42A){fQ}cQ&$0EZa^Awi zdrZ*3^hthyG)>G_c^Z~OL*sRAjYl;4Yl}10m~zl?U89qO$Xfb$T||^8veQ(`y+E2m zl3D|q!=oC$oOO`>!T0}ry5gKxpL3i3{mit7HS(%LNp3t5q8op7QeX99JiTO%L?TEQ z)ik5v>Xa-`&Yt$9=j(AqJWghA?&sH+rxhp~f$KKt z=EkoY?cAc4$opt8U*HQTa^PD)qy+f%CXuU#Slq@ycB*T<8UL3#*%=)CRx2z*HSRry z>96;(0DTnEsVl)Pmu{my%T~COYSzIqQAHoQL zw{SDi?ar|J4ybY7n%5`)==b9+|64d;s*v|r(tWuB+1z^h(>)_=sYg-WK%%|#$&9sw z_UU9GI|T{Yv7T)>vd=L_LfU;Z2m^V03;Xz33+KgkZzQG7<1W{?K%2Hm6MB@x$7jA} zRque8+ciqgs*v6Deerj@>Y3F27#@MNm+jJ`W zbWB!O-L+A+EKl=~K~;81dim+Pj+W9*_aalo-Z)N`=i!pxD?e%4w<`-JgvzEUHg8T8-5vgIMAPcA~PWl_k)EJuK?L`Hnc;m*V# z=0k~h$VO`%=#HqsU8h8W*7mXnR+7RTOjTST(7NV21_&9a>WI1I8+k%vn~ z<0mnE64aF_`=|z=+K^-;+Z_!I6dhDkXC=;N*Qd)Xx0kL#`J7UM=BF!iyyr^|G4mIj zAUbV&vs#g$AQqOo89oJVb=SMOQ8_+WpRRkm7cbAk_7<&N3ZTW%g>*pdZ{BZ6IT#DP z1iD5bmSI)MHip6>q+=oShZssq%KX1B$Sh9~#x|bw*_3lET zogndKLixk%yp8M&rjL~z7s<;wACqd*_aVkjN+CtJywCwK4BR=vH+s8w?-@@P1O;7m zvi|mD+mTa|ZBWzHnx9`)QL&&Mn-*txSIHOuR&!n#7&y4yfA`b*42RwMtUqh8R>Urt zQo6#peupe*?iO3I6H(&IG-Ifj#$OI**Jr_L9`_=Em>!?~dnBjR9rD2{7BN{XO|7 z%PtpXm9qF;Nsk-60)QJ5b5@LRlD!$r4da=dH?Lght;p4L-LnNX5yKOSWguL3qG_h` zX;}E9r!b&pAZ2%1o$|mU+$oo%1%gCZ-|DDpo5QW=vc;JX{vN>Dlipl*dn>n@9Ax&*~ajpKsMX%~$6r zVaMqWN*>t%JkQdc#_lg7@)Z!bk7ix%L2n)q?|D&}r(9012?(uTJJ|oHy}9y&VzL_M zi}${h-Treu7;HaSKZqCR1TsmzZndD^(dcuUd&N(@4@+5>Siv@dwKnQcnRs7&*c|g- z=woPaZ!2hDpl=;;Sv-5pY|jZlYCmysI-gJGMWluF_k|8}@qT%ImGl&FGEm@2a}Yxh zEhtqucQep1?3v}$)h6gm#KO@~T+y!Un`yBurL{C= z$+|1C)8Zu)%KGb%lQBf}LfnkUtnT44ZS~uias9j9EpC)YstJU6MlaInR#q7qzHew{ zk*y}yv=9;p_)(Hqy7m^b+>DbbN)`rR6aRqTFgG^sw(r&{{%ZSDt~?}m-BOj@`ej%p zSpv(}C#Hn7b%8DiF;+_Rmjr@z;#V6jyd7&vo%D7|QB|@8)=&qy-{2LHZ6U2RFQd!) zE5-SF=I@sba{|xygMktjj0&6k7v0)sVO4cv{m-0?WBnr}iomM9p(s$rBemw%{&|4N zFWke!+qG(vKmBea8|cXB!q>!n} ze_$v2O(aFsj`qlyBF;k6Flv@z2|1;nT2zO6kSd{6N`wQ7y*YU?QR`x2T-;oDaweZ9 zQ&*_tSzR%kQ`_?4aG2+EWP^}*+z#u)`5=8iE$^eK_S?0T&RNMsorAoY<7)_k*l~pX zb{xNe<{PemJ*V?XW9;|158I#cSb7fHa9JQ9mCnw$Ui|9I&+2XM`D%=(8+?5}Jcfry zTaqQtBWkrW?{HfCBG0 z02QdpT*D@$&^RaIrr{5W3B9mt{(Q~BsKRnk1x#Ypi~_%A+)X8RYd;}3v==%S9q$fC zg&R2%p;g-3oz7PoLbs?MOx0a){25C_p2KpGRZL}qAL>@_wH3Rwy~X2boc9%=dvjw# zAiYzX#iWTCpjb>Z5{r(1<_oEuY^6bQD*w_jrwUl4l+dCmlDoIfRXTSEmN}Ucs6g{|G1T}Kl=+Vld!kcM5~M~NNkX3 zp1^=0(TNU~4}$_($K61$ndn(t(sH0_O#a++&}5-z#zuK5Vs&@z*2 zX(`;D%q~vcYWhY}ueP2awn4P`1XS8~7oB7>M}$K^+jUbEo!-*IYj=*Ip($1u6}#__ z{R`n!7&U$|^iAx@G?t?c~NLSuWSE|Q!9b;i}*l%s6 zH+^r@>OKr4S_#L$CPa=g_F_u%fIj!?N0e;A)rId2)e5PYhm`IPP#~(Va`77zN>ssFkbpYDPoWIvKo*GZir@Md8xDC z?D?Z)(i;~9fQn&CjAQSR3Pzv;`W%BW^$4`2Gj>AAn~DS~RYgZI6j7zbQA4Pxz+Y~@k@4-Bx8iR0b(hNQoG$!APs}A2*ubF?VXx~>S#Di;S5<4>-U^=ZbK6p6_$TP z&i6SvUm5p~heyj7(~(eyq)EI9=_ul{2+6CWwd@#Ix~6p7W1W!|jweGcBR?%{%CXuX zdFFA3c_@ANO&ZE#*i$h?iNBO5HU}+sLPwCwB}qV$2Dx&Rxi5@2-*I}4)+;4NY^T(! zZL81KR*M|gDYE*V-P*UWc%E`4;7q1D4Gjl9`S$Q0qE4UL+~_25{PxlSeBxn@=Mu&; z*RKu>^YfLzJB9zXki9@;qJ%hoec2C&1~TZHPpPJMaLV=Yfaf*T$lrFOs>)NUL~g`1 zZqB`O04C8gF`7;M;eY@WXvLuQ`R^D;-SPJBDuZ#n`1z;wiA7<*l!gvn7GfwB%hjsw z2X&Q%LMY99sZ|T_zY^>;92MMGlSx;Y5XF&HYNUbI_fcavoR+y;H*8JP3|Cm5gDOlt znzckd(T*B(-I<5mioCK~>)Dra|6y5vKu);*7#Qm#nQY?WFhiFr>;v4s^^3u@!PHVi zqnve59KrK;QAZQDaLF2@D2W!YA+zygm29*Xa=~vzwTZ#Aoz%TAq}8Hh7-dR`a})^k zL;=Esm&!ZQA!VrIfbv9;)ecbSKxX*mCE@kt87psH4ZZ-!_dPmZY7OjPwN z*a?r2jVUx@HmCPaFcXys%YbeVTvqUXiL5S#_)UxT8gEh|U61jlO-`(t1H&kbf0Qi| z5kZw+Bvz$X9^Ygu4mwVLjtU`<7e^Ug0?kt|M^YeF{O;gDmT5Qf$=pjg%%03X%qZdP zCMnOU@#^eD=}y;(M7!Z!Fqv}Bmx8{&FbDIALf2QJ4H0JJ zzS3;Nq89L(+tz|ho&VO^K*(Fuxz17B<`Z#QAiVCnY5rh%th&i=(0*(Tf>s#Xn zJ1wcFE^cjWSbb_XNM3DTDBJ!eTM_W!&TQM#gKdFYi^-krNaBQBWudha%OV$>E>Os? zRJ>X%)r+B$OW?+Ee}hueFsfU{XSe+q+gNSYX~C2c<^x<|FAf!YA`5vThDFgj zeUWK1^M9K3^g=YIpYJ@sSIlJQioxw>Wjkyk%KM@$?)(u&ouO`OWOa3Zi(7*M0Mb(F zln?D>Jz*@<(e{IW*y$R@t^4)3$a~AN&0~&rMDp0jfLek}v`9s;`a-!%Gb=vE5bz8| zefhUx8hDiwTuE&Z=DM}a#j2GWtHYBU=7}e~A9nBtkUgXf0aXHH5S$9j(P z-lUj1z&(vMXUcHDW4ALa+q&byP+EI-jgf|MojDI>YYl#F28%ghEBSq{_o56{&ePw3xF zqHKBA#7@}P%rz;BpN-c(I9MUixkkje@`b4345djdZW&l#{~`(?8Q$19bUFi@{~Kkn zk|k^;$jIpH=WN$otttwM6qY=^kd6>pPtSS2Sa@BAcrVMYZ3b~$Yvrqe4fxQIIA>Cc z$L8o(JvylXt~))0t1Wlfyj@Jx)F;PYPc_vJ|I8c)cXbKAe{_R^0h+32EKTX_D`a+C zx`&CMkd0*Ob1U?uZTo39yl7khi6(Pp+PHIT{B~%-e(fyOb938TV{K^bn9OAi!%S6z zL!tgffievqUxHeurm1?(`lvQuY*}*%=U+PdkZ&w741yLNW>sSoDJV5>+GNSwC>1N* zWF{}vLWitRpBd1qXGy3(rGAca6?(6E2*ea5V3(p$6<^@Eyw7^ut~5&2y;ofj@VQ-I zpC6*4qFC`x*713*<{#X~-L5@iE)LncOgq6GC`M_yHWUT}_BN3mvUh|z#9Fix^{u37 zt9HNVx+`dU6X1I611J|k^$4XiZA<*{89e(>O#A<(A?fmvaQk!@+8AU^DpRKvK6Ikx z`7UwzmrtN3F9R#BoIzD*ik)cg#jR2rn2)jtf#N@RHSJ_KM`CVo&-Y!=*LX^y0J}Vn z2l8RUT@kIhBJo$Fe)}1Z+u>g&3K4u_$;YUqOLKdU4E%s?T9#UzV@@S(!vv?*r6G6d zK@~=k@#`Uh8Xdh)d9F{iS5fJF-c#7X+7zDr3F}geEA{HFovQ8sYHCsd{jbK>GNZMF zC?O+SsCvrev1LJ}Vnwp`)$CYZZ@eTDOJ-(LVOA`7BQ2uB2PvS>Gc}Cf$Mu`tD>v>y z3%@}lL8KwN_IrPeKb07CoQMQXXK#k*)Y;3Txh$WEX--OM+w7 zyy=1lOM?=~dT8{=K>pg#pBCLxR{C`pxoD$MgARy4ucs8-x@8)8ZWgvVBYF+0)TuuY zZ76oitTecs*Uzuw#>buCn*HGJzV!FiHqk}6Zx6~9*cFFvU|HA)L_+o<$#*YK&~mJv z%7Bb$Cc-XD_YIzIP875if1y~fG*&6(i4KG4=ELf(7Z%JfCNAyL@}?uJPlrmPHxWeU zGu-JV=W@?5p`qYQ0wbLk2|s9eicqw=PoeMN!8ARW8cQV|QKSEZ3KI}efv{`b9o*1j zJd@jnVuLjit9kzGhPdh)1NJQWSJ*wW+IPsG`57=7jM-5qwu`NS=CvyZzVJx3x^?{PGj6<%5!En!W+PYWUEQP?svSzPqYZu4E)vO@BPG>(aZ zP`8rOOF(}V!fvs#R9+p^_c!wng&)hRWUZe(Ds4i4=D=9&#_h6%Y)~J$OBl+=<`KR1 z(>7@`_&^oXuQ2N9Cw1O(8I(6{47&4o4;%jmvAZ)0ia|~VE31>H{miGAJA^}O=+>bL z%Zirk{l1I-`ei5mp81#(H`TX-vE!%bp zM9%-<47Rm)lavhi3`$8k#l@O@!YG7G&?TPSia7YvNstbSU(IZQ@ysIbAr)AgHtrP-X=AF$1$o`cX zgM78F$IYzGv?)#bfklRaN!i3wJUTz~Lk!8H`oM^DATH>)NId$M-*$Ce=&3}+5G_8t z&t%uYmyCm1H9el=Up4sd2ZTS2CW4(TkJ;dw9`bC7l;9}u<~#k(Skx6eSkLHX9b6`` zdrhu{hm8V%_aGt|kFV;oNDkX_x(-B^DBWbE{8i|$3BM56eL7YTE`x&@XCtpas@+h$ zDluy=;Htq@JEJ;6vKaXG7{UAj)laYXy@=zpkhNH?R1QBmpU0(IUAmj%4YeOW9#fTq z^y~^`!Iw|x8E$=8h80 zm*6H6o`ar$>2ck{LPf)>-MO94$<3r0pFYA#=4a*}iN)9tQ88&3fmO&Sag949 zRjOS!HpD>U7s-VLcZ~9IK^#fDdyfK zb3CoJoU;ZXMZT9n|IuzgMse^XRV6bHBegcqi6SHJ($8+(YXS*SJ-g4JS*htZ>l+ud zLrJK0-v>oFmNj^(QM{YphMT22m=dM)FfoF5+e4tL49@)FoYP}4jZ}Dy3m#XyR*`In z_noz*B9LfU&-;0DmNLr$nK||F}TXwx?aY(s2Lx3zj}~H7=D0oTdV>UN>JLKA)HL zD!J@;Cw^kF`oLE*8{$SL=0!f^n_zsULY;+Dnetma_wnRIRbbxqRX6g>B-R6n4{Vl( z1gf{dwqIM^PoMs!vCkzY6Sd6DS4JZL8_(U@kk^?OMuI+7(9}4*nI%qKbvla3@%xS7Q4^M^mXJaw2M%wm%ANl9zr$91%*a>xC zd7-wHh%0a?^BbBRpUbS$_~>dLHSHAftY;oN(8Te0j`9gG^~&?Z2P%!If+&A6;QOD& z@&z9>SFHh0v5NPvLuwr@rPCVb+4ve-w~H^Y_r{FVjs6q)J$S2TTz8^)@;*yV?D0xR zH&BVh>}n^l9!2>F4`YYQ>iD<2M22z5KB6|P9F1HOOGUxd9SJz_cC`F8)$fsQF7vJg zl}!a=#cPaR0=;ApYO*$E#i{*NbI~u*QLE{tam4Gj{7@96n%fyO-eMB0bJ-FLAJ!71 za@675s>ueIa{cqlD_U>s(;5*-|Fc(tVmaRDi|61dvX^Gg6%e^mpyQ08RQd+wG@3*Y zk}D6bTG+$a;8|m8Ge7)A>gI}+2z?%dm2gNQ+QsbdPgx{ovAO{Y>ijekd3A_`oa6+5 zv=LFngvlhf(3i#SRIsrkz4ah zg-(u1(<0>?5-JW=mhoGi*t5ycJt?hTDF>YVW7BCflv}jEks2TCZLl9e9E5U1fss#5=ls8AN8na1WIVtDs-Jg?jE(kyC;(UjW1tzG_sTh=9y+g8iNGD=!O;bopeSVd zyPJO$lVu7v)W~|cG?wrQCw878WH8gj2+9B+ma<7ER%L3huZVh#XUcQn;%?1V2oe2% z4w%z!E0BYN7J$j(JYqcF{<~40%>Xc+5wZQ_s{3P|ZvI!w9RGxwDGG|;Yi9w$6VkQ# zk=c&?wWMM9!i2z2{^ep{7~pUZ%`7hhqN4L~jvxPsT8I=6^ctrP$h^{T6sGFpE`h@iZsoJ+d`%p>LJOQz9UvQjHhnR$UH z=9`*OSaccXnO|Tj5DbKftrt!)(IARqS%7s)O-9rX&}8`QU9DGp-C`AbHA~MSlNxX4 zJev~ubt3H!HMsZY(%gRfV{cj8j!&)*a9}=a^ka!*V$GYMrweKsA$*5E9>H=veO)UY zfF6TvqeN8Diu3kpM&Ved&q1+~GdpvWd``9r3zfk~{8kbau}GA9N^V93{uz=+=5#8q z4+O#7nqaXH%SDDu@OylnN@@Dl20zlM)?nL&?_r|jTyy`20#Hcz9vv_ByDYN z;m~&cdMS6Iz5uoCI4bJkjx%)rX^A~3*PsraX&0B0r5sD3_&)&0KsdkILmN^5LnsWc zXyAeeGb)^AITwDdVW**PUv#e?3I9(mpNHX>@+m7Sw!Y9R8i5pRxCmI6~ zIXy550A*OBR<~{YH(!4PEXS<>^tZKA4IKP4%$C))P_p=M9$>pa%R&h|J98wN!_L7W z76T=#q@?(kYp-c><+XuCW0#U4dDBr8qZNtJ#Oln&>_uG0I2jz1s5u-w(N&bIggqgy z(Y}mUM0eg059fWHKas!(<S8~5w%HlYU(4AEN@5#B8W_^r%J%(oi#5$xo)dApM3O@&Z;T?MDh*&=Rzd4 zP_kJ6EcQXXhw~p!5_?D6smA8HntR^p4|eMjf8`?Zr5Zz?2L?{PUVWs z38bQ9$|U>;hDlO~65YIvGK~~XK+j3pYEo6G|5_7H7lli}g=nG=SavIVd3&{}%+lul z`fpsjR%fyK=MQ6|^(DK!S$}2>CCTRgbFgpOo`Y2bWe{h1vlu8@YgepjarqVDYIazX z-(WA`KL`gTU9FMV7@$okr)-lD#VBz2qk>$_1i6H{#;eg{;|AkuHln3wOPB}@f+U<# zGMH7+T1X~MqMqI0+!vK@Zi%+Nv0hzLP#Jdg83jsL$M^4+(?a)WMQTN@2MtL&uwQ4@ z$c%@7`FARdA5lVn1SnZ721?et6~Fho?T*ltL!k%#w5X(GLJ2lo3SxAuqGlwT1C|+s zCbBr?cMHdyZnV-EG z-gDRXtvVKqzpbIjAB**O8_}TxrN`+h<-d7YECQ6QtsB;NzWUnf+vM;>za-na4Y%i} zIVG6qgrZqU4t~Us;HV(4*yEOvPZb#ZZLpLfA6?|&MTS!}lR#d2ds2lBQb5_Tivmg&Em9R#6i~V;->zSG%{ABbh)oSU zU>jzI>gF2q#wi63?s7?zCCg?^#zakd3)ML=kaIt7@tEQAbBML*cMx1-Tmr|ttNCeg$58inj|SocBC2>Y$Bj!Jv4@T%(62qsB>x*^iCJ=9~LQ- z-@^BGIqPTO>q6mpR(`VR@goY4^tV`>-eUgl+ix^g)PHN`@nBf7U${`O+GJ&ynx6`m z!9Op$ESTM8J$|vu08{D)O1ARLeYNvcoxYkXv=iKH2A8&f_p<^@Rkm#0_^aRju6tZc zXyqA-iOA5$t+xs%KUd30ev7r;*gMwKAorFIM_b}!|DCxnZ`$(Y%Ed&GZ9boJ-y41I znD+E-6T3<}G>r?SD(TQ1Mm5kx{kUZYL<*)1rYf-#WJy37NI=Qg-LB2z;ezvK=a#LDJr^tqKXQI#jTBcgoS*8}KMrP>fJlx< zE&?Z;geL*VJiHFh-9IQZzmH`{?-fe|6K1yia`HW|zuanKx0d5Ow+!!S4htq;l(0x> zNDyfzQlvJ{wTU390iYzx6!K)9Bq@|jF^oPXQxZ^iN{zaVD>|oS&bw&1#8JXs*No(N z3ZdOGCa)%Xd<{=q%SM+UxxHQcy}7xXzl$o$<6)R#F$gj9l?!T3Q1~-pf-t4%bK}r8^KXb-? zZ}v5Zht%Mpz`hot0g@U85khek)eNj`0YEgPUm~M}GR>kP$5WLfrDkasKslWIgHz>8 zg&R1hYIlxGxa3MFfD)J77=ZL(s>AauNA9mOw`p5ZR;sCTHF0WqJn`EnBBW2*_BB?7h>`-C|(fH;ryqQpK4 zj+`1zq9HWMAW5H6o-|8u8zD@Zs7;k_VIF~=;$3nc*ic^9h!c4oF9(JEyXT?Ja_6ut zk>nriq+r5FJiUP_#d-i_t{zv`$ z4SeFUM>@25;L !d0{|2}Z*o#rP0YTf$9_1ni*o*tfCDs)Hp;v&*^JLRz9{@3WC zfH-}&>=4e^(T_yDP3PDD`*G;KZ}z`!QV-Bxka;ZWj#LTnL}lJH;#2Y;mn7w2@hl!B zSOkU&ZBjVm2)jvvq*BNfZ8_NC-z~PbPvO7JEx+Hep}u{B;!9(T0!sgDNWCpS2Fg`s2QjlgXwXeg`pfFF zLvDz~cZkC({S-djqV6;0>QOFLe%By)2_o3;=3UbrgkkbStd1R-(&Pa=+wgMoNj z8saJ*;&I^ayjLtco?5jmBsKD(Id44p-e7XRzI{>;qCiKO9?=e>OIhW4w1Xg}lt{cN zX%7e}!I5DilQ1!x08`QC$Koc)oF?bax|fCrbxMhp0L?^M?rDTe7O`AXqp0{z7)i59 zfS7E;IzX3scXCU{6doD4E$7cS-t^Y%ukYTy>)^qInkvQ@739xf|MKw<-Vmi{Q`He2 z$AbDR`4ws27P4S73U@G%>umIc_+6#;9#wbb88I{O#HG&;mx0&b?+v5nNm+I64OJ9S z)+k#Q<>lSG_h{Oz$xAPWY}xR|@sfGkfkj$Dy7oh=_CxxGJxN;Wf*-a=wQ7BH=h%#~ z<)eRA)X6S}q>7b3VV z7#Js1LwU)K9U1CzUu-#8Tta}v3{?3@`6;ALXNbgv^dJUw){s~TadaoqjBHTsJj_i%5hq7=&Vit={tI+$ACuxRmn+KE-fk@n2E>a3X4*|D~) zI9qmXb#|QgW7?*rAN=9^>w~|F3OjsOrmm@H4LLZwU;xGL*|NRY@+BeZ(LwQ_wVOHh z{NyQOCf8>Lr!nXz#C>thGxzY*K zNgk_QE(XjhUIBOSK%lKyk9?`Lp#%tmCV5sHS-Dh+hTBV238VZHJ&reyAdA)fs(xE` z{^jPIzj$NNqQn_*Pwn%>qYvD4bJNzhwS4XM*OHP_^7DVt8bMJ-pF1iI@J}4dp$7S4 z>o}Cu50nTTa|JXW3c{kH=wF))IvZc<)eXqYThAWQrBHro-H!+TS%-SNZ*|%A>Kcgw z-tG-m6j0jj_K=XyO`7~|=hjH==*sG>=<3W^TXx);>^R~`TXwuHC!Q`Z<)^Gqf8*L~ zuIV0?GVY*lh_!S`PT_#vd3`OrUij|Yo(s|g<7c*?Iqm*;2DF&aMW}rNphm%k2pbNV zlWlPcu%k}UUM)&W+GCgqB@%ZXz{fO5aTk}e4aBT-=w@>wsE|b1Q0e+DRZm135H& zgD$SJ z=@c7nIf=X_0Tj;N0ty8+GSf*Msk2uwCmsp4n4jsLoGCk{MDfJs!(IK27-$Z0li%Dq zwGZob;|b0~$B)RX99?waA9viDGCNEw%s9RMYkOAgrJMwGO z^2rx=o4pa>q$)5{^yoha4Q@0ho(3fJ(;-h{axTy8bLY6dps4EqXHcW1Q+njGYoks` zar$O6&M8OlkvGj_h{v=6h3HV1oV=i@(pz~_$6Kag-?}DS`oLQF6C{d?{-4?#swkj5 zb?Q|6_7BgP{GxVfxh*TEIy?4sR*Wr&Bv$wAQY}JZ)B2y<%jTqJW^=EDw z@L21K-D>zRyoUB#h9UioIG%>#BXhxKN+cY#WrGQXf(vm5Exm0RmG8e8k92sRwNwkOaTk;X;j+lhoTtcCA!BezL zhvk+}v!6G&Z#O@Bl2(vW4GH;7R$O&vjCN=l3Ct@q-U9y?C^>2Q5Ua`J>GagjR-RDCc(EziD2A9ab z>Wod-FHA7!h1D;8SzU+SloH9aOjkNYN`|K{MHK~C_TckH>Gc&%db zp3SrA?E95h`e>?L9X6B=_Q)G6Z$M5tI}hr~EFxayJzL|CVqDe}pBN*eq5mirlN;~p z&;RHR3n+=P^qLm!E%6kKa1K7kN(d;^qUsV$ve1&Xoia|4o1jA6DaX-| z!RW`#+(CyE>fqes3CHZMdp$KTdWu#wzj}9Ubq+*I;W%}=Myia~sus_f-1kp^`pcTt zYc*A_ny^8{G>s|sqCvhd$P(`9vXR=@`|OBxyr^SsdeDR-pBoVbo0efv))`%94>Z`l zUfY0xl8%)}~PCUjp}*#d-`#yJr9ZeL!p06Gg?tGd`k5?@tN!tPKm(H?v=W(uJjT z0;K`^x3P{mSd|Wi&j8PKy}QoS)IJ7e7F9s`iy_G3L*F$`QN?#jnSCsnW}j2(IYYk61tcZ1sirXoxa7!)KY zAH)o>XcS5$4Mq>6Pe5U)m^q9|l>k;*;oQTW#KrQkPQG|rfa5?M=o~B;*<)cUlMGRL zCT^}c;t~wX9z+A307o&2bK)Fke@T<%mP|Ny{-(!6#t!MBl`S~6JKCNbFPx}q-0P6YGQ9XYFZP8%WxBpGO zPmZVGcbNA>HSvEicC1H-Z>*p5V!PTS!ottnKm_=_;wrUYfRgACznf8xIQ|OruHPnD zmBt8^2^$wrP5*3A#Plxj4}NfJFLRv{VL+|+#$8qF%nf1X<9x$SlX(Jw& zIS;w`hiB!bj1w-Y4ijPq*$}tDjshr6&H;dc#XOLtY|E7cZ8?T=Pji8Z?91|WJGD$H zs8=ix8fUvNr*xSY+bKP!Q$`GBiL9w3_b9SwP7Nt;nz-`7Ga_LXnVLVJg_3M05+ADq@pigxDkkpE-vEWqSA zk}Ry_JB^na$AV+S@hyogX~ukvVlA1ek7QgpW@bLKk9nDQXPMD#kD0|?MqCzR?f zcT81vr+TuSUw_%z)mhm!>F%c&@m>(c2CmsaHfW5Pb#a2x(^%yV!kVIQ=t)jHGG_Y_ zYn$SSy`mXV2!z#Uk)pmGUA9r)APFy)&2<5+vPdopzJzDh!6~vrCqt{uCbdH$NA4b3 znA&0ea$0Pz2?{69E;p@QF!N6_GXitKE>N3k`5?S2!oejITs}o+#DevTD4dKOera#| zmFxJpSCOZHu5D#-|W0EjA@71*hGzqcKxcr0=%rsDImVD$U)CwS2>Z~TkbU6sOS&WHW3uPeknLx0#ILMCjj6d}LX?c*v3 z>5CInni&rMd_=!n0O9)KfI&v1U>1p5+_BLhRhc~KIlqFcq3hS@Ea z%~RM@a}(e)wZfz-;>(vi-P_FvrO~8?eoCEs6 zmQ7mj5Rb+_X_VS$664fpuqFtJurgF1e&~PoCDm>-Sm_ zL;7K;yO7_7=a!v&YltWSE3FmMH<8S|)8LBycJ6+oc%F6$`&e!IK&N5eFrU#a#PPdw zlBn8#$UB-g1Ii32jU5l#ZE3J&0c;uF^c>owqfio3vqV?aDn#QVK!mCA4+n=)U(pmZ zR1FXyPpMPLRB$0rRdaoQoB^p}=x{zXD!QLCw#$ctKI}soV!&>it`M3q;In6+v*xL1 zt$f|mq4RTsjO*2(Ay1_fUXp2fqqL`lLAeP;!=-cVjvo@2fOIx?29$mr z+1w1gxg9s_q*LdRCxxV)+|R=eZ)sBH(N7z$uP2pP(&Q0=Q1#0O`X?yFx$r~Rx5-`) zEXX^p!F{fsIRnat7xD(um5+chkRm9`x|f5WNFuaUCAgCiHu4u!_KV`xG85UsY$R~?~*C%D34)qd2jm3XRTO# z+7Eu?%Zs)+YA^eNnx_ovWBe-Zd8Thq=KT653;O}Cb%f5QBP@W<5#jNhUiJeMr~Tl> zY0rCZA<(Hf=HB{m-Eq>ZuKK-{w|WkFTYG07`?r~{Lf+JHv%ql>r^875EKe~ZYTqu= z@0-}-&u0qZV{-mtP||kAZTj7T;89h|p(4)U22Apnkrd53z1iO2`(&Xrlo?R2eDy{! zQ@~zIG!piucuZ(ElM+$OAsTQKLswRk`H5)Teh+Q9bs(qY3_g(>p`JAH?#VbRt%JPE zvY8p7dQ=^U#Z!!N4$k(Z;myu$<-(?-GCxZ%n11fL$9>()zW>5&fA`%te&lVRdf!_= z{l2#pkMEU!(b4u^d3>8Ze)@fH`?S3J{?LEFc62@E$)G+B?|sK-KJc0kzxCUH>EtJ#`63C3 zbN{#e<&JbVb}qd*a4>WYw`d+)#9 za<>FkrTx2hiu+j&YKJ^mFI4pliCPWCIw4d7LL3NGBxvpK4aZl3{L#w|zz&(~-+Wi~ zcz5mI$RY0xD1Ui_6=9f4gH$^#;a8O(sljC$fD(|0dk&~n$vo>Z3E8P=67h#=Q5+e8 zGZGV&H1>^vOY}FU?jke8DN2*R{rGwn8~O)sfnN`4S*n)o{og#{cXLs_G6 zsvmJd(iQ;a0F3V#h(&7Xb3i10e=wjJ|VZ@P9|_xeM-cOG@&4=q0Jc}s&oQF(7q{hibDw?Hnv=nV^^ z(e}5xBst1<1BF|{=n1**@#pq~a>jk^^`SF2k#ty4)dE!IFEz^CK7=@qfA$FTH(HNU zTGNjp>K@4SunnpG=qboC((~1I+4bmDLdq1a$Vl?90ioujq6^2c+bwGcAR>| zY0nGWLvi`kZ){IsX}R>GTSA)xtd!qi2@wvM=|70LL1J^UHT3w0yxW8Y37Gr1y)#`V zO5F)@T<~4C{m6qAkrLb!E6*zL3@FVZ^-dy!4U7m#Dn<~&X#E@ML?vO?1NG*x9o!)W zve8_lm?2ryx++5cRnFlcIb!ZHf;2Rwon#WBI&hE^6hT@wI-k+T&^3V)KnzrkLh}I1 zXPvudyaDAydv+du;SUL*6zvgJF4z9bmIRlY8OrnlrTsA`w~}?bn#g@*k`d)8j)0+b z;6ewy@TDBItr3QGb_2?2kwewvBgA=Bus-7u;m*)2!~GO{pTIj12KCk^1al0zwg@{QMyJ)kU{EUTiFVnLP60<$QmUXPsC zcfH6FByu_~X?0SHpwI=rL}VNE0k z4`)(r=3sKvB0}jK{EDTN#>?+ZE6R>9Slm!n>KqoXLKy%94n^VI^8Op6kk26EL zdl!Hb8J0K(B)i}YDE;xoVH|?_kZiZx5Qe+OvkhR%Faaf?VdxD!(W1Fe`}N2D`tkOS z3s{DuB2MyU{p*6}8Bk_pbLd7y+^}?%_%p7M#@n0-xGm_1ESeES)9`;pljFejIyBe7 zN}w&x&W{2esWofkRpu$KqR)!()i0TcG~6tOfRCg?hQ3Z- z4pwI>B!$Anz>=APrxua|f*}!=fCRk8dLtI5^Q*&Uz_rkgRMsg@Q|^-yOgb&Ms&whI z0F>je@}5@G`urU7PNGr{-|xew)BN7%-XAd0gTH3>UV)*~nMZDA6Sb_v;yXz|NgZ>! zdvXt7b9-?c#g=6n#Oii!ZBWM^t4AWlRZ^a(LZ)gREUeh^+NTQ4r&V-q(g@u3{g zkT(qFuA_I9A@3>d~6c_}1k!x7`_<*IX zX(ePQlLQhEYE@YQ<9G;Fd`mm7|Qv8S3_z7>@4iFH5fst3++g_OfLzw|3 zzinxH4huKqiqhT!nykl^1Go9-qRk{{8+2L%U^fZI7YH?UWAn=q;?$z~?u9t*S0u`Z z|Hy#ymp7n9A*Kw=2xcNX(55gdG6Q`AD4{ligEY_>tfA&9Qi{f8LmE*|R6(cegIo!h zQv%VhO3Qy6l{X)%>T^DbllV9}mmu2M6rDjr-fN}w;&tzTO^bi2&r3DD{rmUHe+Txr z+lpxq?4P%7(2-TjX*vvLW+;;dloGriR5e17+OMvy33`&$)zK}@f9~$r>`wrowZzH6 zwYyAqq#k1$`L(R7?SE|KBZs^LRI-lW0;(V#OKl3M&cH!r)cURvm?k0&76f9(I&q@v z{i}GB>}Uly*r%6+lvtEOdm$bvzysO?6tW%y*?cZOc-5#IPDs6i*9KV<)|V!ga=z(| z=e_**c3k&g|9a)WUw6g7T_f%C@JM6ua{B*z%|HF?)wB2YFT-{HjW?Wm{<%NBW#i_T zY~A#dt(&X1>BaPEJk{acy=rBCJU_o3`wh~uw_Gm6rStugEQ$j0Uhp{imw`0rHlU+Gl! znvv94Jg0q5kdbg2bE@E)nvF6-%^S{9?GAvFhPJH>LCx6v5k67eC{^M=`-}f)|{ttif>CbuA zqRx(OuR7P8Ln$HeWwJvmk~oLFGoTzWJZ+0O55J(FEMp9Asn;l{ebA6rDN9UPhXJAX zfBp+TZykg<=>zwCG&r)#dqV(%70l|zBp;v(>mNmIJtLqAG$Ei0NCyCgiHtCx@$L!p zfYQQCalD(8RRI%UPEQN?uN+6sgGoEf0K!7M%KR+v;QEs^)vZ%Th_#{n0Us~B_@s$% ze(ECs@qh1pW8c5j2S57ZCEvWPJ3zT+^(q&jJVtydgD3}w`BlH1L*B{#TJyt}LQBYPts3rCcra0;FhG^sUdNJf-KCn~gvL8G*4#Nxt(^fU;%jYE($ z&=|@d`nYE^Sgfr_g>G9E(Jla3Od4p=(4a#{X&#mu>%K$7HPa-IGSV-?9y-hC4JhB$ zA5ebiV;@=iEz7$Dl*gZZf=`r#ZBRu{uc!r-`N#~V{KoiMla@;ak;>L$E*h71IWhTF zzhu zB-C#7?AGMSQjC*?(Ls}zm8@d=ESt~8@|It6^5W&~GL#=#k`kai>cUeChVtj8(Yfc4 z_qa)D+qa**d4vjIV3rVpt~C$hEJ%}t&W37tdC74<9fg=#+dK~eMt$dpg-T!L-2_m= zvb)L%UBPA>?(tj2zID_+YG-d8VlnaWw;&0~#{vPA0X9+er9@XiI;*6K$|?fvDlLn; zHiAmEtTsR}oXLA9Ef6Fs-A^P=(+TakH5)QQB4e%&P)6tzCrzR{DjT1j)Jb&ZW}R3O zfn0f7Tyg2eCjltOi=jm2t)iSGwNhmk=!&>UJh=8wqT1 zc#mbsc%}Hk?Ho8;5v2JB%30X%|DeJ1~^Nhms6|m9Ner@8s&*@fFWww}8?Q z`&bo?<)r{q#Jb1}DB-1iNqGL8vxiw*znYWvYx7f3w>fU6!_J1qH{Sz(hO#YweI^MZ zPW$`TPfupQ^o0&6iTO?%a7G3pC1k*tC821O4(Xtwkw%9IHF6w6VsRH3ISQQ@}iY6ob?zPI8N*lsMV*D$9r6Ga@i$^nJ8!aBxep~@S$9$ zD)04a+Xm&*iwV^72KLpgji)TH^&Cn+lD_c6*E2)?%(j-D{Gxl%BvI)Hx|18THbJ5; zW{zODL*e9y{@g}dI;9uEOeqU-;(hy}>m9~M>|5vFsJSyD!pq;u3lUJl+o7Z;Ymiqf zsdebv$gj$1fM0X)5dmcY$09=%s9E>gM2;#yv{{3SL(dQQ(6s)t8n z=tS7{imV@ZBqI`n%{>USRuFmVMJG-iRymYYPC$9gjvp!+N>%!cv3X`F4?~r={3J-* z$79jHpdA_coOYtdOU)U|?!%o;k@PK|%()0`2~AbrSoxhs&(TJZl(p^kt_rb;tZkqE z(Z3+qwL5_|i>RU=j>sf_o)6?b)LPzlvoUkSgx4CHncG-4?l4aor1c%){GsiQ%Zt6S zzwb(TDtm*fSAMgke2g!kB$7)F7;qB2V*wiQIam0F7^UbS_qLV3rGtfdz#NPN{_v-? zRUGwXeKn4H8=E1@j>}`X3Vi@5Rl=DT@Cq0ODIQuEXYwnSSmXl&F@e!VUqm_nkT)vt z^hG(lj=u1Rg`osS%idU>ML7urN`6pVY6voRrck}(^p2~fp!H_`^#t&|0qFenyfq)T z-bTLRN03XWQ-7R2z=!QJLzjO0t*wgN>V;6=<$3o;KE_|=T|g@|CEB3ejQARY z&=icJSdLDD3%WM=KT|0iI({JpOM7Wb| z#r7dW{1S#i?PtsRS59l2qD*e|Hy}tEM1)>kk#~WEvB2c*Ri6MoYsMOB z9sWVq|B^*kj_z|(8%!ustQ(wNbf9SkP>v}>37`zd=07)$)9300aws!H8M6_s{m3!W zEuiEQ9r#fC0A+JYwX3~$_P%xg+nFnu3E!Y^Ua`#Au z6if!q83R+JLx%(&ul|+IBiqOM@Yl6#2r}>>?2boF&C#7CNI(2)S77T{n;%)yZy(&S z2S%Ot-~^P+Jp4A({k{b(Hs3se^?WSk97^yOj{zZpCiRhoLYyfBZo}viS95?Sz!ZZ< zN;`5n=C^F6H5&r$#iDO6#0damiaP2!REe~Hla_1Lq{%6;EPxm(5(KB7Ny4Z+R&%oQ zqEgPXr!4Xx{{x_mNQjSq=*ZV9`QZn785G45EibKiOw@7WvI=kMW# zUCW!QHU=_IT1^aPxGg%Ars!}A6${vcVu_k1*)xI|i&+Uy0L*9=KA#d^u)?|huB-&|lB$ok)(%h0c@T_0`@FS_Ui{s^_kVwX@5A@cK5)&!fO1Rz7RZ3o@ssbcp6(u|_$ePg zj`ug13Xm?(KiAq?*$I3uh{*;@3y0F56S~_2a&kxBncg!;vTs|*lLL=_wo>T${2oBfzBkNl03Wr&!JTlQJT;b?%x^#^CAk57(adq~ zBN1?Y2^eiZdzl^@CQ%s5A|Y7|K)L0ptKRiLW4O)9p$wuNQ|K}YK$!t0zg`IfO4ss| zWhkTK&cfK)&s7g% zf6e~AaXEWqzG(Q%?0p4z9LLh_h7&u?Wbb+fc0guZNzBq@B|9&Muq`tQZD~D{9XWr< z6uKP6!OLO1;g~OTm^s%mGqW|Xrfd78R*LM|wNvq*VEI}GqbvIT-E0k-A<01 zjsCNtP#;RPxZUH3Q)G>i)Rw+N5O;H!%H^4$cQC4ipE+WX*v`mn%9tgMV;#t}v6`fS zwqzpqbh>^uMcUYxQ60;yF7%sQK*#Lr!VGF|j3R0_CoaOSOc1$`ua-&ec)~;-FANF` z0XU2>qf;(zO@wkx`v(?1;Wa;?mUlvwQ-Iy2eP?OPJ6)krC=?1+c^fi^J(M!*EmYeW zrv?qUEU1H2HD)pqMBdz{nC(&eKB9xDAsw+GKMP4`M><&w4|bF{dE-+*nf<$a?|$t!mA{^KV%f##ROb0#$M%zrO6Ev= zOKOefmtKrmGtqu?lu)KG6bgkxp($?% zn(Ky~$&@!lw31Ij9EFvS$PSC>U8{KhIhFlRpWOTONxe_4=y^(cuT#oK(AbWlv7LM>=CW=EfkR}yW7LsV9Pv=gLard`(g#w+{G8ae1kWy9*K%YHdy z!b3NnR?$luK%yaT3Vz9NMbn7Q99i7`*s@FB{rZ!jB_+OmKQtzX2(IzFL2MGWf1vx8 zbQ5fnC*T`-+h~&Th+C^FpHSZ49QRP7mUsKM80aM{%he%mDOHEqQP?yse`!d~SG=IP>zJZ%&1TKgY=pTM@%jLon2_fSeJdnJ@<4243W zP^hta+>tUA_E|JM2`ox5Eh~-z)dMH9rj`E8j481e6T9VHe>qVWFBsli`sr~cxo1!A z*TyY?d>fn~_(OS8v<3!wZIMeikR8H*r?0_;G~25i>=6H55e+ONZl4 zP`S~VDDgB$)|t$z8PNH^u55q9;0`woZhylN$`R-IjZUcz@jKH8lU6#%Z!}>r$z0l0 z^jiGxUp+v8wt<%X;>KuskDTcP+nf10v~2C5oLYYcv(ym(Xx5~w)acZ#JcOQTI)nHs z^|?chZolyQ%iEM*^ut$wnPR1!fyYsVQYfW#52Zq(P$(4XN+@R*A-fG#wPZ^XAyT0Z zvq>gQb|$PG#W+`sDayX<<)3@OK*E`#xz%(__S6B=pOP(v5Jwc5Xor&5!G$j$ul(}J z@mD6EH(@CMBQCh^mot859^6hkbyy?>=^HB8h zIDhg9d?W5PGmoCoJ#=;Wrl)_FQpy`Xl=$R)-9xERC=?1s+7e1Be1YMj6yiulGc|iP z$q5`25M0b~3rW7+^~S5V>O0_5>~W zRQmIqXv1ZjDFx0V`4!^Mn=UK!7iSQ`hXu(OLBwft*CRJzb^$sX;R)s z<&A3ZjuOiBg+ifFDAd^8+P;8YagfpxMQ9R(OdUjZ9LY;$F&i!|Q*df|Pm*}Q^v2@A zaj#9?ZoG;%UuM190=s^lqxG79>I z;431LqVOY*_%*PI@aAN8#iR{$5)(iulgfYd{HT!u9WuW2ytkVql%wvQMjlDwcz&4g zwX0DdOF?&UxGYSdVsMG4zTYv;Zg_P-C{svzn_Avc5usF-w?d&%C=_X|@9q~<`=gMQdjrL3Al+Ov4+?c=uX+U^CwzWw{AJ@$jpbs3q}{UN4i zs>%y;i;^5?a-YM;b$)NzXI?NA|KjG*@ODDA?TD?S{LYp-yBzF#PlLcZsGPTb!FgxoJ^{MppN3J9PlQP^rUW2}@CZodQ4QJ-+j6ZyzY-O@uNm zr9UW)F8(N3Hu3KPXw_x$A?*INgmO-*gc1=-yCooh&KOUjP^2dm3WXBN1}2n(l*m)i zAX&3pWTh=T!6|bW%1u+manya&_UzjmEJ(S3Xq_7xaXzU%?50V-p(ag6b-^iRJ%}B? z7tG9jzgJ)Jt6S+EEWr|A1evxa#K=?2y_<;qfkz@>DHJ@I>^C{V{#N{GDJ_awI-8)j zttdiyAVOJB5lY%a83CaLSxl-?iM_3UVM)9&^4*P{>Zu0$cOaBGrk1x*d8@H``a+>l zC=|MfvPjzE$O4SivEmPIh|i%qBJ3Mwc2&4Dx_&VmDuHw4gsyLX^`RFGue|yDL&kMF zsl12rp|m-4_N3(SWKnh@rO|`A1--ZI)59lpCF(=@cP3sk(~b2JUWKHcj%XmAJegD5 z@{+9FC?dN;Aud9pAf{4#6rJLWV~9{v4ap``-bR!o1o)Q03qmZPDBlM(I=O(PAe5<# za!@HJ5?9+Gg+ifFDAHTyjkp>(7Cb4r-k{Kqh&qJUejF)RIwBYbHbb54sr@NKO=M_4 z)x`;bRh!qJU)}%6iCr_9tO)BOQm&jk+0UUr5ZEnf=5upH*F`gH`a_V8G(9ybc{X2% zaYvHlMtc(xzLk<{xgS9MhOPr1DKB9@iuAOO=N&LY>5aIL2xZ3zp3a3I*`ZyqScD8& zfsQ@0^2ZdLEGaUYpXwgUjW*@&2a?^yKRo))Rzj&zC=?1s`igQ8D&u{G%#EZ8C8&>} zO}Ofc^sBYk;m9Az*17TgLq@lM=j)HXU?}|{d++ES!LJnOy|4;kl6tFINT`7jnGj+U^FDR~D~w%dX0g{lCspLw$T zx_GB;f4H4J!x`MWt_3%nJgBWm#>tLmovG5x6^Vz|ZqycaQ4y)0=_$@jw&@Yo9djdK zvYfx%#Zb^MehpHg{|D73mi+3lMk}TgC#T*BBQ|Z=luGC9#lhSIP1YMpI8X}|cl;&T zP4jE=DaV8OV&OCyKSDq@aHT zE*p`ut-iRwY}Q{W*8^y(dh6m|rU)yHv#nmH%T%ExW3R$@`DY6?W3VLKI_TdgFZK62 zUD3(p0erLlygeKyKdBU5q%^-RU>6YkD0A8~3EH~kxxScR z*Fy1GX1R}-#Cb=r|Gdf@iRa0R)8N?8ikS90%9foVS#ub0byWv|eud~zeOW_&&wZ5p z4+6^=lF1#k`0w>gAR-St);Drr(|Z+!@J#dYP#a3BBoju=jAyQ}a2)lvB9UDO_iz_6 z+GJ_42O+E%Z1dr*aTau~Aq8I55M(38O!-2kX6gW(GlxbxzR{ddIdv9(RRjfpShK&Dfp|>nQimAQCO! zEAp01qc2h9)J^ok8Rm*4BBIhs(2k3(V@N?ugMl(lJ0Co@Xyv}_*)~Mu?Cqwgp7dkz z5UQ~LHyY>A_sKLU0RHRHQ4H5vq}_cAUcq65IH}%w{T%$Y{+E(x&Wn6LmK=&A2`)S!_7YH`;pk;IKZxx;$DiESR95|oyE@sQxi1n zMf}NqBXFD@qpR3LLyoGyz&-}8FkAT=N^Om4j1(M+V^Qo3*8P{~a&pD<*WVPP9BNa< zwNTxz<`-}fw9~5!!@fnLxNRAus>QV>oJ4z^EfN$Vo2F&eP!7wSXY_HO^LVMa{u!7f z;0!ZE%hjJDTS??AGlRzAQW)uh-&B_HR~R4XTMYu_}-r) z{nLmnQgyAk`CSBUYZyX;mx9rWdK59^-6*z1x7Gs4VPyT%7&`$bZDsOB^XO;--Y8asZ=^){i>ci z0Ob&VI&4f!co;bCpdoM|<1Oi17xEq9$6tH6dhKnGUe|rI&ez&=pZWmvbi8mef7*L! z*n<-)7#gK}Kas49m1?2*GagJt_{Np%iWg>EkQS^+g-%NV%h53cTPiyK>s+Q{+Mz*| z{Ty}&B)GolM4B_-@WTifaH50SHJMScq>mzd=SMeXGTBc^|MKI{JFI;mg7uX)+jdWp zs>P0PYmcGR@`VdP1Vq~%iOHD_4Q*>GVEx!ed!)7;bxV~Ivm7VbztF;2&O1*%McVNG zq#MucU0fxPufR_DV`GUmwE5aW-PHBJ^z%Za=4{28mdVAKcBRyWL4@;1?cixvdAX( z0EC-sG(K`5w3{AYSSk3~-sAv2SzqPfUi7sP(3O^U8SL0yY|EG!FGkJDRaK^9?8SB4o9aIIQX%KQv(E4JsDb8Iu!{;0S9soS)fl7gW=g^f^O^1w zo!bS@gUw!vQ3mDsz1>+jy~HM9JqY z?^+PjD6o@zqp7X{l6KlG*U`~2#=Ny}TZ_Q8Y_ZE4zi1z~G5czWq`bJp7OoyRSXRY6 zCkXmiM=H7{l`F7~>miB^iJUf=0)NcjTd8&~b*BL`%Vyx;on~9(M4B*_RrnAcNO%j5 zd6S<5>h5gMpEo_01bJUNTq~dIkZ8KQR|bcPh#$uiOZUztFNv^ILuW~dpR)LpgPo@{ zfk&~TT`^zD>r+$}t3M~}C{nnyjmpq)&TI;iG?bZ!N<=80aGkFTbK?{cyT9K|hSm_w zvDRblFOPM;J#h>j&rs`RXL>t)A3UWSc*M2&bYlW)qREBM-i*2)WZEV4yZOgPNZJK@rNd5!IW3XR^qwH>n zdL0U-7%zNiQuox<|3}{OP_l)oZGMy($6=JK$FVreO@e|L=I(bgV<6@mD0O|3=T2SC zEw-PbmD{i(V43E;eXGRs3>77I`biaUuC17xic3)qjwKv`LK7e3pNEkqZDbm?F6z|Z z@>xFV3+Gf1(?-4>lp`N=fLb8QlDBN?$@QAh=YkWNdo}1|l;w-h(>L`;e0f{&SG05i zm2!dSJ$C5e-%X+tKx$)m2jz+Bn15GD8QLx*$e?Hso`D#(`;sReQOr}!48YcV=b;xS zM$9abls-_%jFU19PzJAec1=DjY1LVeJ*u++;z^epfK38o>nONk=p^hqc3~2qNYcp= zqPVrl{l+hg^R2Fyx4p`WYK?X}gqHm>A02`Xu5A>6Xy^sR&gUG_D&ePioBkqOm(te6 z^5JWd&-E&1El2zkz-9v;{|jg0f=06Rye^FY`Z~uhQw~5s<1yuu3l4Xk+_Gs=T~$Av z($AtEyT!;w!GXJoFbccqgx&SwPM^O6yUz1nXx+CSm{lE&euS0C+x{0z+sG{7BU>{P z{TZ~~61xfK+`M2!g)k0bO!p$VD!@R3Tx+0osL#Qa7o#7#n6~gVIyh3MWJFF3yv%DotyzvG#8)t-!i1Tmy{N5=Dx9hvY(O zdMpv~8?4N7EOOW6p9d@!Vh7b74ds?h=}m0?1^3>MDKjE?>`VDvG_@EP36FBkC_#{7w2RaN$a4#A6H(eH z@N+n@>bvyTY7q&h*ti=N(H%A=NXFYxxHe3~+g09BHSS;Sz=mkYmE_{{BC9_--C-{< z9g9yR@5E>SM{pZg#d(Pz zwfFp-e%KN3npAVZXP8;A?O5Oz#SL{v>pAs*CTanV@nf)};-8Cdn694k(edQ+5 z%q9#KFrQzD1|tp+r)KzBAVBwhx457LI10R{Q?o_?k#dt6gDm-^3@%+O7bXsLw?!YzNI!i%#tL;u~1coDHR@@F*iKXO_vH=gZs0aVY#vHyw{llabff zEG1=H(3#Y9ApbICGsFS9NX~=$2R@3gCY4H$KkA6#a{ATCC%HsEgkMHVqK1h+R7$Y1 z*r5z1!#m7ypxn*rL>v#RP&VLWG{PnSig|nv^q*2F^ko+XPrSb~p_o&e=S?Ixsb~-O zu)mxtM=c(z(E1Z4>Z2W*?#gTE^h$GDz>fdd6~Xt-$;t1UDgHldI4#n4n~2mEL=(O1 z@zdxWa{+XKs#QsAIbS3h(yc@kQhLggxGE)>T)C}u@hBCpx(JuKjKyV<+-`u{y&Y@T?o(P}c*h_Fs0W2*sFcOyW>5v|ZZS@I5LFIL1V(bzNh%FAEiSLr(PARIp==;ou;L^fn z?UrXqO^m@6C={pOizHtSNKJDvcb5I{zr>0%swuIO#R1cUE9Mh|WMZ2-%sS~aR`fEp z&?V%sVMjN`UQcJ5JI*5_QUw$(Qa?5*uyz&Pww(Yg9Di~pZ+ggk+{Pj{ zqBlvCc>NFb+Oq_Mw|U4xu&-(@T;vi!@s)9 zJcit)MC2Eq79jgZJvc9Y!av=8MwMR2tm5NJsuGrHjwIaDr_JS#^VnGarp9VIfi|AB z>nWo?uZe-C9iwJH2~II*dPHx4>zE?2YD(g4**M&DZ%2qW#lX)+dqN$|%v=6~$(O6P z7P2(#cB4Cy{P9hO^2p1~_6Rf0b5Yv*(`z#p`01%KKu5MDYws?%hHd@ z4>O4}J$stm7v`^ggi_4bjD^!5DTY4pBRoDtIw|3^7RU_3uvh@o{gtgwJRv7IVzr=XnH!0&1p;N^iXzQ0PLvYw+U?b;cb`6C?yI&k)PVc z%R!1^)oT5>Ca)yJJ+`UT8r{cRzUBRyTaGQVZk=Yk{j(ecEUgCf*EHL-DJG7Ys@7=! z4nofzIuh1ovca^^1g|Xfj6WDXD(KHm{lyUAp3~W-5sLq90*8bJ$RS6@4dgaA##j`D zu|xSJz|4an9g4f~GTIkgrJUJg&~%KjZs#mNe(u}-OKS|-J=8{*WPDhSeX<5SL}Pqk zrB_WOx3pnJ(VxL+J0T^eX9+vyR2$GQ>WP{oJVO7?AA#Oewy8mA#dRpXXa{Bg(a~tP zI2Fpd7!Wg|%Q}W#Vk3p@&GHamXzvU^WeeVr_Ay;rQ;v`JK^s@IkncoKrTgFWZj-wO zdB+x?`?rvh-(T%@q}#GnvD(e&Zqi(c*Az0m7ziP&`{5{IVxB~}l<1n}!C!c+xQP>s z5^1%6C6DfYlT`K^LyJ_IM-_;Zdpl9h{Wk>BZqy)zsOHbFP4vVk5{;j#5{JSu zetiE_PYk!T@3^WUyC+12&g)=u|6ZkiMuJRM_3{#`N}@zMcbUv7-wDwYc!|}R%+{OI zUuQm3oTglF^l{~fm$7QyKdHmFGH{1wO3zDdRe4une-e%Nfhg13IR>;5}`2k)zenCb(7`#m5d_=wRv}= zpo|o4MSD)X+mjbIn98TUx3PUFUsdJMJXCbY{h^z)R$&rRKNuSMfe|5%EvKfAK;{h8 zr#BeXFqhE!3@q?5ToZ73H}5Y8ZT0`^$H%mRLjqGDtPDP%4o=Jn79nm_i8Fg7Of&K- z??(9-^2DmvhSEF3I_F=WpHCK>Er;SqhIJz884NCyqgQ=BeIYNl{{~>S(GXg&NKt&2 zzF{4{TM0nDq|k27>Z_wTi>V)9)ZwE1{kmfa_~5VsAWpj@slB zIv5UDc=0=e`S@*=9Pfjrc-Q}P=Y5?uD+HG)bvyya@XnO^tIwP1;?i=|Bd6i@ai~1R zzp9SJ>Qx2&)y^k0KP{na-;L-F4`wC(4bXG7vRDm{u>PFB@+_;)Q|d_dhN23qjto$r zVA-SOV^|}2q7wS0hqdS=!9nzw{ku!3%WRnP>+76_0&w~7k{~G6V1Gh(2L8XKY>RO5 z(!y)krx`=YeRy zMEQ=qPWHs<5?IPQJFrU+4x5*B`cAJ+&kPg){e%GJ0KqB z@^X<-#`1mEU#yNo!_9@?&m|2$uUw6;Qr)92cXTjKLkA;ySP3HnUXXr&lC5yfXRX%& zp4Mv~_aWu0e-5>{gksOHhUgIr-~!F1Fy23uj&G`9uJC24fj{B%J`Yi#|`e~sRl*x#V`BP zxtRaak!jxD*zce9a1!@Q!qGvo=6t|8_y(zd8dZqT-hIX0GFD#8hsrNQNI@SXHL5Mg z7YO?3#JgZVjd&QQmrn1&`Rh{s0LI~jEc)yr78d4$iA5(6Qc_;B-quR0GIpZ8m)bNE zR+gP=w*2{IeT%RBKUCWKo+36qE>Ldt`;sbFySn0O+PdBr0>rc6c?H?m1d+AwkrkKk z+Ob9RgAb)z>w1N>nbzqmy~NgCUU2UQnSscH_h3zKq1zb{Jhe11tiAom$K>fh+C~ve zK`7=YsiZeLM9km>?XXx5E;y_()|RS8rkQ?Pkb$4liZ%|Tz~f3cZn#u#ppv=p7ylv; z7jI=sC&;hl|ER$p25M#?0|*x6ABsfIMO{@?2*X?+QaZ*h8<{WeE5Fl41uw-3NQt*C z%4praih}rVOjaCCX%lw&oew@Z_M^zJjzQ16v@OX88AYK#0@!Fwxfiw(N3MV`VK4Yuo*(E~XKo$p2lu!> znh6!%`Fiqe-3zj|JK%kTEG2BfO+-Vy#I}0-J*%@J$YpzItuyt*-WJ-}VtgT^Q~@6k zrNDUrmB1|Bi~xCrD2`h`j&(H3`r{TMZ*GN=hw`6$o(^Ar_^c+!-8QSaKi$x!AUbIL zsu0#cNh=@gCBoa!UOK&BEr*Do$ZS)dIDrfaVYZ>XI~<2yIlIuV>sIA$s98K2nyGgE z9mV90QfqFcZvh$iXdJ?#@QB&D{n}Hj2C7jGckb2uT^<4$)TgVlgW0?AQJfNOBiVy=kcvB1t(r#s+vx|lV-dj0jxQ)BaP*>N4;au6Gr zS&!%A*srhUd5MP{QCr;vr6Mmi2@js3UtK^$Utb^CbFCG-$P5(nlZ;HpLIM@F=4unJ z&x?KG#>)vHK>Ou>h5i1yPq6jn&F{Qf*uUrSSx8+76-99gognK}Hl+QW=lr6btb=?1 zWZclT)vUkM6}C+Zzw*MwJ>w6b8>ab`c{)G@3&K7ub}lGuBrKd-i4R4)Ehu&jUODKh zE~LHY(St$caa}fTv^u-(+JYTTikIRz)W0r?+^z&=|DFAz8>P@ss+$LdFbxrMZ%oMqwlW6#XAwx7g%=;PTdwhhfdJ#TCI3wx>#atlyo#saWA5p2EYFJ zI=;#%C9Qd^{$YbnV~{@XVboVpVfaFS(4Zr>>3290_&zeePs&h}d(vN^{$2dVfsNA* z*b}$l!@yi@4T2gXOu$Y%2Lxpyax`O1q5Mv1%eW1?2AyYW%z>z<9H_6}JV{i>vGP~2 z+bLs@Ki7=)>ecsA49HUH+J*9#X3Z&DkXC7g-Da(Wb&0{bOBDTTgel?~`955kM*Swx?W|aYNd;RN>_d7RQ6lz=f6W;ACR3St(5QJ+b$cQplEDK?{ygXa zr7@5@>J;xuKkImuQjk9P1gS(Y((u*ls))qXx6j$DFRCQvE+O*etZR4EcK0H5)!W2f?^AhbT%&a6_$~TLm{Ysg%+^|OV(a5@SA?X zpXv>on|(-7kiajT5yO2nHS1^pav0=Q8?dH!1bzzOxWuMQf@J9cLB*qLXgOUMY}y^% zk843%oAlJijimOC4Y0KeuH*__wOmW!tnCzd^ zpVio45aCkU{lKT?Eek>g_JDjS+y{D?>NoQRDj=v2q+R#5<$~%NTVM%Ib0ncFvh586 zFAO8sBxr+Jg!5sf7_e%j)DnSyyzaNJ@cRkLfL@nEK-hYmfg%0>bXd!?5cywk>7LaV` zc?CPJ-7P0XXUNy7JTzTpY_v@mB8wvC_YkOPCkcq^U=AIMTP+(mcB-$i`V{njP;2w` zZ`E7b(DHYe{(_ZHwr(HDY(-r73#+KYU(wFHDTy^C#ORJ<@yG9@Kxr8K+%w$-2=nVY z)XbHHknmju6&kl+tV+Cs78s9gy$=4m@G3Som+o;fvlLRVpkJjQFBXSN4c~XYmi?eh zjx2>oKFSZQajBZbaJ#zZ)G<1MBfDbd(QskhKKz%Z7SFI;XfycZ1gPRCmZu19c>#*v zBQGpFTLz>1cW6)_6ez3hVoMa8tozf3#R@(z5dgYA!{>m8Afmo@s2#(nE?RK%A#B)Q z*Q!0Gq8M3OekS=9+e=m7)#c_xSvvP`#DXfW0`h zB??DYiYJUpq4-BRWA|Pe+Ma;tlyg*d*i%GRM1&{oUR;4S2C4VkPpY$_78Br?zdOtB zoJxL)EF36`Vi^d^FsQ(~$^I}Re5nnEYv!9pKN4UIMNH`^C4G+M8P)qXDW1DW$Z>-P zWiN-}DbrM85qHEdsuMxd2~VW4IjXa`QcbgtTRmFzIo{rU_2-{&6}5!@5uJ->fEtaP zskNT(y;A+s-_l=leFkb*<#hO%#HQ$^4=lj}iG8#YU`qb_ECHRwL4~l7Gzbr&K4c30 zcoIG8$Xu}{eoFGj>lk04I-7Clu4vy5AP93zN%K2mBH!t~Ac#1tkGFw;VCV@P!mCHS z^i`8gisoQuBdw5;Udtq4tRp_~JSJs$d5Chbkq9N?+kn7xt>yS{Eag@6EboZ*>wP#E z4`$B;-7(&iDD}I83XG!Vj3s;d7?I5IXC$wJRTz|4Us+0mg28j(F?qg(}Sz>9^9??IcrINA&0|Rp$wwHF{!S zCv-Imkzt}GvXyk0KM^Y8;@@U_I+kXS{7v$`Tzz#TSi~wz901ppQ$#EswPHNcN6YfF z9<)?Emb*?*3)~z9a5z#!CC%mPrEyETEuI{fu8<-^5v*W5Q*RWU*?ryGkId z0F|+E*@UHX%g5z6ia$dA%rZ}P0`op!h5XUNR)IpNNO2!A#Rts*b9WIHhr~&pTAHw> z0%lJFS}I1%Q0REMJTJgsWzPGt{Ih_(C34PL@Tu;0MEpp)rnL> z^QPTrN@Sg%fx}Qx+h5@NWqC);{l-4!7(&i*!+NyT3I2F&4SbQ&a%^7W=>5}Z zqDE+YOalvgV)7yx6=|5#-fC6bNrd+&S!N8?95w;Qpm-uUp<+>$^e96VT7apdd_pzK z@@yklAZYq$Z6b+7XqOFVH1IogX5q3uzNGrz)UPBl^*9w1_g>4~PT1dz=$ETS-Y=}S z=TFJ$co2N3!0YGmSw@bqhf632*vWY9og6roE7MB`*zDVhkb6LhWl-SNsoXGH@pX{> zXRCf-M29&9>t`DF+PPZI9+Z~3a)pJfzPvUr&o5c+=C&^#?DzK_92Xe~<(=Y#4v^qv z51zA*2BZVd_wAoH&)=<|@nKyaCN}Wu1J3kNYN-75a3P%XZ@TdER%wY!=GLFwKCs|Yj7>^%Q}Z56`F)0YmH1!FPP2iB zG$k@avM*!}1(L8Sw<##qjv8-U4QQZ}Z!3Yg4Oc?@Kxj~v0%B)6oC?&T%$w0}KXQ)S zf~o;j{*JQW7X@WU3U1a=la8ZV*$&>_4rgnm4~ILT_ay|0SUmkGHdABEn@9)SWT*>n z>adjNFm47ncmH#yNWw?dyM?^Z>oGV+g%PdomdCEe#XXuPL9&U$qzT4ho4~d~Pj=ya zyZt9PWZnT1_I#KCZ!LmxKB|<6cMK>H@?bo-9Z-&v|29y3Cx>!3D0>LqcXBZ4vI|*S z$R)#0hw_j%uPK6jLol$5g_Z!3ib+A-O9YF9KRl0_+-{DIB~8ZPshQ>NChL*5eIxu& zJ$jn8Qi#rr6>LQzSAw}8$Q5Q~oGgJwHB5@rn@8=zMH^}u#qu=(thD2 zAuU(P%Qx;ut-g!fpq6<_iNc!bOMp`uFx-5Urik@!2XIlEWBf?5RQ7h6RpI}0fQ;C5 z(XI`fZ`b#9O*TA9ABc1f4Q{cpBv#*w)*FWRl7KXa2WR189V#Z*7O&DBs&v^0_XRJ| z0Zuhwd3iKxVg)$IREw7=MQapRe0KL8$H&fmjGi|ExqR-*rPz_yyp0&8(wd@xAK5Ld zw#-$p&+Fc=aDo@j!XsPK^jrW{4MUi|7q}HOh4_uob<@e{5G5yz{wYJCH&rb zS9x5VT(PH}cO+`x;5+~F@=N|Whapen!xKmKOFBxCi$+nW`&R= z;#MO|3f>DFyjsf-58k2M?8N{3hMcvt&QIS@h)Y&{b6U~?V*-_0DjO>ZbLqa-o*hO=Y6r2zvi4ZfHiV2tk@2+Wo zg5R{;Qfcv3xBJcbtuEE?>FK*HuhDEU)R=60HN6~Z^emHe1Yn+q?8fR7`pn|uMrP@o??OG9WtR+CuQTe-TcI5dn1`0(i7@?AL4 zP{o0wD+}V)h67=T)h(*#yE!CSV3tkPpbbL>lfod38Unoa1Jtz&N)tmC#hDOtz;=c3 zY&?Nel?c2&W;>XlA5wA<(rT|rm*hJPILHQQ6Fo1UApQCwsXsTSywa2)7vt~#-j75R zb#%ss|JnQ!gejB^8&>xPd*{zs%0^XWZ9Rm!-bBhLgp40loQ=gX=o3#nkRTUcY~L`v z|1t#Iq@%53xV=y1946j@<)4sv^nan58APz_6lganX|Kwf zAZ;!;ZXZ=fH@0>nnx%54aeQl>J9RBNRo8brYK-qNUyJta+&sEA^LAaUCeweZ({Q1u z5**L!dW)KPe;`_a9a9c)-zR%>IzROkaJv6_{ubh89o!1lweW1haeunw1q;ay0e!=NI86>QVnBREM z8o_CmL=o)*WQ7|0Xop`fOBfNa@H3S_miGQ$@tBa*j-IDEC=rhK;_Mix)F;W4nHnhm zj!nJPPQaLWwI>gC14LF*nYxW!yrI5}55qFEM$gynKq&u-M+_XsHI)d=4f%cJu}MRT z!=ULb$SL8PmM@Xr>`X|Nhz!l0#9(Zn#7RFRx*$*;1=IGZ8#?0hCp;`%niLdY)rRnQ zlB!bZV%fn`suzx2wo^dBkhwWyQDsnQ3WDx%Y_VSXh>jlWba5|G=4&^sCkEWjqvZ)p zLi24$pJ(7ULVe2G{@72SsYN(WbgKS#QG308$)t8qT5J0ZPI+1qaLA^L2Qc5y2##Ra zj1MGo1OwEiLs}ZHO!l!({St|Ws+CTGtU==UtXhpNVhx`{0kVwLa{LA{Zecbq{r%)) zv8j@mzNd?l-W+@Et6Y%ZoSxH0U{z9HGA?)zX` zu&Ar5Yb9&;{tO0F4?BT>likX?dktHg&z4qqM~Dhg+*tTxEVgTA5z$3h9DMt3ge14- z07g>eP1+ENUt;?5$CF(tS94%ye&DuQC6)pkwG?7xMW{(`^phNqr9uMKUc-~l^pHA5 zY|MjW^jxH)bW0OOPIhSGOp3#DDrSEFNqBpV)++XgPUDd;W%3)Mp$ca}FdT|$t=W}| zv`iAQPc^Tgp)P+=Ks{4I;24+c=pcd~0F6i;em1^gaee|Rs+=g{uWzJx3?8&-6qw;& zLC6WxpF&7$Hr{1IrJfq#SB-FqaE)g}rM6wWzZ<^PM9?;&kT+IC&}w$jC$g5ApbFZX zN}Yb%)@ZEK3ZtV=yobq_euAG}>*Vb`Ha4{6Z^&Gj$ zZr8!*#g-5I9a<5Sc1a<(&Jdw8qkkm@OM#fQ2qL>jeZMCnGZl{CLSK|(^b59NrIa_i z`jo}#4{5G%^0&WRK-@=OUMP2;bcNqXq2v75GwQK2J702s;hd*;zL0S0kKm5gk6uaM zcATY7AX+l(^0+M?cV8cqy*Epq{P}bI6;<&6Q)SJsdn)M5BdKjclPTe}XBB8{a;{ z+w2TvH-m+n5@$A>L=`p$$A%Xp{Hjauxf)O*QIH6+l5pb?<$ClPn7avnfStU_mO-b; zKt53;dlwQ>Hp;vGDG-@Kb2D*cZ%`{Nnn3>5XCNzyrfWA_JkMn*HyWQxIpfwgsm9)n zsYn6vQ+{2kP~z`=3Lw|BFYRv=Vy9Psp*ll^aOspp%Arl6yScd5zJtnh8K34DGtSjq zXf*@$1UfR)0oMWWT6|dTM$#RqE*C`}>eK4rWjeakBUh*)kep?&Se`EQ@5?lchW8PZ zO;yU67%QTa6ap~m;EuB~0#YbsMk*)iT?r(_t(0^p8mJ)RMKz!^DeV6)m(%N0elxqHGNMQ>(1f5dmr zt0-E$3G=Gzcx`V5JA}jq3``mfbSg>l@~X?>6B$>xLaNDWWh8*(>5|lR-a(l8H_QPY z?T*{ao9peHHPaA!D6z$6q{+Eemz)l zDzF{d_}_bNJkyMs-SWKCjmiv-k!&0uV%h5!i*)gY}gYZ|!h&mMTx$@unt&yxz{&cD= z%D)L%^Fpyk4KPxb-x{hFhRoW-Fuv#8C>9}2kQn`)Rh7hT{*ri?*kZ~hzMh;&RO>9aHeIUV^)gg{ z*lIht{)MR`%tc>>F0DD(<@Y=U_}}O7@e9Xn$mmP{#Fwh*@hHv(m+xYf7;A2TZ4hwmc#HT6O+}Rbp4EY4R zB!$&O><+)oF79>)E~x)Iw~mbqZf)|TqqaXS_$QFRkg?B=JUWsMw@@i=-ah#1tC~-QV%m7K73CJN7c0m`{u| zS}5%uvAiY4HVmlz>fv%ir6fPQ7zUKZ#)GK_Z4mN|`@2SJ2o%74^5a+TPRWRSxXZPv znu=1B$`xf|M1A+a+s;&6jwaGe*O6y6bBzPP=cROQv#nR;dj_028bPuXO9!!q}%6#It9Q%peu!TsV+rM>&_ zfr>~UY$)s5S_ViA8V%$GCg){}csXTo&k=y#CA~+)wB=dU*c-Zw@&2`gh}Ky%qhsk0 z{}jjWF}oBF$XVE+NFN2(o+eD|Q4Lb|1Bl9g@%Hf#T#gXIUqvd#Y?#ncQ>BmWEw2dX z=Ea&<|0-RjHMhMYz`qJZ!=DHch>0AQP*G73R%iSGL0zQrvC_o#o z7yMxQc9c)bP!*?q^dj{W5`KDMLI=u$wj+BaJIhZPrxv_TO5F$h$Y>(L0+~@90j{noe!0m=a)11p8Mi8TSTe;K}MN8Y40R(3yB77f{?jwW;E& zHKXI&e+?^6mYaz!e~jv2D^ce?-uyC7b2Z7O`C6oRR68qk;u71hj%y*uB!MHAt>~|z zqVQM1U{5bVGy!=o%H;Kk=H*>Gj!nE_U z2+8&Ek7eM^uVZMi&PR`S44i@eFAFV!1=QY!tZ>X}3OBF`-o0!9*;sS*7Ur zWpGX+$x>gp5E$fT>^RAKB#NK^ zo%heD-eFc!-K+KLN=>q%Bq*Lr#jiLw46a)q&)nPjKdt%s53J3ey)>>KwlTTqcJZS) zj2ND-Ufl@lf(JRgMv8s?PQR{`*cZHaE5j{JDiLO_#g7QF{tpD^X4? z?-JBciOtXO6-RU&Qx%Uryk2rP6qN1`AZ2Q$uE$EUr~Be}*^`tdVHV8@&B5ziN};#c ztw1tf54QzGeZK>v%-=RudVjTl#OC&GtTR9T?Eys0CvqKZWplyj&;Cq&Yvwp9{dYDI zF|9db16V=W%%T^`knyrF5ojv}kZ9B5aZNsWhB=ABW)GG4*1#o5b;;l{UN8IQ@ba&o zwOxBX9hOY;BKE_k0@d+kWAHfv0GJV^l-Vwi`jfZzam?^>!ztt_j~dMXcr!yFhZ^s2 z%Bm9n?+=Rd8UQc`3c*d=`{!xlk%I&8i zM1eG{WPG-{DvNl{zBbuDRGs^L*HjV%-@D;$8Kc}F7iqqchc=c@#}-IQrgPs4HB@2! zr}gR2|CF`@6~}88F<-L@j8K9?3@P?71oq~*owyybeF=*JmeJHI@j|&m`+-&R3N=0) zgXN@f19D##ZB1>j(K?~R1Z7vh>) zHU2%Dulwl${+WYpqCFnoSiR2(p{X}`#{nD4B+?yFcJ?QEB%DG9{+GGK_mV$it%29a zT@P!qb_IT;R|Ilzp3{c^tKEODRp0ygA4b#=8sw{#5(YKVz4@ain20luL3; z_6$tWpIq>ISP8!x2J!r1a(CoX+!i3T(&T7fFODs?ce7qG9qz0`{i*$!MbCd*;12b{CYTaRM<06cpZ2}lE-AoC$_e?xQsAhGJ zvAp}pY1EL5yRhVSBv4&Z#J;&L(Tn0j7xBhwySbYHwxW?U<&VWxP=9rlMkTHj;{FGj zo-(I*=~`O*-wK~y z766$@paR5X^3zM-!tVqO(e^|H{*DlRK8|tf>vbgHjq}0hYsks@s z*SpFTjYh6YlSPerX*Ewer;tN;St`{e-^U*m#oY{v_2aKBV3Knl=$|#=7sMWYo|gtr zXv?y4*30lpu^O8B6ogLe+z|gYxk&(LQ0TAg3At8LHDmYj?OZ|-DK!hDH7GAy3d;z1 zL~}Yjfw27$e>D*f?wPK4W~czPsbWIRY9I6ITDKUbITY8{M&8gzFGI5=pVMETd{Z>Z zoKYR_In{F1c$5p$q`M(EyJqN~&86rswhws?1(inTptg2x<;~}AzwmOokj`fA00otx zpNyQ(^-~-iS)OUM+JSVmO&*o8P5?x_omdEGu(m6|wZ3}~TX#`VI!>!szwXe-o91H3{B$~RzfW7ffI8)h?h zE1BwJqO4Oyey-+wTk{tDjPm4`6{>wu+a~d@Pn}%Yzf!w4=!5BOtKh9;U5OSrE50|U zeM@U2a_P_CXwz$`kF>S#M##`I*l$~Skx9FR@~VpG&c9Z-mHECNEovtju&HS=(ZvjD zl@c814^Bul=d|xqwvaYu>>G?FELv36jDLgs*C>|RG>6|oNVKn@a98DkcJtVUDfiVP zM9P|?!Uu4^nOtce6h?gC&lEVsK&&&1#OcZbw>w;3zZI<+c3eqXT=5O-xn3|Hf<3

PV(8(TY6!hEXlr!9T+UDpO>Y+e*m*7({tP0pGW%IIj(PNU?P7V#|A}tCRuGdSwaCEtzo!xdmEl z?+?ub^h>5n^ocTgC|I>v-7ydW9W^mo8R%NNpJqXHS5DORK({<`J9R5DnO{I*ZykyN z0ANi#LKOAx*fhH!0;U)UXk2wPl7{Doa0*-OOI0tcwXEeIr%s(M(fyH+0@@Hudr5u9 zkWUew9L1LBJTXXy?2*$tQ6*RhCt8SEBpkP7*W~1yd?;c8q}|SS%7c6R);#tLxI9}% zoy=||1PT<($l>C`Uo5JYJ_vr~rGJ&77CIFD*^Ir=_Z7*{rJ&+J%& zJ=~d)esH?kfBTP14QN9tJCW&U;HA&1Bup>u?yAUfbsOb#c7XvV(#r`=x{sq+-XP2Q znD}hZt2HHSGvh*ftBh1WPkaw(EwW%_VIaHz*}A>T&)p#r*|Map14MW1VVSrI%QBcg zUZ1|T(r>uc)qoL9VeK4eh#sYJ#o1m9LLJ{&I zMg&G@BW}*VR(GM^XKMl#WO@VrgDcw_qLJDmC<4^;K9Mn9-&(TBL7rz-ERP7YffHnAXdxu#bvSouG3&&WP zh4|NvF@2PRF%sNP3w=4}d9P3Si0=}0R31`+=~a{uLoF3>Ci1)R%754>Jw|Kai10ws zj+YFQK@ij6)S$j6uFc5HJpK#4Q07*%_)K>vI^Fw{I7H`p#kf$)_UIcP<}$v%045G; z7GyavIyL^Wjy{d!t-iNYVlH9Ep~`{oUT_Y#kS3@*Utk13|5~8y>!}{vWO-S(BBkoV z^>42`DV3-RzWd)ph2(dg&6c}YaTW}7a7!n0FXtI}f zV*he3;bJVk3MEBJvt*xY53{7eyVTf>9idN>OP;qP%RU?Gg0LPhXR6zHouumlWsThb z09$2^i*8wQgjh;UU;h@zpKITE*9zyp=+5{((B&jMFj;`ZxCz-H0HWdNE-zNJ#6YCv zej*5x#`v|3l2Kh6Hkhp95ecy;Xc0~cjYL#&XF^i?ToR-5`x7)eAX!T_(N(qMUA%0U zWE7Yp+Id2P?!C)#F5<-VmX70t^!~j^?61b)83f@f*%~B5JRf$4tsmdU3SWd5yR*b) zNHwR(c;Phfec-F-`3ZYid0BgN_UmPrBjba@#e?K+ojqi-9{a}5cMjj((;?LSROsWt z@2q$FpPGUJY|Mqh@xIHU{({UN{=#IMqzS@y7+yt8aq&hOQ{}%a5j$%UIC^wPKDA*Z ztrdTS<<(D0e3S{IJLM!pFs}aWua{$3`nVXeE%D;lfa(tD)y($Wmtu=o9fEvR9dngz--?hL8C8Y^> zy=klu1pLq)F6)+K5$n@CK&tR8NQHdIk&P$k%JjC$w+z#0}I z!aQi-MiJxMd&k9eMAX-^xugGV$>yFbFjjtw9Uod>%TB9j@mfsXXIUnUDT;GWl zN^}|_dfl(y>vbCA0xj}bVQ6X~E-B%};S!(=-CaUs;3l?| z2C;MC^KkI26?d$81k=1L-SKx?_K6?iApT zG&+el3c+edi^WW9r7~a_0jl$CoPR*Ttn?cVfl@y_zG2Vg zDd*@#hM#$eN&7|+t$~K`Z4)+rwaS@2p0S{@GZO<|EF~$U4n5xoisd z%wt)t~MD*rgoe;$ewELnJ&&9q}AT#_-?2LdSdKY&dOML7?)eqb{D4&B2 zJ>zSmVb#6A(T^BWBcvQpmv}#Rdy6NV&ro`&FZ)waMTL&CwfDZ(!$OWWhfFG7)<@1B zgr?Wy=ax^4z0p$|*(XjJ^0>tXxRd?I=|VS|ry{^p_QnB3nyThKuJRuZgOLX@e;ybQ zLSUnPXt0QWexOnhzE>ezq%+DOe0Ia+2%jIy=r%t#-xe7MPh1zAg&mk&)!{K-cqKZM zw{&tXEQIMd|DRUaC`L6a@~F{#Hj>Dbmm}EX@zb~7y9Kv@2&=+Wvj%QwmN7u3_X7~+ z-~y<27hSVrhEkxi)-RoEFL&bVlN|8974n}P<}tBG9xg#vVl}gin8-!znT1)JN2}SV z&XqkRH9dBI)?2qSepA3h$EBp(+50gk6gXI_FelyREzLhqXiAP?F>Q6j7KHe~ z{qcoAQa{3qS3d0bCn2dsbtL|fcdjV}#Hy%ww@bdQBF{YBf4m)C4bxpYTrbq{rYS$` z`AqZ%YX(o=+(B43r^lVG+c+Ga(j0s9pvI77Y$PNkQOc*o3kPP*q)~1oTK_dse!ougcmJPb|Ht&#{QoZdjq&#sJ!QtDv>>5*nR9>8), uint8(g>>8), uint8(b>>8)) + return HSL{h, s, l} +} + +// RGBToHSL converts an RGB triple to a HSL triple. +func RGBToHSL(r, g, b uint8) (h, s, l float64) { + fR := float64(r) / 255 + fG := float64(g) / 255 + fB := float64(b) / 255 + max := math.Max(math.Max(fR, fG), fB) + min := math.Min(math.Min(fR, fG), fB) + l = (max + min) / 2 + if max == min { + // Achromatic. + h, s = 0, 0 + } else { + // Chromatic. + d := max - min + if l > 0.5 { + s = d / (2.0 - max - min) + } else { + s = d / (max + min) + } + switch max { + case fR: + h = (fG - fB) / d + if fG < fB { + h += 6 + } + case fG: + h = (fB-fR)/d + 2 + case fB: + h = (fR-fG)/d + 4 + } + h /= 6 + } + return +} + +// HSLToRGB converts an HSL triple to a RGB triple. +func HSLToRGB(h, s, l float64) (r, g, b uint8) { + var fR, fG, fB float64 + if s == 0 { + fR, fG, fB = l, l, l + } else { + var q float64 + if l < 0.5 { + q = l * (1 + s) + } else { + q = l + s - s*l + } + p := 2*l - q + fR = hueToRGB(p, q, h+1.0/3) + fG = hueToRGB(p, q, h) + fB = hueToRGB(p, q, h-1.0/3) + } + r = uint8((fR * 255) + 0.5) + g = uint8((fG * 255) + 0.5) + b = uint8((fB * 255) + 0.5) + return +} + +// hueToRGB is a helper function for HSLToRGB. +func hueToRGB(p, q, t float64) float64 { + if t < 0 { + t++ + } + if t > 1 { + t-- + } + if t < 1.0/6 { + return p + (q-p)*6*t + } + if t < 0.5 { + return q + } + if t < 2.0/3 { + return p + (q-p)*(2.0/3-t)*6 + } + return p +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/lib.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/lib.go new file mode 100644 index 000000000..4379be4d4 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/lib.go @@ -0,0 +1,166 @@ +package excelize + +import ( + "archive/zip" + "bytes" + "io" + "log" + "math" + "unicode" +) + +// ReadZipReader can be used to read an XLSX in memory without touching the +// filesystem. +func ReadZipReader(r *zip.Reader) (map[string][]byte, int, error) { + fileList := make(map[string][]byte) + worksheets := 0 + for _, v := range r.File { + fileList[v.Name] = readFile(v) + if len(v.Name) > 18 { + if v.Name[0:19] == "xl/worksheets/sheet" { + worksheets++ + } + } + } + return fileList, worksheets, nil +} + +// readXML provides function to read XML content as string. +func (f *File) readXML(name string) []byte { + if content, ok := f.XLSX[name]; ok { + return content + } + return []byte{} +} + +// saveFileList provides function to update given file content in file list of +// XLSX. +func (f *File) saveFileList(name string, content []byte) { + newContent := make([]byte, 0, len(XMLHeader)+len(content)) + newContent = append(newContent, []byte(XMLHeader)...) + newContent = append(newContent, content...) + f.XLSX[name] = newContent +} + +// Read file content as string in a archive file. +func readFile(file *zip.File) []byte { + rc, err := file.Open() + if err != nil { + log.Fatal(err) + } + buff := bytes.NewBuffer(nil) + _, _ = io.Copy(buff, rc) + rc.Close() + return buff.Bytes() +} + +// ToAlphaString provides function to convert integer to Excel sheet column +// title. For example convert 36 to column title AK: +// +// excelize.ToAlphaString(36) +// +func ToAlphaString(value int) string { + if value < 0 { + return "" + } + var ans string + i := value + 1 + for i > 0 { + ans = string((i-1)%26+65) + ans + i = (i - 1) / 26 + } + return ans +} + +// TitleToNumber provides function to convert Excel sheet column title to int +// (this function doesn't do value check currently). For example convert AK +// and ak to column title 36: +// +// excelize.TitleToNumber("AK") +// excelize.TitleToNumber("ak") +// +func TitleToNumber(s string) int { + weight := 0.0 + sum := 0 + for i := len(s) - 1; i >= 0; i-- { + ch := int(s[i]) + if int(s[i]) >= int('a') && int(s[i]) <= int('z') { + ch = int(s[i]) - 32 + } + sum = sum + (ch-int('A')+1)*int(math.Pow(26, weight)) + weight++ + } + return sum - 1 +} + +// letterOnlyMapF is used in conjunction with strings.Map to return only the +// characters A-Z and a-z in a string. +func letterOnlyMapF(rune rune) rune { + switch { + case 'A' <= rune && rune <= 'Z': + return rune + case 'a' <= rune && rune <= 'z': + return rune - 32 + } + return -1 +} + +// intOnlyMapF is used in conjunction with strings.Map to return only the +// numeric portions of a string. +func intOnlyMapF(rune rune) rune { + if rune >= 48 && rune < 58 { + return rune + } + return -1 +} + +// boolPtr returns a pointer to a bool with the given value. +func boolPtr(b bool) *bool { return &b } + +// defaultTrue returns true if b is nil, or the pointed value. +func defaultTrue(b *bool) bool { + if b == nil { + return true + } + return *b +} + +// axisLowerOrEqualThan returns true if axis1 <= axis2 +// axis1/axis2 can be either a column or a row axis, e.g. "A", "AAE", "42", "1", etc. +// +// For instance, the following comparisons are all true: +// +// "A" <= "B" +// "A" <= "AA" +// "B" <= "AA" +// "BC" <= "ABCD" (in a XLSX sheet, the BC col comes before the ABCD col) +// "1" <= "2" +// "2" <= "11" (in a XLSX sheet, the row 2 comes before the row 11) +// and so on +func axisLowerOrEqualThan(axis1, axis2 string) bool { + if len(axis1) < len(axis2) { + return true + } else if len(axis1) > len(axis2) { + return false + } else { + return axis1 <= axis2 + } +} + +// getCellColRow returns the two parts of a cell identifier (its col and row) as strings +// +// For instance: +// +// "C220" => "C", "220" +// "aaef42" => "aaef", "42" +// "" => "", "" +func getCellColRow(cell string) (col, row string) { + for index, rune := range cell { + if unicode.IsDigit(rune) { + return cell[:index], cell[index:] + } + + } + + return cell, "" +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/logo.png b/vendor/github.com/360EntSecGroup-Skylar/excelize/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..48536983d93e65a84579e33c6ebb8fd9639b902c GIT binary patch literal 4208 zcmV-$5RdPPP);LQS?(Sa4 zxx|P!7r9b*hwIP~H)25Ck+=|>OlIO<_oDkjs294vJ6p~JMBbTmzTa!l%w0Kqwvx}K z5>N_7qmc+4i=L}iE}uEGI6M330y3JH(VL?n&`{2aOB$pQAUVMR0= z!7!j=E<<|$rL9&^CtU7KR(^5#_~rplTL|qCD~C+xNuO`Z=~NhuJNcr<5feRGUO^Mj zQ{i&6Y~%==<8e82D3U`_g;v|imk{GcVhH_d&lBs1$EcZexJb5vD>YfIm;|&iJM-kJ zp^iv~2^BhQ?nZ@nqfkywoo2FmSvYZ5=2L`EqQT`BHh((czvqUPY=-#K z5Dlr)p~7!YJp23S*knIUOP_5QNGpXr_5eY#XoFPURlM}kU;OG7uYUD+Sv@_aEA3i4 ziXp< zw~Si4a_2wzxi7r+10SGAvs9$oBazQvzy84Ze62?D#wp1vT_l^meEESf*Dqe6508Qd z>@HmV@s538*xJ&aJKyGsbG16`0V?Nn4THRG#mfy!-LH=JcV*Axg6KZJ=&m+gSh)2% zlTLSN?W9C$2TyosOeT}L9X3C=TdnDg7Z`~m(HUOPk#rX>Z&K*TWLTD`r6mN_PNh}4ndF=5ZNr=;v^@lERJw-cBcN}_U+qW|N7Tc zsnq)0xhq#FLjp=-X;d-c(jNzS4QA(_Q|$Sf8(3ZQL7#WSuDz6a0UQam26N1C?ZSER zc~@80x^?S5{NWEDJb17k>G1mrez>Q6twm)FdA&=H>BfzBzx&;bx*)bo&MvXU_dCGxq1b|K4|vxB{E-1T|N>6Dzs08My4!i3 zp>LT*7DZ@c`D0Vl>v_^Y2^8Bn-1=f7;BX&!-~0Gn&f?r`21Q7NTryxC&n;CDQrV(1 zI;SSFZ4{wctQq7SsjfIcsE9K=cL~H!w_BrD>C|e!*K?f9Bbg?DBT&EQt#4bm{PUi# zeVe3r99O6n8jVb;EBTVwNwMB~by9D^+u^0HBOAf^k3`rK;sNpa%#{+=el`S@`rKA}CDs`9=>Q*5fmA z?qXK-e1BJ~3!<+=2M8ZdzhH4-8<50g2G5-IN~5{i?yJwBHN;3(NP0W5@7V^n;2@iWm#ApVcKM-zj)4kVz;Xqt_4)?GBolu4 zX=zXam_R04!vzsQhw-p@Ah7#)$yC^`Aa@|pW^pm@1+=!-=&XC!{Igb!8%3APxn)>e zchdiI0YLpG8|EK1T1QP_#Qg2$!-yBaUISu+zpML$x(^zhaAO-#HkE2u8P)*Ai01)| zZE%H^Jk3H*GZQ6B80ieefk^%sWjYCvt>J?P#+}#(l!B41Tqzj6w+mziM~#J?*#M^ilece)`g0Ra1>=!e$968LK zm>ET8W`-KW%t@JQ^%GbD20PqXKyr5@-vot7$vwFN!(4Op5Vev;cJ$-*nBoH!EbLN7eQDRlK*-o zdw<SnYB}Bh>8|7Zs{fkeH$nT}I=!ws$}@k9 zMUq%w=B=IJsVng|lz|IhI0$)idgkOsZ5lkwHh+iru3XQiEWiuEKn?Cq*-MK(R_* z5BM10pgI;=S>ac%c3i#v&O2^kj#aBy(L1T;GeKA0dAQOeiHhoq6;XL`V!i>|En``3@ zq@E8k!gx7#|)~ z!@&5^$i%2x4&r5*k7CL^+y^rKx63VL!tYmFNuRBo;IXDEf_^dkOtogLGn=HKQ9ztS z<{JCV)fjB4i^t@V@zEuvK~`zYVY~+AReUAwRJj^KJocbDH5*SZJ5oUBY;TZajz+BW znU7y2mOrny!FRwK+ba=@jb%Q@avucg^72qZkW0=6U5Pu46c0x`Bpl8YM2HcB#5fZ` zPay|`1v2N!&t7>b${tTRFfvFiX{RbQRPMu8eG|FJK;nsb)QE9jS{g(Ma?Dz*74Hh8 z3SQ8Faffn+L(uG+^+=3U_StGO2Lcj7SM*dP`G`VereuM9g>@ghj8%RmfzY2``CB6P zrsFH4<70?5@4{^`ij|iD*PM+-Pc_UAF%P~=4LU#wAcI`)X(!WGSA>7l`V$$apRNRb z>e&ZS*J8;hN~hJ18QH;c%=RD;o+jyeB?VZ(O_OP_eDlum*a!x#9T!mbiTli}Y@x-H zj+ahMO+sf);J>LEwS@sb5+lXb)Ra&N600eRnK6nN5=ZZFzxw>lGgTVIeK=GrLd-UO zrKo7TsBpVm?#8g)DI~-;J{IGiw;4vC!fTfR=d85_Pu2;eiZME#K>Tow!b&}3in1>#~#cUUMEeVC-GcT5;!(7u0DVN zTc;n1aRdvDhHk5jE$y$UJ%G?6(gx>@)&7NL@$MW(J zvf?*1I*e_PKd!Wruk3yE)FaU|PQ+GvbkFFIFZZ@z>bZKc`$|V&7pd`AdpqzEMv4pF zS9%6}1=c94xsTKHU%pA^y#3?H_k>y1zj7F3l@o11mgU+4u}tz?l(GzM6uN=+HBlr7 zi3+z7nMR>6@klyeEQBh?G>W*ee7~rX<#WvT)L5smCE)F1P zPz)_5yby!GATHqi*b#x1h>RvMS@AvX589}iz#L-HX~;xg8&|wRxz~K@IDw#RnUkPW)ZN$Pyqy00&=Y%$a2kq352r39S=etby#x zRLTwrI?3U@?F}**DK^!{HbDssh#;~O#1TOpVAw=nN9|j{od{s#kQE%>H@P)uc;+XRa@%2Rzd!Q$V z&V29Z&#>H)bGBwnT>|#(k#`V}MBLsPx(EyP*Dpcr(KXQXb<2-Wef}!es)450j=uZz z|F=WOGynhq2!Q%Wvy~CN-5>%Hv"+anchor.Content+""), &decodeTwoCellAnchor) + if decodeTwoCellAnchor.From != nil && decodeTwoCellAnchor.Pic != nil { + if decodeTwoCellAnchor.From.Col == col && decodeTwoCellAnchor.From.Row == row { + xlsxWorkbookRelation := f.getDrawingRelationships(drawingRelationships, decodeTwoCellAnchor.Pic.BlipFill.Blip.Embed) + _, ok := supportImageTypes[filepath.Ext(xlsxWorkbookRelation.Target)] + if ok { + return filepath.Base(xlsxWorkbookRelation.Target), []byte(f.XLSX[strings.Replace(xlsxWorkbookRelation.Target, "..", "xl", -1)]) + } + } + } + } + return "", []byte{} +} + +// getDrawingRelationships provides function to get drawing relationships from +// xl/drawings/_rels/drawing%s.xml.rels by given file name and relationship ID. +func (f *File) getDrawingRelationships(rels, rID string) *xlsxWorkbookRelation { + _, ok := f.XLSX[rels] + if !ok { + return nil + } + var drawingRels xlsxWorkbookRels + _ = xml.Unmarshal([]byte(f.readXML(rels)), &drawingRels) + for _, v := range drawingRels.Relationships { + if v.ID == rID { + return &v + } + } + return nil +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/rows.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/rows.go new file mode 100644 index 000000000..ba569ad2c --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/rows.go @@ -0,0 +1,461 @@ +package excelize + +import ( + "bytes" + "encoding/xml" + "fmt" + "io" + "math" + "strconv" + "strings" +) + +// GetRows return all the rows in a sheet by given worksheet name (case +// sensitive). For example: +// +// for _, row := range xlsx.GetRows("Sheet1") { +// for _, colCell := range row { +// fmt.Print(colCell, "\t") +// } +// fmt.Println() +// } +// +func (f *File) GetRows(sheet string) [][]string { + xlsx := f.workSheetReader(sheet) + rows := [][]string{} + name, ok := f.sheetMap[trimSheetName(sheet)] + if !ok { + return rows + } + if xlsx != nil { + output, _ := xml.Marshal(f.Sheet[name]) + f.saveFileList(name, replaceWorkSheetsRelationshipsNameSpaceBytes(output)) + } + xml.NewDecoder(bytes.NewReader(f.readXML(name))) + d := f.sharedStringsReader() + var inElement string + var r xlsxRow + var row []string + tr, tc := f.getTotalRowsCols(name) + for i := 0; i < tr; i++ { + row = []string{} + for j := 0; j <= tc; j++ { + row = append(row, "") + } + rows = append(rows, row) + } + decoder := xml.NewDecoder(bytes.NewReader(f.readXML(name))) + for { + token, _ := decoder.Token() + if token == nil { + break + } + switch startElement := token.(type) { + case xml.StartElement: + inElement = startElement.Name.Local + if inElement == "row" { + r = xlsxRow{} + _ = decoder.DecodeElement(&r, &startElement) + cr := r.R - 1 + for _, colCell := range r.C { + c := TitleToNumber(strings.Map(letterOnlyMapF, colCell.R)) + val, _ := colCell.getValueFrom(f, d) + rows[cr][c] = val + } + } + default: + } + } + return rows +} + +// Rows defines an iterator to a sheet +type Rows struct { + decoder *xml.Decoder + token xml.Token + err error + f *File +} + +// Next will return true if find the next row element. +func (rows *Rows) Next() bool { + for { + rows.token, rows.err = rows.decoder.Token() + if rows.err == io.EOF { + rows.err = nil + } + if rows.token == nil { + return false + } + + switch startElement := rows.token.(type) { + case xml.StartElement: + inElement := startElement.Name.Local + if inElement == "row" { + return true + } + } + } +} + +// Error will return the error when the find next row element +func (rows *Rows) Error() error { + return rows.err +} + +// Columns return the current row's column values +func (rows *Rows) Columns() []string { + if rows.token == nil { + return []string{} + } + startElement := rows.token.(xml.StartElement) + r := xlsxRow{} + _ = rows.decoder.DecodeElement(&r, &startElement) + d := rows.f.sharedStringsReader() + row := make([]string, len(r.C)) + for _, colCell := range r.C { + c := TitleToNumber(strings.Map(letterOnlyMapF, colCell.R)) + val, _ := colCell.getValueFrom(rows.f, d) + row[c] = val + } + return row +} + +// ErrSheetNotExist defines an error of sheet is not exist +type ErrSheetNotExist struct { + SheetName string +} + +func (err ErrSheetNotExist) Error() string { + return fmt.Sprintf("Sheet %s is not exist", string(err.SheetName)) +} + +// Rows return a rows iterator. For example: +// +// rows, err := xlsx.GetRows("Sheet1") +// for rows.Next() { +// for _, colCell := range rows.Columns() { +// fmt.Print(colCell, "\t") +// } +// fmt.Println() +// } +// +func (f *File) Rows(sheet string) (*Rows, error) { + xlsx := f.workSheetReader(sheet) + name, ok := f.sheetMap[trimSheetName(sheet)] + if !ok { + return nil, ErrSheetNotExist{sheet} + } + if xlsx != nil { + output, _ := xml.Marshal(f.Sheet[name]) + f.saveFileList(name, replaceWorkSheetsRelationshipsNameSpaceBytes(output)) + } + return &Rows{ + f: f, + decoder: xml.NewDecoder(bytes.NewReader(f.readXML(name))), + }, nil +} + +// getTotalRowsCols provides a function to get total columns and rows in a +// worksheet. +func (f *File) getTotalRowsCols(name string) (int, int) { + decoder := xml.NewDecoder(bytes.NewReader(f.readXML(name))) + var inElement string + var r xlsxRow + var tr, tc int + for { + token, _ := decoder.Token() + if token == nil { + break + } + switch startElement := token.(type) { + case xml.StartElement: + inElement = startElement.Name.Local + if inElement == "row" { + r = xlsxRow{} + _ = decoder.DecodeElement(&r, &startElement) + tr = r.R + for _, colCell := range r.C { + col := TitleToNumber(strings.Map(letterOnlyMapF, colCell.R)) + if col > tc { + tc = col + } + } + } + default: + } + } + return tr, tc +} + +// SetRowHeight provides a function to set the height of a single row. For +// example, set the height of the first row in Sheet1: +// +// xlsx.SetRowHeight("Sheet1", 1, 50) +// +func (f *File) SetRowHeight(sheet string, row int, height float64) { + xlsx := f.workSheetReader(sheet) + cells := 0 + rowIdx := row - 1 + completeRow(xlsx, row, cells) + xlsx.SheetData.Row[rowIdx].Ht = height + xlsx.SheetData.Row[rowIdx].CustomHeight = true +} + +// getRowHeight provides function to get row height in pixels by given sheet +// name and row index. +func (f *File) getRowHeight(sheet string, row int) int { + xlsx := f.workSheetReader(sheet) + for _, v := range xlsx.SheetData.Row { + if v.R == row+1 && v.Ht != 0 { + return int(convertRowHeightToPixels(v.Ht)) + } + } + // Optimisation for when the row heights haven't changed. + return int(defaultRowHeightPixels) +} + +// GetRowHeight provides function to get row height by given worksheet name +// and row index. For example, get the height of the first row in Sheet1: +// +// xlsx.GetRowHeight("Sheet1", 1) +// +func (f *File) GetRowHeight(sheet string, row int) float64 { + xlsx := f.workSheetReader(sheet) + for _, v := range xlsx.SheetData.Row { + if v.R == row && v.Ht != 0 { + return v.Ht + } + } + // Optimisation for when the row heights haven't changed. + return defaultRowHeightPixels +} + +// sharedStringsReader provides function to get the pointer to the structure +// after deserialization of xl/sharedStrings.xml. +func (f *File) sharedStringsReader() *xlsxSST { + if f.SharedStrings == nil { + var sharedStrings xlsxSST + ss := f.readXML("xl/sharedStrings.xml") + if len(ss) == 0 { + ss = f.readXML("xl/SharedStrings.xml") + } + _ = xml.Unmarshal([]byte(ss), &sharedStrings) + f.SharedStrings = &sharedStrings + } + return f.SharedStrings +} + +// getValueFrom return a value from a column/row cell, this function is inteded +// to be used with for range on rows an argument with the xlsx opened file. +func (xlsx *xlsxC) getValueFrom(f *File, d *xlsxSST) (string, error) { + switch xlsx.T { + case "s": + xlsxSI := 0 + xlsxSI, _ = strconv.Atoi(xlsx.V) + if len(d.SI[xlsxSI].R) > 0 { + value := "" + for _, v := range d.SI[xlsxSI].R { + value += v.T + } + return value, nil + } + return f.formattedValue(xlsx.S, d.SI[xlsxSI].T), nil + case "str": + return f.formattedValue(xlsx.S, xlsx.V), nil + case "inlineStr": + return f.formattedValue(xlsx.S, xlsx.IS.T), nil + default: + return f.formattedValue(xlsx.S, xlsx.V), nil + } +} + +// SetRowVisible provides a function to set visible of a single row by given +// worksheet name and row index. For example, hide row 2 in Sheet1: +// +// xlsx.SetRowVisible("Sheet1", 2, false) +// +func (f *File) SetRowVisible(sheet string, rowIndex int, visible bool) { + xlsx := f.workSheetReader(sheet) + rows := rowIndex + 1 + cells := 0 + completeRow(xlsx, rows, cells) + if visible { + xlsx.SheetData.Row[rowIndex].Hidden = false + return + } + xlsx.SheetData.Row[rowIndex].Hidden = true +} + +// GetRowVisible provides a function to get visible of a single row by given +// worksheet name and row index. For example, get visible state of row 2 in +// Sheet1: +// +// xlsx.GetRowVisible("Sheet1", 2) +// +func (f *File) GetRowVisible(sheet string, rowIndex int) bool { + xlsx := f.workSheetReader(sheet) + rows := rowIndex + 1 + cells := 0 + completeRow(xlsx, rows, cells) + return !xlsx.SheetData.Row[rowIndex].Hidden +} + +// SetRowOutlineLevel provides a function to set outline level number of a +// single row by given worksheet name and row index. For example, outline row +// 2 in Sheet1 to level 1: +// +// xlsx.SetRowOutlineLevel("Sheet1", 2, 1) +// +func (f *File) SetRowOutlineLevel(sheet string, rowIndex int, level uint8) { + xlsx := f.workSheetReader(sheet) + rows := rowIndex + 1 + cells := 0 + completeRow(xlsx, rows, cells) + xlsx.SheetData.Row[rowIndex].OutlineLevel = level +} + +// GetRowOutlineLevel provides a function to get outline level number of a single row by given +// worksheet name and row index. For example, get outline number of row 2 in +// Sheet1: +// +// xlsx.GetRowOutlineLevel("Sheet1", 2) +// +func (f *File) GetRowOutlineLevel(sheet string, rowIndex int) uint8 { + xlsx := f.workSheetReader(sheet) + rows := rowIndex + 1 + cells := 0 + completeRow(xlsx, rows, cells) + return xlsx.SheetData.Row[rowIndex].OutlineLevel +} + +// RemoveRow provides function to remove single row by given worksheet name and +// row index. For example, remove row 3 in Sheet1: +// +// xlsx.RemoveRow("Sheet1", 2) +// +func (f *File) RemoveRow(sheet string, row int) { + if row < 0 { + return + } + xlsx := f.workSheetReader(sheet) + row++ + for i, r := range xlsx.SheetData.Row { + if r.R == row { + xlsx.SheetData.Row = append(xlsx.SheetData.Row[:i], xlsx.SheetData.Row[i+1:]...) + f.adjustHelper(sheet, -1, row, -1) + return + } + } +} + +// InsertRow provides function to insert a new row before given row index. For +// example, create a new row before row 3 in Sheet1: +// +// xlsx.InsertRow("Sheet1", 2) +// +func (f *File) InsertRow(sheet string, row int) { + if row < 0 { + return + } + row++ + f.adjustHelper(sheet, -1, row, 1) +} + +// checkRow provides function to check and fill each column element for all rows +// and make that is continuous in a worksheet of XML. For example: +// +// +// +// +// +// +// +// +// in this case, we should to change it to +// +// +// +// +// +// +// +// +// +// +// +// Noteice: this method could be very slow for large spreadsheets (more than +// 3000 rows one sheet). +func checkRow(xlsx *xlsxWorksheet) { + buffer := bytes.Buffer{} + for k := range xlsx.SheetData.Row { + lenCol := len(xlsx.SheetData.Row[k].C) + if lenCol > 0 { + endR := string(strings.Map(letterOnlyMapF, xlsx.SheetData.Row[k].C[lenCol-1].R)) + endRow, _ := strconv.Atoi(strings.Map(intOnlyMapF, xlsx.SheetData.Row[k].C[lenCol-1].R)) + endCol := TitleToNumber(endR) + 1 + if lenCol < endCol { + oldRow := xlsx.SheetData.Row[k].C + xlsx.SheetData.Row[k].C = xlsx.SheetData.Row[k].C[:0] + tmp := []xlsxC{} + for i := 0; i < endCol; i++ { + buffer.WriteString(ToAlphaString(i)) + buffer.WriteString(strconv.Itoa(endRow)) + tmp = append(tmp, xlsxC{ + R: buffer.String(), + }) + buffer.Reset() + } + xlsx.SheetData.Row[k].C = tmp + for _, y := range oldRow { + colAxis := TitleToNumber(string(strings.Map(letterOnlyMapF, y.R))) + xlsx.SheetData.Row[k].C[colAxis] = y + } + } + } + } +} + +// completeRow provides function to check and fill each column element for a +// single row and make that is continuous in a worksheet of XML by given row +// index and axis. +func completeRow(xlsx *xlsxWorksheet, row, cell int) { + currentRows := len(xlsx.SheetData.Row) + if currentRows > 1 { + lastRow := xlsx.SheetData.Row[currentRows-1].R + if lastRow >= row { + row = lastRow + } + } + for i := currentRows; i < row; i++ { + xlsx.SheetData.Row = append(xlsx.SheetData.Row, xlsxRow{ + R: i + 1, + }) + } + buffer := bytes.Buffer{} + for ii := currentRows; ii < row; ii++ { + start := len(xlsx.SheetData.Row[ii].C) + if start == 0 { + for iii := start; iii < cell; iii++ { + buffer.WriteString(ToAlphaString(iii)) + buffer.WriteString(strconv.Itoa(ii + 1)) + xlsx.SheetData.Row[ii].C = append(xlsx.SheetData.Row[ii].C, xlsxC{ + R: buffer.String(), + }) + buffer.Reset() + } + } + } +} + +// convertRowHeightToPixels provides function to convert the height of a cell +// from user's units to pixels. If the height hasn't been set by the user we use +// the default value. If the row is hidden it has a value of zero. +func convertRowHeightToPixels(height float64) float64 { + var pixels float64 + if height == 0 { + return pixels + } + pixels = math.Ceil(4.0 / 3.0 * height) + return pixels +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/shape.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/shape.go new file mode 100644 index 000000000..96cedb494 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/shape.go @@ -0,0 +1,419 @@ +package excelize + +import ( + "encoding/json" + "encoding/xml" + "strconv" + "strings" +) + +// parseFormatShapeSet provides function to parse the format settings of the +// shape with default value. +func parseFormatShapeSet(formatSet string) (*formatShape, error) { + format := formatShape{ + Width: 160, + Height: 160, + Format: formatPicture{ + FPrintsWithSheet: true, + FLocksWithSheet: false, + NoChangeAspect: false, + OffsetX: 0, + OffsetY: 0, + XScale: 1.0, + YScale: 1.0, + }, + } + err := json.Unmarshal([]byte(formatSet), &format) + return &format, err +} + +// AddShape provides the method to add shape in a sheet by given worksheet +// index, shape format set (such as offset, scale, aspect ratio setting and +// print settings) and properties set. For example, add text box (rect shape) in +// Sheet1: +// +// xlsx.AddShape("Sheet1", "G6", `{"type":"rect","color":{"line":"#4286F4","fill":"#8eb9ff"},"paragraph":[{"text":"Rectangle Shape","font":{"bold":true,"italic":true,"family":"Berlin Sans FB Demi","size":36,"color":"#777777","underline":"sng"}}],"width":180,"height": 90}`) +// +// The following shows the type of shape supported by excelize: +// +// accentBorderCallout1 (Callout 1 with Border and Accent Shape) +// accentBorderCallout2 (Callout 2 with Border and Accent Shape) +// accentBorderCallout3 (Callout 3 with Border and Accent Shape) +// accentCallout1 (Callout 1 Shape) +// accentCallout2 (Callout 2 Shape) +// accentCallout3 (Callout 3 Shape) +// actionButtonBackPrevious (Back or Previous Button Shape) +// actionButtonBeginning (Beginning Button Shape) +// actionButtonBlank (Blank Button Shape) +// actionButtonDocument (Document Button Shape) +// actionButtonEnd (End Button Shape) +// actionButtonForwardNext (Forward or Next Button Shape) +// actionButtonHelp (Help Button Shape) +// actionButtonHome (Home Button Shape) +// actionButtonInformation (Information Button Shape) +// actionButtonMovie (Movie Button Shape) +// actionButtonReturn (Return Button Shape) +// actionButtonSound (Sound Button Shape) +// arc (Curved Arc Shape) +// bentArrow (Bent Arrow Shape) +// bentConnector2 (Bent Connector 2 Shape) +// bentConnector3 (Bent Connector 3 Shape) +// bentConnector4 (Bent Connector 4 Shape) +// bentConnector5 (Bent Connector 5 Shape) +// bentUpArrow (Bent Up Arrow Shape) +// bevel (Bevel Shape) +// blockArc (Block Arc Shape) +// borderCallout1 (Callout 1 with Border Shape) +// borderCallout2 (Callout 2 with Border Shape) +// borderCallout3 (Callout 3 with Border Shape) +// bracePair (Brace Pair Shape) +// bracketPair (Bracket Pair Shape) +// callout1 (Callout 1 Shape) +// callout2 (Callout 2 Shape) +// callout3 (Callout 3 Shape) +// can (Can Shape) +// chartPlus (Chart Plus Shape) +// chartStar (Chart Star Shape) +// chartX (Chart X Shape) +// chevron (Chevron Shape) +// chord (Chord Shape) +// circularArrow (Circular Arrow Shape) +// cloud (Cloud Shape) +// cloudCallout (Callout Cloud Shape) +// corner (Corner Shape) +// cornerTabs (Corner Tabs Shape) +// cube (Cube Shape) +// curvedConnector2 (Curved Connector 2 Shape) +// curvedConnector3 (Curved Connector 3 Shape) +// curvedConnector4 (Curved Connector 4 Shape) +// curvedConnector5 (Curved Connector 5 Shape) +// curvedDownArrow (Curved Down Arrow Shape) +// curvedLeftArrow (Curved Left Arrow Shape) +// curvedRightArrow (Curved Right Arrow Shape) +// curvedUpArrow (Curved Up Arrow Shape) +// decagon (Decagon Shape) +// diagStripe (Diagonal Stripe Shape) +// diamond (Diamond Shape) +// dodecagon (Dodecagon Shape) +// donut (Donut Shape) +// doubleWave (Double Wave Shape) +// downArrow (Down Arrow Shape) +// downArrowCallout (Callout Down Arrow Shape) +// ellipse (Ellipse Shape) +// ellipseRibbon (Ellipse Ribbon Shape) +// ellipseRibbon2 (Ellipse Ribbon 2 Shape) +// flowChartAlternateProcess (Alternate Process Flow Shape) +// flowChartCollate (Collate Flow Shape) +// flowChartConnector (Connector Flow Shape) +// flowChartDecision (Decision Flow Shape) +// flowChartDelay (Delay Flow Shape) +// flowChartDisplay (Display Flow Shape) +// flowChartDocument (Document Flow Shape) +// flowChartExtract (Extract Flow Shape) +// flowChartInputOutput (Input Output Flow Shape) +// flowChartInternalStorage (Internal Storage Flow Shape) +// flowChartMagneticDisk (Magnetic Disk Flow Shape) +// flowChartMagneticDrum (Magnetic Drum Flow Shape) +// flowChartMagneticTape (Magnetic Tape Flow Shape) +// flowChartManualInput (Manual Input Flow Shape) +// flowChartManualOperation (Manual Operation Flow Shape) +// flowChartMerge (Merge Flow Shape) +// flowChartMultidocument (Multi-Document Flow Shape) +// flowChartOfflineStorage (Offline Storage Flow Shape) +// flowChartOffpageConnector (Off-Page Connector Flow Shape) +// flowChartOnlineStorage (Online Storage Flow Shape) +// flowChartOr (Or Flow Shape) +// flowChartPredefinedProcess (Predefined Process Flow Shape) +// flowChartPreparation (Preparation Flow Shape) +// flowChartProcess (Process Flow Shape) +// flowChartPunchedCard (Punched Card Flow Shape) +// flowChartPunchedTape (Punched Tape Flow Shape) +// flowChartSort (Sort Flow Shape) +// flowChartSummingJunction (Summing Junction Flow Shape) +// flowChartTerminator (Terminator Flow Shape) +// foldedCorner (Folded Corner Shape) +// frame (Frame Shape) +// funnel (Funnel Shape) +// gear6 (Gear 6 Shape) +// gear9 (Gear 9 Shape) +// halfFrame (Half Frame Shape) +// heart (Heart Shape) +// heptagon (Heptagon Shape) +// hexagon (Hexagon Shape) +// homePlate (Home Plate Shape) +// horizontalScroll (Horizontal Scroll Shape) +// irregularSeal1 (Irregular Seal 1 Shape) +// irregularSeal2 (Irregular Seal 2 Shape) +// leftArrow (Left Arrow Shape) +// leftArrowCallout (Callout Left Arrow Shape) +// leftBrace (Left Brace Shape) +// leftBracket (Left Bracket Shape) +// leftCircularArrow (Left Circular Arrow Shape) +// leftRightArrow (Left Right Arrow Shape) +// leftRightArrowCallout (Callout Left Right Arrow Shape) +// leftRightCircularArrow (Left Right Circular Arrow Shape) +// leftRightRibbon (Left Right Ribbon Shape) +// leftRightUpArrow (Left Right Up Arrow Shape) +// leftUpArrow (Left Up Arrow Shape) +// lightningBolt (Lightning Bolt Shape) +// line (Line Shape) +// lineInv (Line Inverse Shape) +// mathDivide (Divide Math Shape) +// mathEqual (Equal Math Shape) +// mathMinus (Minus Math Shape) +// mathMultiply (Multiply Math Shape) +// mathNotEqual (Not Equal Math Shape) +// mathPlus (Plus Math Shape) +// moon (Moon Shape) +// nonIsoscelesTrapezoid (Non-Isosceles Trapezoid Shape) +// noSmoking (No Smoking Shape) +// notchedRightArrow (Notched Right Arrow Shape) +// octagon (Octagon Shape) +// parallelogram (Parallelogram Shape) +// pentagon (Pentagon Shape) +// pie (Pie Shape) +// pieWedge (Pie Wedge Shape) +// plaque (Plaque Shape) +// plaqueTabs (Plaque Tabs Shape) +// plus (Plus Shape) +// quadArrow (Quad-Arrow Shape) +// quadArrowCallout (Callout Quad-Arrow Shape) +// rect (Rectangle Shape) +// ribbon (Ribbon Shape) +// ribbon2 (Ribbon 2 Shape) +// rightArrow (Right Arrow Shape) +// rightArrowCallout (Callout Right Arrow Shape) +// rightBrace (Right Brace Shape) +// rightBracket (Right Bracket Shape) +// round1Rect (One Round Corner Rectangle Shape) +// round2DiagRect (Two Diagonal Round Corner Rectangle Shape) +// round2SameRect (Two Same-side Round Corner Rectangle Shape) +// roundRect (Round Corner Rectangle Shape) +// rtTriangle (Right Triangle Shape) +// smileyFace (Smiley Face Shape) +// snip1Rect (One Snip Corner Rectangle Shape) +// snip2DiagRect (Two Diagonal Snip Corner Rectangle Shape) +// snip2SameRect (Two Same-side Snip Corner Rectangle Shape) +// snipRoundRect (One Snip One Round Corner Rectangle Shape) +// squareTabs (Square Tabs Shape) +// star10 (Ten Pointed Star Shape) +// star12 (Twelve Pointed Star Shape) +// star16 (Sixteen Pointed Star Shape) +// star24 (Twenty Four Pointed Star Shape) +// star32 (Thirty Two Pointed Star Shape) +// star4 (Four Pointed Star Shape) +// star5 (Five Pointed Star Shape) +// star6 (Six Pointed Star Shape) +// star7 (Seven Pointed Star Shape) +// star8 (Eight Pointed Star Shape) +// straightConnector1 (Straight Connector 1 Shape) +// stripedRightArrow (Striped Right Arrow Shape) +// sun (Sun Shape) +// swooshArrow (Swoosh Arrow Shape) +// teardrop (Teardrop Shape) +// trapezoid (Trapezoid Shape) +// triangle (Triangle Shape) +// upArrow (Up Arrow Shape) +// upArrowCallout (Callout Up Arrow Shape) +// upDownArrow (Up Down Arrow Shape) +// upDownArrowCallout (Callout Up Down Arrow Shape) +// uturnArrow (U-Turn Arrow Shape) +// verticalScroll (Vertical Scroll Shape) +// wave (Wave Shape) +// wedgeEllipseCallout (Callout Wedge Ellipse Shape) +// wedgeRectCallout (Callout Wedge Rectangle Shape) +// wedgeRoundRectCallout (Callout Wedge Round Rectangle Shape) +// +// The following shows the type of text underline supported by excelize: +// +// none +// words +// sng +// dbl +// heavy +// dotted +// dottedHeavy +// dash +// dashHeavy +// dashLong +// dashLongHeavy +// dotDash +// dotDashHeavy +// dotDotDash +// dotDotDashHeavy +// wavy +// wavyHeavy +// wavyDbl +// +func (f *File) AddShape(sheet, cell, format string) error { + formatSet, err := parseFormatShapeSet(format) + if err != nil { + return err + } + // Read sheet data. + xlsx := f.workSheetReader(sheet) + // Add first shape for given sheet, create xl/drawings/ and xl/drawings/_rels/ folder. + drawingID := f.countDrawings() + 1 + drawingXML := "xl/drawings/drawing" + strconv.Itoa(drawingID) + ".xml" + sheetRelationshipsDrawingXML := "../drawings/drawing" + strconv.Itoa(drawingID) + ".xml" + + if xlsx.Drawing != nil { + // The worksheet already has a shape or chart relationships, use the relationships drawing ../drawings/drawing%d.xml. + sheetRelationshipsDrawingXML = f.getSheetRelationshipsTargetByID(sheet, xlsx.Drawing.RID) + drawingID, _ = strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(sheetRelationshipsDrawingXML, "../drawings/drawing"), ".xml")) + drawingXML = strings.Replace(sheetRelationshipsDrawingXML, "..", "xl", -1) + } else { + // Add first shape for given sheet. + rID := f.addSheetRelationships(sheet, SourceRelationshipDrawingML, sheetRelationshipsDrawingXML, "") + f.addSheetDrawing(sheet, rID) + } + f.addDrawingShape(sheet, drawingXML, cell, formatSet) + f.addContentTypePart(drawingID, "drawings") + return err +} + +// addDrawingShape provides function to add preset geometry by given sheet, +// drawingXMLand format sets. +func (f *File) addDrawingShape(sheet, drawingXML, cell string, formatSet *formatShape) { + textUnderlineType := map[string]bool{"none": true, "words": true, "sng": true, "dbl": true, "heavy": true, "dotted": true, "dottedHeavy": true, "dash": true, "dashHeavy": true, "dashLong": true, "dashLongHeavy": true, "dotDash": true, "dotDashHeavy": true, "dotDotDash": true, "dotDotDashHeavy": true, "wavy": true, "wavyHeavy": true, "wavyDbl": true} + cell = strings.ToUpper(cell) + fromCol := string(strings.Map(letterOnlyMapF, cell)) + fromRow, _ := strconv.Atoi(strings.Map(intOnlyMapF, cell)) + row := fromRow - 1 + col := TitleToNumber(fromCol) + width := int(float64(formatSet.Width) * formatSet.Format.XScale) + height := int(float64(formatSet.Height) * formatSet.Format.YScale) + colStart, rowStart, _, _, colEnd, rowEnd, x2, y2 := f.positionObjectPixels(sheet, col, row, formatSet.Format.OffsetX, formatSet.Format.OffsetY, width, height) + content := xlsxWsDr{} + content.A = NameSpaceDrawingML + content.Xdr = NameSpaceDrawingMLSpreadSheet + cNvPrID := f.drawingParser(drawingXML, &content) + twoCellAnchor := xdrCellAnchor{} + twoCellAnchor.EditAs = formatSet.Format.Positioning + from := xlsxFrom{} + from.Col = colStart + from.ColOff = formatSet.Format.OffsetX * EMU + from.Row = rowStart + from.RowOff = formatSet.Format.OffsetY * EMU + to := xlsxTo{} + to.Col = colEnd + to.ColOff = x2 * EMU + to.Row = rowEnd + to.RowOff = y2 * EMU + twoCellAnchor.From = &from + twoCellAnchor.To = &to + shape := xdrSp{ + NvSpPr: &xdrNvSpPr{ + CNvPr: &xlsxCNvPr{ + ID: cNvPrID, + Name: "Shape " + strconv.Itoa(cNvPrID), + }, + CNvSpPr: &xdrCNvSpPr{ + TxBox: true, + }, + }, + SpPr: &xlsxSpPr{ + PrstGeom: xlsxPrstGeom{ + Prst: formatSet.Type, + }, + }, + Style: &xdrStyle{ + LnRef: setShapeRef(formatSet.Color.Line, 2), + FillRef: setShapeRef(formatSet.Color.Fill, 1), + EffectRef: setShapeRef(formatSet.Color.Effect, 0), + FontRef: &aFontRef{ + Idx: "minor", + SchemeClr: &attrValString{ + Val: "tx1", + }, + }, + }, + TxBody: &xdrTxBody{ + BodyPr: &aBodyPr{ + VertOverflow: "clip", + HorzOverflow: "clip", + Wrap: "none", + RtlCol: false, + Anchor: "t", + }, + }, + } + if len(formatSet.Paragraph) < 1 { + formatSet.Paragraph = []formatShapeParagraph{ + { + Font: formatFont{ + Bold: false, + Italic: false, + Underline: "none", + Family: "Calibri", + Size: 11, + Color: "#000000", + }, + Text: " ", + }, + } + } + for _, p := range formatSet.Paragraph { + u := p.Font.Underline + _, ok := textUnderlineType[u] + if !ok { + u = "none" + } + text := p.Text + if text == "" { + text = " " + } + paragraph := &aP{ + R: &aR{ + RPr: aRPr{ + I: p.Font.Italic, + B: p.Font.Bold, + Lang: "en-US", + AltLang: "en-US", + U: u, + Sz: p.Font.Size * 100, + Latin: &aLatin{Typeface: p.Font.Family}, + SolidFill: &aSolidFill{ + SrgbClr: &attrValString{ + Val: strings.Replace(strings.ToUpper(p.Font.Color), "#", "", -1), + }, + }, + }, + T: text, + }, + EndParaRPr: &aEndParaRPr{ + Lang: "en-US", + }, + } + shape.TxBody.P = append(shape.TxBody.P, paragraph) + } + twoCellAnchor.Sp = &shape + twoCellAnchor.ClientData = &xdrClientData{ + FLocksWithSheet: formatSet.Format.FLocksWithSheet, + FPrintsWithSheet: formatSet.Format.FPrintsWithSheet, + } + content.TwoCellAnchor = append(content.TwoCellAnchor, &twoCellAnchor) + output, _ := xml.Marshal(content) + f.saveFileList(drawingXML, output) +} + +// setShapeRef provides function to set color with hex model by given actual +// color value. +func setShapeRef(color string, i int) *aRef { + if color == "" { + return &aRef{ + Idx: 0, + ScrgbClr: &aScrgbClr{ + R: 0, + G: 0, + B: 0, + }, + } + } + return &aRef{ + Idx: i, + SrgbClr: &attrValString{ + Val: strings.Replace(strings.ToUpper(color), "#", "", -1), + }, + } +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/sheet.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/sheet.go new file mode 100644 index 000000000..535023546 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/sheet.go @@ -0,0 +1,661 @@ +package excelize + +import ( + "bytes" + "encoding/json" + "encoding/xml" + "errors" + "os" + "path" + "strconv" + "strings" + "unicode/utf8" + + "github.com/mohae/deepcopy" +) + +// NewSheet provides function to create a new sheet by given worksheet name, +// when creating a new XLSX file, the default sheet will be create, when you +// create a new file. +func (f *File) NewSheet(name string) int { + // Check if the worksheet already exists + if f.GetSheetIndex(name) != 0 { + return f.SheetCount + } + f.SheetCount++ + // Update docProps/app.xml + f.setAppXML() + // Update [Content_Types].xml + f.setContentTypes(f.SheetCount) + // Create new sheet /xl/worksheets/sheet%d.xml + f.setSheet(f.SheetCount, name) + // Update xl/_rels/workbook.xml.rels + rID := f.addXlsxWorkbookRels(f.SheetCount) + // Update xl/workbook.xml + f.setWorkbook(name, rID) + return f.SheetCount +} + +// contentTypesReader provides function to get the pointer to the +// [Content_Types].xml structure after deserialization. +func (f *File) contentTypesReader() *xlsxTypes { + if f.ContentTypes == nil { + var content xlsxTypes + _ = xml.Unmarshal([]byte(f.readXML("[Content_Types].xml")), &content) + f.ContentTypes = &content + } + return f.ContentTypes +} + +// contentTypesWriter provides function to save [Content_Types].xml after +// serialize structure. +func (f *File) contentTypesWriter() { + if f.ContentTypes != nil { + output, _ := xml.Marshal(f.ContentTypes) + f.saveFileList("[Content_Types].xml", output) + } +} + +// workbookReader provides function to get the pointer to the xl/workbook.xml +// structure after deserialization. +func (f *File) workbookReader() *xlsxWorkbook { + if f.WorkBook == nil { + var content xlsxWorkbook + _ = xml.Unmarshal([]byte(f.readXML("xl/workbook.xml")), &content) + f.WorkBook = &content + } + return f.WorkBook +} + +// workbookWriter provides function to save xl/workbook.xml after serialize +// structure. +func (f *File) workbookWriter() { + if f.WorkBook != nil { + output, _ := xml.Marshal(f.WorkBook) + f.saveFileList("xl/workbook.xml", replaceRelationshipsNameSpaceBytes(output)) + } +} + +// worksheetWriter provides function to save xl/worksheets/sheet%d.xml after +// serialize structure. +func (f *File) worksheetWriter() { + for path, sheet := range f.Sheet { + if sheet != nil { + for k, v := range sheet.SheetData.Row { + f.Sheet[path].SheetData.Row[k].C = trimCell(v.C) + } + output, _ := xml.Marshal(sheet) + f.saveFileList(path, replaceWorkSheetsRelationshipsNameSpaceBytes(output)) + ok := f.checked[path] + if ok { + f.checked[path] = false + } + } + } +} + +// trimCell provides function to trim blank cells which created by completeCol. +func trimCell(column []xlsxC) []xlsxC { + col := make([]xlsxC, len(column)) + i := 0 + for _, c := range column { + if c.S != 0 || c.V != "" || c.F != nil || c.T != "" { + col[i] = c + i++ + } + } + return col[0:i] +} + +// Read and update property of contents type of XLSX. +func (f *File) setContentTypes(index int) { + content := f.contentTypesReader() + content.Overrides = append(content.Overrides, xlsxOverride{ + PartName: "/xl/worksheets/sheet" + strconv.Itoa(index) + ".xml", + ContentType: "application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml", + }) +} + +// Update sheet property by given index. +func (f *File) setSheet(index int, name string) { + var xlsx xlsxWorksheet + xlsx.Dimension.Ref = "A1" + xlsx.SheetViews.SheetView = append(xlsx.SheetViews.SheetView, xlsxSheetView{ + WorkbookViewID: 0, + }) + path := "xl/worksheets/sheet" + strconv.Itoa(index) + ".xml" + f.sheetMap[trimSheetName(name)] = path + f.Sheet[path] = &xlsx +} + +// setWorkbook update workbook property of XLSX. Maximum 31 characters are +// allowed in sheet title. +func (f *File) setWorkbook(name string, rid int) { + content := f.workbookReader() + content.Sheets.Sheet = append(content.Sheets.Sheet, xlsxSheet{ + Name: trimSheetName(name), + SheetID: strconv.Itoa(rid), + ID: "rId" + strconv.Itoa(rid), + }) +} + +// workbookRelsReader provides function to read and unmarshal workbook +// relationships of XLSX file. +func (f *File) workbookRelsReader() *xlsxWorkbookRels { + if f.WorkBookRels == nil { + var content xlsxWorkbookRels + _ = xml.Unmarshal([]byte(f.readXML("xl/_rels/workbook.xml.rels")), &content) + f.WorkBookRels = &content + } + return f.WorkBookRels +} + +// workbookRelsWriter provides function to save xl/_rels/workbook.xml.rels after +// serialize structure. +func (f *File) workbookRelsWriter() { + if f.WorkBookRels != nil { + output, _ := xml.Marshal(f.WorkBookRels) + f.saveFileList("xl/_rels/workbook.xml.rels", output) + } +} + +// addXlsxWorkbookRels update workbook relationships property of XLSX. +func (f *File) addXlsxWorkbookRels(sheet int) int { + content := f.workbookRelsReader() + rID := 0 + for _, v := range content.Relationships { + t, _ := strconv.Atoi(strings.TrimPrefix(v.ID, "rId")) + if t > rID { + rID = t + } + } + rID++ + ID := bytes.Buffer{} + ID.WriteString("rId") + ID.WriteString(strconv.Itoa(rID)) + target := bytes.Buffer{} + target.WriteString("worksheets/sheet") + target.WriteString(strconv.Itoa(sheet)) + target.WriteString(".xml") + content.Relationships = append(content.Relationships, xlsxWorkbookRelation{ + ID: ID.String(), + Target: target.String(), + Type: SourceRelationshipWorkSheet, + }) + return rID +} + +// setAppXML update docProps/app.xml file of XML. +func (f *File) setAppXML() { + f.saveFileList("docProps/app.xml", []byte(templateDocpropsApp)) +} + +// Some tools that read XLSX files have very strict requirements about the +// structure of the input XML. In particular both Numbers on the Mac and SAS +// dislike inline XML namespace declarations, or namespace prefixes that don't +// match the ones that Excel itself uses. This is a problem because the Go XML +// library doesn't multiple namespace declarations in a single element of a +// document. This function is a horrible hack to fix that after the XML +// marshalling is completed. +func replaceRelationshipsNameSpaceBytes(workbookMarshal []byte) []byte { + oldXmlns := []byte(``) + newXmlns := []byte(``) + return bytes.Replace(workbookMarshal, oldXmlns, newXmlns, -1) +} + +// SetActiveSheet provides function to set default active worksheet of XLSX by +// given index. Note that active index is different with the index that got by +// function GetSheetMap, and it should be greater than 0 and less than total +// worksheet numbers. +func (f *File) SetActiveSheet(index int) { + if index < 1 { + index = 1 + } + index-- + content := f.workbookReader() + if len(content.BookViews.WorkBookView) > 0 { + content.BookViews.WorkBookView[0].ActiveTab = index + } else { + content.BookViews.WorkBookView = append(content.BookViews.WorkBookView, xlsxWorkBookView{ + ActiveTab: index, + }) + } + index++ + for idx, name := range f.GetSheetMap() { + xlsx := f.workSheetReader(name) + if index == idx { + if len(xlsx.SheetViews.SheetView) > 0 { + xlsx.SheetViews.SheetView[0].TabSelected = true + } else { + xlsx.SheetViews.SheetView = append(xlsx.SheetViews.SheetView, xlsxSheetView{ + TabSelected: true, + }) + } + } else { + if len(xlsx.SheetViews.SheetView) > 0 { + xlsx.SheetViews.SheetView[0].TabSelected = false + } + } + } +} + +// GetActiveSheetIndex provides function to get active sheet of XLSX. If not +// found the active sheet will be return integer 0. +func (f *File) GetActiveSheetIndex() int { + buffer := bytes.Buffer{} + content := f.workbookReader() + for _, v := range content.Sheets.Sheet { + xlsx := xlsxWorksheet{} + buffer.WriteString("xl/worksheets/sheet") + buffer.WriteString(strings.TrimPrefix(v.ID, "rId")) + buffer.WriteString(".xml") + _ = xml.Unmarshal([]byte(f.readXML(buffer.String())), &xlsx) + for _, sheetView := range xlsx.SheetViews.SheetView { + if sheetView.TabSelected { + ID, _ := strconv.Atoi(strings.TrimPrefix(v.ID, "rId")) + return ID + } + } + buffer.Reset() + } + return 0 +} + +// SetSheetName provides function to set the worksheet name be given old and new +// worksheet name. Maximum 31 characters are allowed in sheet title and this +// function only changes the name of the sheet and will not update the sheet +// name in the formula or reference associated with the cell. So there may be +// problem formula error or reference missing. +func (f *File) SetSheetName(oldName, newName string) { + oldName = trimSheetName(oldName) + newName = trimSheetName(newName) + content := f.workbookReader() + for k, v := range content.Sheets.Sheet { + if v.Name == oldName { + content.Sheets.Sheet[k].Name = newName + f.sheetMap[newName] = f.sheetMap[oldName] + delete(f.sheetMap, oldName) + } + } +} + +// GetSheetName provides function to get worksheet name of XLSX by given +// worksheet index. If given sheet index is invalid, will return an empty +// string. +func (f *File) GetSheetName(index int) string { + content := f.workbookReader() + rels := f.workbookRelsReader() + for _, rel := range rels.Relationships { + rID, _ := strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(rel.Target, "worksheets/sheet"), ".xml")) + if rID == index { + for _, v := range content.Sheets.Sheet { + if v.ID == rel.ID { + return v.Name + } + } + } + } + return "" +} + +// GetSheetIndex provides function to get worksheet index of XLSX by given sheet +// name. If given worksheet name is invalid, will return an integer type value +// 0. +func (f *File) GetSheetIndex(name string) int { + content := f.workbookReader() + rels := f.workbookRelsReader() + for _, v := range content.Sheets.Sheet { + if v.Name == name { + for _, rel := range rels.Relationships { + if v.ID == rel.ID { + rID, _ := strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(rel.Target, "worksheets/sheet"), ".xml")) + return rID + } + } + } + } + return 0 +} + +// GetSheetMap provides function to get worksheet name and index map of XLSX. +// For example: +// +// xlsx, err := excelize.OpenFile("./Book1.xlsx") +// if err != nil { +// return +// } +// for index, name := range xlsx.GetSheetMap() { +// fmt.Println(index, name) +// } +// +func (f *File) GetSheetMap() map[int]string { + content := f.workbookReader() + rels := f.workbookRelsReader() + sheetMap := map[int]string{} + for _, v := range content.Sheets.Sheet { + for _, rel := range rels.Relationships { + if rel.ID == v.ID { + rID, _ := strconv.Atoi(strings.TrimSuffix(strings.TrimPrefix(rel.Target, "worksheets/sheet"), ".xml")) + sheetMap[rID] = v.Name + } + } + } + return sheetMap +} + +// getSheetMap provides function to get worksheet name and XML file path map of +// XLSX. +func (f *File) getSheetMap() map[string]string { + maps := make(map[string]string) + for idx, name := range f.GetSheetMap() { + maps[name] = "xl/worksheets/sheet" + strconv.Itoa(idx) + ".xml" + } + return maps +} + +// SetSheetBackground provides function to set background picture by given +// worksheet name. +func (f *File) SetSheetBackground(sheet, picture string) error { + var err error + // Check picture exists first. + if _, err = os.Stat(picture); os.IsNotExist(err) { + return err + } + ext, ok := supportImageTypes[path.Ext(picture)] + if !ok { + return errors.New("Unsupported image extension") + } + pictureID := f.countMedia() + 1 + rID := f.addSheetRelationships(sheet, SourceRelationshipImage, "../media/image"+strconv.Itoa(pictureID)+ext, "") + f.addSheetPicture(sheet, rID) + f.addMedia(picture, ext) + f.setContentTypePartImageExtensions() + return err +} + +// DeleteSheet provides function to delete worksheet in a workbook by given +// worksheet name. Use this method with caution, which will affect changes in +// references such as formulas, charts, and so on. If there is any referenced +// value of the deleted worksheet, it will cause a file error when you open it. +// This function will be invalid when only the one worksheet is left. +func (f *File) DeleteSheet(name string) { + content := f.workbookReader() + for k, v := range content.Sheets.Sheet { + if v.Name == trimSheetName(name) && len(content.Sheets.Sheet) > 1 { + content.Sheets.Sheet = append(content.Sheets.Sheet[:k], content.Sheets.Sheet[k+1:]...) + sheet := "xl/worksheets/sheet" + strings.TrimPrefix(v.ID, "rId") + ".xml" + rels := "xl/worksheets/_rels/sheet" + strings.TrimPrefix(v.ID, "rId") + ".xml.rels" + target := f.deleteSheetFromWorkbookRels(v.ID) + f.deleteSheetFromContentTypes(target) + delete(f.sheetMap, name) + delete(f.XLSX, sheet) + delete(f.XLSX, rels) + delete(f.Sheet, sheet) + f.SheetCount-- + } + } + f.SetActiveSheet(len(f.GetSheetMap())) +} + +// deleteSheetFromWorkbookRels provides function to remove worksheet +// relationships by given relationships ID in the file +// xl/_rels/workbook.xml.rels. +func (f *File) deleteSheetFromWorkbookRels(rID string) string { + content := f.workbookRelsReader() + for k, v := range content.Relationships { + if v.ID == rID { + content.Relationships = append(content.Relationships[:k], content.Relationships[k+1:]...) + return v.Target + } + } + return "" +} + +// deleteSheetFromContentTypes provides function to remove worksheet +// relationships by given target name in the file [Content_Types].xml. +func (f *File) deleteSheetFromContentTypes(target string) { + content := f.contentTypesReader() + for k, v := range content.Overrides { + if v.PartName == "/xl/"+target { + content.Overrides = append(content.Overrides[:k], content.Overrides[k+1:]...) + } + } +} + +// CopySheet provides function to duplicate a worksheet by gave source and +// target worksheet index. Note that currently doesn't support duplicate +// workbooks that contain tables, charts or pictures. For Example: +// +// // Sheet1 already exists... +// index := xlsx.NewSheet("Sheet2") +// err := xlsx.CopySheet(1, index) +// return err +// +func (f *File) CopySheet(from, to int) error { + if from < 1 || to < 1 || from == to || f.GetSheetName(from) == "" || f.GetSheetName(to) == "" { + return errors.New("Invalid worksheet index") + } + f.copySheet(from, to) + return nil +} + +// copySheet provides function to duplicate a worksheet by gave source and +// target worksheet name. +func (f *File) copySheet(from, to int) { + sheet := f.workSheetReader("sheet" + strconv.Itoa(from)) + worksheet := deepcopy.Copy(sheet).(*xlsxWorksheet) + path := "xl/worksheets/sheet" + strconv.Itoa(to) + ".xml" + if len(worksheet.SheetViews.SheetView) > 0 { + worksheet.SheetViews.SheetView[0].TabSelected = false + } + worksheet.Drawing = nil + worksheet.TableParts = nil + worksheet.PageSetUp = nil + f.Sheet[path] = worksheet + toRels := "xl/worksheets/_rels/sheet" + strconv.Itoa(to) + ".xml.rels" + fromRels := "xl/worksheets/_rels/sheet" + strconv.Itoa(from) + ".xml.rels" + _, ok := f.XLSX[fromRels] + if ok { + f.XLSX[toRels] = f.XLSX[fromRels] + } +} + +// SetSheetVisible provides function to set worksheet visible by given worksheet +// name. A workbook must contain at least one visible worksheet. If the given +// worksheet has been activated, this setting will be invalidated. Sheet state +// values as defined by http://msdn.microsoft.com/en-us/library/office/documentformat.openxml.spreadsheet.sheetstatevalues.aspx +// +// visible +// hidden +// veryHidden +// +// For example, hide Sheet1: +// +// xlsx.SetSheetVisible("Sheet1", false) +// +func (f *File) SetSheetVisible(name string, visible bool) { + name = trimSheetName(name) + content := f.workbookReader() + if visible { + for k, v := range content.Sheets.Sheet { + if v.Name == name { + content.Sheets.Sheet[k].State = "" + } + } + return + } + count := 0 + for _, v := range content.Sheets.Sheet { + if v.State != "hidden" { + count++ + } + } + for k, v := range content.Sheets.Sheet { + xlsx := f.workSheetReader(f.GetSheetMap()[k]) + tabSelected := false + if len(xlsx.SheetViews.SheetView) > 0 { + tabSelected = xlsx.SheetViews.SheetView[0].TabSelected + } + if v.Name == name && count > 1 && !tabSelected { + content.Sheets.Sheet[k].State = "hidden" + } + } +} + +// parseFormatPanesSet provides function to parse the panes settings. +func parseFormatPanesSet(formatSet string) (*formatPanes, error) { + format := formatPanes{} + err := json.Unmarshal([]byte(formatSet), &format) + return &format, err +} + +// SetPanes provides function to create and remove freeze panes and split panes +// by given worksheet name and panes format set. +// +// activePane defines the pane that is active. The possible values for this +// attribute are defined in the following table: +// +// Enumeration Value | Description +// --------------------------------+------------------------------------------------------------- +// bottomLeft (Bottom Left Pane) | Bottom left pane, when both vertical and horizontal +// | splits are applied. +// | +// | This value is also used when only a horizontal split has +// | been applied, dividing the pane into upper and lower +// | regions. In that case, this value specifies the bottom +// | pane. +// | +// bottomRight (Bottom Right Pane) | Bottom right pane, when both vertical and horizontal +// | splits are applied. +// | +// topLeft (Top Left Pane) | Top left pane, when both vertical and horizontal splits +// | are applied. +// | +// | This value is also used when only a horizontal split has +// | been applied, dividing the pane into upper and lower +// | regions. In that case, this value specifies the top pane. +// | +// | This value is also used when only a vertical split has +// | been applied, dividing the pane into right and left +// | regions. In that case, this value specifies the left pane +// | +// topRight (Top Right Pane) | Top right pane, when both vertical and horizontal +// | splits are applied. +// | +// | This value is also used when only a vertical split has +// | been applied, dividing the pane into right and left +// | regions. In that case, this value specifies the right +// | pane. +// +// Pane state type is restricted to the values supported currently listed in the following table: +// +// Enumeration Value | Description +// --------------------------------+------------------------------------------------------------- +// frozen (Frozen) | Panes are frozen, but were not split being frozen. In +// | this state, when the panes are unfrozen again, a single +// | pane results, with no split. +// | +// | In this state, the split bars are not adjustable. +// | +// split (Split) | Panes are split, but not frozen. In this state, the split +// | bars are adjustable by the user. +// +// x_split (Horizontal Split Position): Horizontal position of the split, in +// 1/20th of a point; 0 (zero) if none. If the pane is frozen, this value +// indicates the number of columns visible in the top pane. +// +// y_split (Vertical Split Position): Vertical position of the split, in 1/20th +// of a point; 0 (zero) if none. If the pane is frozen, this value indicates the +// number of rows visible in the left pane. The possible values for this +// attribute are defined by the W3C XML Schema double datatype. +// +// top_left_cell: Location of the top left visible cell in the bottom right pane +// (when in Left-To-Right mode). +// +// sqref (Sequence of References): Range of the selection. Can be non-contiguous +// set of ranges. +// +// An example of how to freeze column A in the Sheet1 and set the active cell on +// Sheet1!K16: +// +// xlsx.SetPanes("Sheet1", `{"freeze":true,"split":false,"x_split":1,"y_split":0,"top_left_cell":"B1","active_pane":"topRight","panes":[{"sqref":"K16","active_cell":"K16","pane":"topRight"}]}`) +// +// An example of how to freeze rows 1 to 9 in the Sheet1 and set the active cell +// ranges on Sheet1!A11:XFD11: +// +// xlsx.SetPanes("Sheet1", `{"freeze":true,"split":false,"x_split":0,"y_split":9,"top_left_cell":"A34","active_pane":"bottomLeft","panes":[{"sqref":"A11:XFD11","active_cell":"A11","pane":"bottomLeft"}]}`) +// +// An example of how to create split panes in the Sheet1 and set the active cell +// on Sheet1!J60: +// +// xlsx.SetPanes("Sheet1", `{"freeze":false,"split":true,"x_split":3270,"y_split":1800,"top_left_cell":"N57","active_pane":"bottomLeft","panes":[{"sqref":"I36","active_cell":"I36"},{"sqref":"G33","active_cell":"G33","pane":"topRight"},{"sqref":"J60","active_cell":"J60","pane":"bottomLeft"},{"sqref":"O60","active_cell":"O60","pane":"bottomRight"}]}`) +// +// An example of how to unfreeze and remove all panes on Sheet1: +// +// xlsx.SetPanes("Sheet1", `{"freeze":false,"split":false}`) +// +func (f *File) SetPanes(sheet, panes string) { + fs, _ := parseFormatPanesSet(panes) + xlsx := f.workSheetReader(sheet) + p := &xlsxPane{ + ActivePane: fs.ActivePane, + TopLeftCell: fs.TopLeftCell, + XSplit: float64(fs.XSplit), + YSplit: float64(fs.YSplit), + } + if fs.Freeze { + p.State = "frozen" + } + xlsx.SheetViews.SheetView[len(xlsx.SheetViews.SheetView)-1].Pane = p + if !(fs.Freeze) && !(fs.Split) { + if len(xlsx.SheetViews.SheetView) > 0 { + xlsx.SheetViews.SheetView[len(xlsx.SheetViews.SheetView)-1].Pane = nil + } + } + s := []*xlsxSelection{} + for _, p := range fs.Panes { + s = append(s, &xlsxSelection{ + ActiveCell: p.ActiveCell, + Pane: p.Pane, + SQRef: p.SQRef, + }) + } + xlsx.SheetViews.SheetView[len(xlsx.SheetViews.SheetView)-1].Selection = s +} + +// GetSheetVisible provides function to get worksheet visible by given worksheet +// name. For example, get visible state of Sheet1: +// +// xlsx.GetSheetVisible("Sheet1") +// +func (f *File) GetSheetVisible(name string) bool { + content := f.workbookReader() + visible := false + for k, v := range content.Sheets.Sheet { + if v.Name == trimSheetName(name) { + if content.Sheets.Sheet[k].State == "" || content.Sheets.Sheet[k].State == "visible" { + visible = true + } + } + } + return visible +} + +// trimSheetName provides function to trim invaild characters by given worksheet +// name. +func trimSheetName(name string) string { + r := []rune{} + for _, v := range name { + switch v { + case 58, 92, 47, 63, 42, 91, 93: // replace :\/?*[] + continue + default: + r = append(r, v) + } + } + name = string(r) + if utf8.RuneCountInString(name) > 31 { + name = string([]rune(name)[0:31]) + } + return name +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/sheetpr.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/sheetpr.go new file mode 100644 index 000000000..7b8df5483 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/sheetpr.go @@ -0,0 +1,139 @@ +package excelize + +// SheetPrOption is an option of a view of a worksheet. See SetSheetPrOptions(). +type SheetPrOption interface { + setSheetPrOption(view *xlsxSheetPr) +} + +// SheetPrOptionPtr is a writable SheetPrOption. See GetSheetPrOptions(). +type SheetPrOptionPtr interface { + SheetPrOption + getSheetPrOption(view *xlsxSheetPr) +} + +type ( + // CodeName is a SheetPrOption + CodeName string + // EnableFormatConditionsCalculation is a SheetPrOption + EnableFormatConditionsCalculation bool + // Published is a SheetPrOption + Published bool + // FitToPage is a SheetPrOption + FitToPage bool + // AutoPageBreaks is a SheetPrOption + AutoPageBreaks bool +) + +func (o CodeName) setSheetPrOption(pr *xlsxSheetPr) { + pr.CodeName = string(o) +} + +func (o *CodeName) getSheetPrOption(pr *xlsxSheetPr) { + if pr == nil { + *o = "" + return + } + *o = CodeName(pr.CodeName) +} + +func (o EnableFormatConditionsCalculation) setSheetPrOption(pr *xlsxSheetPr) { + pr.EnableFormatConditionsCalculation = boolPtr(bool(o)) +} + +func (o *EnableFormatConditionsCalculation) getSheetPrOption(pr *xlsxSheetPr) { + if pr == nil { + *o = true + return + } + *o = EnableFormatConditionsCalculation(defaultTrue(pr.EnableFormatConditionsCalculation)) +} + +func (o Published) setSheetPrOption(pr *xlsxSheetPr) { + pr.Published = boolPtr(bool(o)) +} + +func (o *Published) getSheetPrOption(pr *xlsxSheetPr) { + if pr == nil { + *o = true + return + } + *o = Published(defaultTrue(pr.Published)) +} + +func (o FitToPage) setSheetPrOption(pr *xlsxSheetPr) { + if pr.PageSetUpPr == nil { + if !o { + return + } + pr.PageSetUpPr = new(xlsxPageSetUpPr) + } + pr.PageSetUpPr.FitToPage = bool(o) +} + +func (o *FitToPage) getSheetPrOption(pr *xlsxSheetPr) { + // Excel default: false + if pr == nil || pr.PageSetUpPr == nil { + *o = false + return + } + *o = FitToPage(pr.PageSetUpPr.FitToPage) +} + +func (o AutoPageBreaks) setSheetPrOption(pr *xlsxSheetPr) { + if pr.PageSetUpPr == nil { + if !o { + return + } + pr.PageSetUpPr = new(xlsxPageSetUpPr) + } + pr.PageSetUpPr.AutoPageBreaks = bool(o) +} + +func (o *AutoPageBreaks) getSheetPrOption(pr *xlsxSheetPr) { + // Excel default: false + if pr == nil || pr.PageSetUpPr == nil { + *o = false + return + } + *o = AutoPageBreaks(pr.PageSetUpPr.AutoPageBreaks) +} + +// SetSheetPrOptions provides function to sets worksheet properties. +// +// Available options: +// CodeName(string) +// EnableFormatConditionsCalculation(bool) +// Published(bool) +// FitToPage(bool) +// AutoPageBreaks(bool) +func (f *File) SetSheetPrOptions(name string, opts ...SheetPrOption) error { + sheet := f.workSheetReader(name) + pr := sheet.SheetPr + if pr == nil { + pr = new(xlsxSheetPr) + sheet.SheetPr = pr + } + + for _, opt := range opts { + opt.setSheetPrOption(pr) + } + return nil +} + +// GetSheetPrOptions provides function to gets worksheet properties. +// +// Available options: +// CodeName(string) +// EnableFormatConditionsCalculation(bool) +// Published(bool) +// FitToPage(bool) +// AutoPageBreaks(bool) +func (f *File) GetSheetPrOptions(name string, opts ...SheetPrOptionPtr) error { + sheet := f.workSheetReader(name) + pr := sheet.SheetPr + + for _, opt := range opts { + opt.getSheetPrOption(pr) + } + return nil +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/sheetview.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/sheetview.go new file mode 100644 index 000000000..679e91539 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/sheetview.go @@ -0,0 +1,152 @@ +package excelize + +import "fmt" + +// SheetViewOption is an option of a view of a worksheet. See SetSheetViewOptions(). +type SheetViewOption interface { + setSheetViewOption(view *xlsxSheetView) +} + +// SheetViewOptionPtr is a writable SheetViewOption. See GetSheetViewOptions(). +type SheetViewOptionPtr interface { + SheetViewOption + getSheetViewOption(view *xlsxSheetView) +} + +type ( + // DefaultGridColor is a SheetViewOption. + DefaultGridColor bool + // RightToLeft is a SheetViewOption. + RightToLeft bool + // ShowFormulas is a SheetViewOption. + ShowFormulas bool + // ShowGridLines is a SheetViewOption. + ShowGridLines bool + // ShowRowColHeaders is a SheetViewOption. + ShowRowColHeaders bool + // ZoomScale is a SheetViewOption. + ZoomScale float64 + /* TODO + // ShowWhiteSpace is a SheetViewOption. + ShowWhiteSpace bool + // ShowZeros is a SheetViewOption. + ShowZeros bool + // WindowProtection is a SheetViewOption. + WindowProtection bool + */ +) + +// Defaults for each option are described in XML schema for CT_SheetView + +func (o DefaultGridColor) setSheetViewOption(view *xlsxSheetView) { + view.DefaultGridColor = boolPtr(bool(o)) +} + +func (o *DefaultGridColor) getSheetViewOption(view *xlsxSheetView) { + *o = DefaultGridColor(defaultTrue(view.DefaultGridColor)) // Excel default: true +} + +func (o RightToLeft) setSheetViewOption(view *xlsxSheetView) { + view.RightToLeft = bool(o) // Excel default: false +} + +func (o *RightToLeft) getSheetViewOption(view *xlsxSheetView) { + *o = RightToLeft(view.RightToLeft) +} + +func (o ShowFormulas) setSheetViewOption(view *xlsxSheetView) { + view.ShowFormulas = bool(o) // Excel default: false +} + +func (o *ShowFormulas) getSheetViewOption(view *xlsxSheetView) { + *o = ShowFormulas(view.ShowFormulas) // Excel default: false +} + +func (o ShowGridLines) setSheetViewOption(view *xlsxSheetView) { + view.ShowGridLines = boolPtr(bool(o)) +} + +func (o *ShowGridLines) getSheetViewOption(view *xlsxSheetView) { + *o = ShowGridLines(defaultTrue(view.ShowGridLines)) // Excel default: true +} + +func (o ShowRowColHeaders) setSheetViewOption(view *xlsxSheetView) { + view.ShowRowColHeaders = boolPtr(bool(o)) +} + +func (o *ShowRowColHeaders) getSheetViewOption(view *xlsxSheetView) { + *o = ShowRowColHeaders(defaultTrue(view.ShowRowColHeaders)) // Excel default: true +} + +func (o ZoomScale) setSheetViewOption(view *xlsxSheetView) { + //This attribute is restricted to values ranging from 10 to 400. + if float64(o) >= 10 && float64(o) <= 400 { + view.ZoomScale = float64(o) + } +} + +func (o *ZoomScale) getSheetViewOption(view *xlsxSheetView) { + *o = ZoomScale(view.ZoomScale) +} + +// getSheetView returns the SheetView object +func (f *File) getSheetView(sheetName string, viewIndex int) (*xlsxSheetView, error) { + xlsx := f.workSheetReader(sheetName) + if viewIndex < 0 { + if viewIndex < -len(xlsx.SheetViews.SheetView) { + return nil, fmt.Errorf("view index %d out of range", viewIndex) + } + viewIndex = len(xlsx.SheetViews.SheetView) + viewIndex + } else if viewIndex >= len(xlsx.SheetViews.SheetView) { + return nil, fmt.Errorf("view index %d out of range", viewIndex) + } + + return &(xlsx.SheetViews.SheetView[viewIndex]), nil +} + +// SetSheetViewOptions sets sheet view options. +// The viewIndex may be negative and if so is counted backward (-1 is the last view). +// +// Available options: +// DefaultGridColor(bool) +// RightToLeft(bool) +// ShowFormulas(bool) +// ShowGridLines(bool) +// ShowRowColHeaders(bool) +// Example: +// err = f.SetSheetViewOptions("Sheet1", -1, ShowGridLines(false)) +func (f *File) SetSheetViewOptions(name string, viewIndex int, opts ...SheetViewOption) error { + view, err := f.getSheetView(name, viewIndex) + if err != nil { + return err + } + + for _, opt := range opts { + opt.setSheetViewOption(view) + } + return nil +} + +// GetSheetViewOptions gets the value of sheet view options. +// The viewIndex may be negative and if so is counted backward (-1 is the last view). +// +// Available options: +// DefaultGridColor(bool) +// RightToLeft(bool) +// ShowFormulas(bool) +// ShowGridLines(bool) +// ShowRowColHeaders(bool) +// Example: +// var showGridLines excelize.ShowGridLines +// err = f.GetSheetViewOptions("Sheet1", -1, &showGridLines) +func (f *File) GetSheetViewOptions(name string, viewIndex int, opts ...SheetViewOptionPtr) error { + view, err := f.getSheetView(name, viewIndex) + if err != nil { + return err + } + + for _, opt := range opts { + opt.getSheetViewOption(view) + } + return nil +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/styles.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/styles.go new file mode 100644 index 000000000..e2a1ae618 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/styles.go @@ -0,0 +1,2751 @@ +package excelize + +import ( + "encoding/json" + "encoding/xml" + "fmt" + "math" + "strconv" + "strings" +) + +// Excel styles can reference number formats that are built-in, all of which +// have an id less than 164. This is a possibly incomplete list comprised of as +// many of them as I could find. +var builtInNumFmt = map[int]string{ + 0: "general", + 1: "0", + 2: "0.00", + 3: "#,##0", + 4: "#,##0.00", + 9: "0%", + 10: "0.00%", + 11: "0.00e+00", + 12: "# ?/?", + 13: "# ??/??", + 14: "mm-dd-yy", + 15: "d-mmm-yy", + 16: "d-mmm", + 17: "mmm-yy", + 18: "h:mm am/pm", + 19: "h:mm:ss am/pm", + 20: "h:mm", + 21: "h:mm:ss", + 22: "m/d/yy h:mm", + 37: "#,##0 ;(#,##0)", + 38: "#,##0 ;[red](#,##0)", + 39: "#,##0.00;(#,##0.00)", + 40: "#,##0.00;[red](#,##0.00)", + 41: `_(* #,##0_);_(* \(#,##0\);_(* "-"_);_(@_)`, + 42: `_("$"* #,##0_);_("$* \(#,##0\);_("$"* "-"_);_(@_)`, + 43: `_(* #,##0.00_);_(* \(#,##0.00\);_(* "-"??_);_(@_)`, + 44: `_("$"* #,##0.00_);_("$"* \(#,##0.00\);_("$"* "-"??_);_(@_)`, + 45: "mm:ss", + 46: "[h]:mm:ss", + 47: "mmss.0", + 48: "##0.0e+0", + 49: "@", +} + +// langNumFmt defined number format code (with unicode values provided for +// language glyphs where they occur) in different language. +var langNumFmt = map[string]map[int]string{ + "zh-tw": { + 27: "[$-404]e/m/d", + 28: `[$-404]e"年"m"月"d"日"`, + 29: `[$-404]e"年"m"月"d"日"`, + 30: "m/d/yy", + 31: `yyyy"年"m"月"d"日"`, + 32: `hh"時"mm"分"`, + 33: `hh"時"mm"分"ss"秒"`, + 34: `上午/下午 hh"時"mm"分"`, + 35: `上午/下午 hh"時"mm"分"ss"秒"`, + 36: "[$-404]e/m/d", + 50: "[$-404]e/m/d", + 51: `[$-404]e"年"m"月"d"日"`, + 52: `上午/下午 hh"時"mm"分"`, + 53: `上午/下午 hh"時"mm"分"ss"秒"`, + 54: `[$-404]e"年"m"月"d"日"`, + 55: `上午/下午 hh"時"mm"分"`, + 56: `上午/下午 hh"時"mm"分"ss"秒"`, + 57: "[$-404]e/m/d", + 58: `[$-404]e"年"m"月"d"日"`, + }, + "zh-cn": { + 27: `yyyy"年"m"月"`, + 28: `m"月"d"日"`, + 29: `m"月"d"日"`, + 30: "m-d-yy", + 31: `yyyy"年"m"月"d"日"`, + 32: `h"时"mm"分"`, + 33: `h"时"mm"分"ss"秒"`, + 34: `上午/下午 h"时"mm"分"`, + 35: `上午/下午 h"时"mm"分"ss"秒"`, + 36: `yyyy"年"m"月"`, + 50: `yyyy"年"m"月"`, + 51: `m"月"d"日"`, + 52: `yyyy"年"m"月"`, + 53: `m"月"d"日"`, + 54: `m"月"d"日"`, + 55: `上午/下午 h"时"mm"分"`, + 56: `上午/下午 h"时"mm"分"ss"秒"`, + 57: `yyyy"年"m"月"`, + 58: `m"月"d"日"`, + }, + "zh-tw_unicode": { + 27: "[$-404]e/m/d", + 28: `[$-404]e"5E74"m"6708"d"65E5"`, + 29: `[$-404]e"5E74"m"6708"d"65E5"`, + 30: "m/d/yy", + 31: `yyyy"5E74"m"6708"d"65E5"`, + 32: `hh"6642"mm"5206"`, + 33: `hh"6642"mm"5206"ss"79D2"`, + 34: `4E0A5348/4E0B5348hh"6642"mm"5206"`, + 35: `4E0A5348/4E0B5348hh"6642"mm"5206"ss"79D2"`, + 36: "[$-404]e/m/d", + 50: "[$-404]e/m/d", + 51: `[$-404]e"5E74"m"6708"d"65E5"`, + 52: `4E0A5348/4E0B5348hh"6642"mm"5206"`, + 53: `4E0A5348/4E0B5348hh"6642"mm"5206"ss"79D2"`, + 54: `[$-404]e"5E74"m"6708"d"65E5"`, + 55: `4E0A5348/4E0B5348hh"6642"mm"5206"`, + 56: `4E0A5348/4E0B5348hh"6642"mm"5206"ss"79D2"`, + 57: "[$-404]e/m/d", + 58: `[$-404]e"5E74"m"6708"d"65E5"`, + }, + "zh-cn_unicode": { + 27: `yyyy"5E74"m"6708"`, + 28: `m"6708"d"65E5"`, + 29: `m"6708"d"65E5"`, + 30: "m-d-yy", + 31: `yyyy"5E74"m"6708"d"65E5"`, + 32: `h"65F6"mm"5206"`, + 33: `h"65F6"mm"5206"ss"79D2"`, + 34: `4E0A5348/4E0B5348h"65F6"mm"5206"`, + 35: `4E0A5348/4E0B5348h"65F6"mm"5206"ss"79D2"`, + 36: `yyyy"5E74"m"6708"`, + 50: `yyyy"5E74"m"6708"`, + 51: `m"6708"d"65E5"`, + 52: `yyyy"5E74"m"6708"`, + 53: `m"6708"d"65E5"`, + 54: `m"6708"d"65E5"`, + 55: `4E0A5348/4E0B5348h"65F6"mm"5206"`, + 56: `4E0A5348/4E0B5348h"65F6"mm"5206"ss"79D2"`, + 57: `yyyy"5E74"m"6708"`, + 58: `m"6708"d"65E5"`, + }, + "ja-jp": { + 27: "[$-411]ge.m.d", + 28: `[$-411]ggge"年"m"月"d"日"`, + 29: `[$-411]ggge"年"m"月"d"日"`, + 30: "m/d/yy", + 31: `yyyy"年"m"月"d"日"`, + 32: `h"時"mm"分"`, + 33: `h"時"mm"分"ss"秒"`, + 34: `yyyy"年"m"月"`, + 35: `m"月"d"日"`, + 36: "[$-411]ge.m.d", + 50: "[$-411]ge.m.d", + 51: `[$-411]ggge"年"m"月"d"日"`, + 52: `yyyy"年"m"月"`, + 53: `m"月"d"日"`, + 54: `[$-411]ggge"年"m"月"d"日"`, + 55: `yyyy"年"m"月"`, + 56: `m"月"d"日"`, + 57: "[$-411]ge.m.d", + 58: `[$-411]ggge"年"m"月"d"日"`, + }, + "ko-kr": { + 27: `yyyy"年" mm"月" dd"日"`, + 28: "mm-dd", + 29: "mm-dd", + 30: "mm-dd-yy", + 31: `yyyy"년" mm"월" dd"일"`, + 32: `h"시" mm"분"`, + 33: `h"시" mm"분" ss"초"`, + 34: `yyyy-mm-dd`, + 35: `yyyy-mm-dd`, + 36: `yyyy"年" mm"月" dd"日"`, + 50: `yyyy"年" mm"月" dd"日"`, + 51: "mm-dd", + 52: "yyyy-mm-dd", + 53: "yyyy-mm-dd", + 54: "mm-dd", + 55: "yyyy-mm-dd", + 56: "yyyy-mm-dd", + 57: `yyyy"年" mm"月" dd"日"`, + 58: "mm-dd", + }, + "ja-jp_unicode": { + 27: "[$-411]ge.m.d", + 28: `[$-411]ggge"5E74"m"6708"d"65E5"`, + 29: `[$-411]ggge"5E74"m"6708"d"65E5"`, + 30: "m/d/yy", + 31: `yyyy"5E74"m"6708"d"65E5"`, + 32: `h"6642"mm"5206"`, + 33: `h"6642"mm"5206"ss"79D2"`, + 34: `yyyy"5E74"m"6708"`, + 35: `m"6708"d"65E5"`, + 36: "[$-411]ge.m.d", + 50: "[$-411]ge.m.d", + 51: `[$-411]ggge"5E74"m"6708"d"65E5"`, + 52: `yyyy"5E74"m"6708"`, + 53: `m"6708"d"65E5"`, + 54: `[$-411]ggge"5E74"m"6708"d"65E5"`, + 55: `yyyy"5E74"m"6708"`, + 56: `m"6708"d"65E5"`, + 57: "[$-411]ge.m.d", + 58: `[$-411]ggge"5E74"m"6708"d"65E5"`, + }, + "ko-kr_unicode": { + 27: `yyyy"5E74" mm"6708" dd"65E5"`, + 28: "mm-dd", + 29: "mm-dd", + 30: "mm-dd-yy", + 31: `yyyy"B144" mm"C6D4" dd"C77C"`, + 32: `h"C2DC" mm"BD84"`, + 33: `h"C2DC" mm"BD84" ss"CD08"`, + 34: "yyyy-mm-dd", + 35: "yyyy-mm-dd", + 36: `yyyy"5E74" mm"6708" dd"65E5"`, + 50: `yyyy"5E74" mm"6708" dd"65E5"`, + 51: "mm-dd", + 52: "yyyy-mm-dd", + 53: "yyyy-mm-dd", + 54: "mm-dd", + 55: "yyyy-mm-dd", + 56: "yyyy-mm-dd", + 57: `yyyy"5E74" mm"6708" dd"65E5"`, + 58: "mm-dd", + }, + "th-th": { + 59: "t0", + 60: "t0.00", + 61: "t#,##0", + 62: "t#,##0.00", + 67: "t0%", + 68: "t0.00%", + 69: "t# ?/?", + 70: "t# ??/??", + 71: "ว/ด/ปปปป", + 72: "ว-ดดด-ปป", + 73: "ว-ดดด", + 74: "ดดด-ปป", + 75: "ช:นน", + 76: "ช:นน:ทท", + 77: "ว/ด/ปปปป ช:นน", + 78: "นน:ทท", + 79: "[ช]:นน:ทท", + 80: "นน:ทท.0", + 81: "d/m/bb", + }, + "th-th_unicode": { + 59: "t0", + 60: "t0.00", + 61: "t#,##0", + 62: "t#,##0.00", + 67: "t0%", + 68: "t0.00%", + 69: "t# ?/?", + 70: "t# ??/??", + 71: "0E27/0E14/0E1B0E1B0E1B0E1B", + 72: "0E27-0E140E140E14-0E1B0E1B", + 73: "0E27-0E140E140E14", + 74: "0E140E140E14-0E1B0E1B", + 75: "0E0A:0E190E19", + 76: "0E0A:0E190E19:0E170E17", + 77: "0E27/0E14/0E1B0E1B0E1B0E1B 0E0A:0E190E19", + 78: "0E190E19:0E170E17", + 79: "[0E0A]:0E190E19:0E170E17", + 80: "0E190E19:0E170E17.0", + 81: "d/m/bb", + }, +} + +// currencyNumFmt defined the currency number format map. +var currencyNumFmt = map[int]string{ + 164: `"CN¥",##0.00`, + 165: "[$$-409]#,##0.00", + 166: "[$$-45C]#,##0.00", + 167: "[$$-1004]#,##0.00", + 168: "[$$-404]#,##0.00", + 169: "[$$-C09]#,##0.00", + 170: "[$$-2809]#,##0.00", + 171: "[$$-1009]#,##0.00", + 172: "[$$-2009]#,##0.00", + 173: "[$$-1409]#,##0.00", + 174: "[$$-4809]#,##0.00", + 175: "[$$-2C09]#,##0.00", + 176: "[$$-2409]#,##0.00", + 177: "[$$-1000]#,##0.00", + 178: `#,##0.00\ [$$-C0C]`, + 179: "[$$-475]#,##0.00", + 180: "[$$-83E]#,##0.00", + 181: `[$$-86B]\ #,##0.00`, + 182: `[$$-340A]\ #,##0.00`, + 183: "[$$-240A]#,##0.00", + 184: `[$$-300A]\ #,##0.00`, + 185: "[$$-440A]#,##0.00", + 186: "[$$-80A]#,##0.00", + 187: "[$$-500A]#,##0.00", + 188: "[$$-540A]#,##0.00", + 189: `[$$-380A]\ #,##0.00`, + 190: "[$£-809]#,##0.00", + 191: "[$£-491]#,##0.00", + 192: "[$£-452]#,##0.00", + 193: "[$¥-804]#,##0.00", + 194: "[$¥-411]#,##0.00", + 195: "[$¥-478]#,##0.00", + 196: "[$¥-451]#,##0.00", + 197: "[$¥-480]#,##0.00", + 198: "#,##0.00\\ [$\u058F-42B]", + 199: "[$\u060B-463]#,##0.00", + 200: "[$\u060B-48C]#,##0.00", + 201: "[$\u09F3-845]\\ #,##0.00", + 202: "#,##0.00[$\u17DB-453]", + 203: "[$\u20A1-140A]#,##0.00", + 204: "[$\u20A6-468]\\ #,##0.00", + 205: "[$\u20A6-470]\\ #,##0.00", + 206: "[$\u20A9-412]#,##0.00", + 207: "[$\u20AA-40D]\\ #,##0.00", + 208: "#,##0.00\\ [$\u20AB-42A]", + 209: "#,##0.00\\ [$\u20AC-42D]", + 210: "#,##0.00\\ [$\u20AC-47E]", + 211: "#,##0.00\\ [$\u20AC-403]", + 212: "#,##0.00\\ [$\u20AC-483]", + 213: "[$\u20AC-813]\\ #,##0.00", + 214: "[$\u20AC-413]\\ #,##0.00", + 215: "[$\u20AC-1809]#,##0.00", + 216: "#,##0.00\\ [$\u20AC-425]", + 217: "[$\u20AC-2]\\ #,##0.00", + 218: "#,##0.00\\ [$\u20AC-1]", + 219: "#,##0.00\\ [$\u20AC-40B]", + 220: "#,##0.00\\ [$\u20AC-80C]", + 221: "#,##0.00\\ [$\u20AC-40C]", + 222: "#,##0.00\\ [$\u20AC-140C]", + 223: "#,##0.00\\ [$\u20AC-180C]", + 224: "[$\u20AC-200C]#,##0.00", + 225: "#,##0.00\\ [$\u20AC-456]", + 226: "#,##0.00\\ [$\u20AC-C07]", + 227: "#,##0.00\\ [$\u20AC-407]", + 228: "#,##0.00\\ [$\u20AC-1007]", + 229: "#,##0.00\\ [$\u20AC-408]", + 230: "#,##0.00\\ [$\u20AC-243B]", + 231: "[$\u20AC-83C]#,##0.00", + 232: "[$\u20AC-410]\\ #,##0.00", + 233: "[$\u20AC-476]#,##0.00", + 234: "#,##0.00\\ [$\u20AC-2C1A]", + 235: "[$\u20AC-426]\\ #,##0.00", + 236: "#,##0.00\\ [$\u20AC-427]", + 237: "#,##0.00\\ [$\u20AC-82E]", + 238: "#,##0.00\\ [$\u20AC-46E]", + 239: "[$\u20AC-43A]#,##0.00", + 240: "#,##0.00\\ [$\u20AC-C3B]", + 241: "#,##0.00\\ [$\u20AC-482]", + 242: "#,##0.00\\ [$\u20AC-816]", + 243: "#,##0.00\\ [$\u20AC-301A]", + 244: "#,##0.00\\ [$\u20AC-203B]", + 245: "#,##0.00\\ [$\u20AC-41B]", + 246: "#,##0.00\\ [$\u20AC-424]", + 247: "#,##0.00\\ [$\u20AC-C0A]", + 248: "#,##0.00\\ [$\u20AC-81D]", + 249: "#,##0.00\\ [$\u20AC-484]", + 250: "#,##0.00\\ [$\u20AC-42E]", + 251: "[$\u20AC-462]\\ #,##0.00", + 252: "#,##0.00\\ [$₭-454]", + 253: "#,##0.00\\ [$₮-450]", + 254: "[$\u20AE-C50]#,##0.00", + 255: "[$\u20B1-3409]#,##0.00", + 256: "[$\u20B1-464]#,##0.00", + 257: "#,##0.00[$\u20B4-422]", + 258: "[$\u20B8-43F]#,##0.00", + 259: "[$\u20B9-460]#,##0.00", + 260: "[$\u20B9-4009]\\ #,##0.00", + 261: "[$\u20B9-447]\\ #,##0.00", + 262: "[$\u20B9-439]\\ #,##0.00", + 263: "[$\u20B9-44B]\\ #,##0.00", + 264: "[$\u20B9-860]#,##0.00", + 265: "[$\u20B9-457]\\ #,##0.00", + 266: "[$\u20B9-458]#,##0.00", + 267: "[$\u20B9-44E]\\ #,##0.00", + 268: "[$\u20B9-861]#,##0.00", + 269: "[$\u20B9-448]\\ #,##0.00", + 270: "[$\u20B9-446]\\ #,##0.00", + 271: "[$\u20B9-44F]\\ #,##0.00", + 272: "[$\u20B9-459]#,##0.00", + 273: "[$\u20B9-449]\\ #,##0.00", + 274: "[$\u20B9-820]#,##0.00", + 275: "#,##0.00\\ [$\u20BA-41F]", + 276: "#,##0.00\\ [$\u20BC-42C]", + 277: "#,##0.00\\ [$\u20BC-82C]", + 278: "#,##0.00\\ [$\u20BD-419]", + 279: "#,##0.00[$\u20BD-485]", + 280: "#,##0.00\\ [$\u20BE-437]", + 281: "[$B/.-180A]\\ #,##0.00", + 282: "[$Br-472]#,##0.00", + 283: "[$Br-477]#,##0.00", + 284: "#,##0.00[$Br-473]", + 285: "[$Bs-46B]\\ #,##0.00", + 286: "[$Bs-400A]\\ #,##0.00", + 287: "[$Bs.-200A]\\ #,##0.00", + 288: "[$BWP-832]\\ #,##0.00", + 289: "[$C$-4C0A]#,##0.00", + 290: "[$CA$-85D]#,##0.00", + 291: "[$CA$-47C]#,##0.00", + 292: "[$CA$-45D]#,##0.00", + 293: "[$CFA-340C]#,##0.00", + 294: "[$CFA-280C]#,##0.00", + 295: "#,##0.00\\ [$CFA-867]", + 296: "#,##0.00\\ [$CFA-488]", + 297: "#,##0.00\\ [$CHF-100C]", + 298: "[$CHF-1407]\\ #,##0.00", + 299: "[$CHF-807]\\ #,##0.00", + 300: "[$CHF-810]\\ #,##0.00", + 301: "[$CHF-417]\\ #,##0.00", + 302: "[$CLP-47A]\\ #,##0.00", + 303: "[$CN¥-850]#,##0.00", + 304: "#,##0.00\\ [$DZD-85F]", + 305: "[$FCFA-2C0C]#,##0.00", + 306: "#,##0.00\\ [$Ft-40E]", + 307: "[$G-3C0C]#,##0.00", + 308: "[$Gs.-3C0A]\\ #,##0.00", + 309: "[$GTQ-486]#,##0.00", + 310: "[$HK$-C04]#,##0.00", + 311: "[$HK$-3C09]#,##0.00", + 312: "#,##0.00\\ [$HRK-41A]", + 313: "[$IDR-3809]#,##0.00", + 314: "[$IQD-492]#,##0.00", + 315: "#,##0.00\\ [$ISK-40F]", + 316: "[$K-455]#,##0.00", + 317: "#,##0.00\\ [$K\u010D-405]", + 318: "#,##0.00\\ [$KM-141A]", + 319: "#,##0.00\\ [$KM-101A]", + 320: "#,##0.00\\ [$KM-181A]", + 321: "[$kr-438]\\ #,##0.00", + 322: "[$kr-43B]\\ #,##0.00", + 323: "#,##0.00\\ [$kr-83B]", + 324: "[$kr-414]\\ #,##0.00", + 325: "[$kr-814]\\ #,##0.00", + 326: "#,##0.00\\ [$kr-41D]", + 327: "[$kr.-406]\\ #,##0.00", + 328: "[$kr.-46F]\\ #,##0.00", + 329: "[$Ksh-441]#,##0.00", + 330: "[$L-818]#,##0.00", + 331: "[$L-819]#,##0.00", + 332: "[$L-480A]\\ #,##0.00", + 333: "#,##0.00\\ [$Lek\u00EB-41C]", + 334: "[$MAD-45F]#,##0.00", + 335: "[$MAD-380C]#,##0.00", + 336: "#,##0.00\\ [$MAD-105F]", + 337: "[$MOP$-1404]#,##0.00", + 338: "#,##0.00\\ [$MVR-465]_-", + 339: "#,##0.00[$Nfk-873]", + 340: "[$NGN-466]#,##0.00", + 341: "[$NGN-467]#,##0.00", + 342: "[$NGN-469]#,##0.00", + 343: "[$NGN-471]#,##0.00", + 344: "[$NOK-103B]\\ #,##0.00", + 345: "[$NOK-183B]\\ #,##0.00", + 346: "[$NZ$-481]#,##0.00", + 347: "[$PKR-859]\\ #,##0.00", + 348: "[$PYG-474]#,##0.00", + 349: "[$Q-100A]#,##0.00", + 350: "[$R-436]\\ #,##0.00", + 351: "[$R-1C09]\\ #,##0.00", + 352: "[$R-435]\\ #,##0.00", + 353: "[$R$-416]\\ #,##0.00", + 354: "[$RD$-1C0A]#,##0.00", + 355: "#,##0.00\\ [$RF-487]", + 356: "[$RM-4409]#,##0.00", + 357: "[$RM-43E]#,##0.00", + 358: "#,##0.00\\ [$RON-418]", + 359: "[$Rp-421]#,##0.00", + 360: "[$Rs-420]#,##0.00_-", + 361: "[$Rs.-849]\\ #,##0.00", + 362: "#,##0.00\\ [$RSD-81A]", + 363: "#,##0.00\\ [$RSD-C1A]", + 364: "#,##0.00\\ [$RUB-46D]", + 365: "#,##0.00\\ [$RUB-444]", + 366: "[$S/.-C6B]\\ #,##0.00", + 367: "[$S/.-280A]\\ #,##0.00", + 368: "#,##0.00\\ [$SEK-143B]", + 369: "#,##0.00\\ [$SEK-1C3B]", + 370: "#,##0.00\\ [$so\u02BBm-443]", + 371: "#,##0.00\\ [$so\u02BBm-843]", + 372: "#,##0.00\\ [$SYP-45A]", + 373: "[$THB-41E]#,##0.00", + 374: "#,##0.00[$TMT-442]", + 375: "[$US$-3009]#,##0.00", + 376: "[$ZAR-46C]\\ #,##0.00", + 377: "[$ZAR-430]#,##0.00", + 378: "[$ZAR-431]#,##0.00", + 379: "[$ZAR-432]\\ #,##0.00", + 380: "[$ZAR-433]#,##0.00", + 381: "[$ZAR-434]\\ #,##0.00", + 382: "#,##0.00\\ [$z\u0142-415]", + 383: "#,##0.00\\ [$\u0434\u0435\u043D-42F]", + 384: "#,##0.00\\ [$КМ-201A]", + 385: "#,##0.00\\ [$КМ-1C1A]", + 386: "#,##0.00\\ [$\u043B\u0432.-402]", + 387: "#,##0.00\\ [$р.-423]", + 388: "#,##0.00\\ [$\u0441\u043E\u043C-440]", + 389: "#,##0.00\\ [$\u0441\u043E\u043C-428]", + 390: "[$\u062C.\u0645.-C01]\\ #,##0.00_-", + 391: "[$\u062F.\u0623.-2C01]\\ #,##0.00_-", + 392: "[$\u062F.\u0625.-3801]\\ #,##0.00_-", + 393: "[$\u062F.\u0628.-3C01]\\ #,##0.00_-", + 394: "[$\u062F.\u062A.-1C01]\\ #,##0.00_-", + 395: "[$\u062F.\u062C.-1401]\\ #,##0.00_-", + 396: "[$\u062F.\u0639.-801]\\ #,##0.00_-", + 397: "[$\u062F.\u0643.-3401]\\ #,##0.00_-", + 398: "[$\u062F.\u0644.-1001]#,##0.00_-", + 399: "[$\u062F.\u0645.-1801]\\ #,##0.00_-", + 400: "[$\u0631-846]\\ #,##0.00", + 401: "[$\u0631.\u0633.-401]\\ #,##0.00_-", + 402: "[$\u0631.\u0639.-2001]\\ #,##0.00_-", + 403: "[$\u0631.\u0642.-4001]\\ #,##0.00_-", + 404: "[$\u0631.\u064A.-2401]\\ #,##0.00_-", + 405: "[$\u0631\u06CC\u0627\u0644-429]#,##0.00_-", + 406: "[$\u0644.\u0633.-2801]\\ #,##0.00_-", + 407: "[$\u0644.\u0644.-3001]\\ #,##0.00_-", + 408: "[$\u1265\u122D-45E]#,##0.00", + 409: "[$\u0930\u0942-461]#,##0.00", + 410: "[$\u0DBB\u0DD4.-45B]\\ #,##0.00", + 411: "[$ADP]\\ #,##0.00", + 412: "[$AED]\\ #,##0.00", + 413: "[$AFA]\\ #,##0.00", + 414: "[$AFN]\\ #,##0.00", + 415: "[$ALL]\\ #,##0.00", + 416: "[$AMD]\\ #,##0.00", + 417: "[$ANG]\\ #,##0.00", + 418: "[$AOA]\\ #,##0.00", + 419: "[$ARS]\\ #,##0.00", + 420: "[$ATS]\\ #,##0.00", + 421: "[$AUD]\\ #,##0.00", + 422: "[$AWG]\\ #,##0.00", + 423: "[$AZM]\\ #,##0.00", + 424: "[$AZN]\\ #,##0.00", + 425: "[$BAM]\\ #,##0.00", + 426: "[$BBD]\\ #,##0.00", + 427: "[$BDT]\\ #,##0.00", + 428: "[$BEF]\\ #,##0.00", + 429: "[$BGL]\\ #,##0.00", + 430: "[$BGN]\\ #,##0.00", + 431: "[$BHD]\\ #,##0.00", + 432: "[$BIF]\\ #,##0.00", + 433: "[$BMD]\\ #,##0.00", + 434: "[$BND]\\ #,##0.00", + 435: "[$BOB]\\ #,##0.00", + 436: "[$BOV]\\ #,##0.00", + 437: "[$BRL]\\ #,##0.00", + 438: "[$BSD]\\ #,##0.00", + 439: "[$BTN]\\ #,##0.00", + 440: "[$BWP]\\ #,##0.00", + 441: "[$BYR]\\ #,##0.00", + 442: "[$BZD]\\ #,##0.00", + 443: "[$CAD]\\ #,##0.00", + 444: "[$CDF]\\ #,##0.00", + 445: "[$CHE]\\ #,##0.00", + 446: "[$CHF]\\ #,##0.00", + 447: "[$CHW]\\ #,##0.00", + 448: "[$CLF]\\ #,##0.00", + 449: "[$CLP]\\ #,##0.00", + 450: "[$CNY]\\ #,##0.00", + 451: "[$COP]\\ #,##0.00", + 452: "[$COU]\\ #,##0.00", + 453: "[$CRC]\\ #,##0.00", + 454: "[$CSD]\\ #,##0.00", + 455: "[$CUC]\\ #,##0.00", + 456: "[$CVE]\\ #,##0.00", + 457: "[$CYP]\\ #,##0.00", + 458: "[$CZK]\\ #,##0.00", + 459: "[$DEM]\\ #,##0.00", + 460: "[$DJF]\\ #,##0.00", + 461: "[$DKK]\\ #,##0.00", + 462: "[$DOP]\\ #,##0.00", + 463: "[$DZD]\\ #,##0.00", + 464: "[$ECS]\\ #,##0.00", + 465: "[$ECV]\\ #,##0.00", + 466: "[$EEK]\\ #,##0.00", + 467: "[$EGP]\\ #,##0.00", + 468: "[$ERN]\\ #,##0.00", + 469: "[$ESP]\\ #,##0.00", + 470: "[$ETB]\\ #,##0.00", + 471: "[$EUR]\\ #,##0.00", + 472: "[$FIM]\\ #,##0.00", + 473: "[$FJD]\\ #,##0.00", + 474: "[$FKP]\\ #,##0.00", + 475: "[$FRF]\\ #,##0.00", + 476: "[$GBP]\\ #,##0.00", + 477: "[$GEL]\\ #,##0.00", + 478: "[$GHC]\\ #,##0.00", + 479: "[$GHS]\\ #,##0.00", + 480: "[$GIP]\\ #,##0.00", + 481: "[$GMD]\\ #,##0.00", + 482: "[$GNF]\\ #,##0.00", + 483: "[$GRD]\\ #,##0.00", + 484: "[$GTQ]\\ #,##0.00", + 485: "[$GYD]\\ #,##0.00", + 486: "[$HKD]\\ #,##0.00", + 487: "[$HNL]\\ #,##0.00", + 488: "[$HRK]\\ #,##0.00", + 489: "[$HTG]\\ #,##0.00", + 490: "[$HUF]\\ #,##0.00", + 491: "[$IDR]\\ #,##0.00", + 492: "[$IEP]\\ #,##0.00", + 493: "[$ILS]\\ #,##0.00", + 494: "[$INR]\\ #,##0.00", + 495: "[$IQD]\\ #,##0.00", + 496: "[$IRR]\\ #,##0.00", + 497: "[$ISK]\\ #,##0.00", + 498: "[$ITL]\\ #,##0.00", + 499: "[$JMD]\\ #,##0.00", + 500: "[$JOD]\\ #,##0.00", + 501: "[$JPY]\\ #,##0.00", + 502: "[$KAF]\\ #,##0.00", + 503: "[$KES]\\ #,##0.00", + 504: "[$KGS]\\ #,##0.00", + 505: "[$KHR]\\ #,##0.00", + 506: "[$KMF]\\ #,##0.00", + 507: "[$KPW]\\ #,##0.00", + 508: "[$KRW]\\ #,##0.00", + 509: "[$KWD]\\ #,##0.00", + 510: "[$KYD]\\ #,##0.00", + 511: "[$KZT]\\ #,##0.00", + 512: "[$LAK]\\ #,##0.00", + 513: "[$LBP]\\ #,##0.00", + 514: "[$LKR]\\ #,##0.00", + 515: "[$LRD]\\ #,##0.00", + 516: "[$LSL]\\ #,##0.00", + 517: "[$LTL]\\ #,##0.00", + 518: "[$LUF]\\ #,##0.00", + 519: "[$LVL]\\ #,##0.00", + 520: "[$LYD]\\ #,##0.00", + 521: "[$MAD]\\ #,##0.00", + 522: "[$MDL]\\ #,##0.00", + 523: "[$MGA]\\ #,##0.00", + 524: "[$MGF]\\ #,##0.00", + 525: "[$MKD]\\ #,##0.00", + 526: "[$MMK]\\ #,##0.00", + 527: "[$MNT]\\ #,##0.00", + 528: "[$MOP]\\ #,##0.00", + 529: "[$MRO]\\ #,##0.00", + 530: "[$MTL]\\ #,##0.00", + 531: "[$MUR]\\ #,##0.00", + 532: "[$MVR]\\ #,##0.00", + 533: "[$MWK]\\ #,##0.00", + 534: "[$MXN]\\ #,##0.00", + 535: "[$MXV]\\ #,##0.00", + 536: "[$MYR]\\ #,##0.00", + 537: "[$MZM]\\ #,##0.00", + 538: "[$MZN]\\ #,##0.00", + 539: "[$NAD]\\ #,##0.00", + 540: "[$NGN]\\ #,##0.00", + 541: "[$NIO]\\ #,##0.00", + 542: "[$NLG]\\ #,##0.00", + 543: "[$NOK]\\ #,##0.00", + 544: "[$NPR]\\ #,##0.00", + 545: "[$NTD]\\ #,##0.00", + 546: "[$NZD]\\ #,##0.00", + 547: "[$OMR]\\ #,##0.00", + 548: "[$PAB]\\ #,##0.00", + 549: "[$PEN]\\ #,##0.00", + 550: "[$PGK]\\ #,##0.00", + 551: "[$PHP]\\ #,##0.00", + 552: "[$PKR]\\ #,##0.00", + 553: "[$PLN]\\ #,##0.00", + 554: "[$PTE]\\ #,##0.00", + 555: "[$PYG]\\ #,##0.00", + 556: "[$QAR]\\ #,##0.00", + 557: "[$ROL]\\ #,##0.00", + 558: "[$RON]\\ #,##0.00", + 559: "[$RSD]\\ #,##0.00", + 560: "[$RUB]\\ #,##0.00", + 561: "[$RUR]\\ #,##0.00", + 562: "[$RWF]\\ #,##0.00", + 563: "[$SAR]\\ #,##0.00", + 564: "[$SBD]\\ #,##0.00", + 565: "[$SCR]\\ #,##0.00", + 566: "[$SDD]\\ #,##0.00", + 567: "[$SDG]\\ #,##0.00", + 568: "[$SDP]\\ #,##0.00", + 569: "[$SEK]\\ #,##0.00", + 570: "[$SGD]\\ #,##0.00", + 571: "[$SHP]\\ #,##0.00", + 572: "[$SIT]\\ #,##0.00", + 573: "[$SKK]\\ #,##0.00", + 574: "[$SLL]\\ #,##0.00", + 575: "[$SOS]\\ #,##0.00", + 576: "[$SPL]\\ #,##0.00", + 577: "[$SRD]\\ #,##0.00", + 578: "[$SRG]\\ #,##0.00", + 579: "[$STD]\\ #,##0.00", + 580: "[$SVC]\\ #,##0.00", + 581: "[$SYP]\\ #,##0.00", + 582: "[$SZL]\\ #,##0.00", + 583: "[$THB]\\ #,##0.00", + 584: "[$TJR]\\ #,##0.00", + 585: "[$TJS]\\ #,##0.00", + 586: "[$TMM]\\ #,##0.00", + 587: "[$TMT]\\ #,##0.00", + 588: "[$TND]\\ #,##0.00", + 589: "[$TOP]\\ #,##0.00", + 590: "[$TRL]\\ #,##0.00", + 591: "[$TRY]\\ #,##0.00", + 592: "[$TTD]\\ #,##0.00", + 593: "[$TWD]\\ #,##0.00", + 594: "[$TZS]\\ #,##0.00", + 595: "[$UAH]\\ #,##0.00", + 596: "[$UGX]\\ #,##0.00", + 597: "[$USD]\\ #,##0.00", + 598: "[$USN]\\ #,##0.00", + 599: "[$USS]\\ #,##0.00", + 600: "[$UYI]\\ #,##0.00", + 601: "[$UYU]\\ #,##0.00", + 602: "[$UZS]\\ #,##0.00", + 603: "[$VEB]\\ #,##0.00", + 604: "[$VEF]\\ #,##0.00", + 605: "[$VND]\\ #,##0.00", + 606: "[$VUV]\\ #,##0.00", + 607: "[$WST]\\ #,##0.00", + 608: "[$XAF]\\ #,##0.00", + 609: "[$XAG]\\ #,##0.00", + 610: "[$XAU]\\ #,##0.00", + 611: "[$XB5]\\ #,##0.00", + 612: "[$XBA]\\ #,##0.00", + 613: "[$XBB]\\ #,##0.00", + 614: "[$XBC]\\ #,##0.00", + 615: "[$XBD]\\ #,##0.00", + 616: "[$XCD]\\ #,##0.00", + 617: "[$XDR]\\ #,##0.00", + 618: "[$XFO]\\ #,##0.00", + 619: "[$XFU]\\ #,##0.00", + 620: "[$XOF]\\ #,##0.00", + 621: "[$XPD]\\ #,##0.00", + 622: "[$XPF]\\ #,##0.00", + 623: "[$XPT]\\ #,##0.00", + 624: "[$XTS]\\ #,##0.00", + 625: "[$XXX]\\ #,##0.00", + 626: "[$YER]\\ #,##0.00", + 627: "[$YUM]\\ #,##0.00", + 628: "[$ZAR]\\ #,##0.00", + 629: "[$ZMK]\\ #,##0.00", + 630: "[$ZMW]\\ #,##0.00", + 631: "[$ZWD]\\ #,##0.00", + 632: "[$ZWL]\\ #,##0.00", + 633: "[$ZWN]\\ #,##0.00", + 634: "[$ZWR]\\ #,##0.00", +} + +// builtInNumFmtFunc defined the format conversion functions map. Partial format +// code doesn't support currently and will return original string. +var builtInNumFmtFunc = map[int]func(i int, v string) string{ + 0: formatToString, + 1: formatToInt, + 2: formatToFloat, + 3: formatToInt, + 4: formatToFloat, + 9: formatToC, + 10: formatToD, + 11: formatToE, + 12: formatToString, // Doesn't support currently + 13: formatToString, // Doesn't support currently + 14: parseTime, + 15: parseTime, + 16: parseTime, + 17: parseTime, + 18: parseTime, + 19: parseTime, + 20: parseTime, + 21: parseTime, + 22: parseTime, + 37: formatToA, + 38: formatToA, + 39: formatToB, + 40: formatToB, + 41: formatToString, // Doesn't support currently + 42: formatToString, // Doesn't support currently + 43: formatToString, // Doesn't support currently + 44: formatToString, // Doesn't support currently + 45: parseTime, + 46: parseTime, + 47: parseTime, + 48: formatToE, + 49: formatToString, +} + +// validType defined the list of valid validation types. +var validType = map[string]string{ + "cell": "cellIs", + "date": "date", // Doesn't support currently + "time": "time", // Doesn't support currently + "average": "aboveAverage", + "duplicate": "duplicateValues", + "unique": "uniqueValues", + "top": "top10", + "bottom": "top10", + "text": "text", // Doesn't support currently + "time_period": "timePeriod", // Doesn't support currently + "blanks": "containsBlanks", // Doesn't support currently + "no_blanks": "notContainsBlanks", // Doesn't support currently + "errors": "containsErrors", // Doesn't support currently + "no_errors": "notContainsErrors", // Doesn't support currently + "2_color_scale": "2_color_scale", + "3_color_scale": "3_color_scale", + "data_bar": "dataBar", + "formula": "expression", +} + +// criteriaType defined the list of valid criteria types. +var criteriaType = map[string]string{ + "between": "between", + "not between": "notBetween", + "equal to": "equal", + "=": "equal", + "==": "equal", + "not equal to": "notEqual", + "!=": "notEqual", + "<>": "notEqual", + "greater than": "greaterThan", + ">": "greaterThan", + "less than": "lessThan", + "<": "lessThan", + "greater than or equal to": "greaterThanOrEqual", + ">=": "greaterThanOrEqual", + "less than or equal to": "lessThanOrEqual", + "<=": "lessThanOrEqual", + "containing": "containsText", + "not containing": "notContains", + "begins with": "beginsWith", + "ends with": "endsWith", + "yesterday": "yesterday", + "today": "today", + "last 7 days": "last7Days", + "last week": "lastWeek", + "this week": "thisWeek", + "continue week": "continueWeek", + "last month": "lastMonth", + "this month": "thisMonth", + "continue month": "continueMonth", +} + +// formatToString provides function to return original string by given built-in +// number formats code and cell string. +func formatToString(i int, v string) string { + return v +} + +// formatToInt provides function to convert original string to integer format as +// string type by given built-in number formats code and cell string. +func formatToInt(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + return fmt.Sprintf("%d", int(f)) +} + +// formatToFloat provides function to convert original string to float format as +// string type by given built-in number formats code and cell string. +func formatToFloat(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + return fmt.Sprintf("%.2f", f) +} + +// formatToA provides function to convert original string to special format as +// string type by given built-in number formats code and cell string. +func formatToA(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + if f < 0 { + t := int(math.Abs(f)) + return fmt.Sprintf("(%d)", t) + } + t := int(f) + return fmt.Sprintf("%d", t) +} + +// formatToB provides function to convert original string to special format as +// string type by given built-in number formats code and cell string. +func formatToB(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + if f < 0 { + return fmt.Sprintf("(%.2f)", f) + } + return fmt.Sprintf("%.2f", f) +} + +// formatToC provides function to convert original string to special format as +// string type by given built-in number formats code and cell string. +func formatToC(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + f = f * 100 + return fmt.Sprintf("%d%%", int(f)) +} + +// formatToD provides function to convert original string to special format as +// string type by given built-in number formats code and cell string. +func formatToD(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + f = f * 100 + return fmt.Sprintf("%.2f%%", f) +} + +// formatToE provides function to convert original string to special format as +// string type by given built-in number formats code and cell string. +func formatToE(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + return fmt.Sprintf("%.e", f) +} + +// parseTime provides function to returns a string parsed using time.Time. +// Replace Excel placeholders with Go time placeholders. For example, replace +// yyyy with 2006. These are in a specific order, due to the fact that m is used +// in month, minute, and am/pm. It would be easier to fix that with regular +// expressions, but if it's possible to keep this simple it would be easier to +// maintain. Full-length month and days (e.g. March, Tuesday) have letters in +// them that would be replaced by other characters below (such as the 'h' in +// March, or the 'd' in Tuesday) below. First we convert them to arbitrary +// characters unused in Excel Date formats, and then at the end, turn them to +// what they should actually be. +// Based off: http://www.ozgrid.com/Excel/CustomFormats.htm +func parseTime(i int, v string) string { + f, err := strconv.ParseFloat(v, 64) + if err != nil { + return v + } + val := timeFromExcelTime(f, false) + format := builtInNumFmt[i] + + replacements := []struct{ xltime, gotime string }{ + {"yyyy", "2006"}, + {"yy", "06"}, + {"mmmm", "%%%%"}, + {"dddd", "&&&&"}, + {"dd", "02"}, + {"d", "2"}, + {"mmm", "Jan"}, + {"mmss", "0405"}, + {"ss", "05"}, + {"mm:", "04:"}, + {":mm", ":04"}, + {"mm", "01"}, + {"am/pm", "pm"}, + {"m/", "1/"}, + {"%%%%", "January"}, + {"&&&&", "Monday"}, + } + // It is the presence of the "am/pm" indicator that determines if this is + // a 12 hour or 24 hours time format, not the number of 'h' characters. + if is12HourTime(format) { + format = strings.Replace(format, "hh", "03", 1) + format = strings.Replace(format, "h", "3", 1) + } else { + format = strings.Replace(format, "hh", "15", 1) + format = strings.Replace(format, "h", "15", 1) + } + for _, repl := range replacements { + format = strings.Replace(format, repl.xltime, repl.gotime, 1) + } + // If the hour is optional, strip it out, along with the possible dangling + // colon that would remain. + if val.Hour() < 1 { + format = strings.Replace(format, "]:", "]", 1) + format = strings.Replace(format, "[03]", "", 1) + format = strings.Replace(format, "[3]", "", 1) + format = strings.Replace(format, "[15]", "", 1) + } else { + format = strings.Replace(format, "[3]", "3", 1) + format = strings.Replace(format, "[15]", "15", 1) + } + return val.Format(format) +} + +// is12HourTime checks whether an Excel time format string is a 12 hours form. +func is12HourTime(format string) bool { + return strings.Contains(format, "am/pm") || strings.Contains(format, "AM/PM") || strings.Contains(format, "a/p") || strings.Contains(format, "A/P") +} + +// stylesReader provides function to get the pointer to the structure after +// deserialization of xl/styles.xml. +func (f *File) stylesReader() *xlsxStyleSheet { + if f.Styles == nil { + var styleSheet xlsxStyleSheet + _ = xml.Unmarshal([]byte(f.readXML("xl/styles.xml")), &styleSheet) + f.Styles = &styleSheet + } + return f.Styles +} + +// styleSheetWriter provides function to save xl/styles.xml after serialize +// structure. +func (f *File) styleSheetWriter() { + if f.Styles != nil { + output, _ := xml.Marshal(f.Styles) + f.saveFileList("xl/styles.xml", replaceWorkSheetsRelationshipsNameSpaceBytes(output)) + } +} + +// parseFormatStyleSet provides function to parse the format settings of the +// cells and conditional formats. +func parseFormatStyleSet(style string) (*formatStyle, error) { + format := formatStyle{ + DecimalPlaces: 2, + } + err := json.Unmarshal([]byte(style), &format) + return &format, err +} + +// NewStyle provides function to create style for cells by given style format. +// Note that the color field uses RGB color code. +// +// The following shows the border styles sorted by excelize index number: +// +// Index | Name | Weight | Style +// -------+---------------+--------+------------- +// 0 | None | 0 | +// 1 | Continuous | 1 | ----------- +// 2 | Continuous | 2 | ----------- +// 3 | Dash | 1 | - - - - - - +// 4 | Dot | 1 | . . . . . . +// 5 | Continuous | 3 | ----------- +// 6 | Double | 3 | =========== +// 7 | Continuous | 0 | ----------- +// 8 | Dash | 2 | - - - - - - +// 9 | Dash Dot | 1 | - . - . - . +// 10 | Dash Dot | 2 | - . - . - . +// 11 | Dash Dot Dot | 1 | - . . - . . +// 12 | Dash Dot Dot | 2 | - . . - . . +// 13 | SlantDash Dot | 2 | / - . / - . +// +// The following shows the borders in the order shown in the Excel dialog: +// +// Index | Style | Index | Style +// -------+-------------+-------+------------- +// 0 | None | 12 | - . . - . . +// 7 | ----------- | 13 | / - . / - . +// 4 | . . . . . . | 10 | - . - . - . +// 11 | - . . - . . | 8 | - - - - - - +// 9 | - . - . - . | 2 | ----------- +// 3 | - - - - - - | 5 | ----------- +// 1 | ----------- | 6 | =========== +// +// The following shows the shading styles sorted by excelize index number: +// +// Index | Style | Index | Style +// -------+-----------------+-------+----------------- +// 0 | Horizontal | 3 | Diagonal down +// 1 | Vertical | 4 | From corner +// 2 | Diagonal Up | 5 | From center +// +// The following shows the patterns styles sorted by excelize index number: +// +// Index | Style | Index | Style +// -------+-----------------+-------+----------------- +// 0 | None | 10 | darkTrellis +// 1 | solid | 11 | lightHorizontal +// 2 | mediumGray | 12 | lightVertical +// 3 | darkGray | 13 | lightDown +// 4 | lightGray | 14 | lightUp +// 5 | darkHorizontal | 15 | lightGrid +// 6 | darkVertical | 16 | lightTrellis +// 7 | darkDown | 17 | gray125 +// 8 | darkUp | 18 | gray0625 +// 9 | darkGrid | | +// +// The following the type of horizontal alignment in cells: +// +// Style +// ------------------ +// left +// center +// right +// fill +// justify +// centerContinuous +// distributed +// +// The following the type of vertical alignment in cells: +// +// Style +// ------------------ +// top +// center +// justify +// distributed +// +// The following the type of font underline style: +// +// Style +// ------------------ +// single +// double +// +// Excel's built-in all languages formats are shown in the following table: +// +// Index | Format String +// -------+---------------------------------------------------- +// 0 | General +// 1 | 0 +// 2 | 0.00 +// 3 | #,##0 +// 4 | #,##0.00 +// 5 | ($#,##0_);($#,##0) +// 6 | ($#,##0_);[Red]($#,##0) +// 7 | ($#,##0.00_);($#,##0.00) +// 8 | ($#,##0.00_);[Red]($#,##0.00) +// 9 | 0% +// 10 | 0.00% +// 11 | 0.00E+00 +// 12 | # ?/? +// 13 | # ??/?? +// 14 | m/d/yy +// 15 | d-mmm-yy +// 16 | d-mmm +// 17 | mmm-yy +// 18 | h:mm AM/PM +// 19 | h:mm:ss AM/PM +// 20 | h:mm +// 21 | h:mm:ss +// 22 | m/d/yy h:mm +// ... | ... +// 37 | (#,##0_);(#,##0) +// 38 | (#,##0_);[Red](#,##0) +// 39 | (#,##0.00_);(#,##0.00) +// 40 | (#,##0.00_);[Red](#,##0.00) +// 41 | _(* #,##0_);_(* (#,##0);_(* "-"_);_(@_) +// 42 | _($* #,##0_);_($* (#,##0);_($* "-"_);_(@_) +// 43 | _(* #,##0.00_);_(* (#,##0.00);_(* "-"??_);_(@_) +// 44 | _($* #,##0.00_);_($* (#,##0.00);_($* "-"??_);_(@_) +// 45 | mm:ss +// 46 | [h]:mm:ss +// 47 | mm:ss.0 +// 48 | ##0.0E+0 +// 49 | @ +// +// Number format code in zh-tw language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | [$-404]e/m/d +// 28 | [$-404]e"年"m"月"d"日" +// 29 | [$-404]e"年"m"月"d"日" +// 30 | m/d/yy +// 31 | yyyy"年"m"月"d"日" +// 32 | hh"時"mm"分" +// 33 | hh"時"mm"分"ss"秒" +// 34 | 上午/下午 hh"時"mm"分" +// 35 | 上午/下午 hh"時"mm"分"ss"秒" +// 36 | [$-404]e/m/d +// 50 | [$-404]e/m/d +// 51 | [$-404]e"年"m"月"d"日" +// 52 | 上午/下午 hh"時"mm"分" +// 53 | 上午/下午 hh"時"mm"分"ss"秒" +// 54 | [$-404]e"年"m"月"d"日" +// 55 | 上午/下午 hh"時"mm"分" +// 56 | 上午/下午 hh"時"mm"分"ss"秒" +// 57 | [$-404]e/m/d +// 58 | [$-404]e"年"m"月"d"日" +// +// Number format code in zh-cn language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | yyyy"年"m"月" +// 28 | m"月"d"日" +// 29 | m"月"d"日" +// 30 | m-d-yy +// 31 | yyyy"年"m"月"d"日" +// 32 | h"时"mm"分" +// 33 | h"时"mm"分"ss"秒" +// 34 | 上午/下午 h"时"mm"分" +// 35 | 上午/下午 h"时"mm"分"ss"秒 +// 36 | yyyy"年"m"月 +// 50 | yyyy"年"m"月 +// 51 | m"月"d"日 +// 52 | yyyy"年"m"月 +// 53 | m"月"d"日 +// 54 | m"月"d"日 +// 55 | 上午/下午 h"时"mm"分 +// 56 | 上午/下午 h"时"mm"分"ss"秒 +// 57 | yyyy"年"m"月 +// 58 | m"月"d"日" +// +// Number format code with unicode values provided for language glyphs where +// they occur in zh-tw language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | [$-404]e/m/ +// 28 | [$-404]e"5E74"m"6708"d"65E5 +// 29 | [$-404]e"5E74"m"6708"d"65E5 +// 30 | m/d/y +// 31 | yyyy"5E74"m"6708"d"65E5 +// 32 | hh"6642"mm"5206 +// 33 | hh"6642"mm"5206"ss"79D2 +// 34 | 4E0A5348/4E0B5348hh"6642"mm"5206 +// 35 | 4E0A5348/4E0B5348hh"6642"mm"5206"ss"79D2 +// 36 | [$-404]e/m/ +// 50 | [$-404]e/m/ +// 51 | [$-404]e"5E74"m"6708"d"65E5 +// 52 | 4E0A5348/4E0B5348hh"6642"mm"5206 +// 53 | 4E0A5348/4E0B5348hh"6642"mm"5206"ss"79D2 +// 54 | [$-404]e"5E74"m"6708"d"65E5 +// 55 | 4E0A5348/4E0B5348hh"6642"mm"5206 +// 56 | 4E0A5348/4E0B5348hh"6642"mm"5206"ss"79D2 +// 57 | [$-404]e/m/ +// 58 | [$-404]e"5E74"m"6708"d"65E5" +// +// Number format code with unicode values provided for language glyphs where +// they occur in zh-cn language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | yyyy"5E74"m"6708 +// 28 | m"6708"d"65E5 +// 29 | m"6708"d"65E5 +// 30 | m-d-y +// 31 | yyyy"5E74"m"6708"d"65E5 +// 32 | h"65F6"mm"5206 +// 33 | h"65F6"mm"5206"ss"79D2 +// 34 | 4E0A5348/4E0B5348h"65F6"mm"5206 +// 35 | 4E0A5348/4E0B5348h"65F6"mm"5206"ss"79D2 +// 36 | yyyy"5E74"m"6708 +// 50 | yyyy"5E74"m"6708 +// 51 | m"6708"d"65E5 +// 52 | yyyy"5E74"m"6708 +// 53 | m"6708"d"65E5 +// 54 | m"6708"d"65E5 +// 55 | 4E0A5348/4E0B5348h"65F6"mm"5206 +// 56 | 4E0A5348/4E0B5348h"65F6"mm"5206"ss"79D2 +// 57 | yyyy"5E74"m"6708 +// 58 | m"6708"d"65E5" +// +// Number format code in ja-jp language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | [$-411]ge.m.d +// 28 | [$-411]ggge"年"m"月"d"日 +// 29 | [$-411]ggge"年"m"月"d"日 +// 30 | m/d/y +// 31 | yyyy"年"m"月"d"日 +// 32 | h"時"mm"分 +// 33 | h"時"mm"分"ss"秒 +// 34 | yyyy"年"m"月 +// 35 | m"月"d"日 +// 36 | [$-411]ge.m.d +// 50 | [$-411]ge.m.d +// 51 | [$-411]ggge"年"m"月"d"日 +// 52 | yyyy"年"m"月 +// 53 | m"月"d"日 +// 54 | [$-411]ggge"年"m"月"d"日 +// 55 | yyyy"年"m"月 +// 56 | m"月"d"日 +// 57 | [$-411]ge.m.d +// 58 | [$-411]ggge"年"m"月"d"日" +// +// Number format code in ko-kr language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | yyyy"年" mm"月" dd"日 +// 28 | mm-d +// 29 | mm-d +// 30 | mm-dd-y +// 31 | yyyy"년" mm"월" dd"일 +// 32 | h"시" mm"분 +// 33 | h"시" mm"분" ss"초 +// 34 | yyyy-mm-d +// 35 | yyyy-mm-d +// 36 | yyyy"年" mm"月" dd"日 +// 50 | yyyy"年" mm"月" dd"日 +// 51 | mm-d +// 52 | yyyy-mm-d +// 53 | yyyy-mm-d +// 54 | mm-d +// 55 | yyyy-mm-d +// 56 | yyyy-mm-d +// 57 | yyyy"年" mm"月" dd"日 +// 58 | mm-dd +// +// Number format code with unicode values provided for language glyphs where +// they occur in ja-jp language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | [$-411]ge.m.d +// 28 | [$-411]ggge"5E74"m"6708"d"65E5 +// 29 | [$-411]ggge"5E74"m"6708"d"65E5 +// 30 | m/d/y +// 31 | yyyy"5E74"m"6708"d"65E5 +// 32 | h"6642"mm"5206 +// 33 | h"6642"mm"5206"ss"79D2 +// 34 | yyyy"5E74"m"6708 +// 35 | m"6708"d"65E5 +// 36 | [$-411]ge.m.d +// 50 | [$-411]ge.m.d +// 51 | [$-411]ggge"5E74"m"6708"d"65E5 +// 52 | yyyy"5E74"m"6708 +// 53 | m"6708"d"65E5 +// 54 | [$-411]ggge"5E74"m"6708"d"65E5 +// 55 | yyyy"5E74"m"6708 +// 56 | m"6708"d"65E5 +// 57 | [$-411]ge.m.d +// 58 | [$-411]ggge"5E74"m"6708"d"65E5" +// +// Number format code with unicode values provided for language glyphs where +// they occur in ko-kr language: +// +// Index | Symbol +// -------+------------------------------------------- +// 27 | yyyy"5E74" mm"6708" dd"65E5 +// 28 | mm-d +// 29 | mm-d +// 30 | mm-dd-y +// 31 | yyyy"B144" mm"C6D4" dd"C77C +// 32 | h"C2DC" mm"BD84 +// 33 | h"C2DC" mm"BD84" ss"CD08 +// 34 | yyyy-mm-d +// 35 | yyyy-mm-d +// 36 | yyyy"5E74" mm"6708" dd"65E5 +// 50 | yyyy"5E74" mm"6708" dd"65E5 +// 51 | mm-d +// 52 | yyyy-mm-d +// 53 | yyyy-mm-d +// 54 | mm-d +// 55 | yyyy-mm-d +// 56 | yyyy-mm-d +// 57 | yyyy"5E74" mm"6708" dd"65E5 +// 58 | mm-dd +// +// Number format code in th-th language: +// +// Index | Symbol +// -------+------------------------------------------- +// 59 | t +// 60 | t0.0 +// 61 | t#,## +// 62 | t#,##0.0 +// 67 | t0 +// 68 | t0.00 +// 69 | t# ?/ +// 70 | t# ??/? +// 71 | ว/ด/ปปป +// 72 | ว-ดดด-ป +// 73 | ว-ดด +// 74 | ดดด-ป +// 75 | ช:น +// 76 | ช:นน:ท +// 77 | ว/ด/ปปปป ช:น +// 78 | นน:ท +// 79 | [ช]:นน:ท +// 80 | นน:ทท. +// 81 | d/m/bb +// +// Number format code with unicode values provided for language glyphs where +// they occur in th-th language: +// +// Index | Symbol +// -------+------------------------------------------- +// 59 | t +// 60 | t0.0 +// 61 | t#,## +// 62 | t#,##0.0 +// 67 | t0 +// 68 | t0.00 +// 69 | t# ?/ +// 70 | t# ??/? +// 71 | 0E27/0E14/0E1B0E1B0E1B0E1 +// 72 | 0E27-0E140E140E14-0E1B0E1 +// 73 | 0E27-0E140E140E1 +// 74 | 0E140E140E14-0E1B0E1 +// 75 | 0E0A:0E190E1 +// 76 | 0E0A:0E190E19:0E170E1 +// 77 | 0E27/0E14/0E1B0E1B0E1B0E1B 0E0A:0E190E1 +// 78 | 0E190E19:0E170E1 +// 79 | [0E0A]:0E190E19:0E170E1 +// 80 | 0E190E19:0E170E17. +// 81 | d/m/bb +// +// Excelize built-in currency formats are shown in the following table, only +// support these types in the following table (Index number is used only for +// markup and is not used inside an Excel file and you can't get formatted value +// by the function GetCellValue) currently: +// +// Index | Symbol +// -------+--------------------------------------------------------------- +// 164 | CN¥ +// 165 | $ English (China) +// 166 | $ Cherokee (United States) +// 167 | $ Chinese (Singapore) +// 168 | $ Chinese (Taiwan) +// 169 | $ English (Australia) +// 170 | $ English (Belize) +// 171 | $ English (Canada) +// 172 | $ English (Jamaica) +// 173 | $ English (New Zealand) +// 174 | $ English (Singapore) +// 175 | $ English (Trinidad & Tobago) +// 176 | $ English (U.S. Virgin Islands) +// 177 | $ English (United States) +// 178 | $ French (Canada) +// 179 | $ Hawaiian (United States) +// 180 | $ Malay (Brunei) +// 181 | $ Quechua (Ecuador) +// 182 | $ Spanish (Chile) +// 183 | $ Spanish (Colombia) +// 184 | $ Spanish (Ecuador) +// 185 | $ Spanish (El Salvador) +// 186 | $ Spanish (Mexico) +// 187 | $ Spanish (Puerto Rico) +// 188 | $ Spanish (United States) +// 189 | $ Spanish (Uruguay) +// 190 | £ English (United Kingdom) +// 191 | £ Scottish Gaelic (United Kingdom) +// 192 | £ Welsh (United Kindom) +// 193 | ¥ Chinese (China) +// 194 | ¥ Japanese (Japan) +// 195 | ¥ Sichuan Yi (China) +// 196 | ¥ Tibetan (China) +// 197 | ¥ Uyghur (China) +// 198 | ֏ Armenian (Armenia) +// 199 | ؋ Pashto (Afghanistan) +// 200 | ؋ Persian (Afghanistan) +// 201 | ৳ Bengali (Bangladesh) +// 202 | ៛ Khmer (Cambodia) +// 203 | ₡ Spanish (Costa Rica) +// 204 | ₦ Hausa (Nigeria) +// 205 | ₦ Igbo (Nigeria) +// 206 | ₦ Yoruba (Nigeria) +// 207 | ₩ Korean (South Korea) +// 208 | ₪ Hebrew (Israel) +// 209 | ₫ Vietnamese (Vietnam) +// 210 | € Basque (Spain) +// 211 | € Breton (France) +// 212 | € Catalan (Spain) +// 213 | € Corsican (France) +// 214 | € Dutch (Belgium) +// 215 | € Dutch (Netherlands) +// 216 | € English (Ireland) +// 217 | € Estonian (Estonia) +// 218 | € Euro (€ 123) +// 219 | € Euro (123 €) +// 220 | € Finnish (Finland) +// 221 | € French (Belgium) +// 222 | € French (France) +// 223 | € French (Luxembourg) +// 224 | € French (Monaco) +// 225 | € French (Réunion) +// 226 | € Galician (Spain) +// 227 | € German (Austria) +// 228 | € German (Luxembourg) +// 229 | € Greek (Greece) +// 230 | € Inari Sami (Finland) +// 231 | € Irish (Ireland) +// 232 | € Italian (Italy) +// 233 | € Latin (Italy) +// 234 | € Latin, Serbian (Montenegro) +// 235 | € Larvian (Latvia) +// 236 | € Lithuanian (Lithuania) +// 237 | € Lower Sorbian (Germany) +// 238 | € Luxembourgish (Luxembourg) +// 239 | € Maltese (Malta) +// 240 | € Northern Sami (Finland) +// 241 | € Occitan (France) +// 242 | € Portuguese (Portugal) +// 243 | € Serbian (Montenegro) +// 244 | € Skolt Sami (Finland) +// 245 | € Slovak (Slovakia) +// 246 | € Slovenian (Slovenia) +// 247 | € Spanish (Spain) +// 248 | € Swedish (Finland) +// 249 | € Swiss German (France) +// 250 | € Upper Sorbian (Germany) +// 251 | € Western Frisian (Netherlands) +// 252 | ₭ Lao (Laos) +// 253 | ₮ Mongolian (Mongolia) +// 254 | ₮ Mongolian, Mongolian (Mongolia) +// 255 | ₱ English (Philippines) +// 256 | ₱ Filipino (Philippines) +// 257 | ₴ Ukrainian (Ukraine) +// 258 | ₸ Kazakh (Kazakhstan) +// 259 | ₹ Arabic, Kashmiri (India) +// 260 | ₹ English (India) +// 261 | ₹ Gujarati (India) +// 262 | ₹ Hindi (India) +// 263 | ₹ Kannada (India) +// 264 | ₹ Kashmiri (India) +// 265 | ₹ Konkani (India) +// 266 | ₹ Manipuri (India) +// 267 | ₹ Marathi (India) +// 268 | ₹ Nepali (India) +// 269 | ₹ Oriya (India) +// 270 | ₹ Punjabi (India) +// 271 | ₹ Sanskrit (India) +// 272 | ₹ Sindhi (India) +// 273 | ₹ Tamil (India) +// 274 | ₹ Urdu (India) +// 275 | ₺ Turkish (Turkey) +// 276 | ₼ Azerbaijani (Azerbaijan) +// 277 | ₼ Cyrillic, Azerbaijani (Azerbaijan) +// 278 | ₽ Russian (Russia) +// 279 | ₽ Sakha (Russia) +// 280 | ₾ Georgian (Georgia) +// 281 | B/. Spanish (Panama) +// 282 | Br Oromo (Ethiopia) +// 283 | Br Somali (Ethiopia) +// 284 | Br Tigrinya (Ethiopia) +// 285 | Bs Quechua (Bolivia) +// 286 | Bs Spanish (Bolivia) +// 287 | BS. Spanish (Venezuela) +// 288 | BWP Tswana (Botswana) +// 289 | C$ Spanish (Nicaragua) +// 290 | CA$ Latin, Inuktitut (Canada) +// 291 | CA$ Mohawk (Canada) +// 292 | CA$ Unified Canadian Aboriginal Syllabics, Inuktitut (Canada) +// 293 | CFA French (Mali) +// 294 | CFA French (Senegal) +// 295 | CFA Fulah (Senegal) +// 296 | CFA Wolof (Senegal) +// 297 | CHF French (Switzerland) +// 298 | CHF German (Liechtenstein) +// 299 | CHF German (Switzerland) +// 300 | CHF Italian (Switzerland) +// 301 | CHF Romansh (Switzerland) +// 302 | CLP Mapuche (Chile) +// 303 | CN¥ Mongolian, Mongolian (China) +// 304 | DZD Central Atlas Tamazight (Algeria) +// 305 | FCFA French (Cameroon) +// 306 | Ft Hungarian (Hungary) +// 307 | G French (Haiti) +// 308 | Gs. Spanish (Paraguay) +// 309 | GTQ K'iche' (Guatemala) +// 310 | HK$ Chinese (Hong Kong (China)) +// 311 | HK$ English (Hong Kong (China)) +// 312 | HRK Croatian (Croatia) +// 313 | IDR English (Indonesia) +// 314 | IQD Arbic, Central Kurdish (Iraq) +// 315 | ISK Icelandic (Iceland) +// 316 | K Burmese (Myanmar (Burma)) +// 317 | Kč Czech (Czech Republic) +// 318 | KM Bosnian (Bosnia & Herzegovina) +// 319 | KM Croatian (Bosnia & Herzegovina) +// 320 | KM Latin, Serbian (Bosnia & Herzegovina) +// 321 | kr Faroese (Faroe Islands) +// 322 | kr Northern Sami (Norway) +// 323 | kr Northern Sami (Sweden) +// 324 | kr Norwegian Bokmål (Norway) +// 325 | kr Norwegian Nynorsk (Norway) +// 326 | kr Swedish (Sweden) +// 327 | kr. Danish (Denmark) +// 328 | kr. Kalaallisut (Greenland) +// 329 | Ksh Swahili (kenya) +// 330 | L Romanian (Moldova) +// 331 | L Russian (Moldova) +// 332 | L Spanish (Honduras) +// 333 | Lekë Albanian (Albania) +// 334 | MAD Arabic, Central Atlas Tamazight (Morocco) +// 335 | MAD French (Morocco) +// 336 | MAD Tifinagh, Central Atlas Tamazight (Morocco) +// 337 | MOP$ Chinese (Macau (China)) +// 338 | MVR Divehi (Maldives) +// 339 | Nfk Tigrinya (Eritrea) +// 340 | NGN Bini (Nigeria) +// 341 | NGN Fulah (Nigeria) +// 342 | NGN Ibibio (Nigeria) +// 343 | NGN Kanuri (Nigeria) +// 344 | NOK Lule Sami (Norway) +// 345 | NOK Southern Sami (Norway) +// 346 | NZ$ Maori (New Zealand) +// 347 | PKR Sindhi (Pakistan) +// 348 | PYG Guarani (Paraguay) +// 349 | Q Spanish (Guatemala) +// 350 | R Afrikaans (South Africa) +// 351 | R English (South Africa) +// 352 | R Zulu (South Africa) +// 353 | R$ Portuguese (Brazil) +// 354 | RD$ Spanish (Dominican Republic) +// 355 | RF Kinyarwanda (Rwanda) +// 356 | RM English (Malaysia) +// 357 | RM Malay (Malaysia) +// 358 | RON Romanian (Romania) +// 359 | Rp Indonesoan (Indonesia) +// 360 | Rs Urdu (Pakistan) +// 361 | Rs. Tamil (Sri Lanka) +// 362 | RSD Latin, Serbian (Serbia) +// 363 | RSD Serbian (Serbia) +// 364 | RUB Bashkir (Russia) +// 365 | RUB Tatar (Russia) +// 366 | S/. Quechua (Peru) +// 367 | S/. Spanish (Peru) +// 368 | SEK Lule Sami (Sweden) +// 369 | SEK Southern Sami (Sweden) +// 370 | soʻm Latin, Uzbek (Uzbekistan) +// 371 | soʻm Uzbek (Uzbekistan) +// 372 | SYP Syriac (Syria) +// 373 | THB Thai (Thailand) +// 374 | TMT Turkmen (Turkmenistan) +// 375 | US$ English (Zimbabwe) +// 376 | ZAR Northern Sotho (South Africa) +// 377 | ZAR Southern Sotho (South Africa) +// 378 | ZAR Tsonga (South Africa) +// 379 | ZAR Tswana (south Africa) +// 380 | ZAR Venda (South Africa) +// 381 | ZAR Xhosa (South Africa) +// 382 | zł Polish (Poland) +// 383 | ден Macedonian (Macedonia) +// 384 | KM Cyrillic, Bosnian (Bosnia & Herzegovina) +// 385 | KM Serbian (Bosnia & Herzegovina) +// 386 | лв. Bulgarian (Bulgaria) +// 387 | p. Belarusian (Belarus) +// 388 | сом Kyrgyz (Kyrgyzstan) +// 389 | сом Tajik (Tajikistan) +// 390 | ج.م. Arabic (Egypt) +// 391 | د.أ. Arabic (Jordan) +// 392 | د.أ. Arabic (United Arab Emirates) +// 393 | د.ب. Arabic (Bahrain) +// 394 | د.ت. Arabic (Tunisia) +// 395 | د.ج. Arabic (Algeria) +// 396 | د.ع. Arabic (Iraq) +// 397 | د.ك. Arabic (Kuwait) +// 398 | د.ل. Arabic (Libya) +// 399 | د.م. Arabic (Morocco) +// 400 | ر Punjabi (Pakistan) +// 401 | ر.س. Arabic (Saudi Arabia) +// 402 | ر.ع. Arabic (Oman) +// 403 | ر.ق. Arabic (Qatar) +// 404 | ر.ي. Arabic (Yemen) +// 405 | ریال Persian (Iran) +// 406 | ل.س. Arabic (Syria) +// 407 | ل.ل. Arabic (Lebanon) +// 408 | ብር Amharic (Ethiopia) +// 409 | रू Nepaol (Nepal) +// 410 | රු. Sinhala (Sri Lanka) +// 411 | ADP +// 412 | AED +// 413 | AFA +// 414 | AFN +// 415 | ALL +// 416 | AMD +// 417 | ANG +// 418 | AOA +// 419 | ARS +// 420 | ATS +// 421 | AUD +// 422 | AWG +// 423 | AZM +// 424 | AZN +// 425 | BAM +// 426 | BBD +// 427 | BDT +// 428 | BEF +// 429 | BGL +// 430 | BGN +// 431 | BHD +// 432 | BIF +// 433 | BMD +// 434 | BND +// 435 | BOB +// 436 | BOV +// 437 | BRL +// 438 | BSD +// 439 | BTN +// 440 | BWP +// 441 | BYR +// 442 | BZD +// 443 | CAD +// 444 | CDF +// 445 | CHE +// 446 | CHF +// 447 | CHW +// 448 | CLF +// 449 | CLP +// 450 | CNY +// 451 | COP +// 452 | COU +// 453 | CRC +// 454 | CSD +// 455 | CUC +// 456 | CVE +// 457 | CYP +// 458 | CZK +// 459 | DEM +// 460 | DJF +// 461 | DKK +// 462 | DOP +// 463 | DZD +// 464 | ECS +// 465 | ECV +// 466 | EEK +// 467 | EGP +// 468 | ERN +// 469 | ESP +// 470 | ETB +// 471 | EUR +// 472 | FIM +// 473 | FJD +// 474 | FKP +// 475 | FRF +// 476 | GBP +// 477 | GEL +// 478 | GHC +// 479 | GHS +// 480 | GIP +// 481 | GMD +// 482 | GNF +// 483 | GRD +// 484 | GTQ +// 485 | GYD +// 486 | HKD +// 487 | HNL +// 488 | HRK +// 489 | HTG +// 490 | HUF +// 491 | IDR +// 492 | IEP +// 493 | ILS +// 494 | INR +// 495 | IQD +// 496 | IRR +// 497 | ISK +// 498 | ITL +// 499 | JMD +// 500 | JOD +// 501 | JPY +// 502 | KAF +// 503 | KES +// 504 | KGS +// 505 | KHR +// 506 | KMF +// 507 | KPW +// 508 | KRW +// 509 | KWD +// 510 | KYD +// 511 | KZT +// 512 | LAK +// 513 | LBP +// 514 | LKR +// 515 | LRD +// 516 | LSL +// 517 | LTL +// 518 | LUF +// 519 | LVL +// 520 | LYD +// 521 | MAD +// 522 | MDL +// 523 | MGA +// 524 | MGF +// 525 | MKD +// 526 | MMK +// 527 | MNT +// 528 | MOP +// 529 | MRO +// 530 | MTL +// 531 | MUR +// 532 | MVR +// 533 | MWK +// 534 | MXN +// 535 | MXV +// 536 | MYR +// 537 | MZM +// 538 | MZN +// 539 | NAD +// 540 | NGN +// 541 | NIO +// 542 | NLG +// 543 | NOK +// 544 | NPR +// 545 | NTD +// 546 | NZD +// 547 | OMR +// 548 | PAB +// 549 | PEN +// 550 | PGK +// 551 | PHP +// 552 | PKR +// 553 | PLN +// 554 | PTE +// 555 | PYG +// 556 | QAR +// 557 | ROL +// 558 | RON +// 559 | RSD +// 560 | RUB +// 561 | RUR +// 562 | RWF +// 563 | SAR +// 564 | SBD +// 565 | SCR +// 566 | SDD +// 567 | SDG +// 568 | SDP +// 569 | SEK +// 570 | SGD +// 571 | SHP +// 572 | SIT +// 573 | SKK +// 574 | SLL +// 575 | SOS +// 576 | SPL +// 577 | SRD +// 578 | SRG +// 579 | STD +// 580 | SVC +// 581 | SYP +// 582 | SZL +// 583 | THB +// 584 | TJR +// 585 | TJS +// 586 | TMM +// 587 | TMT +// 588 | TND +// 589 | TOP +// 590 | TRL +// 591 | TRY +// 592 | TTD +// 593 | TWD +// 594 | TZS +// 595 | UAH +// 596 | UGX +// 597 | USD +// 598 | USN +// 599 | USS +// 600 | UYI +// 601 | UYU +// 602 | UZS +// 603 | VEB +// 604 | VEF +// 605 | VND +// 606 | VUV +// 607 | WST +// 608 | XAF +// 609 | XAG +// 610 | XAU +// 611 | XB5 +// 612 | XBA +// 613 | XBB +// 614 | XBC +// 615 | XBD +// 616 | XCD +// 617 | XDR +// 618 | XFO +// 619 | XFU +// 620 | XOF +// 621 | XPD +// 622 | XPF +// 623 | XPT +// 624 | XTS +// 625 | XXX +// 626 | YER +// 627 | YUM +// 628 | ZAR +// 629 | ZMK +// 630 | ZMW +// 631 | ZWD +// 632 | ZWL +// 633 | ZWN +// 634 | ZWR +// +// Excelize support set custom number format for cell. For example, set number +// as date type in Uruguay (Spanish) format for Sheet1!A6: +// +// xlsx := excelize.NewFile() +// xlsx.SetCellValue("Sheet1", "A6", 42920.5) +// style, _ := xlsx.NewStyle(`{"custom_number_format": "[$-380A]dddd\\,\\ dd\" de \"mmmm\" de \"yyyy;@"}`) +// xlsx.SetCellStyle("Sheet1", "A6", "A6", style) +// +// Cell Sheet1!A6 in the Excel Application: martes, 04 de Julio de 2017 +// +func (f *File) NewStyle(style string) (int, error) { + var cellXfsID, fontID, borderID, fillID int + s := f.stylesReader() + fs, err := parseFormatStyleSet(style) + if err != nil { + return cellXfsID, err + } + numFmtID := setNumFmt(s, fs) + + if fs.Font != nil { + font, _ := xml.Marshal(setFont(fs)) + s.Fonts.Count++ + s.Fonts.Font = append(s.Fonts.Font, &xlsxFont{ + Font: string(font[6 : len(font)-7]), + }) + fontID = s.Fonts.Count - 1 + } + + s.Borders.Count++ + s.Borders.Border = append(s.Borders.Border, setBorders(fs)) + borderID = s.Borders.Count - 1 + + s.Fills.Count++ + s.Fills.Fill = append(s.Fills.Fill, setFills(fs, true)) + fillID = s.Fills.Count - 1 + + applyAlignment, alignment := fs.Alignment != nil, setAlignment(fs) + applyProtection, protection := fs.Protection != nil, setProtection(fs) + cellXfsID = setCellXfs(s, fontID, numFmtID, fillID, borderID, applyAlignment, applyProtection, alignment, protection) + return cellXfsID, nil +} + +// NewConditionalStyle provides function to create style for conditional format +// by given style format. The parameters are the same as function NewStyle(). +// Note that the color field uses RGB color code and only support to set font, +// fills, alignment and borders currently. +func (f *File) NewConditionalStyle(style string) (int, error) { + s := f.stylesReader() + fs, err := parseFormatStyleSet(style) + if err != nil { + return 0, err + } + dxf := dxf{ + Fill: setFills(fs, false), + Alignment: setAlignment(fs), + Border: setBorders(fs), + } + if fs.Font != nil { + dxf.Font = setFont(fs) + } + dxfStr, _ := xml.Marshal(dxf) + if s.Dxfs == nil { + s.Dxfs = &xlsxDxfs{} + } + s.Dxfs.Count++ + s.Dxfs.Dxfs = append(s.Dxfs.Dxfs, &xlsxDxf{ + Dxf: string(dxfStr[5 : len(dxfStr)-6]), + }) + return s.Dxfs.Count - 1, nil +} + +// setFont provides function to add font style by given cell format settings. +func setFont(formatStyle *formatStyle) *font { + fontUnderlineType := map[string]string{"single": "single", "double": "double"} + if formatStyle.Font.Size < 1 { + formatStyle.Font.Size = 11 + } + if formatStyle.Font.Color == "" { + formatStyle.Font.Color = "#000000" + } + f := font{ + B: formatStyle.Font.Bold, + I: formatStyle.Font.Italic, + Sz: &attrValInt{Val: formatStyle.Font.Size}, + Color: &xlsxColor{RGB: getPaletteColor(formatStyle.Font.Color)}, + Name: &attrValString{Val: formatStyle.Font.Family}, + Family: &attrValInt{Val: 2}, + } + if f.Name.Val == "" { + f.Name.Val = "Calibri" + f.Scheme = &attrValString{Val: "minor"} + } + val, ok := fontUnderlineType[formatStyle.Font.Underline] + if ok { + f.U = &attrValString{Val: val} + } + return &f +} + +// setNumFmt provides function to check if number format code in the range of +// built-in values. +func setNumFmt(style *xlsxStyleSheet, formatStyle *formatStyle) int { + dp := "0." + numFmtID := 164 // Default custom number format code from 164. + if formatStyle.DecimalPlaces < 0 || formatStyle.DecimalPlaces > 30 { + formatStyle.DecimalPlaces = 2 + } + for i := 0; i < formatStyle.DecimalPlaces; i++ { + dp += "0" + } + if formatStyle.CustomNumFmt != nil { + return setCustomNumFmt(style, formatStyle) + } + _, ok := builtInNumFmt[formatStyle.NumFmt] + if !ok { + fc, currency := currencyNumFmt[formatStyle.NumFmt] + if !currency { + return setLangNumFmt(style, formatStyle) + } + fc = strings.Replace(fc, "0.00", dp, -1) + if formatStyle.NegRed { + fc = fc + ";[Red]" + fc + } + if style.NumFmts != nil { + numFmtID = style.NumFmts.NumFmt[len(style.NumFmts.NumFmt)-1].NumFmtID + 1 + nf := xlsxNumFmt{ + FormatCode: fc, + NumFmtID: numFmtID, + } + style.NumFmts.NumFmt = append(style.NumFmts.NumFmt, &nf) + style.NumFmts.Count++ + } else { + nf := xlsxNumFmt{ + FormatCode: fc, + NumFmtID: numFmtID, + } + numFmts := xlsxNumFmts{ + NumFmt: []*xlsxNumFmt{&nf}, + Count: 1, + } + style.NumFmts = &numFmts + } + return numFmtID + } + return formatStyle.NumFmt +} + +// setCustomNumFmt provides function to set custom number format code. +func setCustomNumFmt(style *xlsxStyleSheet, formatStyle *formatStyle) int { + nf := xlsxNumFmt{FormatCode: *formatStyle.CustomNumFmt} + if style.NumFmts != nil { + nf.NumFmtID = style.NumFmts.NumFmt[len(style.NumFmts.NumFmt)-1].NumFmtID + 1 + style.NumFmts.NumFmt = append(style.NumFmts.NumFmt, &nf) + style.NumFmts.Count++ + } else { + nf.NumFmtID = 164 + numFmts := xlsxNumFmts{ + NumFmt: []*xlsxNumFmt{&nf}, + Count: 1, + } + style.NumFmts = &numFmts + } + return nf.NumFmtID +} + +// setLangNumFmt provides function to set number format code with language. +func setLangNumFmt(style *xlsxStyleSheet, formatStyle *formatStyle) int { + numFmts, ok := langNumFmt[formatStyle.Lang] + if !ok { + return 0 + } + var fc string + fc, ok = numFmts[formatStyle.NumFmt] + if !ok { + return 0 + } + nf := xlsxNumFmt{FormatCode: fc} + if style.NumFmts != nil { + nf.NumFmtID = style.NumFmts.NumFmt[len(style.NumFmts.NumFmt)-1].NumFmtID + 1 + style.NumFmts.NumFmt = append(style.NumFmts.NumFmt, &nf) + style.NumFmts.Count++ + } else { + nf.NumFmtID = formatStyle.NumFmt + numFmts := xlsxNumFmts{ + NumFmt: []*xlsxNumFmt{&nf}, + Count: 1, + } + style.NumFmts = &numFmts + } + return nf.NumFmtID +} + +// setFills provides function to add fill elements in the styles.xml by given +// cell format settings. +func setFills(formatStyle *formatStyle, fg bool) *xlsxFill { + var patterns = []string{ + "none", + "solid", + "mediumGray", + "darkGray", + "lightGray", + "darkHorizontal", + "darkVertical", + "darkDown", + "darkUp", + "darkGrid", + "darkTrellis", + "lightHorizontal", + "lightVertical", + "lightDown", + "lightUp", + "lightGrid", + "lightTrellis", + "gray125", + "gray0625", + } + + var variants = []float64{ + 90, + 0, + 45, + 135, + } + + var fill xlsxFill + switch formatStyle.Fill.Type { + case "gradient": + if len(formatStyle.Fill.Color) != 2 { + break + } + var gradient xlsxGradientFill + switch formatStyle.Fill.Shading { + case 0, 1, 2, 3: + gradient.Degree = variants[formatStyle.Fill.Shading] + case 4: + gradient.Type = "path" + case 5: + gradient.Type = "path" + gradient.Bottom = 0.5 + gradient.Left = 0.5 + gradient.Right = 0.5 + gradient.Top = 0.5 + default: + break + } + var stops []*xlsxGradientFillStop + for index, color := range formatStyle.Fill.Color { + var stop xlsxGradientFillStop + stop.Position = float64(index) + stop.Color.RGB = getPaletteColor(color) + stops = append(stops, &stop) + } + gradient.Stop = stops + fill.GradientFill = &gradient + case "pattern": + if formatStyle.Fill.Pattern > 18 || formatStyle.Fill.Pattern < 0 { + break + } + if len(formatStyle.Fill.Color) < 1 { + break + } + var pattern xlsxPatternFill + pattern.PatternType = patterns[formatStyle.Fill.Pattern] + if fg { + pattern.FgColor.RGB = getPaletteColor(formatStyle.Fill.Color[0]) + } else { + pattern.BgColor.RGB = getPaletteColor(formatStyle.Fill.Color[0]) + } + fill.PatternFill = &pattern + } + return &fill +} + +// setAlignment provides function to formatting information pertaining to text +// alignment in cells. There are a variety of choices for how text is aligned +// both horizontally and vertically, as well as indentation settings, and so on. +func setAlignment(formatStyle *formatStyle) *xlsxAlignment { + var alignment xlsxAlignment + if formatStyle.Alignment != nil { + alignment.Horizontal = formatStyle.Alignment.Horizontal + alignment.Indent = formatStyle.Alignment.Indent + alignment.JustifyLastLine = formatStyle.Alignment.JustifyLastLine + alignment.ReadingOrder = formatStyle.Alignment.ReadingOrder + alignment.RelativeIndent = formatStyle.Alignment.RelativeIndent + alignment.ShrinkToFit = formatStyle.Alignment.ShrinkToFit + alignment.TextRotation = formatStyle.Alignment.TextRotation + alignment.Vertical = formatStyle.Alignment.Vertical + alignment.WrapText = formatStyle.Alignment.WrapText + } + return &alignment +} + +// setProtection provides function to set protection properties associated +// with the cell. +func setProtection(formatStyle *formatStyle) *xlsxProtection { + var protection xlsxProtection + if formatStyle.Protection != nil { + protection.Hidden = formatStyle.Protection.Hidden + protection.Locked = formatStyle.Protection.Locked + } + return &protection +} + +// setBorders provides function to add border elements in the styles.xml by +// given borders format settings. +func setBorders(formatStyle *formatStyle) *xlsxBorder { + var styles = []string{ + "none", + "thin", + "medium", + "dashed", + "dotted", + "thick", + "double", + "hair", + "mediumDashed", + "dashDot", + "mediumDashDot", + "dashDotDot", + "mediumDashDotDot", + "slantDashDot", + } + + var border xlsxBorder + for _, v := range formatStyle.Border { + if 0 <= v.Style && v.Style < 14 { + var color xlsxColor + color.RGB = getPaletteColor(v.Color) + switch v.Type { + case "left": + border.Left.Style = styles[v.Style] + border.Left.Color = &color + case "right": + border.Right.Style = styles[v.Style] + border.Right.Color = &color + case "top": + border.Top.Style = styles[v.Style] + border.Top.Color = &color + case "bottom": + border.Bottom.Style = styles[v.Style] + border.Bottom.Color = &color + case "diagonalUp": + border.Diagonal.Style = styles[v.Style] + border.Diagonal.Color = &color + border.DiagonalUp = true + case "diagonalDown": + border.Diagonal.Style = styles[v.Style] + border.Diagonal.Color = &color + border.DiagonalDown = true + } + } + } + return &border +} + +// setCellXfs provides function to set describes all of the formatting for a +// cell. +func setCellXfs(style *xlsxStyleSheet, fontID, numFmtID, fillID, borderID int, applyAlignment, applyProtection bool, alignment *xlsxAlignment, protection *xlsxProtection) int { + var xf xlsxXf + xf.FontID = fontID + if fontID != 0 { + xf.ApplyFont = true + } + xf.NumFmtID = numFmtID + if numFmtID != 0 { + xf.ApplyNumberFormat = true + } + xf.FillID = fillID + xf.BorderID = borderID + style.CellXfs.Count++ + xf.Alignment = alignment + xf.ApplyAlignment = applyAlignment + if applyProtection { + xf.ApplyProtection = applyProtection + xf.Protection = protection + } + xfID := 0 + xf.XfID = &xfID + style.CellXfs.Xf = append(style.CellXfs.Xf, xf) + return style.CellXfs.Count - 1 +} + +// SetCellStyle provides function to add style attribute for cells by given +// worksheet name, coordinate area and style ID. Note that diagonalDown and +// diagonalUp type border should be use same color in the same coordinate area. +// +// For example create a borders of cell H9 on Sheet1: +// +// style, err := xlsx.NewStyle(`{"border":[{"type":"left","color":"0000FF","style":3},{"type":"top","color":"00FF00","style":4},{"type":"bottom","color":"FFFF00","style":5},{"type":"right","color":"FF0000","style":6},{"type":"diagonalDown","color":"A020F0","style":7},{"type":"diagonalUp","color":"A020F0","style":8}]}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetCellStyle("Sheet1", "H9", "H9", style) +// +// Set gradient fill with vertical variants shading styles for cell H9 on +// Sheet1: +// +// style, err := xlsx.NewStyle(`{"fill":{"type":"gradient","color":["#FFFFFF","#E0EBF5"],"shading":1}}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetCellStyle("Sheet1", "H9", "H9", style) +// +// Set solid style pattern fill for cell H9 on Sheet1: +// +// style, err := xlsx.NewStyle(`{"fill":{"type":"pattern","color":["#E0EBF5"],"pattern":1}}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetCellStyle("Sheet1", "H9", "H9", style) +// +// Set alignment style for cell H9 on Sheet1: +// +// style, err := xlsx.NewStyle(`{"alignment":{"horizontal":"center","ident":1,"justify_last_line":true,"reading_order":0,"relative_indent":1,"shrink_to_fit":true,"text_rotation":45,"vertical":"","wrap_text":true}}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetCellStyle("Sheet1", "H9", "H9", style) +// +// Dates and times in Excel are represented by real numbers, for example "Apr 7 +// 2017 12:00 PM" is represented by the number 42920.5. Set date and time format +// for cell H9 on Sheet1: +// +// xlsx.SetCellValue("Sheet1", "H9", 42920.5) +// style, err := xlsx.NewStyle(`{"number_format": 22}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetCellStyle("Sheet1", "H9", "H9", style) +// +// Set font style for cell H9 on Sheet1: +// +// style, err := xlsx.NewStyle(`{"font":{"bold":true,"italic":true,"family":"Berlin Sans FB Demi","size":36,"color":"#777777"}}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetCellStyle("Sheet1", "H9", "H9", style) +// +// Hide and lock for cell H9 on Sheet1: +// +// style, err := xlsx.NewStyle(`{"protection":{"hidden":true, "locked":true}}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetCellStyle("Sheet1", "H9", "H9", style) +// +func (f *File) SetCellStyle(sheet, hcell, vcell string, styleID int) { + hcell = strings.ToUpper(hcell) + vcell = strings.ToUpper(vcell) + + // Coordinate conversion, convert C1:B3 to 2,0,1,2. + hcol := string(strings.Map(letterOnlyMapF, hcell)) + hrow, err := strconv.Atoi(strings.Map(intOnlyMapF, hcell)) + if err != nil { + return + } + hyAxis := hrow - 1 + hxAxis := TitleToNumber(hcol) + + vcol := string(strings.Map(letterOnlyMapF, vcell)) + vrow, err := strconv.Atoi(strings.Map(intOnlyMapF, vcell)) + if err != nil { + return + } + vyAxis := vrow - 1 + vxAxis := TitleToNumber(vcol) + + // Correct the coordinate area, such correct C1:B3 to B1:C3. + if vxAxis < hxAxis { + vxAxis, hxAxis = hxAxis, vxAxis + } + + if vyAxis < hyAxis { + vyAxis, hyAxis = hyAxis, vyAxis + } + + xlsx := f.workSheetReader(sheet) + + completeRow(xlsx, vyAxis+1, vxAxis+1) + completeCol(xlsx, vyAxis+1, vxAxis+1) + + for r := hyAxis; r <= vyAxis; r++ { + for k := hxAxis; k <= vxAxis; k++ { + xlsx.SheetData.Row[r].C[k].S = styleID + } + } +} + +// SetConditionalFormat provides function to create conditional formatting rule +// for cell value. Conditional formatting is a feature of Excel which allows you +// to apply a format to a cell or a range of cells based on certain criteria. +// +// The type option is a required parameter and it has no default value. +// Allowable type values and their associated parameters are: +// +// Type | Parameters +// ---------------+------------------------------------ +// cell | criteria +// | value +// | minimum +// | maximum +// date | criteria +// | value +// | minimum +// | maximum +// time_period | criteria +// text | criteria +// | value +// average | criteria +// duplicate | (none) +// unique | (none) +// top | criteria +// | value +// bottom | criteria +// | value +// blanks | (none) +// no_blanks | (none) +// errors | (none) +// no_errors | (none) +// 2_color_scale | min_type +// | max_type +// | min_value +// | max_value +// | min_color +// | max_color +// 3_color_scale | min_type +// | mid_type +// | max_type +// | min_value +// | mid_value +// | max_value +// | min_color +// | mid_color +// | max_color +// data_bar | min_type +// | max_type +// | min_value +// | max_value +// | bar_color +// formula | criteria +// +// The criteria parameter is used to set the criteria by which the cell data +// will be evaluated. It has no default value. The most common criteria as +// applied to {'type': 'cell'} are: +// +// between | +// not between | +// equal to | == +// not equal to | != +// greater than | > +// less than | < +// greater than or equal to | >= +// less than or equal to | <= +// +// You can either use Excel's textual description strings, in the first column +// above, or the more common symbolic alternatives. +// +// Additional criteria which are specific to other conditional format types are +// shown in the relevant sections below. +// +// value: The value is generally used along with the criteria parameter to set +// the rule by which the cell data will be evaluated: +// +// xlsx.SetConditionalFormat("Sheet1", "D1:D10", fmt.Sprintf(`[{"type":"cell","criteria":">","format":%d,"value":"6"}]`, format)) +// +// The value property can also be an cell reference: +// +// xlsx.SetConditionalFormat("Sheet1", "D1:D10", fmt.Sprintf(`[{"type":"cell","criteria":">","format":%d,"value":"$C$1"}]`, format)) +// +// type: format - The format parameter is used to specify the format that will +// be applied to the cell when the conditional formatting criterion is met. The +// format is created using the NewConditionalStyle() method in the same way as +// cell formats: +// +// format, err = xlsx.NewConditionalStyle(`{"font":{"color":"#9A0511"},"fill":{"type":"pattern","color":["#FEC7CE"],"pattern":1}}`) +// if err != nil { +// fmt.Println(err) +// } +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", fmt.Sprintf(`[{"type":"cell","criteria":">","format":%d,"value":"6"}]`, format)) +// +// Note: In Excel, a conditional format is superimposed over the existing cell +// format and not all cell format properties can be modified. Properties that +// cannot be modified in a conditional format are font name, font size, +// superscript and subscript, diagonal borders, all alignment properties and all +// protection properties. +// +// Excel specifies some default formats to be used with conditional formatting. +// These can be replicated using the following excelize formats: +// +// // Rose format for bad conditional. +// format1, err = xlsx.NewConditionalStyle(`{"font":{"color":"#9A0511"},"fill":{"type":"pattern","color":["#FEC7CE"],"pattern":1}}`) +// +// // Light yellow format for neutral conditional. +// format2, err = xlsx.NewConditionalStyle(`{"font":{"color":"#9B5713"},"fill":{"type":"pattern","color":["#FEEAA0"],"pattern":1}}`) +// +// // Light green format for good conditional. +// format3, err = xlsx.NewConditionalStyle(`{"font":{"color":"#09600B"},"fill":{"type":"pattern","color":["#C7EECF"],"pattern":1}}`) +// +// type: minimum - The minimum parameter is used to set the lower limiting value +// when the criteria is either "between" or "not between". +// +// // Hightlight cells rules: between... +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", fmt.Sprintf(`[{"type":"cell","criteria":"between","format":%d,"minimum":"6","maximum":"8"}]`, format)) +// +// type: maximum - The maximum parameter is used to set the upper limiting value +// when the criteria is either "between" or "not between". See the previous +// example. +// +// type: average - The average type is used to specify Excel's "Average" style +// conditional format: +// +// // Top/Bottom rules: Above Average... +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", fmt.Sprintf(`[{"type":"average","criteria":"=","format":%d, "above_average": true}]`, format1)) +// +// // Top/Bottom rules: Below Average... +// xlsx.SetConditionalFormat("Sheet1", "B1:B10", fmt.Sprintf(`[{"type":"average","criteria":"=","format":%d, "above_average": false}]`, format2)) +// +// type: duplicate - The duplicate type is used to highlight duplicate cells in a range: +// +// // Hightlight cells rules: Duplicate Values... +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", fmt.Sprintf(`[{"type":"duplicate","criteria":"=","format":%d}]`, format)) +// +// type: unique - The unique type is used to highlight unique cells in a range: +// +// // Hightlight cells rules: Not Equal To... +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", fmt.Sprintf(`[{"type":"unique","criteria":"=","format":%d}]`, format)) +// +// type: top - The top type is used to specify the top n values by number or percentage in a range: +// +// // Top/Bottom rules: Top 10. +// xlsx.SetConditionalFormat("Sheet1", "H1:H10", fmt.Sprintf(`[{"type":"top","criteria":"=","format":%d,"value":"6"}]`, format)) +// +// The criteria can be used to indicate that a percentage condition is required: +// +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", fmt.Sprintf(`[{"type":"top","criteria":"=","format":%d,"value":"6","percent":true}]`, format)) +// +// type: 2_color_scale - The 2_color_scale type is used to specify Excel's "2 +// Color Scale" style conditional format: +// +// // Color scales: 2 color. +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", `[{"type":"2_color_scale","criteria":"=","min_type":"min","max_type":"max","min_color":"#F8696B","max_color":"#63BE7B"}]`) +// +// This conditional type can be modified with min_type, max_type, min_value, +// max_value, min_color and max_color, see below. +// +// type: 3_color_scale - The 3_color_scale type is used to specify Excel's "3 +// Color Scale" style conditional format: +// +// // Color scales: 3 color. +// xlsx.SetConditionalFormat("Sheet1", "A1:A10", `[{"type":"3_color_scale","criteria":"=","min_type":"min","mid_type":"percentile","max_type":"max","min_color":"#F8696B","mid_color":"#FFEB84","max_color":"#63BE7B"}]`) +// +// This conditional type can be modified with min_type, mid_type, max_type, +// min_value, mid_value, max_value, min_color, mid_color and max_color, see +// below. +// +// type: data_bar - The data_bar type is used to specify Excel's "Data Bar" +// style conditional format. +// +// min_type - The min_type and max_type properties are available when the conditional formatting type is 2_color_scale, 3_color_scale or data_bar. The mid_type is available for 3_color_scale. The properties are used as follows: +// +// // Data Bars: Gradient Fill. +// xlsx.SetConditionalFormat("Sheet1", "K1:K10", `[{"type":"data_bar", "criteria":"=", "min_type":"min","max_type":"max","bar_color":"#638EC6"}]`) +// +// The available min/mid/max types are: +// +// min (for min_type only) +// num +// percent +// percentile +// formula +// max (for max_type only) +// +// mid_type - Used for 3_color_scale. Same as min_type, see above. +// +// max_type - Same as min_type, see above. +// +// min_value - The min_value and max_value properties are available when the +// conditional formatting type is 2_color_scale, 3_color_scale or data_bar. The +// mid_value is available for 3_color_scale. +// +// mid_value - Used for 3_color_scale. Same as min_value, see above. +// +// max_value - Same as min_value, see above. +// +// min_color - The min_color and max_color properties are available when the +// conditional formatting type is 2_color_scale, 3_color_scale or data_bar. +// The mid_color is available for 3_color_scale. The properties are used as +// follows: +// +// // Color scales: 3 color. +// xlsx.SetConditionalFormat("Sheet1", "B1:B10", `[{"type":"3_color_scale","criteria":"=","min_type":"min","mid_type":"percentile","max_type":"max","min_color":"#F8696B","mid_color":"#FFEB84","max_color":"#63BE7B"}]`) +// +// mid_color - Used for 3_color_scale. Same as min_color, see above. +// +// max_color - Same as min_color, see above. +// +// bar_color - Used for data_bar. Same as min_color, see above. +// +func (f *File) SetConditionalFormat(sheet, area, formatSet string) error { + var format []*formatConditional + err := json.Unmarshal([]byte(formatSet), &format) + if err != nil { + return err + } + drawContFmtFunc := map[string]func(p int, ct string, fmtCond *formatConditional) *xlsxCfRule{ + "cellIs": drawCondFmtCellIs, + "top10": drawCondFmtTop10, + "aboveAverage": drawCondFmtAboveAverage, + "duplicateValues": drawCondFmtDuplicateUniqueValues, + "uniqueValues": drawCondFmtDuplicateUniqueValues, + "2_color_scale": drawCondFmtColorScale, + "3_color_scale": drawCondFmtColorScale, + "dataBar": drawCondFmtDataBar, + "expression": drawConfFmtExp, + } + + xlsx := f.workSheetReader(sheet) + cfRule := []*xlsxCfRule{} + for p, v := range format { + var vt, ct string + var ok bool + // "type" is a required parameter, check for valid validation types. + vt, ok = validType[v.Type] + if ok { + // Check for valid criteria types. + ct, ok = criteriaType[v.Criteria] + if ok || vt == "expression" { + drawfunc, ok := drawContFmtFunc[vt] + if ok { + cfRule = append(cfRule, drawfunc(p, ct, v)) + } + } + } + } + + xlsx.ConditionalFormatting = append(xlsx.ConditionalFormatting, &xlsxConditionalFormatting{ + SQRef: area, + CfRule: cfRule, + }) + return err +} + +// drawCondFmtCellIs provides function to create conditional formatting rule for +// cell value (include between, not between, equal, not equal, greater than and +// less than) by given priority, criteria type and format settings. +func drawCondFmtCellIs(p int, ct string, format *formatConditional) *xlsxCfRule { + c := &xlsxCfRule{ + Priority: p + 1, + Type: validType[format.Type], + Operator: ct, + DxfID: &format.Format, + } + // "between" and "not between" criteria require 2 values. + _, ok := map[string]bool{"between": true, "notBetween": true}[ct] + if ok { + c.Formula = append(c.Formula, format.Minimum) + c.Formula = append(c.Formula, format.Maximum) + } + _, ok = map[string]bool{"equal": true, "notEqual": true, "greaterThan": true, "lessThan": true}[ct] + if ok { + c.Formula = append(c.Formula, format.Value) + } + return c +} + +// drawCondFmtTop10 provides function to create conditional formatting rule for +// top N (default is top 10) by given priority, criteria type and format +// settings. +func drawCondFmtTop10(p int, ct string, format *formatConditional) *xlsxCfRule { + c := &xlsxCfRule{ + Priority: p + 1, + Type: validType[format.Type], + Rank: 10, + DxfID: &format.Format, + Percent: format.Percent, + } + rank, err := strconv.Atoi(format.Value) + if err == nil { + c.Rank = rank + } + return c +} + +// drawCondFmtAboveAverage provides function to create conditional formatting +// rule for above average and below average by given priority, criteria type and +// format settings. +func drawCondFmtAboveAverage(p int, ct string, format *formatConditional) *xlsxCfRule { + return &xlsxCfRule{ + Priority: p + 1, + Type: validType[format.Type], + AboveAverage: &format.AboveAverage, + DxfID: &format.Format, + } +} + +// drawCondFmtDuplicateUniqueValues provides function to create conditional +// formatting rule for duplicate and unique values by given priority, criteria +// type and format settings. +func drawCondFmtDuplicateUniqueValues(p int, ct string, format *formatConditional) *xlsxCfRule { + return &xlsxCfRule{ + Priority: p + 1, + Type: validType[format.Type], + DxfID: &format.Format, + } +} + +// drawCondFmtColorScale provides function to create conditional formatting rule +// for color scale (include 2 color scale and 3 color scale) by given priority, +// criteria type and format settings. +func drawCondFmtColorScale(p int, ct string, format *formatConditional) *xlsxCfRule { + c := &xlsxCfRule{ + Priority: p + 1, + Type: "colorScale", + ColorScale: &xlsxColorScale{ + Cfvo: []*xlsxCfvo{ + {Type: format.MinType}, + }, + Color: []*xlsxColor{ + {RGB: getPaletteColor(format.MinColor)}, + }, + }, + } + if validType[format.Type] == "3_color_scale" { + c.ColorScale.Cfvo = append(c.ColorScale.Cfvo, &xlsxCfvo{Type: format.MidType, Val: 50}) + c.ColorScale.Color = append(c.ColorScale.Color, &xlsxColor{RGB: getPaletteColor(format.MidColor)}) + } + c.ColorScale.Cfvo = append(c.ColorScale.Cfvo, &xlsxCfvo{Type: format.MaxType}) + c.ColorScale.Color = append(c.ColorScale.Color, &xlsxColor{RGB: getPaletteColor(format.MaxColor)}) + return c +} + +// drawCondFmtDataBar provides function to create conditional formatting rule +// for data bar by given priority, criteria type and format settings. +func drawCondFmtDataBar(p int, ct string, format *formatConditional) *xlsxCfRule { + return &xlsxCfRule{ + Priority: p + 1, + Type: validType[format.Type], + DataBar: &xlsxDataBar{ + Cfvo: []*xlsxCfvo{{Type: format.MinType}, {Type: format.MaxType}}, + Color: []*xlsxColor{{RGB: getPaletteColor(format.BarColor)}}, + }, + } +} + +// drawConfFmtExp provides function to create conditional formatting rule for +// expression by given priority, criteria type and format settings. +func drawConfFmtExp(p int, ct string, format *formatConditional) *xlsxCfRule { + return &xlsxCfRule{ + Priority: p + 1, + Type: validType[format.Type], + Formula: []string{format.Criteria}, + DxfID: &format.Format, + } +} + +// getPaletteColor provides function to convert the RBG color by given string. +func getPaletteColor(color string) string { + return "FF" + strings.Replace(strings.ToUpper(color), "#", "", -1) +} + +// themeReader provides function to get the pointer to the xl/theme/theme1.xml +// structure after deserialization. +func (f *File) themeReader() *xlsxTheme { + var theme xlsxTheme + _ = xml.Unmarshal([]byte(f.readXML("xl/theme/theme1.xml")), &theme) + return &theme +} + +// ThemeColor applied the color with tint value. +func ThemeColor(baseColor string, tint float64) string { + if tint == 0 { + return "FF" + baseColor + } + r, _ := strconv.ParseInt(baseColor[0:2], 16, 64) + g, _ := strconv.ParseInt(baseColor[2:4], 16, 64) + b, _ := strconv.ParseInt(baseColor[4:6], 16, 64) + h, s, l := RGBToHSL(uint8(r), uint8(g), uint8(b)) + if tint < 0 { + l *= (1 + tint) + } else { + l = l*(1-tint) + (1 - (1 - tint)) + } + br, bg, bb := HSLToRGB(h, s, l) + return fmt.Sprintf("FF%02X%02X%02X", br, bg, bb) +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/table.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/table.go new file mode 100644 index 000000000..941b52fdb --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/table.go @@ -0,0 +1,451 @@ +package excelize + +import ( + "encoding/json" + "encoding/xml" + "fmt" + "regexp" + "strconv" + "strings" +) + +// parseFormatTableSet provides function to parse the format settings of the +// table with default value. +func parseFormatTableSet(formatSet string) (*formatTable, error) { + format := formatTable{ + TableStyle: "", + ShowRowStripes: true, + } + err := json.Unmarshal([]byte(formatSet), &format) + return &format, err +} + +// AddTable provides the method to add table in a worksheet by given worksheet +// name, coordinate area and format set. For example, create a table of A1:D5 +// on Sheet1: +// +// xlsx.AddTable("Sheet1", "A1", "D5", ``) +// +// Create a table of F2:H6 on Sheet2 with format set: +// +// xlsx.AddTable("Sheet2", "F2", "H6", `{"table_name":"table","table_style":"TableStyleMedium2", "show_first_column":true,"show_last_column":true,"show_row_stripes":false,"show_column_stripes":true}`) +// +// Note that the table at least two lines include string type header. Multiple +// tables coordinate areas can't have an intersection. +// +// table_name: The name of the table, in the same worksheet name of the table should be unique +// +// table_style: The built-in table style names +// +// TableStyleLight1 - TableStyleLight21 +// TableStyleMedium1 - TableStyleMedium28 +// TableStyleDark1 - TableStyleDark11 +// +func (f *File) AddTable(sheet, hcell, vcell, format string) error { + formatSet, err := parseFormatTableSet(format) + if err != nil { + return err + } + hcell = strings.ToUpper(hcell) + vcell = strings.ToUpper(vcell) + // Coordinate conversion, convert C1:B3 to 2,0,1,2. + hcol := string(strings.Map(letterOnlyMapF, hcell)) + hrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, hcell)) + hyAxis := hrow - 1 + hxAxis := TitleToNumber(hcol) + + vcol := string(strings.Map(letterOnlyMapF, vcell)) + vrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, vcell)) + vyAxis := vrow - 1 + vxAxis := TitleToNumber(vcol) + if vxAxis < hxAxis { + vxAxis, hxAxis = hxAxis, vxAxis + } + if vyAxis < hyAxis { + vyAxis, hyAxis = hyAxis, vyAxis + } + tableID := f.countTables() + 1 + sheetRelationshipsTableXML := "../tables/table" + strconv.Itoa(tableID) + ".xml" + tableXML := strings.Replace(sheetRelationshipsTableXML, "..", "xl", -1) + // Add first table for given sheet. + rID := f.addSheetRelationships(sheet, SourceRelationshipTable, sheetRelationshipsTableXML, "") + f.addSheetTable(sheet, rID) + f.addTable(sheet, tableXML, hxAxis, hyAxis, vxAxis, vyAxis, tableID, formatSet) + f.addContentTypePart(tableID, "table") + return err +} + +// countTables provides function to get table files count storage in the folder +// xl/tables. +func (f *File) countTables() int { + count := 0 + for k := range f.XLSX { + if strings.Contains(k, "xl/tables/table") { + count++ + } + } + return count +} + +// addSheetTable provides function to add tablePart element to +// xl/worksheets/sheet%d.xml by given worksheet name and relationship index. +func (f *File) addSheetTable(sheet string, rID int) { + xlsx := f.workSheetReader(sheet) + table := &xlsxTablePart{ + RID: "rId" + strconv.Itoa(rID), + } + if xlsx.TableParts == nil { + xlsx.TableParts = &xlsxTableParts{} + } + xlsx.TableParts.Count++ + xlsx.TableParts.TableParts = append(xlsx.TableParts.TableParts, table) +} + +// addTable provides function to add table by given worksheet name, coordinate +// area and format set. +func (f *File) addTable(sheet, tableXML string, hxAxis, hyAxis, vxAxis, vyAxis, i int, formatSet *formatTable) { + // Correct the minimum number of rows, the table at least two lines. + if hyAxis == vyAxis { + vyAxis++ + } + // Correct table reference coordinate area, such correct C1:B3 to B1:C3. + ref := ToAlphaString(hxAxis) + strconv.Itoa(hyAxis+1) + ":" + ToAlphaString(vxAxis) + strconv.Itoa(vyAxis+1) + tableColumn := []*xlsxTableColumn{} + idx := 0 + for i := hxAxis; i <= vxAxis; i++ { + idx++ + cell := ToAlphaString(i) + strconv.Itoa(hyAxis+1) + name := f.GetCellValue(sheet, cell) + if _, err := strconv.Atoi(name); err == nil { + f.SetCellStr(sheet, cell, name) + } + if name == "" { + name = "Column" + strconv.Itoa(idx) + f.SetCellStr(sheet, cell, name) + } + tableColumn = append(tableColumn, &xlsxTableColumn{ + ID: idx, + Name: name, + }) + } + name := formatSet.TableName + if name == "" { + name = "Table" + strconv.Itoa(i) + } + t := xlsxTable{ + XMLNS: NameSpaceSpreadSheet, + ID: i, + Name: name, + DisplayName: name, + Ref: ref, + AutoFilter: &xlsxAutoFilter{ + Ref: ref, + }, + TableColumns: &xlsxTableColumns{ + Count: idx, + TableColumn: tableColumn, + }, + TableStyleInfo: &xlsxTableStyleInfo{ + Name: formatSet.TableStyle, + ShowFirstColumn: formatSet.ShowFirstColumn, + ShowLastColumn: formatSet.ShowLastColumn, + ShowRowStripes: formatSet.ShowRowStripes, + ShowColumnStripes: formatSet.ShowColumnStripes, + }, + } + table, _ := xml.Marshal(t) + f.saveFileList(tableXML, table) +} + +// parseAutoFilterSet provides function to parse the settings of the auto +// filter. +func parseAutoFilterSet(formatSet string) (*formatAutoFilter, error) { + format := formatAutoFilter{} + err := json.Unmarshal([]byte(formatSet), &format) + return &format, err +} + +// AutoFilter provides the method to add auto filter in a worksheet by given +// worksheet name, coordinate area and settings. An autofilter in Excel is a +// way of filtering a 2D range of data based on some simple criteria. For +// example applying an autofilter to a cell range A1:D4 in the Sheet1: +// +// err = xlsx.AutoFilter("Sheet1", "A1", "D4", "") +// +// Filter data in an autofilter: +// +// err = xlsx.AutoFilter("Sheet1", "A1", "D4", `{"column":"B","expression":"x != blanks"}`) +// +// column defines the filter columns in a autofilter range based on simple +// criteria +// +// It isn't sufficient to just specify the filter condition. You must also +// hide any rows that don't match the filter condition. Rows are hidden using +// the SetRowVisible() method. Excelize can't filter rows automatically since +// this isn't part of the file format. +// +// Setting a filter criteria for a column: +// +// expression defines the conditions, the following operators are available +// for setting the filter criteria: +// +// == +// != +// > +// < +// >= +// <= +// and +// or +// +// An expression can comprise a single statement or two statements separated +// by the 'and' and 'or' operators. For example: +// +// x < 2000 +// x > 2000 +// x == 2000 +// x > 2000 and x < 5000 +// x == 2000 or x == 5000 +// +// Filtering of blank or non-blank data can be achieved by using a value of +// Blanks or NonBlanks in the expression: +// +// x == Blanks +// x == NonBlanks +// +// Excel also allows some simple string matching operations: +// +// x == b* // begins with b +// x != b* // doesnt begin with b +// x == *b // ends with b +// x != *b // doesnt end with b +// x == *b* // contains b +// x != *b* // doesn't contains b +// +// You can also use '*' to match any character or number and '?' to match any +// single character or number. No other regular expression quantifier is +// supported by Excel's filters. Excel's regular expression characters can be +// escaped using '~'. +// +// The placeholder variable x in the above examples can be replaced by any +// simple string. The actual placeholder name is ignored internally so the +// following are all equivalent: +// +// x < 2000 +// col < 2000 +// Price < 2000 +// +func (f *File) AutoFilter(sheet, hcell, vcell, format string) error { + formatSet, _ := parseAutoFilterSet(format) + + hcell = strings.ToUpper(hcell) + vcell = strings.ToUpper(vcell) + + // Coordinate conversion, convert C1:B3 to 2,0,1,2. + hcol := string(strings.Map(letterOnlyMapF, hcell)) + hrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, hcell)) + hyAxis := hrow - 1 + hxAxis := TitleToNumber(hcol) + + vcol := string(strings.Map(letterOnlyMapF, vcell)) + vrow, _ := strconv.Atoi(strings.Map(intOnlyMapF, vcell)) + vyAxis := vrow - 1 + vxAxis := TitleToNumber(vcol) + + if vxAxis < hxAxis { + vxAxis, hxAxis = hxAxis, vxAxis + } + + if vyAxis < hyAxis { + vyAxis, hyAxis = hyAxis, vyAxis + } + ref := ToAlphaString(hxAxis) + strconv.Itoa(hyAxis+1) + ":" + ToAlphaString(vxAxis) + strconv.Itoa(vyAxis+1) + refRange := vxAxis - hxAxis + return f.autoFilter(sheet, ref, refRange, hxAxis, formatSet) +} + +// autoFilter provides function to extract the tokens from the filter +// expression. The tokens are mainly non-whitespace groups. +func (f *File) autoFilter(sheet, ref string, refRange, hxAxis int, formatSet *formatAutoFilter) error { + xlsx := f.workSheetReader(sheet) + if xlsx.SheetPr != nil { + xlsx.SheetPr.FilterMode = true + } + xlsx.SheetPr = &xlsxSheetPr{FilterMode: true} + filter := &xlsxAutoFilter{ + Ref: ref, + } + xlsx.AutoFilter = filter + if formatSet.Column == "" || formatSet.Expression == "" { + return nil + } + col := TitleToNumber(formatSet.Column) + offset := col - hxAxis + if offset < 0 || offset > refRange { + return fmt.Errorf("Incorrect index of column '%s'", formatSet.Column) + } + filter.FilterColumn = &xlsxFilterColumn{ + ColID: offset, + } + re := regexp.MustCompile(`"(?:[^"]|"")*"|\S+`) + token := re.FindAllString(formatSet.Expression, -1) + if len(token) != 3 && len(token) != 7 { + return fmt.Errorf("Incorrect number of tokens in criteria '%s'", formatSet.Expression) + } + expressions, tokens, err := f.parseFilterExpression(formatSet.Expression, token) + if err != nil { + return err + } + f.writeAutoFilter(filter, expressions, tokens) + xlsx.AutoFilter = filter + return nil +} + +// writeAutoFilter provides function to check for single or double custom filters +// as default filters and handle them accordingly. +func (f *File) writeAutoFilter(filter *xlsxAutoFilter, exp []int, tokens []string) { + if len(exp) == 1 && exp[0] == 2 { + // Single equality. + filters := []*xlsxFilter{} + filters = append(filters, &xlsxFilter{Val: tokens[0]}) + filter.FilterColumn.Filters = &xlsxFilters{Filter: filters} + } else if len(exp) == 3 && exp[0] == 2 && exp[1] == 1 && exp[2] == 2 { + // Double equality with "or" operator. + filters := []*xlsxFilter{} + for _, v := range tokens { + filters = append(filters, &xlsxFilter{Val: v}) + } + filter.FilterColumn.Filters = &xlsxFilters{Filter: filters} + } else { + // Non default custom filter. + expRel := map[int]int{0: 0, 1: 2} + andRel := map[int]bool{0: true, 1: false} + for k, v := range tokens { + f.writeCustomFilter(filter, exp[expRel[k]], v) + if k == 1 { + filter.FilterColumn.CustomFilters.And = andRel[exp[k]] + } + } + } +} + +// writeCustomFilter provides function to write the element. +func (f *File) writeCustomFilter(filter *xlsxAutoFilter, operator int, val string) { + operators := map[int]string{ + 1: "lessThan", + 2: "equal", + 3: "lessThanOrEqual", + 4: "greaterThan", + 5: "notEqual", + 6: "greaterThanOrEqual", + 22: "equal", + } + customFilter := xlsxCustomFilter{ + Operator: operators[operator], + Val: val, + } + if filter.FilterColumn.CustomFilters != nil { + filter.FilterColumn.CustomFilters.CustomFilter = append(filter.FilterColumn.CustomFilters.CustomFilter, &customFilter) + } else { + customFilters := []*xlsxCustomFilter{} + customFilters = append(customFilters, &customFilter) + filter.FilterColumn.CustomFilters = &xlsxCustomFilters{CustomFilter: customFilters} + } +} + +// parseFilterExpression provides function to converts the tokens of a possibly +// conditional expression into 1 or 2 sub expressions for further parsing. +// +// Examples: +// +// ('x', '==', 2000) -> exp1 +// ('x', '>', 2000, 'and', 'x', '<', 5000) -> exp1 and exp2 +// +func (f *File) parseFilterExpression(expression string, tokens []string) ([]int, []string, error) { + expressions := []int{} + t := []string{} + if len(tokens) == 7 { + // The number of tokens will be either 3 (for 1 expression) or 7 (for 2 + // expressions). + conditional := 0 + c := tokens[3] + re, _ := regexp.Match(`(or|\|\|)`, []byte(c)) + if re { + conditional = 1 + } + expression1, token1, err := f.parseFilterTokens(expression, tokens[0:3]) + if err != nil { + return expressions, t, err + } + expression2, token2, err := f.parseFilterTokens(expression, tokens[4:7]) + if err != nil { + return expressions, t, err + } + expressions = []int{expression1[0], conditional, expression2[0]} + t = []string{token1, token2} + } else { + exp, token, err := f.parseFilterTokens(expression, tokens) + if err != nil { + return expressions, t, err + } + expressions = exp + t = []string{token} + } + return expressions, t, nil +} + +// parseFilterTokens provides function to parse the 3 tokens of a filter +// expression and return the operator and token. +func (f *File) parseFilterTokens(expression string, tokens []string) ([]int, string, error) { + operators := map[string]int{ + "==": 2, + "=": 2, + "=~": 2, + "eq": 2, + "!=": 5, + "!~": 5, + "ne": 5, + "<>": 5, + "<": 1, + "<=": 3, + ">": 4, + ">=": 6, + } + operator, ok := operators[strings.ToLower(tokens[1])] + if !ok { + // Convert the operator from a number to a descriptive string. + return []int{}, "", fmt.Errorf("Unknown operator: %s", tokens[1]) + } + token := tokens[2] + // Special handling for Blanks/NonBlanks. + re, _ := regexp.Match("blanks|nonblanks", []byte(strings.ToLower(token))) + if re { + // Only allow Equals or NotEqual in this context. + if operator != 2 && operator != 5 { + return []int{operator}, token, fmt.Errorf("The operator '%s' in expression '%s' is not valid in relation to Blanks/NonBlanks'", tokens[1], expression) + } + token = strings.ToLower(token) + // The operator should always be 2 (=) to flag a "simple" equality in + // the binary record. Therefore we convert <> to =. + if token == "blanks" { + if operator == 5 { + token = " " + } + } else { + if operator == 5 { + operator = 2 + token = "blanks" + } else { + operator = 5 + token = " " + } + } + } + // if the string token contains an Excel match character then change the + // operator type to indicate a non "simple" equality. + re, _ = regexp.Match("[*?]", []byte(token)) + if operator == 2 && re { + operator = 22 + } + return []int{operator}, token, nil +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/templates.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/templates.go new file mode 100644 index 000000000..ef6058cbc --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/templates.go @@ -0,0 +1,31 @@ +// This file contains default templates for XML files we don't yet populated +// based on content. + +package excelize + +// XMLHeader define an XML declaration can also contain a standalone declaration. +const XMLHeader = "\n" + +var ( + // XMLHeaderByte define an XML declaration can also contain a standalone + // declaration. + XMLHeaderByte = []byte(XMLHeader) +) + +const templateDocpropsApp = `0Go Excelize` + +const templateContentTypes = `` + +const templateWorkbook = `` + +const templateStyles = `` + +const templateSheet = `` + +const templateWorkbookRels = `` + +const templateDocpropsCore = `xuri2006-09-16T00:00:00Z2006-09-16T00:00:00Z` + +const templateRels = `` + +const templateTheme = `` diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/vmlDrawing.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/vmlDrawing.go new file mode 100644 index 000000000..307186a95 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/vmlDrawing.go @@ -0,0 +1,135 @@ +package excelize + +import "encoding/xml" + +// vmlDrawing directly maps the root element in the file +// xl/drawings/vmlDrawing%d.vml. +type vmlDrawing struct { + XMLName xml.Name `xml:"xml"` + XMLNSv string `xml:"xmlns:v,attr"` + XMLNSo string `xml:"xmlns:o,attr"` + XMLNSx string `xml:"xmlns:x,attr"` + XMLNSmv string `xml:"xmlns:mv,attr"` + Shapelayout *xlsxShapelayout `xml:"o:shapelayout"` + Shapetype *xlsxShapetype `xml:"v:shapetype"` + Shape []xlsxShape `xml:"v:shape"` +} + +// xlsxShapelayout directly maps the shapelayout element. This element contains +// child elements that store information used in the editing and layout of +// shapes. +type xlsxShapelayout struct { + Ext string `xml:"v:ext,attr"` + IDmap *xlsxIDmap `xml:"o:idmap"` +} + +// xlsxIDmap directly maps the idmap element. +type xlsxIDmap struct { + Ext string `xml:"v:ext,attr"` + Data int `xml:"data,attr"` +} + +// xlsxShape directly maps the shape element. +type xlsxShape struct { + XMLName xml.Name `xml:"v:shape"` + ID string `xml:"id,attr"` + Type string `xml:"type,attr"` + Style string `xml:"style,attr"` + Fillcolor string `xml:"fillcolor,attr"` + Insetmode string `xml:"urn:schemas-microsoft-com:office:office insetmode,attr,omitempty"` + Strokecolor string `xml:"strokecolor,attr,omitempty"` + Val string `xml:",innerxml"` +} + +// xlsxShapetype directly maps the shapetype element. +type xlsxShapetype struct { + ID string `xml:"id,attr"` + Coordsize string `xml:"coordsize,attr"` + Spt int `xml:"o:spt,attr"` + Path string `xml:"path,attr"` + Stroke *xlsxStroke `xml:"v:stroke"` + VPath *vPath `xml:"v:path"` +} + +// xlsxStroke directly maps the stroke element. +type xlsxStroke struct { + Joinstyle string `xml:"joinstyle,attr"` +} + +// vPath directly maps the v:path element. +type vPath struct { + Gradientshapeok string `xml:"gradientshapeok,attr,omitempty"` + Connecttype string `xml:"o:connecttype,attr"` +} + +// vFill directly maps the v:fill element. This element must be defined within a +// Shape element. +type vFill struct { + Angle int `xml:"angle,attr,omitempty"` + Color2 string `xml:"color2,attr"` + Type string `xml:"type,attr,omitempty"` + Fill *oFill `xml:"o:fill"` +} + +// oFill directly maps the o:fill element. +type oFill struct { + Ext string `xml:"v:ext,attr"` + Type string `xml:"type,attr,omitempty"` +} + +// vShadow directly maps the v:shadow element. This element must be defined +// within a Shape element. In addition, the On attribute must be set to True. +type vShadow struct { + On string `xml:"on,attr"` + Color string `xml:"color,attr,omitempty"` + Obscured string `xml:"obscured,attr"` +} + +// vTextbox directly maps the v:textbox element. This element must be defined +// within a Shape element. +type vTextbox struct { + Style string `xml:"style,attr"` + Div *xlsxDiv `xml:"div"` +} + +// xlsxDiv directly maps the div element. +type xlsxDiv struct { + Style string `xml:"style,attr"` +} + +// xClientData (Attached Object Data) directly maps the x:ClientData element. +// This element specifies data associated with objects attached to a +// spreadsheet. While this element might contain any of the child elements +// below, only certain combinations are meaningful. The ObjectType attribute +// determines the kind of object the element represents and which subset of +// child elements is appropriate. Relevant groups are identified for each child +// element. +type xClientData struct { + ObjectType string `xml:"ObjectType,attr"` + MoveWithCells string `xml:"x:MoveWithCells,omitempty"` + SizeWithCells string `xml:"x:SizeWithCells,omitempty"` + Anchor string `xml:"x:Anchor"` + AutoFill string `xml:"x:AutoFill"` + Row int `xml:"x:Row"` + Column int `xml:"x:Column"` +} + +// decodeVmlDrawing defines the structure used to parse the file +// xl/drawings/vmlDrawing%d.vml. +type decodeVmlDrawing struct { + Shape []decodeShape `xml:"urn:schemas-microsoft-com:vml shape"` +} + +// decodeShape defines the structure used to parse the particular shape element. +type decodeShape struct { + Val string `xml:",innerxml"` +} + +// encodeShape defines the structure used to re-serialization shape element. +type encodeShape struct { + Fill *vFill `xml:"v:fill"` + Shadow *vShadow `xml:"v:shadow"` + Path *vPath `xml:"v:path"` + Textbox *vTextbox `xml:"v:textbox"` + ClientData *xClientData `xml:"x:ClientData"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlChart.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlChart.go new file mode 100644 index 000000000..a26333430 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlChart.go @@ -0,0 +1,612 @@ +package excelize + +import "encoding/xml" + +// xlsxChartSpace directly maps the c:chartSpace element. The chart namespace in +// DrawingML is for representing visualizations of numeric data with column +// charts, pie charts, scatter charts, or other types of charts. +type xlsxChartSpace struct { + XMLName xml.Name `xml:"c:chartSpace"` + XMLNSc string `xml:"xmlns:c,attr"` + XMLNSa string `xml:"xmlns:a,attr"` + XMLNSr string `xml:"xmlns:r,attr"` + XMLNSc16r2 string `xml:"xmlns:c16r2,attr"` + Date1904 *attrValBool `xml:"c:date1904"` + Lang *attrValString `xml:"c:lang"` + RoundedCorners *attrValBool `xml:"c:roundedCorners"` + Chart cChart `xml:"c:chart"` + SpPr *cSpPr `xml:"c:spPr"` + TxPr *cTxPr `xml:"c:txPr"` + PrintSettings *cPrintSettings `xml:"c:printSettings"` +} + +// cThicknessSpPr directly maps the element that specifies the thickness of the +// walls or floor as a percentage of the largest dimension of the plot volume +// and SpPr element. +type cThicknessSpPr struct { + Thickness *attrValInt `xml:"c:thickness"` + SpPr *cSpPr `xml:"c:spPr"` +} + +// cChart (Chart) directly maps the c:chart element. This element specifies a +// title. +type cChart struct { + Title *cTitle `xml:"c:title"` + AutoTitleDeleted *cAutoTitleDeleted `xml:"c:autoTitleDeleted"` + View3D *cView3D `xml:"c:view3D"` + Floor *cThicknessSpPr `xml:"c:floor"` + SideWall *cThicknessSpPr `xml:"c:sideWall"` + BackWall *cThicknessSpPr `xml:"c:backWall"` + PlotArea *cPlotArea `xml:"c:plotArea"` + Legend *cLegend `xml:"c:legend"` + PlotVisOnly *attrValBool `xml:"c:plotVisOnly"` + DispBlanksAs *attrValString `xml:"c:dispBlanksAs"` + ShowDLblsOverMax *attrValBool `xml:"c:showDLblsOverMax"` +} + +// cTitle (Title) directly maps the c:title element. This element specifies a +// title. +type cTitle struct { + Tx cTx `xml:"c:tx,omitempty"` + Layout string `xml:"c:layout,omitempty"` + Overlay attrValBool `xml:"c:overlay,omitempty"` + SpPr cSpPr `xml:"c:spPr,omitempty"` + TxPr cTxPr `xml:"c:txPr,omitempty"` +} + +// cTx (Chart Text) directly maps the c:tx element. This element specifies text +// to use on a chart, including rich text formatting. +type cTx struct { + StrRef *cStrRef `xml:"c:strRef"` + Rich *cRich `xml:"c:rich,omitempty"` +} + +// cRich (Rich Text) directly maps the c:rich element. This element contains a +// string with rich text formatting. +type cRich struct { + BodyPr aBodyPr `xml:"a:bodyPr,omitempty"` + LstStyle string `xml:"a:lstStyle,omitempty"` + P aP `xml:"a:p"` +} + +// aBodyPr (Body Properties) directly maps the a:bodyPr element. This element +// defines the body properties for the text body within a shape. +type aBodyPr struct { + Anchor string `xml:"anchor,attr,omitempty"` + AnchorCtr bool `xml:"anchorCtr,attr"` + Rot int `xml:"rot,attr"` + BIns float64 `xml:"bIns,attr,omitempty"` + CompatLnSpc bool `xml:"compatLnSpc,attr,omitempty"` + ForceAA bool `xml:"forceAA,attr,omitempty"` + FromWordArt bool `xml:"fromWordArt,attr,omitempty"` + HorzOverflow string `xml:"horzOverflow,attr,omitempty"` + LIns float64 `xml:"lIns,attr,omitempty"` + NumCol int `xml:"numCol,attr,omitempty"` + RIns float64 `xml:"rIns,attr,omitempty"` + RtlCol bool `xml:"rtlCol,attr,omitempty"` + SpcCol int `xml:"spcCol,attr,omitempty"` + SpcFirstLastPara bool `xml:"spcFirstLastPara,attr"` + TIns float64 `xml:"tIns,attr,omitempty"` + Upright bool `xml:"upright,attr,omitempty"` + Vert string `xml:"vert,attr,omitempty"` + VertOverflow string `xml:"vertOverflow,attr,omitempty"` + Wrap string `xml:"wrap,attr,omitempty"` +} + +// aP (Paragraph) directly maps the a:p element. This element specifies a +// paragraph of content in the document. +type aP struct { + PPr *aPPr `xml:"a:pPr"` + R *aR `xml:"a:r"` + EndParaRPr *aEndParaRPr `xml:"a:endParaRPr"` +} + +// aPPr (Paragraph Properties) directly maps the a:pPr element. This element +// specifies a set of paragraph properties which shall be applied to the +// contents of the parent paragraph after all style/numbering/table properties +// have been applied to the text. These properties are defined as direct +// formatting, since they are directly applied to the paragraph and supersede +// any formatting from styles. +type aPPr struct { + DefRPr aRPr `xml:"a:defRPr"` +} + +// aSolidFill (Solid Fill) directly maps the solidFill element. This element +// specifies a solid color fill. The shape is filled entirely with the specified +// color. +type aSolidFill struct { + SchemeClr *aSchemeClr `xml:"a:schemeClr"` + SrgbClr *attrValString `xml:"a:srgbClr"` +} + +// aSchemeClr (Scheme Color) directly maps the a:schemeClr element. This +// element specifies a color bound to a user's theme. As with all elements which +// define a color, it is possible to apply a list of color transforms to the +// base color defined. +type aSchemeClr struct { + Val string `xml:"val,attr,omitempty"` + LumMod *attrValInt `xml:"a:lumMod"` + LumOff *attrValInt `xml:"a:lumOff"` +} + +// attrValInt directly maps the val element with integer data type as an +// attribute。 +type attrValInt struct { + Val int `xml:"val,attr"` +} + +// attrValFloat directly maps the val element with float64 data type as an +// attribute。 +type attrValFloat struct { + Val float64 `xml:"val,attr"` +} + +// attrValBool directly maps the val element with boolean data type as an +// attribute。 +type attrValBool struct { + Val bool `xml:"val,attr"` +} + +// attrValString directly maps the val element with string data type as an +// attribute。 +type attrValString struct { + Val string `xml:"val,attr"` +} + +// aCs directly maps the a:cs element. +type aCs struct { + Typeface string `xml:"typeface,attr"` +} + +// aEa directly maps the a:ea element. +type aEa struct { + Typeface string `xml:"typeface,attr"` +} + +// aLatin (Latin Font) directly maps the a:latin element. This element +// specifies that a Latin font be used for a specific run of text. This font is +// specified with a typeface attribute much like the others but is specifically +// classified as a Latin font. +type aLatin struct { + Typeface string `xml:"typeface,attr"` +} + +// aR directly maps the a:r element. +type aR struct { + RPr aRPr `xml:"a:rPr,omitempty"` + T string `xml:"a:t,omitempty"` +} + +// aRPr (Run Properties) directly maps the c:rPr element. This element +// specifies a set of run properties which shall be applied to the contents of +// the parent run after all style formatting has been applied to the text. These +// properties are defined as direct formatting, since they are directly applied +// to the run and supersede any formatting from styles. +type aRPr struct { + AltLang string `xml:"altLang,attr,omitempty"` + B bool `xml:"b,attr"` + Baseline int `xml:"baseline,attr"` + Bmk string `xml:"bmk,attr,omitempty"` + Cap string `xml:"cap,attr,omitempty"` + Dirty bool `xml:"dirty,attr,omitempty"` + Err bool `xml:"err,attr,omitempty"` + I bool `xml:"i,attr"` + Kern int `xml:"kern,attr"` + Kumimoji bool `xml:"kumimoji,attr,omitempty"` + Lang string `xml:"lang,attr,omitempty"` + NoProof bool `xml:"noProof,attr,omitempty"` + NormalizeH bool `xml:"normalizeH,attr,omitempty"` + SmtClean bool `xml:"smtClean,attr,omitempty"` + SmtID uint64 `xml:"smtId,attr,omitempty"` + Spc int `xml:"spc,attr"` + Strike string `xml:"strike,attr,omitempty"` + Sz int `xml:"sz,attr,omitempty"` + U string `xml:"u,attr,omitempty"` + SolidFill *aSolidFill `xml:"a:solidFill"` + Latin *aLatin `xml:"a:latin"` + Ea *aEa `xml:"a:ea"` + Cs *aCs `xml:"a:cs"` +} + +// cSpPr (Shape Properties) directly maps the c:spPr element. This element +// specifies the visual shape properties that can be applied to a shape. These +// properties include the shape fill, outline, geometry, effects, and 3D +// orientation. +type cSpPr struct { + NoFill *string `xml:"a:noFill"` + SolidFill *aSolidFill `xml:"a:solidFill"` + Ln *aLn `xml:"a:ln"` + Sp3D *aSp3D `xml:"a:sp3d"` + EffectLst *string `xml:"a:effectLst"` +} + +// aSp3D (3-D Shape Properties) directly maps the a:sp3d element. This element +// defines the 3D properties associated with a particular shape in DrawingML. +// The 3D properties which can be applied to a shape are top and bottom bevels, +// a contour and an extrusion. +type aSp3D struct { + ContourW int `xml:"contourW,attr"` + ContourClr *aContourClr `xml:"a:contourClr"` +} + +// aContourClr (Contour Color) directly maps the a:contourClr element. This +// element defines the color for the contour on a shape. The contour of a shape +// is a solid filled line which surrounds the outer edges of the shape. +type aContourClr struct { + SchemeClr *aSchemeClr `xml:"a:schemeClr"` +} + +// aLn (Outline) directly maps the a:ln element. This element specifies an +// outline style that can be applied to a number of different objects such as +// shapes and text. The line allows for the specifying of many different types +// of outlines including even line dashes and bevels. +type aLn struct { + Algn string `xml:"algn,attr,omitempty"` + Cap string `xml:"cap,attr,omitempty"` + Cmpd string `xml:"cmpd,attr,omitempty"` + W int `xml:"w,attr,omitempty" ` + NoFill string `xml:"a:noFill,omitempty"` + Round string `xml:"a:round,omitempty"` + SolidFill *aSolidFill `xml:"a:solidFill"` +} + +// cTxPr (Text Properties) directly maps the c:txPr element. This element +// specifies text formatting. The lstStyle element is not supported. +type cTxPr struct { + BodyPr aBodyPr `xml:"a:bodyPr,omitempty"` + LstStyle string `xml:"a:lstStyle,omitempty"` + P aP `xml:"a:p,omitempty"` +} + +// aEndParaRPr (End Paragraph Run Properties) directly maps the a:endParaRPr +// element. This element specifies the text run properties that are to be used +// if another run is inserted after the last run specified. This effectively +// saves the run property state so that it can be applied when the user enters +// additional text. If this element is omitted, then the application can +// determine which default properties to apply. It is recommended that this +// element be specified at the end of the list of text runs within the paragraph +// so that an orderly list is maintained. +type aEndParaRPr struct { + Lang string `xml:"lang,attr"` + AltLang string `xml:"altLang,attr,omitempty"` + Sz int `xml:"sz,attr,omitempty"` +} + +// cAutoTitleDeleted (Auto Title Is Deleted) directly maps the +// c:autoTitleDeleted element. This element specifies the title shall not be +// shown for this chart. +type cAutoTitleDeleted struct { + Val bool `xml:"val,attr"` +} + +// cView3D (View In 3D) directly maps the c:view3D element. This element +// specifies the 3-D view of the chart. +type cView3D struct { + RotX *attrValInt `xml:"c:rotX"` + RotY *attrValInt `xml:"c:rotY"` + DepthPercent *attrValInt `xml:"c:depthPercent"` + RAngAx *attrValInt `xml:"c:rAngAx"` +} + +// cPlotArea directly maps the c:plotArea element. This element specifies the +// plot area of the chart. +type cPlotArea struct { + Layout *string `xml:"c:layout"` + BarChart *cCharts `xml:"c:barChart"` + Bar3DChart *cCharts `xml:"c:bar3DChart"` + DoughnutChart *cCharts `xml:"c:doughnutChart"` + LineChart *cCharts `xml:"c:lineChart"` + PieChart *cCharts `xml:"c:pieChart"` + Pie3DChart *cCharts `xml:"c:pie3DChart"` + RadarChart *cCharts `xml:"c:radarChart"` + ScatterChart *cCharts `xml:"c:scatterChart"` + CatAx []*cAxs `xml:"c:catAx"` + ValAx []*cAxs `xml:"c:valAx"` + SpPr *cSpPr `xml:"c:spPr"` +} + +// cCharts specifies the common element of the chart. +type cCharts struct { + BarDir *attrValString `xml:"c:barDir"` + Grouping *attrValString `xml:"c:grouping"` + RadarStyle *attrValString `xml:"c:radarStyle"` + ScatterStyle *attrValString `xml:"c:scatterStyle"` + VaryColors *attrValBool `xml:"c:varyColors"` + Ser *[]cSer `xml:"c:ser"` + DLbls *cDLbls `xml:"c:dLbls"` + HoleSize *attrValInt `xml:"c:holeSize"` + Smooth *attrValBool `xml:"c:smooth"` + Overlap *attrValInt `xml:"c:overlap"` + AxID []*attrValInt `xml:"c:axId"` +} + +// cAxs directly maps the c:catAx and c:valAx element. +type cAxs struct { + AxID *attrValInt `xml:"c:axId"` + Scaling *cScaling `xml:"c:scaling"` + Delete *attrValBool `xml:"c:delete"` + AxPos *attrValString `xml:"c:axPos"` + NumFmt *cNumFmt `xml:"c:numFmt"` + MajorTickMark *attrValString `xml:"c:majorTickMark"` + MinorTickMark *attrValString `xml:"c:minorTickMark"` + TickLblPos *attrValString `xml:"c:tickLblPos"` + SpPr *cSpPr `xml:"c:spPr"` + TxPr *cTxPr `xml:"c:txPr"` + CrossAx *attrValInt `xml:"c:crossAx"` + Crosses *attrValString `xml:"c:crosses"` + CrossBetween *attrValString `xml:"c:crossBetween"` + Auto *attrValBool `xml:"c:auto"` + LblAlgn *attrValString `xml:"c:lblAlgn"` + LblOffset *attrValInt `xml:"c:lblOffset"` + NoMultiLvlLbl *attrValBool `xml:"c:noMultiLvlLbl"` +} + +// cScaling directly maps the c:scaling element. This element contains +// additional axis settings. +type cScaling struct { + Orientation *attrValString `xml:"c:orientation"` + Max *attrValFloat `xml:"c:max"` + Min *attrValFloat `xml:"c:min"` +} + +// cNumFmt (Numbering Format) directly maps the c:numFmt element. This element +// specifies number formatting for the parent element. +type cNumFmt struct { + FormatCode string `xml:"formatCode,attr"` + SourceLinked bool `xml:"sourceLinked,attr"` +} + +// cSer directly maps the c:ser element. This element specifies a series on a +// chart. +type cSer struct { + IDx *attrValInt `xml:"c:idx"` + Order *attrValInt `xml:"c:order"` + Tx *cTx `xml:"c:tx"` + SpPr *cSpPr `xml:"c:spPr"` + DPt []*cDPt `xml:"c:dPt"` + DLbls *cDLbls `xml:"c:dLbls"` + Marker *cMarker `xml:"c:marker"` + InvertIfNegative *attrValBool `xml:"c:invertIfNegative"` + Cat *cCat `xml:"c:cat"` + Val *cVal `xml:"c:val"` + XVal *cCat `xml:"c:xVal"` + YVal *cVal `xml:"c:yVal"` + Smooth *attrValBool `xml:"c:smooth"` +} + +// cMarker (Marker) directly maps the c:marker element. This element specifies a +// data marker. +type cMarker struct { + Symbol *attrValString `xml:"c:symbol"` + Size *attrValInt `xml:"c:size"` + SpPr *cSpPr `xml:"c:spPr"` +} + +// cDPt (Data Point) directly maps the c:dPt element. This element specifies a +// single data point. +type cDPt struct { + IDx *attrValInt `xml:"c:idx"` + Bubble3D *attrValBool `xml:"c:bubble3D"` + SpPr *cSpPr `xml:"c:spPr"` +} + +// cCat (Category Axis Data) directly maps the c:cat element. This element +// specifies the data used for the category axis. +type cCat struct { + StrRef *cStrRef `xml:"c:strRef"` +} + +// cStrRef (String Reference) directly maps the c:strRef element. This element +// specifies a reference to data for a single data label or title with a cache +// of the last values used. +type cStrRef struct { + F string `xml:"c:f"` + StrCache *cStrCache `xml:"c:strCache"` +} + +// cStrCache (String Cache) directly maps the c:strCache element. This element +// specifies the last string data used for a chart. +type cStrCache struct { + Pt []*cPt `xml:"c:pt"` + PtCount *attrValInt `xml:"c:ptCount"` +} + +// cPt directly maps the c:pt element. This element specifies data for a +// particular data point. +type cPt struct { + IDx int `xml:"idx,attr"` + V *string `xml:"c:v"` +} + +// cVal directly maps the c:val element. This element specifies the data values +// which shall be used to define the location of data markers on a chart. +type cVal struct { + NumRef *cNumRef `xml:"c:numRef"` +} + +// cNumRef directly maps the c:numRef element. This element specifies a +// reference to numeric data with a cache of the last values used. +type cNumRef struct { + F string `xml:"c:f"` + NumCache *cNumCache `xml:"c:numCache"` +} + +// cNumCache directly maps the c:numCache element. This element specifies the +// last data shown on the chart for a series. +type cNumCache struct { + FormatCode string `xml:"c:formatCode"` + Pt []*cPt `xml:"c:pt"` + PtCount *attrValInt `xml:"c:ptCount"` +} + +// cDLbls (Data Lables) directly maps the c:dLbls element. This element serves +// as a root element that specifies the settings for the data labels for an +// entire series or the entire chart. It contains child elements that specify +// the specific formatting and positioning settings. +type cDLbls struct { + ShowLegendKey *attrValBool `xml:"c:showLegendKey"` + ShowVal *attrValBool `xml:"c:showVal"` + ShowCatName *attrValBool `xml:"c:showCatName"` + ShowSerName *attrValBool `xml:"c:showSerName"` + ShowPercent *attrValBool `xml:"c:showPercent"` + ShowBubbleSize *attrValBool `xml:"c:showBubbleSize"` + ShowLeaderLines *attrValBool `xml:"c:showLeaderLines"` +} + +// cLegend (Legend) directly maps the c:legend element. This element specifies +// the legend. +type cLegend struct { + Layout *string `xml:"c:layout"` + LegendPos *attrValString `xml:"c:legendPos"` + Overlay *attrValBool `xml:"c:overlay"` + SpPr *cSpPr `xml:"c:spPr"` + TxPr *cTxPr `xml:"c:txPr"` +} + +// cPrintSettings directly maps the c:printSettings element. This element +// specifies the print settings for the chart. +type cPrintSettings struct { + HeaderFooter *string `xml:"c:headerFooter"` + PageMargins *cPageMargins `xml:"c:pageMargins"` + PageSetup *string `xml:"c:pageSetup"` +} + +// cPageMargins directly maps the c:pageMargins element. This element specifies +// the page margins for a chart. +type cPageMargins struct { + B float64 `xml:"b,attr"` + Footer float64 `xml:"footer,attr"` + Header float64 `xml:"header,attr"` + L float64 `xml:"l,attr"` + R float64 `xml:"r,attr"` + T float64 `xml:"t,attr"` +} + +// formatChartAxis directly maps the format settings of the chart axis. +type formatChartAxis struct { + Crossing string `json:"crossing"` + MajorTickMark string `json:"major_tick_mark"` + MinorTickMark string `json:"minor_tick_mark"` + MinorUnitType string `json:"minor_unit_type"` + MajorUnit int `json:"major_unit"` + MajorUnitType string `json:"major_unit_type"` + DisplayUnits string `json:"display_units"` + DisplayUnitsVisible bool `json:"display_units_visible"` + DateAxis bool `json:"date_axis"` + ReverseOrder bool `json:"reverse_order"` + Maximum float64 `json:"maximum"` + Minimum float64 `json:"minimum"` + NumFormat string `json:"num_format"` + NumFont struct { + Color string `json:"color"` + Bold bool `json:"bold"` + Italic bool `json:"italic"` + Underline bool `json:"underline"` + } `json:"num_font"` + NameLayout formatLayout `json:"name_layout"` +} + +type formatChartDimension struct { + Width int `json:"width"` + Height int `json:"height"` +} + +// formatChart directly maps the format settings of the chart. +type formatChart struct { + Type string `json:"type"` + Series []formatChartSeries `json:"series"` + Format formatPicture `json:"format"` + Dimension formatChartDimension `json:"dimension"` + Legend formatChartLegend `json:"legend"` + Title formatChartTitle `json:"title"` + XAxis formatChartAxis `json:"x_axis"` + YAxis formatChartAxis `json:"y_axis"` + Chartarea struct { + Border struct { + None bool `json:"none"` + } `json:"border"` + Fill struct { + Color string `json:"color"` + } `json:"fill"` + Pattern struct { + Pattern string `json:"pattern"` + FgColor string `json:"fg_color"` + BgColor string `json:"bg_color"` + } `json:"pattern"` + } `json:"chartarea"` + Plotarea struct { + ShowBubbleSize bool `json:"show_bubble_size"` + ShowCatName bool `json:"show_cat_name"` + ShowLeaderLines bool `json:"show_leader_lines"` + ShowPercent bool `json:"show_percent"` + ShowSerName bool `json:"show_series_name"` + ShowVal bool `json:"show_val"` + Gradient struct { + Colors []string `json:"colors"` + } `json:"gradient"` + Border struct { + Color string `json:"color"` + Width int `json:"width"` + DashType string `json:"dash_type"` + } `json:"border"` + Fill struct { + Color string `json:"color"` + } `json:"fill"` + Layout formatLayout `json:"layout"` + } `json:"plotarea"` + ShowBlanksAs string `json:"show_blanks_as"` + ShowHiddenData bool `json:"show_hidden_data"` + SetRotation int `json:"set_rotation"` + SetHoleSize int `json:"set_hole_size"` +} + +// formatChartLegend directly maps the format settings of the chart legend. +type formatChartLegend struct { + None bool `json:"none"` + DeleteSeries []int `json:"delete_series"` + Font formatFont `json:"font"` + Layout formatLayout `json:"layout"` + Position string `json:"position"` + ShowLegendEntry bool `json:"show_legend_entry"` + ShowLegendKey bool `json:"show_legend_key"` +} + +// formatChartSeries directly maps the format settings of the chart series. +type formatChartSeries struct { + Name string `json:"name"` + Categories string `json:"categories"` + Values string `json:"values"` + Line struct { + None bool `json:"none"` + Color string `json:"color"` + } `json:"line"` + Marker struct { + Type string `json:"type"` + Size int `json:"size,"` + Width float64 `json:"width"` + Border struct { + Color string `json:"color"` + None bool `json:"none"` + } `json:"border"` + Fill struct { + Color string `json:"color"` + None bool `json:"none"` + } `json:"fill"` + } `json:"marker"` +} + +// formatChartTitle directly maps the format settings of the chart title. +type formatChartTitle struct { + None bool `json:"none"` + Name string `json:"name"` + Overlay bool `json:"overlay"` + Layout formatLayout `json:"layout"` +} + +// formatLayout directly maps the format settings of the element layout. +type formatLayout struct { + X float64 `json:"x"` + Y float64 `json:"y"` + Width float64 `json:"width"` + Height float64 `json:"height"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlComments.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlComments.go new file mode 100644 index 000000000..fadc9b387 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlComments.go @@ -0,0 +1,55 @@ +package excelize + +import "encoding/xml" + +// xlsxComments directly maps the comments element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main. A comment is a +// rich text note that is attached to and associated with a cell, separate from +// other cell content. Comment content is stored separate from the cell, and is +// displayed in a drawing object (like a text box) that is separate from, but +// associated with, a cell. Comments are used as reminders, such as noting how a +// complex formula works, or to provide feedback to other users. Comments can +// also be used to explain assumptions made in a formula or to call out +// something special about the cell. +type xlsxComments struct { + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/spreadsheetml/2006/main comments"` + Authors []xlsxAuthor `xml:"authors"` + CommentList xlsxCommentList `xml:"commentList"` +} + +// xlsxAuthor directly maps the author element. This element holds a string +// representing the name of a single author of comments. Every comment shall +// have an author. The maximum length of the author string is an implementation +// detail, but a good guideline is 255 chars. +type xlsxAuthor struct { + Author string `xml:"author"` +} + +// xlsxCommentList (List of Comments) directly maps the xlsxCommentList element. +// This element is a container that holds a list of comments for the sheet. +type xlsxCommentList struct { + Comment []xlsxComment `xml:"comment"` +} + +// xlsxComment directly maps the comment element. This element represents a +// single user entered comment. Each comment shall have an author and can +// optionally contain richly formatted text. +type xlsxComment struct { + Ref string `xml:"ref,attr"` + AuthorID int `xml:"authorId,attr"` + Text xlsxText `xml:"text"` +} + +// xlsxText directly maps the text element. This element contains rich text +// which represents the text of a comment. The maximum length for this text is a +// spreadsheet application implementation detail. A recommended guideline is +// 32767 chars. +type xlsxText struct { + R []xlsxR `xml:"r"` +} + +// formatComment directly maps the format settings of the comment. +type formatComment struct { + Author string `json:"author"` + Text string `json:"text"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlContentTypes.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlContentTypes.go new file mode 100644 index 000000000..121c6846b --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlContentTypes.go @@ -0,0 +1,26 @@ +package excelize + +import "encoding/xml" + +// xlsxTypes directly maps the types element of content types for relationship +// parts, it takes a Multipurpose Internet Mail Extension (MIME) media type as a +// value. +type xlsxTypes struct { + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/package/2006/content-types Types"` + Overrides []xlsxOverride `xml:"Override"` + Defaults []xlsxDefault `xml:"Default"` +} + +// xlsxOverride directly maps the override element in the namespace +// http://schemas.openxmlformats.org/package/2006/content-types +type xlsxOverride struct { + PartName string `xml:",attr"` + ContentType string `xml:",attr"` +} + +// xlsxDefault directly maps the default element in the namespace +// http://schemas.openxmlformats.org/package/2006/content-types +type xlsxDefault struct { + Extension string `xml:",attr"` + ContentType string `xml:",attr"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDecodeDrawing.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDecodeDrawing.go new file mode 100644 index 000000000..fff6b9deb --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDecodeDrawing.go @@ -0,0 +1,187 @@ +package excelize + +import "encoding/xml" + +// decodeCellAnchor directly maps the oneCellAnchor (One Cell Anchor Shape Size) +// and twoCellAnchor (Two Cell Anchor Shape Size). This element specifies a two +// cell anchor placeholder for a group, a shape, or a drawing element. It moves +// with cells and its extents are in EMU units. +type decodeCellAnchor struct { + EditAs string `xml:"editAs,attr,omitempty"` + Content string `xml:",innerxml"` +} + +// decodeWsDr directly maps the root element for a part of this content type +// shall wsDr. In order to solve the problem that the label structure is changed +// after serialization and deserialization, two different structures are +// defined. decodeWsDr just for deserialization. +type decodeWsDr struct { + A string `xml:"xmlns a,attr"` + Xdr string `xml:"xmlns xdr,attr"` + R string `xml:"xmlns r,attr"` + OneCellAnchor []*decodeCellAnchor `xml:"oneCellAnchor,omitempty"` + TwoCellAnchor []*decodeCellAnchor `xml:"twoCellAnchor,omitempty"` + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/drawingml/2006/spreadsheetDrawing wsDr,omitempty"` +} + +// decodeTwoCellAnchor directly maps the oneCellAnchor (One Cell Anchor Shape +// Size) and twoCellAnchor (Two Cell Anchor Shape Size). This element specifies +// a two cell anchor placeholder for a group, a shape, or a drawing element. It +// moves with cells and its extents are in EMU units. +type decodeTwoCellAnchor struct { + From *decodeFrom `xml:"from"` + To *decodeTo `xml:"to"` + Pic *decodePic `xml:"pic,omitempty"` + ClientData *decodeClientData `xml:"clientData"` +} + +// decodeCNvPr directly maps the cNvPr (Non-Visual Drawing Properties). This +// element specifies non-visual canvas properties. This allows for additional +// information that does not affect the appearance of the picture to be stored. +type decodeCNvPr struct { + ID int `xml:"id,attr"` + Name string `xml:"name,attr"` + Descr string `xml:"descr,attr"` + Title string `xml:"title,attr,omitempty"` +} + +// decodePicLocks directly maps the picLocks (Picture Locks). This element +// specifies all locking properties for a graphic frame. These properties inform +// the generating application about specific properties that have been +// previously locked and thus should not be changed. +type decodePicLocks struct { + NoAdjustHandles bool `xml:"noAdjustHandles,attr,omitempty"` + NoChangeArrowheads bool `xml:"noChangeArrowheads,attr,omitempty"` + NoChangeAspect bool `xml:"noChangeAspect,attr"` + NoChangeShapeType bool `xml:"noChangeShapeType,attr,omitempty"` + NoCrop bool `xml:"noCrop,attr,omitempty"` + NoEditPoints bool `xml:"noEditPoints,attr,omitempty"` + NoGrp bool `xml:"noGrp,attr,omitempty"` + NoMove bool `xml:"noMove,attr,omitempty"` + NoResize bool `xml:"noResize,attr,omitempty"` + NoRot bool `xml:"noRot,attr,omitempty"` + NoSelect bool `xml:"noSelect,attr,omitempty"` +} + +// decodeBlip directly maps the blip element in the namespace +// http://purl.oclc.org/ooxml/officeDoc ument/relationships - This element +// specifies the existence of an image (binary large image or picture) and +// contains a reference to the image data. +type decodeBlip struct { + Embed string `xml:"embed,attr"` + Cstate string `xml:"cstate,attr,omitempty"` + R string `xml:"r,attr"` +} + +// decodeStretch directly maps the stretch element. This element specifies that +// a BLIP should be stretched to fill the target rectangle. The other option is +// a tile where a BLIP is tiled to fill the available area. +type decodeStretch struct { + FillRect string `xml:"fillRect"` +} + +// decodeOff directly maps the colOff and rowOff element. This element is used +// to specify the column offset within a cell. +type decodeOff struct { + X int `xml:"x,attr"` + Y int `xml:"y,attr"` +} + +// decodeExt directly maps the ext element. +type decodeExt struct { + Cx int `xml:"cx,attr"` + Cy int `xml:"cy,attr"` +} + +// decodePrstGeom directly maps the prstGeom (Preset geometry). This element +// specifies when a preset geometric shape should be used instead of a custom +// geometric shape. The generating application should be able to render all +// preset geometries enumerated in the ST_ShapeType list. +type decodePrstGeom struct { + Prst string `xml:"prst,attr"` +} + +// decodeXfrm directly maps the xfrm (2D Transform for Graphic Frame). This +// element specifies the transform to be applied to the corresponding graphic +// frame. This transformation is applied to the graphic frame just as it would +// be for a shape or group shape. +type decodeXfrm struct { + Off decodeOff `xml:"off"` + Ext decodeExt `xml:"ext"` +} + +// decodeCNvPicPr directly maps the cNvPicPr (Non-Visual Picture Drawing +// Properties). This element specifies the non-visual properties for the picture +// canvas. These properties are to be used by the generating application to +// determine how certain properties are to be changed for the picture object in +// question. +type decodeCNvPicPr struct { + PicLocks decodePicLocks `xml:"picLocks"` +} + +// directly maps the nvPicPr (Non-Visual Properties for a Picture). This element +// specifies all non-visual properties for a picture. This element is a +// container for the non-visual identification properties, shape properties and +// application properties that are to be associated with a picture. This allows +// for additional information that does not affect the appearance of the picture +// to be stored. +type decodeNvPicPr struct { + CNvPr decodeCNvPr `xml:"cNvPr"` + CNvPicPr decodeCNvPicPr `xml:"cNvPicPr"` +} + +// decodeBlipFill directly maps the blipFill (Picture Fill). This element +// specifies the kind of picture fill that the picture object has. Because a +// picture has a picture fill already by default, it is possible to have two +// fills specified for a picture object. +type decodeBlipFill struct { + Blip decodeBlip `xml:"blip"` + Stretch decodeStretch `xml:"stretch"` +} + +// decodeSpPr directly maps the spPr (Shape Properties). This element specifies +// the visual shape properties that can be applied to a picture. These are the +// same properties that are allowed to describe the visual properties of a shape +// but are used here to describe the visual appearance of a picture within a +// document. +type decodeSpPr struct { + Xfrm decodeXfrm `xml:"a:xfrm"` + PrstGeom decodePrstGeom `xml:"a:prstGeom"` +} + +// decodePic elements encompass the definition of pictures within the DrawingML +// framework. While pictures are in many ways very similar to shapes they have +// specific properties that are unique in order to optimize for picture- +// specific scenarios. +type decodePic struct { + NvPicPr decodeNvPicPr `xml:"nvPicPr"` + BlipFill decodeBlipFill `xml:"blipFill"` + SpPr decodeSpPr `xml:"spPr"` +} + +// decodeFrom specifies the starting anchor. +type decodeFrom struct { + Col int `xml:"col"` + ColOff int `xml:"colOff"` + Row int `xml:"row"` + RowOff int `xml:"rowOff"` +} + +// decodeTo directly specifies the ending anchor. +type decodeTo struct { + Col int `xml:"col"` + ColOff int `xml:"colOff"` + Row int `xml:"row"` + RowOff int `xml:"rowOff"` +} + +// decodeClientData directly maps the clientData element. An empty element which +// specifies (via attributes) certain properties related to printing and +// selection of the drawing object. The fLocksWithSheet attribute (either true +// or false) determines whether to disable selection when the sheet is +// protected, and fPrintsWithSheet attribute (either true or false) determines +// whether the object is printed when the sheet is printed. +type decodeClientData struct { + FLocksWithSheet bool `xml:"fLocksWithSheet,attr"` + FPrintsWithSheet bool `xml:"fPrintsWithSheet,attr"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDrawing.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDrawing.go new file mode 100644 index 000000000..beb6bc94a --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlDrawing.go @@ -0,0 +1,388 @@ +package excelize + +import "encoding/xml" + +// Source relationship and namespace. +const ( + SourceRelationship = "http://schemas.openxmlformats.org/officeDocument/2006/relationships" + SourceRelationshipChart = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/chart" + SourceRelationshipComments = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/comments" + SourceRelationshipImage = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/image" + SourceRelationshipTable = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/table" + SourceRelationshipDrawingML = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/drawing" + SourceRelationshipDrawingVML = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/vmlDrawing" + SourceRelationshipHyperLink = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/hyperlink" + SourceRelationshipWorkSheet = "http://schemas.openxmlformats.org/officeDocument/2006/relationships/worksheet" + SourceRelationshipChart201506 = "http://schemas.microsoft.com/office/drawing/2015/06/chart" + SourceRelationshipChart20070802 = "http://schemas.microsoft.com/office/drawing/2007/8/2/chart" + SourceRelationshipChart2014 = "http://schemas.microsoft.com/office/drawing/2014/chart" + SourceRelationshipCompatibility = "http://schemas.openxmlformats.org/markup-compatibility/2006" + NameSpaceDrawingML = "http://schemas.openxmlformats.org/drawingml/2006/main" + NameSpaceDrawingMLChart = "http://schemas.openxmlformats.org/drawingml/2006/chart" + NameSpaceDrawingMLSpreadSheet = "http://schemas.openxmlformats.org/drawingml/2006/spreadsheetDrawing" + NameSpaceSpreadSheet = "http://schemas.openxmlformats.org/spreadsheetml/2006/main" + NameSpaceXML = "http://www.w3.org/XML/1998/namespace" +) + +var supportImageTypes = map[string]string{".gif": ".gif", ".jpg": ".jpeg", ".jpeg": ".jpeg", ".png": ".png"} + +// xlsxCNvPr directly maps the cNvPr (Non-Visual Drawing Properties). This +// element specifies non-visual canvas properties. This allows for additional +// information that does not affect the appearance of the picture to be stored. +type xlsxCNvPr struct { + ID int `xml:"id,attr"` + Name string `xml:"name,attr"` + Descr string `xml:"descr,attr"` + Title string `xml:"title,attr,omitempty"` + HlinkClick *xlsxHlinkClick `xml:"a:hlinkClick"` +} + +// xlsxHlinkClick (Click Hyperlink) Specifies the on-click hyperlink +// information to be applied to a run of text. When the hyperlink text is +// clicked the link is fetched. +type xlsxHlinkClick struct { + R string `xml:"xmlns:r,attr,omitempty"` + RID string `xml:"r:id,attr,omitempty"` + InvalidURL string `xml:"invalidUrl,attr,omitempty"` + Action string `xml:"action,attr,omitempty"` + TgtFrame string `xml:"tgtFrame,attr,omitempty"` + Tooltip string `xml:"tooltip,attr,omitempty"` + History bool `xml:"history,attr,omitempty"` + HighlightClick bool `xml:"highlightClick,attr,omitempty"` + EndSnd bool `xml:"endSnd,attr,omitempty"` +} + +// xlsxPicLocks directly maps the picLocks (Picture Locks). This element +// specifies all locking properties for a graphic frame. These properties inform +// the generating application about specific properties that have been +// previously locked and thus should not be changed. +type xlsxPicLocks struct { + NoAdjustHandles bool `xml:"noAdjustHandles,attr,omitempty"` + NoChangeArrowheads bool `xml:"noChangeArrowheads,attr,omitempty"` + NoChangeAspect bool `xml:"noChangeAspect,attr"` + NoChangeShapeType bool `xml:"noChangeShapeType,attr,omitempty"` + NoCrop bool `xml:"noCrop,attr,omitempty"` + NoEditPoints bool `xml:"noEditPoints,attr,omitempty"` + NoGrp bool `xml:"noGrp,attr,omitempty"` + NoMove bool `xml:"noMove,attr,omitempty"` + NoResize bool `xml:"noResize,attr,omitempty"` + NoRot bool `xml:"noRot,attr,omitempty"` + NoSelect bool `xml:"noSelect,attr,omitempty"` +} + +// xlsxBlip directly maps the blip element in the namespace +// http://purl.oclc.org/ooxml/officeDoc ument/relationships - This element +// specifies the existence of an image (binary large image or picture) and +// contains a reference to the image data. +type xlsxBlip struct { + Embed string `xml:"r:embed,attr"` + Cstate string `xml:"cstate,attr,omitempty"` + R string `xml:"xmlns:r,attr"` +} + +// xlsxStretch directly maps the stretch element. This element specifies that a +// BLIP should be stretched to fill the target rectangle. The other option is a +// tile where a BLIP is tiled to fill the available area. +type xlsxStretch struct { + FillRect string `xml:"a:fillRect"` +} + +// xlsxOff directly maps the colOff and rowOff element. This element is used to +// specify the column offset within a cell. +type xlsxOff struct { + X int `xml:"x,attr"` + Y int `xml:"y,attr"` +} + +// xlsxExt directly maps the ext element. +type xlsxExt struct { + Cx int `xml:"cx,attr"` + Cy int `xml:"cy,attr"` +} + +// xlsxPrstGeom directly maps the prstGeom (Preset geometry). This element +// specifies when a preset geometric shape should be used instead of a custom +// geometric shape. The generating application should be able to render all +// preset geometries enumerated in the ST_ShapeType list. +type xlsxPrstGeom struct { + Prst string `xml:"prst,attr"` +} + +// xlsxXfrm directly maps the xfrm (2D Transform for Graphic Frame). This +// element specifies the transform to be applied to the corresponding graphic +// frame. This transformation is applied to the graphic frame just as it would +// be for a shape or group shape. +type xlsxXfrm struct { + Off xlsxOff `xml:"a:off"` + Ext xlsxExt `xml:"a:ext"` +} + +// xlsxCNvPicPr directly maps the cNvPicPr (Non-Visual Picture Drawing +// Properties). This element specifies the non-visual properties for the picture +// canvas. These properties are to be used by the generating application to +// determine how certain properties are to be changed for the picture object in +// question. +type xlsxCNvPicPr struct { + PicLocks xlsxPicLocks `xml:"a:picLocks"` +} + +// directly maps the nvPicPr (Non-Visual Properties for a Picture). This element +// specifies all non-visual properties for a picture. This element is a +// container for the non-visual identification properties, shape properties and +// application properties that are to be associated with a picture. This allows +// for additional information that does not affect the appearance of the picture +// to be stored. +type xlsxNvPicPr struct { + CNvPr xlsxCNvPr `xml:"xdr:cNvPr"` + CNvPicPr xlsxCNvPicPr `xml:"xdr:cNvPicPr"` +} + +// xlsxBlipFill directly maps the blipFill (Picture Fill). This element +// specifies the kind of picture fill that the picture object has. Because a +// picture has a picture fill already by default, it is possible to have two +// fills specified for a picture object. +type xlsxBlipFill struct { + Blip xlsxBlip `xml:"a:blip"` + Stretch xlsxStretch `xml:"a:stretch"` +} + +// xlsxSpPr directly maps the spPr (Shape Properties). This element specifies +// the visual shape properties that can be applied to a picture. These are the +// same properties that are allowed to describe the visual properties of a shape +// but are used here to describe the visual appearance of a picture within a +// document. +type xlsxSpPr struct { + Xfrm xlsxXfrm `xml:"a:xfrm"` + PrstGeom xlsxPrstGeom `xml:"a:prstGeom"` +} + +// xlsxPic elements encompass the definition of pictures within the DrawingML +// framework. While pictures are in many ways very similar to shapes they have +// specific properties that are unique in order to optimize for picture- +// specific scenarios. +type xlsxPic struct { + NvPicPr xlsxNvPicPr `xml:"xdr:nvPicPr"` + BlipFill xlsxBlipFill `xml:"xdr:blipFill"` + SpPr xlsxSpPr `xml:"xdr:spPr"` +} + +// xlsxFrom specifies the starting anchor. +type xlsxFrom struct { + Col int `xml:"xdr:col"` + ColOff int `xml:"xdr:colOff"` + Row int `xml:"xdr:row"` + RowOff int `xml:"xdr:rowOff"` +} + +// xlsxTo directly specifies the ending anchor. +type xlsxTo struct { + Col int `xml:"xdr:col"` + ColOff int `xml:"xdr:colOff"` + Row int `xml:"xdr:row"` + RowOff int `xml:"xdr:rowOff"` +} + +// xdrClientData directly maps the clientData element. An empty element which +// specifies (via attributes) certain properties related to printing and +// selection of the drawing object. The fLocksWithSheet attribute (either true +// or false) determines whether to disable selection when the sheet is +// protected, and fPrintsWithSheet attribute (either true or false) determines +// whether the object is printed when the sheet is printed. +type xdrClientData struct { + FLocksWithSheet bool `xml:"fLocksWithSheet,attr"` + FPrintsWithSheet bool `xml:"fPrintsWithSheet,attr"` +} + +// xdrCellAnchor directly maps the oneCellAnchor (One Cell Anchor Shape Size) +// and twoCellAnchor (Two Cell Anchor Shape Size). This element specifies a two +// cell anchor placeholder for a group, a shape, or a drawing element. It moves +// with cells and its extents are in EMU units. +type xdrCellAnchor struct { + EditAs string `xml:"editAs,attr,omitempty"` + From *xlsxFrom `xml:"xdr:from"` + To *xlsxTo `xml:"xdr:to"` + Ext *xlsxExt `xml:"xdr:ext"` + Sp *xdrSp `xml:"xdr:sp"` + Pic *xlsxPic `xml:"xdr:pic,omitempty"` + GraphicFrame string `xml:",innerxml"` + ClientData *xdrClientData `xml:"xdr:clientData"` +} + +// xlsxWsDr directly maps the root element for a part of this content type shall +// wsDr. +type xlsxWsDr struct { + XMLName xml.Name `xml:"xdr:wsDr"` + OneCellAnchor []*xdrCellAnchor `xml:"xdr:oneCellAnchor"` + TwoCellAnchor []*xdrCellAnchor `xml:"xdr:twoCellAnchor"` + A string `xml:"xmlns:a,attr,omitempty"` + Xdr string `xml:"xmlns:xdr,attr,omitempty"` + R string `xml:"xmlns:r,attr,omitempty"` +} + +// xlsxGraphicFrame (Graphic Frame) directly maps the xdr:graphicFrame element. +// This element specifies the existence of a graphics frame. This frame contains +// a graphic that was generated by an external source and needs a container in +// which to be displayed on the slide surface. +type xlsxGraphicFrame struct { + XMLName xml.Name `xml:"xdr:graphicFrame"` + Macro string `xml:"macro,attr"` + NvGraphicFramePr xlsxNvGraphicFramePr `xml:"xdr:nvGraphicFramePr"` + Xfrm xlsxXfrm `xml:"xdr:xfrm"` + Graphic *xlsxGraphic `xml:"a:graphic"` +} + +// xlsxNvGraphicFramePr (Non-Visual Properties for a Graphic Frame) directly +// maps the xdr:nvGraphicFramePr element. This element specifies all non-visual +// properties for a graphic frame. This element is a container for the non- +// visual identification properties, shape properties and application properties +// that are to be associated with a graphic frame. This allows for additional +// information that does not affect the appearance of the graphic frame to be +// stored. +type xlsxNvGraphicFramePr struct { + CNvPr *xlsxCNvPr `xml:"xdr:cNvPr"` + ChicNvGraphicFramePr string `xml:"xdr:cNvGraphicFramePr"` +} + +// xlsxGraphic (Graphic Object) directly maps the a:graphic element. This +// element specifies the existence of a single graphic object. Document authors +// should refer to this element when they wish to persist a graphical object of +// some kind. The specification for this graphical object is provided entirely +// by the document author and referenced within the graphicData child element. +type xlsxGraphic struct { + GraphicData *xlsxGraphicData `xml:"a:graphicData"` +} + +// xlsxGraphicData (Graphic Object Data) directly maps the a:graphicData +// element. This element specifies the reference to a graphic object within the +// document. This graphic object is provided entirely by the document authors +// who choose to persist this data within the document. +type xlsxGraphicData struct { + URI string `xml:"uri,attr"` + Chart *xlsxChart `xml:"c:chart,omitempty"` +} + +// xlsxChart (Chart) directly maps the c:chart element. +type xlsxChart struct { + C string `xml:"xmlns:c,attr"` + RID string `xml:"r:id,attr"` + R string `xml:"xmlns:r,attr"` +} + +// xdrSp (Shape) directly maps the xdr:sp element. This element specifies the +// existence of a single shape. A shape can either be a preset or a custom +// geometry, defined using the SpreadsheetDrawingML framework. In addition to a +// geometry each shape can have both visual and non-visual properties attached. +// Text and corresponding styling information can also be attached to a shape. +// This shape is specified along with all other shapes within either the shape +// tree or group shape elements. +type xdrSp struct { + Macro string `xml:"macro,attr"` + Textlink string `xml:"textlink,attr"` + NvSpPr *xdrNvSpPr `xml:"xdr:nvSpPr"` + SpPr *xlsxSpPr `xml:"xdr:spPr"` + Style *xdrStyle `xml:"xdr:style"` + TxBody *xdrTxBody `xml:"xdr:txBody"` +} + +// xdrNvSpPr (Non-Visual Properties for a Shape) directly maps the xdr:nvSpPr +// element. This element specifies all non-visual properties for a shape. This +// element is a container for the non-visual identification properties, shape +// properties and application properties that are to be associated with a shape. +// This allows for additional information that does not affect the appearance of +// the shape to be stored. +type xdrNvSpPr struct { + CNvPr *xlsxCNvPr `xml:"xdr:cNvPr"` + CNvSpPr *xdrCNvSpPr `xml:"xdr:cNvSpPr"` +} + +// xdrCNvSpPr (Connection Non-Visual Shape Properties) directly maps the +// xdr:cNvSpPr element. This element specifies the set of non-visual properties +// for a connection shape. These properties specify all data about the +// connection shape which do not affect its display within a spreadsheet. +type xdrCNvSpPr struct { + TxBox bool `xml:"txBox,attr"` +} + +// xdrStyle (Shape Style) directly maps the xdr:style element. The element +// specifies the style that is applied to a shape and the corresponding +// references for each of the style components such as lines and fills. +type xdrStyle struct { + LnRef *aRef `xml:"a:lnRef"` + FillRef *aRef `xml:"a:fillRef"` + EffectRef *aRef `xml:"a:effectRef"` + FontRef *aFontRef `xml:"a:fontRef"` +} + +// aRef directly maps the a:lnRef, a:fillRef and a:effectRef element. +type aRef struct { + Idx int `xml:"idx,attr"` + ScrgbClr *aScrgbClr `xml:"a:scrgbClr"` + SchemeClr *attrValString `xml:"a:schemeClr"` + SrgbClr *attrValString `xml:"a:srgbClr"` +} + +// aScrgbClr (RGB Color Model - Percentage Variant) directly maps the a:scrgbClr +// element. This element specifies a color using the red, green, blue RGB color +// model. Each component, red, green, and blue is expressed as a percentage from +// 0% to 100%. A linear gamma of 1.0 is assumed. +type aScrgbClr struct { + R float64 `xml:"r,attr"` + G float64 `xml:"g,attr"` + B float64 `xml:"b,attr"` +} + +// aFontRef (Font Reference) directly maps the a:fontRef element. This element +// represents a reference to a themed font. When used it specifies which themed +// font to use along with a choice of color. +type aFontRef struct { + Idx string `xml:"idx,attr"` + SchemeClr *attrValString `xml:"a:schemeClr"` +} + +// xdrTxBody (Shape Text Body) directly maps the xdr:txBody element. This +// element specifies the existence of text to be contained within the +// corresponding shape. All visible text and visible text related properties are +// contained within this element. There can be multiple paragraphs and within +// paragraphs multiple runs of text. +type xdrTxBody struct { + BodyPr *aBodyPr `xml:"a:bodyPr"` + P []*aP `xml:"a:p"` +} + +// formatPicture directly maps the format settings of the picture. +type formatPicture struct { + FPrintsWithSheet bool `json:"print_obj"` + FLocksWithSheet bool `json:"locked"` + NoChangeAspect bool `json:"lock_aspect_ratio"` + OffsetX int `json:"x_offset"` + OffsetY int `json:"y_offset"` + XScale float64 `json:"x_scale"` + YScale float64 `json:"y_scale"` + Hyperlink string `json:"hyperlink"` + HyperlinkType string `json:"hyperlink_type"` + Positioning string `json:"positioning"` +} + +// formatShape directly maps the format settings of the shape. +type formatShape struct { + Type string `json:"type"` + Width int `json:"width"` + Height int `json:"height"` + Format formatPicture `json:"format"` + Color formatShapeColor `json:"color"` + Paragraph []formatShapeParagraph `json:"paragraph"` +} + +// formatShapeParagraph directly maps the format settings of the paragraph in +// the shape. +type formatShapeParagraph struct { + Font formatFont `json:"font"` + Text string `json:"text"` +} + +// formatShapeColor directly maps the color settings of the shape. +type formatShapeColor struct { + Line string `json:"line"` + Fill string `json:"fill"` + Effect string `json:"effect"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlSharedStrings.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlSharedStrings.go new file mode 100644 index 000000000..c8b54a013 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlSharedStrings.go @@ -0,0 +1,46 @@ +package excelize + +import "encoding/xml" + +// xlsxSST directly maps the sst element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main. String values may +// be stored directly inside spreadsheet cell elements; however, storing the +// same value inside multiple cell elements can result in very large worksheet +// Parts, possibly resulting in performance degradation. The Shared String Table +// is an indexed list of string values, shared across the workbook, which allows +// implementations to store values only once. +type xlsxSST struct { + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/spreadsheetml/2006/main sst"` + Count int `xml:"count,attr"` + UniqueCount int `xml:"uniqueCount,attr"` + SI []xlsxSI `xml:"si"` +} + +// xlsxSI directly maps the si element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked this for completeness - it does as much as I need. +type xlsxSI struct { + T string `xml:"t"` + R []xlsxR `xml:"r"` +} + +// xlsxR directly maps the r element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked this for completeness - it does as much as I need. +type xlsxR struct { + RPr *xlsxRPr `xml:"rPr"` + T string `xml:"t"` +} + +// xlsxRPr (Run Properties) specifies a set of run properties which shall be +// applied to the contents of the parent run after all style formatting has been +// applied to the text. These properties are defined as direct formatting, since +// they are directly applied to the run and supersede any formatting from +// styles. +type xlsxRPr struct { + B string `xml:"b,omitempty"` + Sz *attrValFloat `xml:"sz"` + Color *xlsxColor `xml:"color"` + RFont *attrValString `xml:"rFont"` + Family *attrValInt `xml:"family"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlStyles.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlStyles.go new file mode 100644 index 000000000..05ff22bc4 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlStyles.go @@ -0,0 +1,356 @@ +package excelize + +import "encoding/xml" + +// xlsxStyleSheet directly maps the stylesheet element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxStyleSheet struct { + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/spreadsheetml/2006/main styleSheet"` + NumFmts *xlsxNumFmts `xml:"numFmts,omitempty"` + Fonts *xlsxFonts `xml:"fonts,omitempty"` + Fills *xlsxFills `xml:"fills,omitempty"` + Borders *xlsxBorders `xml:"borders,omitempty"` + CellStyleXfs *xlsxCellStyleXfs `xml:"cellStyleXfs,omitempty"` + CellXfs *xlsxCellXfs `xml:"cellXfs,omitempty"` + CellStyles *xlsxCellStyles `xml:"cellStyles,omitempty"` + Dxfs *xlsxDxfs `xml:"dxfs,omitempty"` + TableStyles *xlsxTableStyles `xml:"tableStyles,omitempty"` + Colors *xlsxStyleColors `xml:"colors,omitempty"` + ExtLst *xlsxExtLst `xml:"extLst"` +} + +// xlsxAlignment formatting information pertaining to text alignment in cells. +// There are a variety of choices for how text is aligned both horizontally and +// vertically, as well as indentation settings, and so on. +type xlsxAlignment struct { + Horizontal string `xml:"horizontal,attr,omitempty"` + Indent int `xml:"indent,attr,omitempty"` + JustifyLastLine bool `xml:"justifyLastLine,attr,omitempty"` + ReadingOrder uint64 `xml:"readingOrder,attr,omitempty"` + RelativeIndent int `xml:"relativeIndent,attr,omitempty"` + ShrinkToFit bool `xml:"shrinkToFit,attr,omitempty"` + TextRotation int `xml:"textRotation,attr,omitempty"` + Vertical string `xml:"vertical,attr,omitempty"` + WrapText bool `xml:"wrapText,attr,omitempty"` +} + +// xlsxProtection (Protection Properties) contains protection properties +// associated with the cell. Each cell has protection properties that can be +// set. The cell protection properties do not take effect unless the sheet has +// been protected. +type xlsxProtection struct { + Hidden bool `xml:"hidden,attr"` + Locked bool `xml:"locked,attr"` +} + +// xlsxLine directly maps the line style element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxLine struct { + Style string `xml:"style,attr,omitempty"` + Color *xlsxColor `xml:"color,omitempty"` +} + +// xlsxColor is a common mapping used for both the fgColor and bgColor elements. +// Foreground color of the cell fill pattern. Cell fill patterns operate with +// two colors: a background color and a foreground color. These combine together +// to make a patterned cell fill. Background color of the cell fill pattern. +// Cell fill patterns operate with two colors: a background color and a +// foreground color. These combine together to make a patterned cell fill. +type xlsxColor struct { + Auto bool `xml:"auto,attr,omitempty"` + RGB string `xml:"rgb,attr,omitempty"` + Indexed int `xml:"indexed,attr,omitempty"` + Theme *int `xml:"theme,attr"` + Tint float64 `xml:"tint,attr,omitempty"` +} + +// xlsxFonts directly maps the font element. This element contains all font +// definitions for this workbook. +type xlsxFonts struct { + Count int `xml:"count,attr"` + Font []*xlsxFont `xml:"font"` +} + +// font directly maps the font element. +type font struct { + Name *attrValString `xml:"name"` + Charset *attrValInt `xml:"charset"` + Family *attrValInt `xml:"family"` + B bool `xml:"b,omitempty"` + I bool `xml:"i,omitempty"` + Strike bool `xml:"strike,omitempty"` + Outline bool `xml:"outline,omitempty"` + Shadow bool `xml:"shadow,omitempty"` + Condense bool `xml:"condense,omitempty"` + Extend bool `xml:"extend,omitempty"` + Color *xlsxColor `xml:"color"` + Sz *attrValInt `xml:"sz"` + U *attrValString `xml:"u"` + Scheme *attrValString `xml:"scheme"` +} + +// xlsxFont directly maps the font element. This element defines the properties +// for one of the fonts used in this workbook. +type xlsxFont struct { + Font string `xml:",innerxml"` +} + +// xlsxFills directly maps the fills element. This element defines the cell +// fills portion of the Styles part, consisting of a sequence of fill records. A +// cell fill consists of a background color, foreground color, and pattern to be +// applied across the cell. +type xlsxFills struct { + Count int `xml:"count,attr"` + Fill []*xlsxFill `xml:"fill,omitempty"` +} + +// xlsxFill directly maps the fill element. This element specifies fill +// formatting. +type xlsxFill struct { + PatternFill *xlsxPatternFill `xml:"patternFill,omitempty"` + GradientFill *xlsxGradientFill `xml:"gradientFill,omitempty"` +} + +// xlsxPatternFill directly maps the patternFill element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. This element is +// used to specify cell fill information for pattern and solid color cell fills. +// For solid cell fills (no pattern), fgColor is used. For cell fills with +// patterns specified, then the cell fill color is specified by the bgColor +// element. +type xlsxPatternFill struct { + PatternType string `xml:"patternType,attr,omitempty"` + FgColor xlsxColor `xml:"fgColor,omitempty"` + BgColor xlsxColor `xml:"bgColor,omitempty"` +} + +// xlsxGradientFill defines a gradient-style cell fill. Gradient cell fills can +// use one or two colors as the end points of color interpolation. +type xlsxGradientFill struct { + Bottom float64 `xml:"bottom,attr,omitempty"` + Degree float64 `xml:"degree,attr,omitempty"` + Left float64 `xml:"left,attr,omitempty"` + Right float64 `xml:"right,attr,omitempty"` + Top float64 `xml:"top,attr,omitempty"` + Type string `xml:"type,attr,omitempty"` + Stop []*xlsxGradientFillStop `xml:"stop,omitempty"` +} + +// xlsxGradientFillStop directly maps the stop element. +type xlsxGradientFillStop struct { + Position float64 `xml:"position,attr"` + Color xlsxColor `xml:"color,omitempty"` +} + +// xlsxBorders directly maps the borders element. This element contains borders +// formatting information, specifying all border definitions for all cells in +// the workbook. +type xlsxBorders struct { + Count int `xml:"count,attr"` + Border []*xlsxBorder `xml:"border,omitempty"` +} + +// xlsxBorder directly maps the border element. Expresses a single set of cell +// border formats (left, right, top, bottom, diagonal). Color is optional. When +// missing, 'automatic' is implied. +type xlsxBorder struct { + DiagonalDown bool `xml:"diagonalDown,attr,omitempty"` + DiagonalUp bool `xml:"diagonalUp,attr,omitempty"` + Outline bool `xml:"outline,attr,omitempty"` + Left xlsxLine `xml:"left,omitempty"` + Right xlsxLine `xml:"right,omitempty"` + Top xlsxLine `xml:"top,omitempty"` + Bottom xlsxLine `xml:"bottom,omitempty"` + Diagonal xlsxLine `xml:"diagonal,omitempty"` +} + +// xlsxCellStyles directly maps the cellStyles element. This element contains +// the named cell styles, consisting of a sequence of named style records. A +// named cell style is a collection of direct or themed formatting (e.g., cell +// border, cell fill, and font type/size/style) grouped together into a single +// named style, and can be applied to a cell. +type xlsxCellStyles struct { + XMLName xml.Name `xml:"cellStyles"` + Count int `xml:"count,attr"` + CellStyle []*xlsxCellStyle `xml:"cellStyle,omitempty"` +} + +// xlsxCellStyle directly maps the cellStyle element. This element represents +// the name and related formatting records for a named cell style in this +// workbook. +type xlsxCellStyle struct { + XMLName xml.Name `xml:"cellStyle"` + BuiltInID *int `xml:"builtinId,attr,omitempty"` + CustomBuiltIn *bool `xml:"customBuiltin,attr,omitempty"` + Hidden *bool `xml:"hidden,attr,omitempty"` + ILevel *bool `xml:"iLevel,attr,omitempty"` + Name string `xml:"name,attr"` + XfID int `xml:"xfId,attr"` +} + +// xlsxCellStyleXfs directly maps the cellStyleXfs element. This element +// contains the master formatting records (xf's) which define the formatting for +// all named cell styles in this workbook. Master formatting records reference +// individual elements of formatting (e.g., number format, font definitions, +// cell fills, etc) by specifying a zero-based index into those collections. +// Master formatting records also specify whether to apply or ignore particular +// aspects of formatting. +type xlsxCellStyleXfs struct { + Count int `xml:"count,attr"` + Xf []xlsxXf `xml:"xf,omitempty"` +} + +// xlsxXf directly maps the xf element. A single xf element describes all of the +// formatting for a cell. +type xlsxXf struct { + ApplyAlignment bool `xml:"applyAlignment,attr"` + ApplyBorder bool `xml:"applyBorder,attr"` + ApplyFill bool `xml:"applyFill,attr"` + ApplyFont bool `xml:"applyFont,attr"` + ApplyNumberFormat bool `xml:"applyNumberFormat,attr"` + ApplyProtection bool `xml:"applyProtection,attr"` + BorderID int `xml:"borderId,attr"` + FillID int `xml:"fillId,attr"` + FontID int `xml:"fontId,attr"` + NumFmtID int `xml:"numFmtId,attr"` + PivotButton bool `xml:"pivotButton,attr,omitempty"` + QuotePrefix bool `xml:"quotePrefix,attr,omitempty"` + XfID *int `xml:"xfId,attr"` + Alignment *xlsxAlignment `xml:"alignment"` + Protection *xlsxProtection `xml:"protection"` +} + +// xlsxCellXfs directly maps the cellXfs element. This element contains the +// master formatting records (xf) which define the formatting applied to cells +// in this workbook. These records are the starting point for determining the +// formatting for a cell. Cells in the Sheet Part reference the xf records by +// zero-based index. +type xlsxCellXfs struct { + Count int `xml:"count,attr"` + Xf []xlsxXf `xml:"xf,omitempty"` +} + +// xlsxDxfs directly maps the dxfs element. This element contains the master +// differential formatting records (dxf's) which define formatting for all non- +// cell formatting in this workbook. Whereas xf records fully specify a +// particular aspect of formatting (e.g., cell borders) by referencing those +// formatting definitions elsewhere in the Styles part, dxf records specify +// incremental (or differential) aspects of formatting directly inline within +// the dxf element. The dxf formatting is to be applied on top of or in addition +// to any formatting already present on the object using the dxf record. +type xlsxDxfs struct { + Count int `xml:"count,attr"` + Dxfs []*xlsxDxf `xml:"dxf,omitempty"` +} + +// xlsxDxf directly maps the dxf element. A single dxf record, expressing +// incremental formatting to be applied. +type xlsxDxf struct { + Dxf string `xml:",innerxml"` +} + +// dxf directly maps the dxf element. +type dxf struct { + Font *font `xml:"font"` + NumFmt *xlsxNumFmt `xml:"numFmt"` + Fill *xlsxFill `xml:"fill"` + Alignment *xlsxAlignment `xml:"alignment"` + Border *xlsxBorder `xml:"border"` + Protection *xlsxProtection `xml:"protection"` + ExtLst *xlsxExt `xml:"extLst"` +} + +// xlsxTableStyles directly maps the tableStyles element. This element +// represents a collection of Table style definitions for Table styles and +// PivotTable styles used in this workbook. It consists of a sequence of +// tableStyle records, each defining a single Table style. +type xlsxTableStyles struct { + Count int `xml:"count,attr"` + DefaultPivotStyle string `xml:"defaultPivotStyle,attr"` + DefaultTableStyle string `xml:"defaultTableStyle,attr"` + TableStyles []*xlsxTableStyle `xml:"tableStyle,omitempty"` +} + +// xlsxTableStyle directly maps the tableStyle element. This element represents +// a single table style definition that indicates how a spreadsheet application +// should format and display a table. +type xlsxTableStyle struct { + Name string `xml:"name,attr,omitempty"` + Pivot int `xml:"pivot,attr"` + Count int `xml:"count,attr,omitempty"` + Table bool `xml:"table,attr,omitempty"` + TableStyleElement string `xml:",innerxml"` +} + +// xlsxNumFmts directly maps the numFmts element. This element defines the +// number formats in this workbook, consisting of a sequence of numFmt records, +// where each numFmt record defines a particular number format, indicating how +// to format and render the numeric value of a cell. +type xlsxNumFmts struct { + Count int `xml:"count,attr"` + NumFmt []*xlsxNumFmt `xml:"numFmt,omitempty"` +} + +// xlsxNumFmt directly maps the numFmt element. This element specifies number +// format properties which indicate how to format and render the numeric value +// of a cell. +type xlsxNumFmt struct { + NumFmtID int `xml:"numFmtId,attr,omitempty"` + FormatCode string `xml:"formatCode,attr,omitempty"` +} + +// xlsxStyleColors directly maps the colors element. Color information +// associated with this stylesheet. This collection is written whenever the +// legacy color palette has been modified (backwards compatibility settings) or +// a custom color has been selected while using this workbook. +type xlsxStyleColors struct { + Color string `xml:",innerxml"` +} + +// formatFont directly maps the styles settings of the fonts. +type formatFont struct { + Bold bool `json:"bold"` + Italic bool `json:"italic"` + Underline string `json:"underline"` + Family string `json:"family"` + Size int `json:"size"` + Color string `json:"color"` +} + +// formatStyle directly maps the styles settings of the cells. +type formatStyle struct { + Border []struct { + Type string `json:"type"` + Color string `json:"color"` + Style int `json:"style"` + } `json:"border"` + Fill struct { + Type string `json:"type"` + Pattern int `json:"pattern"` + Color []string `json:"color"` + Shading int `json:"shading"` + } `json:"fill"` + Font *formatFont `json:"font"` + Alignment *struct { + Horizontal string `json:"horizontal"` + Indent int `json:"indent"` + JustifyLastLine bool `json:"justify_last_line"` + ReadingOrder uint64 `json:"reading_order"` + RelativeIndent int `json:"relative_indent"` + ShrinkToFit bool `json:"shrink_to_fit"` + TextRotation int `json:"text_rotation"` + Vertical string `json:"vertical"` + WrapText bool `json:"wrap_text"` + } `json:"alignment"` + Protection *struct { + Hidden bool `json:"hidden"` + Locked bool `json:"locked"` + } `json:"protection"` + NumFmt int `json:"number_format"` + DecimalPlaces int `json:"decimal_places"` + CustomNumFmt *string `json:"custom_number_format"` + Lang string `json:"lang"` + NegRed bool `json:"negred"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTable.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTable.go new file mode 100644 index 000000000..b23835026 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTable.go @@ -0,0 +1,205 @@ +package excelize + +import "encoding/xml" + +// xlsxTable directly maps the table element. A table helps organize and provide +// structure to lists of information in a worksheet. Tables have clearly labeled +// columns, rows, and data regions. Tables make it easier for users to sort, +// analyze, format, manage, add, and delete information. This element is the +// root element for a table that is not a single cell XML table. +type xlsxTable struct { + XMLName xml.Name `xml:"table"` + XMLNS string `xml:"xmlns,attr"` + DataCellStyle string `xml:"dataCellStyle,attr,omitempty"` + DataDxfID int `xml:"dataDxfId,attr,omitempty"` + DisplayName string `xml:"displayName,attr,omitempty"` + HeaderRowBorderDxfID int `xml:"headerRowBorderDxfId,attr,omitempty"` + HeaderRowCellStyle string `xml:"headerRowCellStyle,attr,omitempty"` + HeaderRowCount int `xml:"headerRowCount,attr,omitempty"` + HeaderRowDxfID int `xml:"headerRowDxfId,attr,omitempty"` + ID int `xml:"id,attr"` + InsertRow bool `xml:"insertRow,attr,omitempty"` + InsertRowShift bool `xml:"insertRowShift,attr,omitempty"` + Name string `xml:"name,attr"` + Published bool `xml:"published,attr,omitempty"` + Ref string `xml:"ref,attr"` + TotalsRowCount int `xml:"totalsRowCount,attr,omitempty"` + TotalsRowDxfID int `xml:"totalsRowDxfId,attr,omitempty"` + TotalsRowShown bool `xml:"totalsRowShown,attr"` + AutoFilter *xlsxAutoFilter `xml:"autoFilter"` + TableColumns *xlsxTableColumns `xml:"tableColumns"` + TableStyleInfo *xlsxTableStyleInfo `xml:"tableStyleInfo"` +} + +// xlsxAutoFilter temporarily hides rows based on a filter criteria, which is +// applied column by column to a table of data in the worksheet. This collection +// expresses AutoFilter settings. +type xlsxAutoFilter struct { + Ref string `xml:"ref,attr"` + FilterColumn *xlsxFilterColumn `xml:"filterColumn"` +} + +// xlsxFilterColumn directly maps the filterColumn element. The filterColumn +// collection identifies a particular column in the AutoFilter range and +// specifies filter information that has been applied to this column. If a +// column in the AutoFilter range has no criteria specified, then there is no +// corresponding filterColumn collection expressed for that column. +type xlsxFilterColumn struct { + ColID int `xml:"colId,attr"` + HiddenButton bool `xml:"hiddenButton,attr,omitempty"` + ShowButton bool `xml:"showButton,attr,omitempty"` + CustomFilters *xlsxCustomFilters `xml:"customFilters"` + Filters *xlsxFilters `xml:"filters"` + ColorFilter *xlsxColorFilter `xml:"colorFilter"` + DynamicFilter *xlsxDynamicFilter `xml:"dynamicFilter"` + IconFilter *xlsxIconFilter `xml:"iconFilter"` + Top10 *xlsxTop10 `xml:"top10"` +} + +// xlsxCustomFilters directly maps the customFilters element. When there is more +// than one custom filter criteria to apply (an 'and' or 'or' joining two +// criteria), then this element groups the customFilter elements together. +type xlsxCustomFilters struct { + And bool `xml:"and,attr,omitempty"` + CustomFilter []*xlsxCustomFilter `xml:"customFilter"` +} + +// xlsxCustomFilter directly maps the customFilter element. A custom AutoFilter +// specifies an operator and a value. There can be at most two customFilters +// specified, and in that case the parent element specifies whether the two +// conditions are joined by 'and' or 'or'. For any cells whose values do not +// meet the specified criteria, the corresponding rows shall be hidden from view +// when the filter is applied. +type xlsxCustomFilter struct { + Operator string `xml:"operator,attr,omitempty"` + Val string `xml:"val,attr,omitempty"` +} + +// xlsxFilters directly maps the filters (Filter Criteria) element. When +// multiple values are chosen to filter by, or when a group of date values are +// chosen to filter by, this element groups those criteria together. +type xlsxFilters struct { + Blank bool `xml:"blank,attr,omitempty"` + CalendarType string `xml:"calendarType,attr,omitempty"` + Filter []*xlsxFilter `xml:"filter"` + DateGroupItem []*xlsxDateGroupItem `xml:"dateGroupItem"` +} + +// xlsxFilter directly maps the filter element. This element expresses a filter +// criteria value. +type xlsxFilter struct { + Val string `xml:"val,attr,omitempty"` +} + +// xlsxColorFilter directly maps the colorFilter element. This element specifies +// the color to filter by and whether to use the cell's fill or font color in +// the filter criteria. If the cell's font or fill color does not match the +// color specified in the criteria, the rows corresponding to those cells are +// hidden from view. +type xlsxColorFilter struct { + CellColor bool `xml:"cellColor,attr"` + DxfID int `xml:"dxfId,attr"` +} + +// xlsxDynamicFilter directly maps the dynamicFilter element. This collection +// specifies dynamic filter criteria. These criteria are considered dynamic +// because they can change, either with the data itself (e.g., "above average") +// or with the current system date (e.g., show values for "today"). For any +// cells whose values do not meet the specified criteria, the corresponding rows +// shall be hidden from view when the filter is applied. +type xlsxDynamicFilter struct { + MaxValISO string `xml:"maxValIso,attr,omitempty"` + Type string `xml:"type,attr,omitempty"` + Val float64 `xml:"val,attr,omitempty"` + ValISO string `xml:"valIso,attr,omitempty"` +} + +// xlsxIconFilter directly maps the iconFilter element. This element specifies +// the icon set and particular icon within that set to filter by. For any cells +// whose icon does not match the specified criteria, the corresponding rows +// shall be hidden from view when the filter is applied. +type xlsxIconFilter struct { + IconID int `xml:"iconId,attr"` + IconSet string `xml:"iconSet,attr,omitempty"` +} + +// xlsxTop10 directly maps the top10 element. This element specifies the top N +// (percent or number of items) to filter by. +type xlsxTop10 struct { + FilterVal float64 `xml:"filterVal,attr,omitempty"` + Percent bool `xml:"percent,attr,omitempty"` + Top bool `xml:"top,attr"` + Val float64 `xml:"val,attr,omitempty"` +} + +// xlsxDateGroupItem directly maps the dateGroupItem element. This collection is +// used to express a group of dates or times which are used in an AutoFilter +// criteria. [Note: See parent element for an example. end note] Values are +// always written in the calendar type of the first date encountered in the +// filter range, so that all subsequent dates, even when formatted or +// represented by other calendar types, can be correctly compared for the +// purposes of filtering. +type xlsxDateGroupItem struct { + DateTimeGrouping string `xml:"dateTimeGrouping,attr,omitempty"` + Day int `xml:"day,attr,omitempty"` + Hour int `xml:"hour,attr,omitempty"` + Minute int `xml:"minute,attr,omitempty"` + Month int `xml:"month,attr,omitempty"` + Second int `xml:"second,attr,omitempty"` + Year int `xml:"year,attr,omitempty"` +} + +// xlsxTableColumns directly maps the element representing the collection of all +// table columns for this table. +type xlsxTableColumns struct { + Count int `xml:"count,attr"` + TableColumn []*xlsxTableColumn `xml:"tableColumn"` +} + +// xlsxTableColumn directly maps the element representing a single column for +// this table. +type xlsxTableColumn struct { + DataCellStyle string `xml:"dataCellStyle,attr,omitempty"` + DataDxfID int `xml:"dataDxfId,attr,omitempty"` + HeaderRowCellStyle string `xml:"headerRowCellStyle,attr,omitempty"` + HeaderRowDxfID int `xml:"headerRowDxfId,attr,omitempty"` + ID int `xml:"id,attr"` + Name string `xml:"name,attr"` + QueryTableFieldID int `xml:"queryTableFieldId,attr,omitempty"` + TotalsRowCellStyle string `xml:"totalsRowCellStyle,attr,omitempty"` + TotalsRowDxfID int `xml:"totalsRowDxfId,attr,omitempty"` + TotalsRowFunction string `xml:"totalsRowFunction,attr,omitempty"` + TotalsRowLabel string `xml:"totalsRowLabel,attr,omitempty"` + UniqueName string `xml:"uniqueName,attr,omitempty"` +} + +// xlsxTableStyleInfo directly maps the tableStyleInfo element. This element +// describes which style is used to display this table, and specifies which +// portions of the table have the style applied. +type xlsxTableStyleInfo struct { + Name string `xml:"name,attr,omitempty"` + ShowFirstColumn bool `xml:"showFirstColumn,attr"` + ShowLastColumn bool `xml:"showLastColumn,attr"` + ShowRowStripes bool `xml:"showRowStripes,attr"` + ShowColumnStripes bool `xml:"showColumnStripes,attr"` +} + +// formatTable directly maps the format settings of the table. +type formatTable struct { + TableName string `json:"table_name"` + TableStyle string `json:"table_style"` + ShowFirstColumn bool `json:"show_first_column"` + ShowLastColumn bool `json:"show_last_column"` + ShowRowStripes bool `json:"show_row_stripes"` + ShowColumnStripes bool `json:"show_column_stripes"` +} + +// formatAutoFilter directly maps the auto filter settings. +type formatAutoFilter struct { + Column string `json:"column"` + Expression string `json:"expression"` + FilterList []struct { + Column string `json:"column"` + Value []int `json:"value"` + } `json:"filter_list"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTheme.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTheme.go new file mode 100644 index 000000000..d2ab343ce --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlTheme.go @@ -0,0 +1,140 @@ +package excelize + +import "encoding/xml" + +// xlsxTheme directly maps the theme element in the namespace +// http://schemas.openxmlformats.org/drawingml/2006/main +type xlsxTheme struct { + ThemeElements xlsxThemeElements `xml:"themeElements"` + ObjectDefaults xlsxObjectDefaults `xml:"objectDefaults"` + ExtraClrSchemeLst xlsxExtraClrSchemeLst `xml:"extraClrSchemeLst"` + ExtLst *xlsxExtLst `xml:"extLst"` +} + +// objectDefaults element allows for the definition of default shape, line, +// and textbox formatting properties. An application can use this information +// to format a shape (or text) initially on insertion into a document. +type xlsxObjectDefaults struct { + ObjectDefaults string `xml:",innerxml"` +} + +// xlsxExtraClrSchemeLst element is a container for the list of extra color +// schemes present in a document. +type xlsxExtraClrSchemeLst struct { + ExtraClrSchemeLst string `xml:",innerxml"` +} + +// xlsxThemeElements directly maps the element defines the theme formatting +// options for the theme and is the workhorse of the theme. This is where the +// bulk of the shared theme information is contained and used by a document. +// This element contains the color scheme, font scheme, and format scheme +// elements which define the different formatting aspects of what a theme +// defines. +type xlsxThemeElements struct { + ClrScheme xlsxClrScheme `xml:"clrScheme"` + FontScheme xlsxFontScheme `xml:"fontScheme"` + FmtScheme xlsxFmtScheme `xml:"fmtScheme"` +} + +// xlsxClrScheme element specifies the theme color, stored in the document's +// Theme part to which the value of this theme color shall be mapped. This +// mapping enables multiple theme colors to be chained together. +type xlsxClrScheme struct { + Name string `xml:"name,attr"` + Children []xlsxClrSchemeEl `xml:",any"` +} + +// xlsxFontScheme element defines the font scheme within the theme. The font +// scheme consists of a pair of major and minor fonts for which to use in a +// document. The major font corresponds well with the heading areas of a +// document, and the minor font corresponds well with the normal text or +// paragraph areas. +type xlsxFontScheme struct { + Name string `xml:"name,attr"` + MajorFont xlsxMajorFont `xml:"majorFont"` + MinorFont xlsxMinorFont `xml:"minorFont"` + ExtLst *xlsxExtLst `xml:"extLst"` +} + +// xlsxMajorFont element defines the set of major fonts which are to be used +// under different languages or locals. +type xlsxMajorFont struct { + Children []xlsxFontSchemeEl `xml:",any"` +} + +// xlsxMinorFont element defines the set of minor fonts that are to be used +// under different languages or locals. +type xlsxMinorFont struct { + Children []xlsxFontSchemeEl `xml:",any"` +} + +// xlsxFmtScheme element contains the background fill styles, effect styles, +// fill styles, and line styles which define the style matrix for a theme. The +// style matrix consists of subtle, moderate, and intense fills, lines, and +// effects. The background fills are not generally thought of to directly be +// associated with the matrix, but do play a role in the style of the overall +// document. Usually, a given object chooses a single line style, a single +// fill style, and a single effect style in order to define the overall final +// look of the object. +type xlsxFmtScheme struct { + Name string `xml:"name,attr"` + FillStyleLst xlsxFillStyleLst `xml:"fillStyleLst"` + LnStyleLst xlsxLnStyleLst `xml:"lnStyleLst"` + EffectStyleLst xlsxEffectStyleLst `xml:"effectStyleLst"` + BgFillStyleLst xlsxBgFillStyleLst `xml:"bgFillStyleLst"` +} + +// xlsxFillStyleLst element defines a set of three fill styles that are used +// within a theme. The three fill styles are arranged in order from subtle to +// moderate to intense. +type xlsxFillStyleLst struct { + FillStyleLst string `xml:",innerxml"` +} + +// xlsxLnStyleLst element defines a list of three line styles for use within a +// theme. The three line styles are arranged in order from subtle to moderate +// to intense versions of lines. This list makes up part of the style matrix. +type xlsxLnStyleLst struct { + LnStyleLst string `xml:",innerxml"` +} + +// xlsxEffectStyleLst element defines a set of three effect styles that create +// the effect style list for a theme. The effect styles are arranged in order +// of subtle to moderate to intense. +type xlsxEffectStyleLst struct { + EffectStyleLst string `xml:",innerxml"` +} + +// xlsxBgFillStyleLst element defines a list of background fills that are +// used within a theme. The background fills consist of three fills, arranged +// in order from subtle to moderate to intense. +type xlsxBgFillStyleLst struct { + BgFillStyleLst string `xml:",innerxml"` +} + +// xlsxClrScheme maps to children of the clrScheme element in the namespace +// http://schemas.openxmlformats.org/drawingml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxClrSchemeEl struct { + XMLName xml.Name + SysClr *xlsxSysClr `xml:"sysClr"` + SrgbClr *attrValString `xml:"srgbClr"` +} + +// xlsxFontSchemeEl directly maps the major and minor font of the style's font +// scheme. +type xlsxFontSchemeEl struct { + XMLName xml.Name + Script string `xml:"script,attr,omitempty"` + Typeface string `xml:"typeface,attr"` + Panose string `xml:"panose,attr,omitempty"` + PitchFamily string `xml:"pitchFamily,attr,omitempty"` + Charset string `xml:"charset,attr,omitempty"` +} + +// xlsxSysClr element specifies a color bound to predefined operating system +// elements. +type xlsxSysClr struct { + Val string `xml:"val,attr"` + LastClr string `xml:"lastClr,attr"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorkbook.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorkbook.go new file mode 100644 index 000000000..816d5a461 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorkbook.go @@ -0,0 +1,282 @@ +package excelize + +import "encoding/xml" + +// xmlxWorkbookRels contains xmlxWorkbookRelations which maps sheet id and sheet XML. +type xlsxWorkbookRels struct { + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/package/2006/relationships Relationships"` + Relationships []xlsxWorkbookRelation `xml:"Relationship"` +} + +// xmlxWorkbookRelation maps sheet id and xl/worksheets/_rels/sheet%d.xml.rels +type xlsxWorkbookRelation struct { + ID string `xml:"Id,attr"` + Target string `xml:",attr"` + Type string `xml:",attr"` + TargetMode string `xml:",attr,omitempty"` +} + +// xlsxWorkbook directly maps the workbook element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxWorkbook struct { + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/spreadsheetml/2006/main workbook"` + FileVersion *xlsxFileVersion `xml:"fileVersion"` + WorkbookPr *xlsxWorkbookPr `xml:"workbookPr"` + WorkbookProtection *xlsxWorkbookProtection `xml:"workbookProtection"` + BookViews xlsxBookViews `xml:"bookViews"` + Sheets xlsxSheets `xml:"sheets"` + ExternalReferences *xlsxExternalReferences `xml:"externalReferences"` + DefinedNames *xlsxDefinedNames `xml:"definedNames"` + CalcPr *xlsxCalcPr `xml:"calcPr"` + CustomWorkbookViews *xlsxCustomWorkbookViews `xml:"customWorkbookViews"` + PivotCaches *xlsxPivotCaches `xml:"pivotCaches"` + ExtLst *xlsxExtLst `xml:"extLst"` + FileRecoveryPr *xlsxFileRecoveryPr `xml:"fileRecoveryPr"` +} + +// xlsxFileRecoveryPr maps sheet recovery information. This element defines +// properties that track the state of the workbook file, such as whether the +// file was saved during a crash, or whether it should be opened in auto-recover +// mode. +type xlsxFileRecoveryPr struct { + AutoRecover bool `xml:"autoRecover,attr,omitempty"` + CrashSave bool `xml:"crashSave,attr,omitempty"` + DataExtractLoad bool `xml:"dataExtractLoad,attr,omitempty"` + RepairLoad bool `xml:"repairLoad,attr,omitempty"` +} + +// xlsxWorkbookProtection directly maps the workbookProtection element. This +// element specifies options for protecting data in the workbook. Applications +// might use workbook protection to prevent anyone from accidentally changing, +// moving, or deleting important data. This protection can be ignored by +// applications which choose not to support this optional protection mechanism. +// When a password is to be hashed and stored in this element, it shall be +// hashed as defined below, starting from a UTF-16LE encoded string value. If +// there is a leading BOM character (U+FEFF) in the encoded password it is +// removed before hash calculation. +type xlsxWorkbookProtection struct { + LockRevision bool `xml:"lockRevision,attr,omitempty"` + LockStructure bool `xml:"lockStructure,attr,omitempty"` + LockWindows bool `xml:"lockWindows,attr,omitempty"` + RevisionsAlgorithmName string `xml:"revisionsAlgorithmName,attr,omitempty"` + RevisionsHashValue string `xml:"revisionsHashValue,attr,omitempty"` + RevisionsSaltValue string `xml:"revisionsSaltValue,attr,omitempty"` + RevisionsSpinCount int `xml:"revisionsSpinCount,attr,omitempty"` + WorkbookAlgorithmName string `xml:"workbookAlgorithmName,attr,omitempty"` + WorkbookHashValue string `xml:"workbookHashValue,attr,omitempty"` + WorkbookSaltValue string `xml:"workbookSaltValue,attr,omitempty"` + WorkbookSpinCount int `xml:"workbookSpinCount,attr,omitempty"` +} + +// xlsxFileVersion directly maps the fileVersion element. This element defines +// properties that track which version of the application accessed the data and +// source code contained in the file. +type xlsxFileVersion struct { + AppName string `xml:"appName,attr,omitempty"` + CodeName string `xml:"codeName,attr,omitempty"` + LastEdited string `xml:"lastEdited,attr,omitempty"` + LowestEdited string `xml:"lowestEdited,attr,omitempty"` + RupBuild string `xml:"rupBuild,attr,omitempty"` +} + +// xlsxWorkbookPr directly maps the workbookPr element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main This element +// defines a collection of workbook properties. +type xlsxWorkbookPr struct { + AllowRefreshQuery bool `xml:"allowRefreshQuery,attr,omitempty"` + AutoCompressPictures bool `xml:"autoCompressPictures,attr,omitempty"` + BackupFile bool `xml:"backupFile,attr,omitempty"` + CheckCompatibility bool `xml:"checkCompatibility,attr,omitempty"` + CodeName string `xml:"codeName,attr,omitempty"` + Date1904 bool `xml:"date1904,attr,omitempty"` + DefaultThemeVersion string `xml:"defaultThemeVersion,attr,omitempty"` + FilterPrivacy bool `xml:"filterPrivacy,attr,omitempty"` + HidePivotFieldList bool `xml:"hidePivotFieldList,attr,omitempty"` + PromptedSolutions bool `xml:"promptedSolutions,attr,omitempty"` + PublishItems bool `xml:"publishItems,attr,omitempty"` + RefreshAllConnections bool `xml:"refreshAllConnections,attr,omitempty"` + SaveExternalLinkValues bool `xml:"saveExternalLinkValues,attr,omitempty"` + ShowBorderUnselectedTables bool `xml:"showBorderUnselectedTables,attr,omitempty"` + ShowInkAnnotation bool `xml:"showInkAnnotation,attr,omitempty"` + ShowObjects string `xml:"showObjects,attr,omitempty"` + ShowPivotChartFilter bool `xml:"showPivotChartFilter,attr,omitempty"` + UpdateLinks string `xml:"updateLinks,attr,omitempty"` +} + +// xlsxBookViews directly maps the bookViews element. This element specifies the +// collection of workbook views of the enclosing workbook. Each view can specify +// a window position, filter options, and other configurations. There is no +// limit on the number of workbook views that can be defined for a workbook. +type xlsxBookViews struct { + WorkBookView []xlsxWorkBookView `xml:"workbookView"` +} + +// xlsxWorkBookView directly maps the workbookView element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main This element +// specifies a single Workbook view. +type xlsxWorkBookView struct { + ActiveTab int `xml:"activeTab,attr,omitempty"` + AutoFilterDateGrouping bool `xml:"autoFilterDateGrouping,attr,omitempty"` + FirstSheet int `xml:"firstSheet,attr,omitempty"` + Minimized bool `xml:"minimized,attr,omitempty"` + ShowHorizontalScroll bool `xml:"showHorizontalScroll,attr,omitempty"` + ShowSheetTabs bool `xml:"showSheetTabs,attr,omitempty"` + ShowVerticalScroll bool `xml:"showVerticalScroll,attr,omitempty"` + TabRatio int `xml:"tabRatio,attr,omitempty"` + Visibility string `xml:"visibility,attr,omitempty"` + WindowHeight int `xml:"windowHeight,attr,omitempty"` + WindowWidth int `xml:"windowWidth,attr,omitempty"` + XWindow string `xml:"xWindow,attr,omitempty"` + YWindow string `xml:"yWindow,attr,omitempty"` +} + +// xlsxSheets directly maps the sheets element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main. +type xlsxSheets struct { + Sheet []xlsxSheet `xml:"sheet"` +} + +// xlsxSheet directly maps the sheet element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxSheet struct { + Name string `xml:"name,attr,omitempty"` + SheetID string `xml:"sheetId,attr,omitempty"` + ID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` + State string `xml:"state,attr,omitempty"` +} + +// xlsxExternalReferences directly maps the externalReferences element of the +// external workbook references part. +type xlsxExternalReferences struct { + ExternalReference []xlsxExternalReference `xml:"externalReference"` +} + +// xlsxExternalReference directly maps the externalReference element of the +// external workbook references part. +type xlsxExternalReference struct { + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` +} + +// xlsxPivotCaches element enumerates pivot cache definition parts used by pivot +// tables and formulas in this workbook. +type xlsxPivotCaches struct { + PivotCache []xlsxPivotCache `xml:"pivotCache"` +} + +// xlsxPivotCache directly maps the pivotCache element. +type xlsxPivotCache struct { + CacheID int `xml:"cacheId,attr,omitempty"` + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` +} + +// extLst element provides a convention for extending spreadsheetML in +// predefined locations. The locations shall be denoted with the extLst element, +// and are called extension lists. Extension list locations within the markup +// document are specified in the markup specification and can be used to store +// extensions to the markup specification, whether those are future version +// extensions of the markup specification or are private extensions implemented +// independently from the markup specification. Markup within an extension might +// not be understood by a consumer. +type xlsxExtLst struct { + Ext string `xml:",innerxml"` +} + +// xlsxDefinedNames directly maps the definedNames element. This element defines +// the collection of defined names for this workbook. Defined names are +// descriptive names to represent cells, ranges of cells, formulas, or constant +// values. Defined names can be used to represent a range on any worksheet. +type xlsxDefinedNames struct { + DefinedName []xlsxDefinedName `xml:"definedName"` +} + +// xlsxDefinedName directly maps the definedName element from the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main This element +// defines a defined name within this workbook. A defined name is descriptive +// text that is used to represents a cell, range of cells, formula, or constant +// value. For a descriptions of the attributes see https://msdn.microsoft.com/en-us/library/office/documentformat.openxml.spreadsheet.definedname.aspx +type xlsxDefinedName struct { + Comment string `xml:"comment,attr,omitempty"` + CustomMenu string `xml:"customMenu,attr,omitempty"` + Description string `xml:"description,attr,omitempty"` + Function bool `xml:"function,attr,omitempty"` + FunctionGroupID int `xml:"functionGroupId,attr,omitempty"` + Help string `xml:"help,attr,omitempty"` + Hidden bool `xml:"hidden,attr,omitempty"` + LocalSheetID *int `xml:"localSheetId,attr"` + Name string `xml:"name,attr,omitempty"` + PublishToServer bool `xml:"publishToServer,attr,omitempty"` + ShortcutKey string `xml:"shortcutKey,attr,omitempty"` + StatusBar string `xml:"statusBar,attr,omitempty"` + VbProcedure bool `xml:"vbProcedure,attr,omitempty"` + WorkbookParameter bool `xml:"workbookParameter,attr,omitempty"` + Xlm bool `xml:"xml,attr,omitempty"` + Data string `xml:",chardata"` +} + +// xlsxCalcPr directly maps the calcPr element. This element defines the +// collection of properties the application uses to record calculation status +// and details. Calculation is the process of computing formulas and then +// displaying the results as values in the cells that contain the formulas. +type xlsxCalcPr struct { + CalcCompleted bool `xml:"calcCompleted,attr,omitempty"` + CalcID string `xml:"calcId,attr,omitempty"` + CalcMode string `xml:"calcMode,attr,omitempty"` + CalcOnSave bool `xml:"calcOnSave,attr,omitempty"` + ConcurrentCalc *bool `xml:"concurrentCalc,attr"` + ConcurrentManualCount int `xml:"concurrentManualCount,attr,omitempty"` + ForceFullCalc bool `xml:"forceFullCalc,attr,omitempty"` + FullCalcOnLoad bool `xml:"fullCalcOnLoad,attr,omitempty"` + FullPrecision bool `xml:"fullPrecision,attr,omitempty"` + Iterate bool `xml:"iterate,attr,omitempty"` + IterateCount int `xml:"iterateCount,attr,omitempty"` + IterateDelta float64 `xml:"iterateDelta,attr,omitempty"` + RefMode string `xml:"refMode,attr,omitempty"` +} + +// xlsxCustomWorkbookViews defines the collection of custom workbook views that +// are defined for this workbook. A customWorkbookView is similar in concept to +// a workbookView in that its attributes contain settings related to the way +// that the workbook should be displayed on a screen by a spreadsheet +// application. +type xlsxCustomWorkbookViews struct { + CustomWorkbookView []xlsxCustomWorkbookView `xml:"customWorkbookView"` +} + +// xlsxCustomWorkbookView directly maps the customWorkbookView element. This +// element specifies a single custom workbook view. A custom workbook view +// consists of a set of display and print settings that you can name and apply +// to a workbook. You can create more than one custom workbook view of the same +// workbook. Custom Workbook Views are not required in order to construct a +// valid SpreadsheetML document, and are not necessary if the document is never +// displayed by a spreadsheet application, or if the spreadsheet application has +// a fixed display for workbooks. However, if a spreadsheet application chooses +// to implement configurable display modes, the customWorkbookView element +// should be used to persist the settings for those display modes. +type xlsxCustomWorkbookView struct { + ActiveSheetID *int `xml:"activeSheetId,attr"` + AutoUpdate *bool `xml:"autoUpdate,attr"` + ChangesSavedWin *bool `xml:"changesSavedWin,attr"` + GUID *string `xml:"guid,attr"` + IncludeHiddenRowCol *bool `xml:"includeHiddenRowCol,attr"` + IncludePrintSettings *bool `xml:"includePrintSettings,attr"` + Maximized *bool `xml:"maximized,attr"` + MergeInterval int `xml:"mergeInterval,attr"` + Minimized *bool `xml:"minimized,attr"` + Name *string `xml:"name,attr"` + OnlySync *bool `xml:"onlySync,attr"` + PersonalView *bool `xml:"personalView,attr"` + ShowComments *string `xml:"showComments,attr"` + ShowFormulaBar *bool `xml:"showFormulaBar,attr"` + ShowHorizontalScroll *bool `xml:"showHorizontalScroll,attr"` + ShowObjects *string `xml:"showObjects,attr"` + ShowSheetTabs *bool `xml:"showSheetTabs,attr"` + ShowStatusbar *bool `xml:"showStatusbar,attr"` + ShowVerticalScroll *bool `xml:"showVerticalScroll,attr"` + TabRatio *int `xml:"tabRatio,attr"` + WindowHeight *int `xml:"windowHeight,attr"` + WindowWidth *int `xml:"windowWidth,attr"` + XWindow *int `xml:"xWindow,attr"` + YWindow *int `xml:"yWindow,attr"` +} diff --git a/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorksheet.go b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorksheet.go new file mode 100644 index 000000000..37c0d18e0 --- /dev/null +++ b/vendor/github.com/360EntSecGroup-Skylar/excelize/xmlWorksheet.go @@ -0,0 +1,573 @@ +package excelize + +import "encoding/xml" + +// xlsxWorksheet directly maps the worksheet element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxWorksheet struct { + XMLName xml.Name `xml:"http://schemas.openxmlformats.org/spreadsheetml/2006/main worksheet"` + SheetPr *xlsxSheetPr `xml:"sheetPr"` + Dimension xlsxDimension `xml:"dimension"` + SheetViews xlsxSheetViews `xml:"sheetViews,omitempty"` + SheetFormatPr *xlsxSheetFormatPr `xml:"sheetFormatPr"` + Cols *xlsxCols `xml:"cols,omitempty"` + SheetData xlsxSheetData `xml:"sheetData"` + SheetProtection *xlsxSheetProtection `xml:"sheetProtection"` + AutoFilter *xlsxAutoFilter `xml:"autoFilter"` + MergeCells *xlsxMergeCells `xml:"mergeCells"` + PhoneticPr *xlsxPhoneticPr `xml:"phoneticPr"` + ConditionalFormatting []*xlsxConditionalFormatting `xml:"conditionalFormatting"` + DataValidations *xlsxDataValidations `xml:"dataValidations"` + Hyperlinks *xlsxHyperlinks `xml:"hyperlinks"` + PrintOptions *xlsxPrintOptions `xml:"printOptions"` + PageMargins *xlsxPageMargins `xml:"pageMargins"` + PageSetUp *xlsxPageSetUp `xml:"pageSetup"` + HeaderFooter *xlsxHeaderFooter `xml:"headerFooter"` + Drawing *xlsxDrawing `xml:"drawing"` + LegacyDrawing *xlsxLegacyDrawing `xml:"legacyDrawing"` + Picture *xlsxPicture `xml:"picture"` + TableParts *xlsxTableParts `xml:"tableParts"` + ExtLst *xlsxExtLst `xml:"extLst"` +} + +// xlsxDrawing change r:id to rid in the namespace. +type xlsxDrawing struct { + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` +} + +// xlsxHeaderFooter directly maps the headerFooter element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - When printed or +// viewed in page layout view (§18.18.69), each page of a worksheet can have a +// page header, a page footer, or both. The headers and footers on odd-numbered +// pages can differ from those on even-numbered pages, and the headers and +// footers on the first page can differ from those on odd- and even-numbered +// pages. In the latter case, the first page is not considered an odd page. +type xlsxHeaderFooter struct { + DifferentFirst bool `xml:"differentFirst,attr,omitempty"` + DifferentOddEven bool `xml:"differentOddEven,attr,omitempty"` + OddHeader []*xlsxOddHeader `xml:"oddHeader"` + OddFooter []*xlsxOddFooter `xml:"oddFooter"` +} + +// xlsxOddHeader directly maps the oddHeader element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxOddHeader struct { + Content string `xml:",chardata"` +} + +// xlsxOddFooter directly maps the oddFooter element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxOddFooter struct { + Content string `xml:",chardata"` +} + +// xlsxPageSetUp directly maps the pageSetup element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Page setup +// settings for the worksheet. +type xlsxPageSetUp struct { + BlackAndWhite bool `xml:"blackAndWhite,attr,omitempty"` + CellComments string `xml:"cellComments,attr,omitempty"` + Copies int `xml:"copies,attr,omitempty"` + Draft bool `xml:"draft,attr,omitempty"` + Errors string `xml:"errors,attr,omitempty"` + FirstPageNumber int `xml:"firstPageNumber,attr,omitempty"` + FitToHeight *int `xml:"fitToHeight,attr"` + FitToWidth int `xml:"fitToWidth,attr,omitempty"` + HorizontalDPI float32 `xml:"horizontalDpi,attr,omitempty"` + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` + Orientation string `xml:"orientation,attr,omitempty"` + PageOrder string `xml:"pageOrder,attr,omitempty"` + PaperHeight string `xml:"paperHeight,attr,omitempty"` + PaperSize string `xml:"paperSize,attr,omitempty"` + PaperWidth string `xml:"paperWidth,attr,omitempty"` + Scale int `xml:"scale,attr,omitempty"` + UseFirstPageNumber bool `xml:"useFirstPageNumber,attr,omitempty"` + UsePrinterDefaults bool `xml:"usePrinterDefaults,attr,omitempty"` + VerticalDPI float32 `xml:"verticalDpi,attr,omitempty"` +} + +// xlsxPrintOptions directly maps the printOptions element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Print options for +// the sheet. Printer-specific settings are stored separately in the Printer +// Settings part. +type xlsxPrintOptions struct { + GridLines bool `xml:"gridLines,attr,omitempty"` + GridLinesSet bool `xml:"gridLinesSet,attr,omitempty"` + Headings bool `xml:"headings,attr,omitempty"` + HorizontalCentered bool `xml:"horizontalCentered,attr,omitempty"` + VerticalCentered bool `xml:"verticalCentered,attr,omitempty"` +} + +// xlsxPageMargins directly maps the pageMargins element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Page margins for +// a sheet or a custom sheet view. +type xlsxPageMargins struct { + Bottom float64 `xml:"bottom,attr"` + Footer float64 `xml:"footer,attr"` + Header float64 `xml:"header,attr"` + Left float64 `xml:"left,attr"` + Right float64 `xml:"right,attr"` + Top float64 `xml:"top,attr"` +} + +// xlsxSheetFormatPr directly maps the sheetFormatPr element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main. This element +// specifies the sheet formatting properties. +type xlsxSheetFormatPr struct { + BaseColWidth uint8 `xml:"baseColWidth,attr,omitempty"` + DefaultColWidth float64 `xml:"defaultColWidth,attr,omitempty"` + DefaultRowHeight float64 `xml:"defaultRowHeight,attr"` + CustomHeight bool `xml:"customHeight,attr,omitempty"` + ZeroHeight bool `xml:"zeroHeight,attr,omitempty"` + ThickTop bool `xml:"thickTop,attr,omitempty"` + ThickBottom bool `xml:"thickBottom,attr,omitempty"` + OutlineLevelRow uint8 `xml:"outlineLevelRow,attr,omitempty"` + OutlineLevelCol uint8 `xml:"outlineLevelCol,attr,omitempty"` +} + +// xlsxSheetViews directly maps the sheetViews element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Worksheet views +// collection. +type xlsxSheetViews struct { + SheetView []xlsxSheetView `xml:"sheetView"` +} + +// xlsxSheetView directly maps the sheetView element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. A single sheet +// view definition. When more than one sheet view is defined in the file, it +// means that when opening the workbook, each sheet view corresponds to a +// separate window within the spreadsheet application, where each window is +// showing the particular sheet containing the same workbookViewId value, the +// last sheetView definition is loaded, and the others are discarded. When +// multiple windows are viewing the same sheet, multiple sheetView elements +// (with corresponding workbookView entries) are saved. +// See https://msdn.microsoft.com/en-us/library/office/documentformat.openxml.spreadsheet.sheetview.aspx +type xlsxSheetView struct { + WindowProtection bool `xml:"windowProtection,attr,omitempty"` + ShowFormulas bool `xml:"showFormulas,attr,omitempty"` + ShowGridLines *bool `xml:"showGridLines,attr"` + ShowRowColHeaders *bool `xml:"showRowColHeaders,attr"` + ShowZeros bool `xml:"showZeros,attr,omitempty"` + RightToLeft bool `xml:"rightToLeft,attr,omitempty"` + TabSelected bool `xml:"tabSelected,attr,omitempty"` + ShowWhiteSpace *bool `xml:"showWhiteSpace,attr"` + ShowOutlineSymbols bool `xml:"showOutlineSymbols,attr,omitempty"` + DefaultGridColor *bool `xml:"defaultGridColor,attr"` + View string `xml:"view,attr,omitempty"` + TopLeftCell string `xml:"topLeftCell,attr,omitempty"` + ColorID int `xml:"colorId,attr,omitempty"` + ZoomScale float64 `xml:"zoomScale,attr,omitempty"` + ZoomScaleNormal float64 `xml:"zoomScaleNormal,attr,omitempty"` + ZoomScalePageLayoutView float64 `xml:"zoomScalePageLayoutView,attr,omitempty"` + ZoomScaleSheetLayoutView float64 `xml:"zoomScaleSheetLayoutView,attr,omitempty"` + WorkbookViewID int `xml:"workbookViewId,attr"` + Pane *xlsxPane `xml:"pane,omitempty"` + Selection []*xlsxSelection `xml:"selection"` +} + +// xlsxSelection directly maps the selection element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Worksheet view +// selection. +type xlsxSelection struct { + ActiveCell string `xml:"activeCell,attr,omitempty"` + ActiveCellID *int `xml:"activeCellId,attr"` + Pane string `xml:"pane,attr,omitempty"` + SQRef string `xml:"sqref,attr,omitempty"` +} + +// xlsxSelection directly maps the selection element. Worksheet view pane. +type xlsxPane struct { + ActivePane string `xml:"activePane,attr,omitempty"` + State string `xml:"state,attr,omitempty"` // Either "split" or "frozen" + TopLeftCell string `xml:"topLeftCell,attr,omitempty"` + XSplit float64 `xml:"xSplit,attr,omitempty"` + YSplit float64 `xml:"ySplit,attr,omitempty"` +} + +// xlsxSheetPr directly maps the sheetPr element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Sheet-level +// properties. +type xlsxSheetPr struct { + XMLName xml.Name `xml:"sheetPr"` + CodeName string `xml:"codeName,attr,omitempty"` + EnableFormatConditionsCalculation *bool `xml:"enableFormatConditionsCalculation,attr"` + FilterMode bool `xml:"filterMode,attr,omitempty"` + Published *bool `xml:"published,attr"` + SyncHorizontal bool `xml:"syncHorizontal,attr,omitempty"` + SyncVertical bool `xml:"syncVertical,attr,omitempty"` + TransitionEntry bool `xml:"transitionEntry,attr,omitempty"` + TabColor *xlsxTabColor `xml:"tabColor,omitempty"` + PageSetUpPr *xlsxPageSetUpPr `xml:"pageSetUpPr,omitempty"` +} + +// xlsxPageSetUpPr directly maps the pageSetupPr element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Page setup +// properties of the worksheet. +type xlsxPageSetUpPr struct { + AutoPageBreaks bool `xml:"autoPageBreaks,attr,omitempty"` + FitToPage bool `xml:"fitToPage,attr,omitempty"` // Flag indicating whether the Fit to Page print option is enabled. +} + +// xlsxTabColor directly maps the tabColor element in the namespace currently I +// have not checked it for completeness - it does as much as I need. +type xlsxTabColor struct { + Theme int `xml:"theme,attr,omitempty"` + Tint float64 `xml:"tint,attr,omitempty"` +} + +// xlsxCols directly maps the cols element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxCols struct { + Col []xlsxCol `xml:"col"` +} + +// xlsxCol directly maps the col (Column Width & Formatting). Defines column +// width and column formatting for one or more columns of the worksheet. +type xlsxCol struct { + BestFit bool `xml:"bestFit,attr,omitempty"` + Collapsed bool `xml:"collapsed,attr"` + CustomWidth bool `xml:"customWidth,attr,omitempty"` + Hidden bool `xml:"hidden,attr"` + Max int `xml:"max,attr"` + Min int `xml:"min,attr"` + OutlineLevel uint8 `xml:"outlineLevel,attr,omitempty"` + Phonetic bool `xml:"phonetic,attr,omitempty"` + Style int `xml:"style,attr"` + Width float64 `xml:"width,attr"` +} + +// xlsxDimension directly maps the dimension element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - This element +// specifies the used range of the worksheet. It specifies the row and column +// bounds of used cells in the worksheet. This is optional and is not required. +// Used cells include cells with formulas, text content, and cell formatting. +// When an entire column is formatted, only the first cell in that column is +// considered used. +type xlsxDimension struct { + Ref string `xml:"ref,attr"` +} + +// xlsxSheetData directly maps the sheetData element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxSheetData struct { + XMLName xml.Name `xml:"sheetData"` + Row []xlsxRow `xml:"row"` +} + +// xlsxRow directly maps the row element. The element expresses information +// about an entire row of a worksheet, and contains all cell definitions for a +// particular row in the worksheet. +type xlsxRow struct { + Collapsed bool `xml:"collapsed,attr,omitempty"` + CustomFormat bool `xml:"customFormat,attr,omitempty"` + CustomHeight bool `xml:"customHeight,attr,omitempty"` + Hidden bool `xml:"hidden,attr,omitempty"` + Ht float64 `xml:"ht,attr,omitempty"` + OutlineLevel uint8 `xml:"outlineLevel,attr,omitempty"` + Ph bool `xml:"ph,attr,omitempty"` + R int `xml:"r,attr,omitempty"` + S int `xml:"s,attr,omitempty"` + Spans string `xml:"spans,attr,omitempty"` + ThickBot bool `xml:"thickBot,attr,omitempty"` + ThickTop bool `xml:"thickTop,attr,omitempty"` + C []xlsxC `xml:"c"` +} + +// xlsxMergeCell directly maps the mergeCell element. A single merged cell. +type xlsxMergeCell struct { + Ref string `xml:"ref,attr,omitempty"` +} + +// xlsxMergeCells directly maps the mergeCells element. This collection +// expresses all the merged cells in the sheet. +type xlsxMergeCells struct { + Count int `xml:"count,attr,omitempty"` + Cells []*xlsxMergeCell `xml:"mergeCell,omitempty"` +} + +// xlsxDataValidations expresses all data validation information for cells in a +// sheet which have data validation features applied. +type xlsxDataValidations struct { + Count int `xml:"count,attr,omitempty"` + DisablePrompts bool `xml:"disablePrompts,attr,omitempty"` + XWindow int `xml:"xWindow,attr,omitempty"` + YWindow int `xml:"yWindow,attr,omitempty"` + DataValidation string `xml:",innerxml"` +} + +// xlsxC directly maps the c element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +// +// This simple type is restricted to the values listed in the following table: +// +// Enumeration Value | Description +// ---------------------------+--------------------------------- +// b (Boolean) | Cell containing a boolean. +// d (Date) | Cell contains a date in the ISO 8601 format. +// e (Error) | Cell containing an error. +// inlineStr (Inline String) | Cell containing an (inline) rich string, i.e., one not in the shared string table. If this cell type is used, then the cell value is in the is element rather than the v element in the cell (c element). +// n (Number) | Cell containing a number. +// s (Shared String) | Cell containing a shared string. +// str (String) | Cell containing a formula string. +// +type xlsxC struct { + R string `xml:"r,attr"` // Cell ID, e.g. A1 + S int `xml:"s,attr,omitempty"` // Style reference. + // Str string `xml:"str,attr,omitempty"` // Style reference. + T string `xml:"t,attr,omitempty"` // Type. + F *xlsxF `xml:"f,omitempty"` // Formula + V string `xml:"v,omitempty"` // Value + IS *xlsxIS `xml:"is"` + XMLSpace xml.Attr `xml:"space,attr,omitempty"` +} + +// xlsxIS directly maps the t element. Cell containing an (inline) rich +// string, i.e., one not in the shared string table. If this cell type is +// used, then the cell value is in the is element rather than the v element in +// the cell (c element). +type xlsxIS struct { + T string `xml:"t"` +} + +// xlsxF directly maps the f element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - currently I have +// not checked it for completeness - it does as much as I need. +type xlsxF struct { + Content string `xml:",chardata"` + T string `xml:"t,attr,omitempty"` // Formula type + Ref string `xml:"ref,attr,omitempty"` // Shared formula ref + Si string `xml:"si,attr,omitempty"` // Shared formula index +} + +// xlsxSheetProtection collection expresses the sheet protection options to +// enforce when the sheet is protected. +type xlsxSheetProtection struct { + AlgorithmName string `xml:"algorithmName,attr,omitempty"` + AutoFilter int `xml:"autoFilter,attr,omitempty"` + DeleteColumns int `xml:"deleteColumns,attr,omitempty"` + DeleteRows int `xml:"deleteRows,attr,omitempty"` + FormatCells int `xml:"formatCells,attr,omitempty"` + FormatColumns int `xml:"formatColumns,attr,omitempty"` + FormatRows int `xml:"formatRows,attr,omitempty"` + HashValue string `xml:"hashValue,attr,omitempty"` + InsertColumns int `xml:"insertColumns,attr,omitempty"` + InsertHyperlinks int `xml:"insertHyperlinks,attr,omitempty"` + InsertRows int `xml:"insertRows,attr,omitempty"` + Objects int `xml:"objects,attr,omitempty"` + PivotTables int `xml:"pivotTables,attr,omitempty"` + SaltValue string `xml:"saltValue,attr,omitempty"` + Scenarios int `xml:"scenarios,attr,omitempty"` + SelectLockedCells int `xml:"selectLockedCells,attr,omitempty"` + SelectUnlockedCell int `xml:"selectUnlockedCell,attr,omitempty"` + Sheet int `xml:"sheet,attr,omitempty"` + Sort int `xml:"sort,attr,omitempty"` + SpinCount int `xml:"spinCount,attr,omitempty"` +} + +// xlsxPhoneticPr (Phonetic Properties) represents a collection of phonetic +// properties that affect the display of phonetic text for this String Item +// (si). Phonetic text is used to give hints as to the pronunciation of an East +// Asian language, and the hints are displayed as text within the spreadsheet +// cells across the top portion of the cell. Since the phonetic hints are text, +// every phonetic hint is expressed as a phonetic run (rPh), and these +// properties specify how to display that phonetic run. +type xlsxPhoneticPr struct { + Alignment string `xml:"alignment,attr,omitempty"` + FontID *int `xml:"fontId,attr"` + Type string `xml:"type,attr,omitempty"` +} + +// A Conditional Format is a format, such as cell shading or font color, that a +// spreadsheet application can automatically apply to cells if a specified +// condition is true. This collection expresses conditional formatting rules +// applied to a particular cell or range. +type xlsxConditionalFormatting struct { + SQRef string `xml:"sqref,attr,omitempty"` + CfRule []*xlsxCfRule `xml:"cfRule"` +} + +// xlsxCfRule (Conditional Formatting Rule) represents a description of a +// conditional formatting rule. +type xlsxCfRule struct { + AboveAverage *bool `xml:"aboveAverage,attr"` + Bottom bool `xml:"bottom,attr,omitempty"` + DxfID *int `xml:"dxfId,attr"` + EqualAverage bool `xml:"equalAverage,attr,omitempty"` + Operator string `xml:"operator,attr,omitempty"` + Percent bool `xml:"percent,attr,omitempty"` + Priority int `xml:"priority,attr,omitempty"` + Rank int `xml:"rank,attr,omitempty"` + StdDev int `xml:"stdDev,attr,omitempty"` + StopIfTrue bool `xml:"stopIfTrue,attr,omitempty"` + Text string `xml:"text,attr,omitempty"` + TimePeriod string `xml:"timePeriod,attr,omitempty"` + Type string `xml:"type,attr,omitempty"` + Formula []string `xml:"formula,omitempty"` + ColorScale *xlsxColorScale `xml:"colorScale"` + DataBar *xlsxDataBar `xml:"dataBar"` + IconSet *xlsxIconSet `xml:"iconSet"` + ExtLst *xlsxExtLst `xml:"extLst"` +} + +// xlsxColorScale (Color Scale) describes a gradated color scale in this +// conditional formatting rule. +type xlsxColorScale struct { + Cfvo []*xlsxCfvo `xml:"cfvo"` + Color []*xlsxColor `xml:"color"` +} + +// dataBar (Data Bar) describes a data bar conditional formatting rule. +type xlsxDataBar struct { + MaxLength int `xml:"maxLength,attr,omitempty"` + MinLength int `xml:"minLength,attr,omitempty"` + ShowValue bool `xml:"showValue,attr,omitempty"` + Cfvo []*xlsxCfvo `xml:"cfvo"` + Color []*xlsxColor `xml:"color"` +} + +// xlsxIconSet (Icon Set) describes an icon set conditional formatting rule. +type xlsxIconSet struct { + Cfvo []*xlsxCfvo `xml:"cfvo"` + IconSet string `xml:"iconSet,attr,omitempty"` + ShowValue bool `xml:"showValue,attr,omitempty"` + Percent bool `xml:"percent,attr,omitempty"` + Reverse bool `xml:"reverse,attr,omitempty"` +} + +// cfvo (Conditional Format Value Object) describes the values of the +// interpolation points in a gradient scale. +type xlsxCfvo struct { + Gte bool `xml:"gte,attr,omitempty"` + Type string `xml:"type,attr,omitempty"` + Val int `xml:"val,attr"` + ExtLst *xlsxExtLst `xml:"extLst"` +} + +// xlsxHyperlinks directly maps the hyperlinks element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - A hyperlink can +// be stored in a package as a relationship. Hyperlinks shall be identified by +// containing a target which specifies the destination of the given hyperlink. +type xlsxHyperlinks struct { + Hyperlink []xlsxHyperlink `xml:"hyperlink"` +} + +// xlsxHyperlink directly maps the hyperlink element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main +type xlsxHyperlink struct { + Ref string `xml:"ref,attr"` + Location string `xml:"location,attr,omitempty"` + Display string `xml:"display,attr,omitempty"` + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` +} + +// xlsxTableParts directly maps the tableParts element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - The table element +// has several attributes applied to identify the table and the data range it +// covers. The table id attribute needs to be unique across all table parts, the +// same goes for the name and displayName. The displayName has the further +// restriction that it must be unique across all defined names in the workbook. +// Later on we will see that you can define names for many elements, such as +// cells or formulas. The name value is used for the object model in Microsoft +// Office Excel. The displayName is used for references in formulas. The ref +// attribute is used to identify the cell range that the table covers. This +// includes not only the table data, but also the table header containing column +// names. +// To add columns to your table you add new tableColumn elements to the +// tableColumns container. Similar to the shared string table the collection +// keeps a count attribute identifying the number of columns. Besides the table +// definition in the table part there is also the need to identify which tables +// are displayed in the worksheet. The worksheet part has a separate element +// tableParts to store this information. Each table part is referenced through +// the relationship ID and again a count of the number of table parts is +// maintained. The following markup sample is taken from the documents +// accompanying this book. The sheet data element has been removed to reduce the +// size of the sample. To reference the table, just add the tableParts element, +// of course after having created and stored the table part. For example: +// +// +// ... +// +// +// +// +// +type xlsxTableParts struct { + Count int `xml:"count,attr,omitempty"` + TableParts []*xlsxTablePart `xml:"tablePart"` +} + +// xlsxTablePart directly maps the tablePart element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main +type xlsxTablePart struct { + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` +} + +// xlsxPicture directly maps the picture element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - Background sheet +// image. For example: +// +// +// +type xlsxPicture struct { + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` +} + +// xlsxLegacyDrawing directly maps the legacyDrawing element in the namespace +// http://schemas.openxmlformats.org/spreadsheetml/2006/main - A comment is a +// rich text note that is attached to, and associated with, a cell, separate +// from other cell content. Comment content is stored separate from the cell, +// and is displayed in a drawing object (like a text box) that is separate from, +// but associated with, a cell. Comments are used as reminders, such as noting +// how a complex formula works, or to provide feedback to other users. Comments +// can also be used to explain assumptions made in a formula or to call out +// something special about the cell. +type xlsxLegacyDrawing struct { + RID string `xml:"http://schemas.openxmlformats.org/officeDocument/2006/relationships id,attr,omitempty"` +} + +// formatPanes directly maps the settings of the panes. +type formatPanes struct { + Freeze bool `json:"freeze"` + Split bool `json:"split"` + XSplit int `json:"x_split"` + YSplit int `json:"y_split"` + TopLeftCell string `json:"top_left_cell"` + ActivePane string `json:"active_pane"` + Panes []struct { + SQRef string `json:"sqref"` + ActiveCell string `json:"active_cell"` + Pane string `json:"pane"` + } `json:"panes"` +} + +// formatConditional directly maps the conditional format settings of the cells. +type formatConditional struct { + Type string `json:"type"` + AboveAverage bool `json:"above_average"` + Percent bool `json:"percent"` + Format int `json:"format"` + Criteria string `json:"criteria"` + Value string `json:"value,omitempty"` + Minimum string `json:"minimum,omitempty"` + Maximum string `json:"maximum,omitempty"` + MinType string `json:"min_type,omitempty"` + MidType string `json:"mid_type,omitempty"` + MaxType string `json:"max_type,omitempty"` + MinValue string `json:"min_value,omitempty"` + MidValue string `json:"mid_value,omitempty"` + MaxValue string `json:"max_value,omitempty"` + MinColor string `json:"min_color,omitempty"` + MidColor string `json:"mid_color,omitempty"` + MaxColor string `json:"max_color,omitempty"` + MinLength string `json:"min_length,omitempty"` + MaxLength string `json:"max_length,omitempty"` + MultiRange string `json:"multi_range,omitempty"` + BarColor string `json:"bar_color,omitempty"` +} diff --git a/vendor/github.com/kardianos/osext/LICENSE b/vendor/github.com/kardianos/osext/LICENSE new file mode 100644 index 000000000..744875676 --- /dev/null +++ b/vendor/github.com/kardianos/osext/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/kardianos/osext/README.md b/vendor/github.com/kardianos/osext/README.md new file mode 100644 index 000000000..15cbc3d95 --- /dev/null +++ b/vendor/github.com/kardianos/osext/README.md @@ -0,0 +1,21 @@ +### Extensions to the "os" package. + +[![GoDoc](https://godoc.org/github.com/kardianos/osext?status.svg)](https://godoc.org/github.com/kardianos/osext) + +## Find the current Executable and ExecutableFolder. + +As of go1.8 the Executable function may be found in `os`. The Executable function +in the std lib `os` package is used if available. + +There is sometimes utility in finding the current executable file +that is running. This can be used for upgrading the current executable +or finding resources located relative to the executable file. Both +working directory and the os.Args[0] value are arbitrary and cannot +be relied on; os.Args[0] can be "faked". + +Multi-platform and supports: + * Linux + * OS X + * Windows + * Plan 9 + * BSDs. diff --git a/vendor/github.com/kardianos/osext/osext.go b/vendor/github.com/kardianos/osext/osext.go new file mode 100644 index 000000000..17f380f0e --- /dev/null +++ b/vendor/github.com/kardianos/osext/osext.go @@ -0,0 +1,33 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Extensions to the standard "os" package. +package osext // import "github.com/kardianos/osext" + +import "path/filepath" + +var cx, ce = executableClean() + +func executableClean() (string, error) { + p, err := executable() + return filepath.Clean(p), err +} + +// Executable returns an absolute path that can be used to +// re-invoke the current program. +// It may not be valid after the current program exits. +func Executable() (string, error) { + return cx, ce +} + +// Returns same path as Executable, returns just the folder +// path. Excludes the executable name and any trailing slash. +func ExecutableFolder() (string, error) { + p, err := Executable() + if err != nil { + return "", err + } + + return filepath.Dir(p), nil +} diff --git a/vendor/github.com/kardianos/osext/osext_go18.go b/vendor/github.com/kardianos/osext/osext_go18.go new file mode 100644 index 000000000..009d8a926 --- /dev/null +++ b/vendor/github.com/kardianos/osext/osext_go18.go @@ -0,0 +1,9 @@ +//+build go1.8,!openbsd + +package osext + +import "os" + +func executable() (string, error) { + return os.Executable() +} diff --git a/vendor/github.com/kardianos/osext/osext_plan9.go b/vendor/github.com/kardianos/osext/osext_plan9.go new file mode 100644 index 000000000..95e237137 --- /dev/null +++ b/vendor/github.com/kardianos/osext/osext_plan9.go @@ -0,0 +1,22 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !go1.8 + +package osext + +import ( + "os" + "strconv" + "syscall" +) + +func executable() (string, error) { + f, err := os.Open("/proc/" + strconv.Itoa(os.Getpid()) + "/text") + if err != nil { + return "", err + } + defer f.Close() + return syscall.Fd2path(int(f.Fd())) +} diff --git a/vendor/github.com/kardianos/osext/osext_procfs.go b/vendor/github.com/kardianos/osext/osext_procfs.go new file mode 100644 index 000000000..e1f16f885 --- /dev/null +++ b/vendor/github.com/kardianos/osext/osext_procfs.go @@ -0,0 +1,36 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.8,android !go1.8,linux !go1.8,netbsd !go1.8,solaris !go1.8,dragonfly + +package osext + +import ( + "errors" + "fmt" + "os" + "runtime" + "strings" +) + +func executable() (string, error) { + switch runtime.GOOS { + case "linux", "android": + const deletedTag = " (deleted)" + execpath, err := os.Readlink("/proc/self/exe") + if err != nil { + return execpath, err + } + execpath = strings.TrimSuffix(execpath, deletedTag) + execpath = strings.TrimPrefix(execpath, deletedTag) + return execpath, nil + case "netbsd": + return os.Readlink("/proc/curproc/exe") + case "dragonfly": + return os.Readlink("/proc/curproc/file") + case "solaris": + return os.Readlink(fmt.Sprintf("/proc/%d/path/a.out", os.Getpid())) + } + return "", errors.New("ExecPath not implemented for " + runtime.GOOS) +} diff --git a/vendor/github.com/kardianos/osext/osext_sysctl.go b/vendor/github.com/kardianos/osext/osext_sysctl.go new file mode 100644 index 000000000..33cee2522 --- /dev/null +++ b/vendor/github.com/kardianos/osext/osext_sysctl.go @@ -0,0 +1,126 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.8,darwin !go1.8,freebsd openbsd + +package osext + +import ( + "os" + "os/exec" + "path/filepath" + "runtime" + "syscall" + "unsafe" +) + +var initCwd, initCwdErr = os.Getwd() + +func executable() (string, error) { + var mib [4]int32 + switch runtime.GOOS { + case "freebsd": + mib = [4]int32{1 /* CTL_KERN */, 14 /* KERN_PROC */, 12 /* KERN_PROC_PATHNAME */, -1} + case "darwin": + mib = [4]int32{1 /* CTL_KERN */, 38 /* KERN_PROCARGS */, int32(os.Getpid()), -1} + case "openbsd": + mib = [4]int32{1 /* CTL_KERN */, 55 /* KERN_PROC_ARGS */, int32(os.Getpid()), 1 /* KERN_PROC_ARGV */} + } + + n := uintptr(0) + // Get length. + _, _, errNum := syscall.Syscall6(syscall.SYS___SYSCTL, uintptr(unsafe.Pointer(&mib[0])), 4, 0, uintptr(unsafe.Pointer(&n)), 0, 0) + if errNum != 0 { + return "", errNum + } + if n == 0 { // This shouldn't happen. + return "", nil + } + buf := make([]byte, n) + _, _, errNum = syscall.Syscall6(syscall.SYS___SYSCTL, uintptr(unsafe.Pointer(&mib[0])), 4, uintptr(unsafe.Pointer(&buf[0])), uintptr(unsafe.Pointer(&n)), 0, 0) + if errNum != 0 { + return "", errNum + } + if n == 0 { // This shouldn't happen. + return "", nil + } + + var execPath string + switch runtime.GOOS { + case "openbsd": + // buf now contains **argv, with pointers to each of the C-style + // NULL terminated arguments. + var args []string + argv := uintptr(unsafe.Pointer(&buf[0])) + Loop: + for { + argp := *(**[1 << 20]byte)(unsafe.Pointer(argv)) + if argp == nil { + break + } + for i := 0; uintptr(i) < n; i++ { + // we don't want the full arguments list + if string(argp[i]) == " " { + break Loop + } + if argp[i] != 0 { + continue + } + args = append(args, string(argp[:i])) + n -= uintptr(i) + break + } + if n < unsafe.Sizeof(argv) { + break + } + argv += unsafe.Sizeof(argv) + n -= unsafe.Sizeof(argv) + } + execPath = args[0] + // There is no canonical way to get an executable path on + // OpenBSD, so check PATH in case we are called directly + if execPath[0] != '/' && execPath[0] != '.' { + execIsInPath, err := exec.LookPath(execPath) + if err == nil { + execPath = execIsInPath + } + } + default: + for i, v := range buf { + if v == 0 { + buf = buf[:i] + break + } + } + execPath = string(buf) + } + + var err error + // execPath will not be empty due to above checks. + // Try to get the absolute path if the execPath is not rooted. + if execPath[0] != '/' { + execPath, err = getAbs(execPath) + if err != nil { + return execPath, err + } + } + // For darwin KERN_PROCARGS may return the path to a symlink rather than the + // actual executable. + if runtime.GOOS == "darwin" { + if execPath, err = filepath.EvalSymlinks(execPath); err != nil { + return execPath, err + } + } + return execPath, nil +} + +func getAbs(execPath string) (string, error) { + if initCwdErr != nil { + return execPath, initCwdErr + } + // The execPath may begin with a "../" or a "./" so clean it first. + // Join the two paths, trailing and starting slashes undetermined, so use + // the generic Join function. + return filepath.Join(initCwd, filepath.Clean(execPath)), nil +} diff --git a/vendor/github.com/kardianos/osext/osext_windows.go b/vendor/github.com/kardianos/osext/osext_windows.go new file mode 100644 index 000000000..074b3b385 --- /dev/null +++ b/vendor/github.com/kardianos/osext/osext_windows.go @@ -0,0 +1,36 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !go1.8 + +package osext + +import ( + "syscall" + "unicode/utf16" + "unsafe" +) + +var ( + kernel = syscall.MustLoadDLL("kernel32.dll") + getModuleFileNameProc = kernel.MustFindProc("GetModuleFileNameW") +) + +// GetModuleFileName() with hModule = NULL +func executable() (exePath string, err error) { + return getModuleFileName() +} + +func getModuleFileName() (string, error) { + var n uint32 + b := make([]uint16, syscall.MAX_PATH) + size := uint32(len(b)) + + r0, _, e1 := getModuleFileNameProc.Call(0, uintptr(unsafe.Pointer(&b[0])), uintptr(size)) + n = uint32(r0) + if n == 0 { + return "", e1 + } + return string(utf16.Decode(b[0:n])), nil +} diff --git a/vendor/github.com/mohae/deepcopy/LICENSE b/vendor/github.com/mohae/deepcopy/LICENSE new file mode 100644 index 000000000..419673f00 --- /dev/null +++ b/vendor/github.com/mohae/deepcopy/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Joel + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mohae/deepcopy/README.md b/vendor/github.com/mohae/deepcopy/README.md new file mode 100644 index 000000000..f81841885 --- /dev/null +++ b/vendor/github.com/mohae/deepcopy/README.md @@ -0,0 +1,8 @@ +deepCopy +======== +[![GoDoc](https://godoc.org/github.com/mohae/deepcopy?status.svg)](https://godoc.org/github.com/mohae/deepcopy)[![Build Status](https://travis-ci.org/mohae/deepcopy.png)](https://travis-ci.org/mohae/deepcopy) + +DeepCopy makes deep copies of things: unexported field values are not copied. + +## Usage + cpy := deepcopy.Copy(orig) diff --git a/vendor/github.com/mohae/deepcopy/deepcopy.go b/vendor/github.com/mohae/deepcopy/deepcopy.go new file mode 100644 index 000000000..ba763ad09 --- /dev/null +++ b/vendor/github.com/mohae/deepcopy/deepcopy.go @@ -0,0 +1,125 @@ +// deepcopy makes deep copies of things. A standard copy will copy the +// pointers: deep copy copies the values pointed to. Unexported field +// values are not copied. +// +// Copyright (c)2014-2016, Joel Scoble (github.com/mohae), all rights reserved. +// License: MIT, for more details check the included LICENSE file. +package deepcopy + +import ( + "reflect" + "time" +) + +// Interface for delegating copy process to type +type Interface interface { + DeepCopy() interface{} +} + +// Iface is an alias to Copy; this exists for backwards compatibility reasons. +func Iface(iface interface{}) interface{} { + return Copy(iface) +} + +// Copy creates a deep copy of whatever is passed to it and returns the copy +// in an interface{}. The returned value will need to be asserted to the +// correct type. +func Copy(src interface{}) interface{} { + if src == nil { + return nil + } + + // Make the interface a reflect.Value + original := reflect.ValueOf(src) + + // Make a copy of the same type as the original. + cpy := reflect.New(original.Type()).Elem() + + // Recursively copy the original. + copyRecursive(original, cpy) + + // Return the copy as an interface. + return cpy.Interface() +} + +// copyRecursive does the actual copying of the interface. It currently has +// limited support for what it can handle. Add as needed. +func copyRecursive(original, cpy reflect.Value) { + // check for implement deepcopy.Interface + if original.CanInterface() { + if copier, ok := original.Interface().(Interface); ok { + cpy.Set(reflect.ValueOf(copier.DeepCopy())) + return + } + } + + // handle according to original's Kind + switch original.Kind() { + case reflect.Ptr: + // Get the actual value being pointed to. + originalValue := original.Elem() + + // if it isn't valid, return. + if !originalValue.IsValid() { + return + } + cpy.Set(reflect.New(originalValue.Type())) + copyRecursive(originalValue, cpy.Elem()) + + case reflect.Interface: + // If this is a nil, don't do anything + if original.IsNil() { + return + } + // Get the value for the interface, not the pointer. + originalValue := original.Elem() + + // Get the value by calling Elem(). + copyValue := reflect.New(originalValue.Type()).Elem() + copyRecursive(originalValue, copyValue) + cpy.Set(copyValue) + + case reflect.Struct: + t, ok := original.Interface().(time.Time) + if ok { + cpy.Set(reflect.ValueOf(t)) + return + } + // Go through each field of the struct and copy it. + for i := 0; i < original.NumField(); i++ { + // The Type's StructField for a given field is checked to see if StructField.PkgPath + // is set to determine if the field is exported or not because CanSet() returns false + // for settable fields. I'm not sure why. -mohae + if original.Type().Field(i).PkgPath != "" { + continue + } + copyRecursive(original.Field(i), cpy.Field(i)) + } + + case reflect.Slice: + if original.IsNil() { + return + } + // Make a new slice and copy each element. + cpy.Set(reflect.MakeSlice(original.Type(), original.Len(), original.Cap())) + for i := 0; i < original.Len(); i++ { + copyRecursive(original.Index(i), cpy.Index(i)) + } + + case reflect.Map: + if original.IsNil() { + return + } + cpy.Set(reflect.MakeMap(original.Type())) + for _, key := range original.MapKeys() { + originalValue := original.MapIndex(key) + copyValue := reflect.New(originalValue.Type()).Elem() + copyRecursive(originalValue, copyValue) + copyKey := Copy(key.Interface()) + cpy.SetMapIndex(reflect.ValueOf(copyKey), copyValue) + } + + default: + cpy.Set(original) + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/types/comm-types.go b/vendor/github.com/rpoletaev/supervisord/types/comm-types.go new file mode 100644 index 000000000..18679a66f --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/types/comm-types.go @@ -0,0 +1,33 @@ +package types + +type ProcessInfo struct { + Name string `xml:"name"` + Group string `xml:"group"` + Description string `xml:"description"` + Start int `xml:"start"` + Stop int `xml:"stop"` + Now int `xml:"now"` + State int `xml:"state"` + Statename string `xml:"statename"` + Spawnerr string `xml:"spawnerr"` + Exitstatus int `xml:"exitstatus"` + Logfile string `xml:"logfile"` + Stdout_logfile string `xml:"stdout_logfile"` + Stderr_logfile string `xml:"stderr_logfile"` + Pid int `xml:"pid"` +} + +type ReloadConfigResult struct { + AddedGroup []string + ChangedGroup []string + RemovedGroup []string +} + +type ProcessSignal struct { + Name string + Signal string +} + +type BooleanReply struct { + Success bool +} diff --git a/vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xml_processor.go b/vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xml_processor.go new file mode 100644 index 000000000..a1d5acbd7 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xml_processor.go @@ -0,0 +1,109 @@ +package xmlrpcclient + +import ( + "encoding/xml" + "io" + "strings" +) + +type XmlPath struct { + ElemNames []string +} + +func NewXmlPath() *XmlPath { + return &XmlPath{ElemNames: make([]string, 0)} +} + +func (xp *XmlPath) AddChildren(names ...string) { + for _, name := range names { + xp.ElemNames = append(xp.ElemNames, name) + } +} +func (xp *XmlPath) AddChild(elemName string) { + xp.ElemNames = append(xp.ElemNames, elemName) +} + +func (xp *XmlPath) RemoveLast() { + if len(xp.ElemNames) > 0 { + xp.ElemNames = xp.ElemNames[0 : len(xp.ElemNames)-1] + } +} + +func (xp *XmlPath) Equals(other *XmlPath) bool { + if len(xp.ElemNames) != len(other.ElemNames) { + return false + } + + for i := len(xp.ElemNames) - 1; i >= 0; i -= 1 { + if xp.ElemNames[i] != other.ElemNames[i] { + return false + } + } + return true +} +func (xp *XmlPath) String() string { + return strings.Join(xp.ElemNames, "/") +} + +type XmlLeafProcessor func(value string) +type XmlNonLeafProcessor func() + +type XmlProcessorManager struct { + leafProcessors map[string]XmlLeafProcessor + nonLeafProcessors map[string]XmlNonLeafProcessor +} + +func NewXmlProcessorManager() *XmlProcessorManager { + return &XmlProcessorManager{leafProcessors: make(map[string]XmlLeafProcessor), + nonLeafProcessors: make(map[string]XmlNonLeafProcessor)} +} + +func (xpm *XmlProcessorManager) AddLeafProcessor(path string, processor XmlLeafProcessor) { + xpm.leafProcessors[path] = processor +} + +func (xpm *XmlProcessorManager) AddNonLeafProcessor(path string, processor XmlNonLeafProcessor) { + xpm.nonLeafProcessors[path] = processor +} + +func (xpm *XmlProcessorManager) ProcessLeafNode(path string, data string) { + if processor, ok := xpm.leafProcessors[path]; ok { + processor(data) + } +} + +func (xpm *XmlProcessorManager) ProcessNonLeafNode(path string) { + if processor, ok := xpm.nonLeafProcessors[path]; ok { + processor() + } +} + +func (xpm *XmlProcessorManager) ProcessXml(reader io.Reader) { + decoder := xml.NewDecoder(reader) + var curData xml.CharData + curPath := NewXmlPath() + + for { + tk, err := decoder.Token() + if err != nil { + break + } + + switch tk.(type) { + case xml.StartElement: + startElem, _ := tk.(xml.StartElement) + curPath.AddChild(startElem.Name.Local) + curData = nil + case xml.CharData: + data, _ := tk.(xml.CharData) + curData = data.Copy() + case xml.EndElement: + if curData != nil { + xpm.ProcessLeafNode(curPath.String(), string(curData)) + } else { + xpm.ProcessNonLeafNode(curPath.String()) + } + curPath.RemoveLast() + } + } +} diff --git a/vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xmlrpc-client.go b/vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xmlrpc-client.go new file mode 100644 index 000000000..6b2ffea79 --- /dev/null +++ b/vendor/github.com/rpoletaev/supervisord/xmlrpcclient/xmlrpc-client.go @@ -0,0 +1,191 @@ +package xmlrpcclient + +import ( + "bytes" + "fmt" + "net/http" + + "github.com/rpoletaev/supervisord/types" + + "github.com/ochinchina/gorilla-xmlrpc/xml" +) + +type XmlRPCClient struct { + serverurl string +} + +type VersionReply struct { + Value string +} + +type StartStopReply struct { + Value bool +} + +type ShutdownReply StartStopReply + +type AllProcessInfoReply struct { + Value []types.ProcessInfo +} + +func NewXmlRPCClient(serverurl string) *XmlRPCClient { + return &XmlRPCClient{serverurl: serverurl} +} + +func (r *XmlRPCClient) Url() string { + return fmt.Sprintf("%s/RPC2", r.serverurl) +} + +func (r *XmlRPCClient) post(method string, data interface{}) (*http.Response, error) { + buf, _ := xml.EncodeClientRequest(method, data) + resp, err := http.Post(r.Url(), "text/xml", bytes.NewBuffer(buf)) + if err != nil { + fmt.Println("Fail to send request to supervisord:", err) + return nil, err + } + + if resp.StatusCode/100 != 2 { + fmt.Println("Bad Response:", resp.Status) + resp.Body.Close() + return nil, fmt.Errorf("Response code is NOT 2xx") + } + return resp, nil +} + +func (r *XmlRPCClient) GetVersion() (reply VersionReply, err error) { + ins := struct{}{} + resp, err := r.post("supervisor.getVersion", &ins) + + if err != nil { + return + } + defer resp.Body.Close() + + err = xml.DecodeClientResponse(resp.Body, &reply) + + return +} + +func (r *XmlRPCClient) GetAllProcessInfo() (reply AllProcessInfoReply, err error) { + ins := struct{}{} + resp, err := r.post("supervisor.getAllProcessInfo", &ins) + if err != nil { + return + } + defer resp.Body.Close() + + err = xml.DecodeClientResponse(resp.Body, &reply) + + return +} + +func (r *XmlRPCClient) ChangeProcessState(change string, processName string) (reply StartStopReply, err error) { + if !(change == "start" || change == "stop") { + err = fmt.Errorf("Incorrect required state") + return + } + + ins := struct{ Value string }{processName} + resp, err := r.post(fmt.Sprintf("supervisor.%sProcess", change), &ins) + + if err != nil { + return + } + defer resp.Body.Close() + + err = xml.DecodeClientResponse(resp.Body, &reply) + + return +} + +func (r *XmlRPCClient) ChangeAllProcessState(change string) (reply AllProcessInfoReply, err error) { + if !(change == "start" || change == "stop") { + err = fmt.Errorf("Incorrect required state") + return + } + ins := struct{ Wait bool }{true} + resp, err := r.post(fmt.Sprintf("supervisor.%sAllProcesses", change), &ins) + if err != nil { + return + } + defer resp.Body.Close() + err = xml.DecodeClientResponse(resp.Body, &reply) + return +} + +func (r *XmlRPCClient) Shutdown() (reply ShutdownReply, err error) { + ins := struct{}{} + resp, err := r.post("supervisor.shutdown", &ins) + + if err != nil { + return + } + defer resp.Body.Close() + + err = xml.DecodeClientResponse(resp.Body, &reply) + + return +} + +func (r *XmlRPCClient) ReloadConfig() (reply types.ReloadConfigResult, err error) { + ins := struct{}{} + resp, err := r.post("supervisor.reloadConfig", &ins) + + if err != nil { + return + } + + defer resp.Body.Close() + xmlProcMgr := NewXmlProcessorManager() + reply.AddedGroup = make([]string, 0) + reply.ChangedGroup = make([]string, 0) + reply.RemovedGroup = make([]string, 0) + i := -1 + has_value := false + xmlProcMgr.AddNonLeafProcessor("methodResponse/params/param/value/array/data", func() { + if has_value { + has_value = false + } else { + i++ + } + }) + xmlProcMgr.AddLeafProcessor("methodResponse/params/param/value/array/data/value", func(value string) { + has_value = true + i++ + switch i { + case 0: + reply.AddedGroup = append(reply.AddedGroup, value) + case 1: + reply.ChangedGroup = append(reply.ChangedGroup, value) + case 2: + reply.RemovedGroup = append(reply.RemovedGroup, value) + } + }) + xmlProcMgr.ProcessXml(resp.Body) + return +} + +func (r *XmlRPCClient) SignalProcess(signal string, name string) (reply types.BooleanReply, err error) { + ins := types.ProcessSignal{Name: name, Signal: signal} + resp, err := r.post("supervisor.signalProcess", &ins) + if err != nil { + return + } + defer resp.Body.Close() + + err = xml.DecodeClientResponse(resp.Body, &reply) + return +} + +func (r *XmlRPCClient) SignalAll(signal string) (reply AllProcessInfoReply, err error) { + ins := struct{ Signal string }{signal} + resp, err := r.post("supervisor.signalProcess", &ins) + if err != nil { + return + } + defer resp.Body.Close() + + err = xml.DecodeClientResponse(resp.Body, &reply) + + return +} diff --git a/vendor/github.com/sevlyar/go-daemon/LICENSE b/vendor/github.com/sevlyar/go-daemon/LICENSE new file mode 100644 index 000000000..6923f2f22 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/LICENSE @@ -0,0 +1,7 @@ +Copyright (C) 2013 Sergey Yarmonov + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/sevlyar/go-daemon/README.md b/vendor/github.com/sevlyar/go-daemon/README.md new file mode 100644 index 000000000..be4824a92 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/README.md @@ -0,0 +1,63 @@ +# go-daemon [![Build Status](https://travis-ci.org/sevlyar/go-daemon.svg?branch=master)](https://travis-ci.org/sevlyar/go-daemon) [![GoDoc](https://godoc.org/github.com/sevlyar/go-daemon?status.svg)](https://godoc.org/github.com/sevlyar/go-daemon) + +Library for writing system daemons in Go. + +Now supported only UNIX-based OS (Windows is not supported). But the library was tested only on Linux +and OSX, so that if you have an ability to test the library on other platforms, give me feedback, please (#26). + +*Please, feel free to send me bug reports and fixes. Many thanks to all contributors.* + +## Features + +* Goroutine-safe daemonization; +* Out of box work with pid-files; +* Easy handling of system signals; +* The control of a daemon. + +## Installation + + go get github.com/sevlyar/go-daemon + +You can use [gopkg.in](http://labix.org/gopkg.in): + + go get gopkg.in/sevlyar/go-daemon.v0 + +If you want to use the library in production project, please use vendoring, +because i can not ensure backward compatibility before release v1.0. + +## Examples + +* [Simple](examples/cmd/gd-simple/) +* [Log rotation](examples/cmd/gd-log-rotation/) +* [Signal handling](examples/cmd/gd-signal-handling/) + +## Documentation + +[godoc.org/github.com/sevlyar/go-daemon](https://godoc.org/github.com/sevlyar/go-daemon) + +## How it works + +We can not use `fork` syscall in Golang's runtime, because child process doesn't inherit +threads and goroutines in that case. The library uses a simple trick: it runs its own copy with +a mark - a predefined environment variable. Availability of the variable for the process means +an execution in the child's copy. So that if the mark is not setted - the library executes +parent's operations and runs its own copy with mark, and if the mark is setted - the library +executes child's operations: + +```go +func main() { + Pre() + + context := new(Context) + child, _ := context.Reborn() + + if child != nil { + PostParent() + } else { + defer context.Release() + PostChild() + } +} +``` + +![](img/idea.png) diff --git a/vendor/github.com/sevlyar/go-daemon/command.go b/vendor/github.com/sevlyar/go-daemon/command.go new file mode 100644 index 000000000..07d23c829 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/command.go @@ -0,0 +1,99 @@ +package daemon + +import ( + "os" +) + +// AddCommand is wrapper on AddFlag and SetSigHandler functions. +func AddCommand(f Flag, sig os.Signal, handler SignalHandlerFunc) { + if f != nil { + AddFlag(f, sig) + } + if handler != nil { + SetSigHandler(handler, sig) + } +} + +// Flag is the interface implemented by an object that has two state: +// 'set' and 'unset'. +type Flag interface { + IsSet() bool +} + +// BoolFlag returns new object that implements interface Flag and +// has state 'set' when var with the given address is true. +func BoolFlag(f *bool) Flag { + return &boolFlag{f} +} + +// StringFlag returns new object that implements interface Flag and +// has state 'set' when var with the given address equals given value of v. +func StringFlag(f *string, v string) Flag { + return &stringFlag{f, v} +} + +type boolFlag struct { + b *bool +} + +func (f *boolFlag) IsSet() bool { + if f == nil { + return false + } + return *f.b +} + +type stringFlag struct { + s *string + v string +} + +func (f *stringFlag) IsSet() bool { + if f == nil { + return false + } + return *f.s == f.v +} + +var flags = make(map[Flag]os.Signal) + +// Flags returns flags that was added by the function AddFlag. +func Flags() map[Flag]os.Signal { + return flags +} + +// AddFlag adds the flag and signal to the internal map. +func AddFlag(f Flag, sig os.Signal) { + flags[f] = sig +} + +// SendCommands sends active signals to the given process. +func SendCommands(p *os.Process) (err error) { + for _, sig := range signals() { + if err = p.Signal(sig); err != nil { + return + } + } + return +} + +// ActiveFlags returns flags that has the state 'set'. +func ActiveFlags() (ret []Flag) { + ret = make([]Flag, 0, 1) + for f := range flags { + if f.IsSet() { + ret = append(ret, f) + } + } + return +} + +func signals() (ret []os.Signal) { + ret = make([]os.Signal, 0, 1) + for f, sig := range flags { + if f.IsSet() { + ret = append(ret, sig) + } + } + return +} diff --git a/vendor/github.com/sevlyar/go-daemon/daemon.go b/vendor/github.com/sevlyar/go-daemon/daemon.go new file mode 100644 index 000000000..6c44aae25 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/daemon.go @@ -0,0 +1,44 @@ +package daemon + +import ( + "errors" + "os" +) + +var errNotSupported = errors.New("daemon: Non-POSIX OS is not supported") + +// Mark of daemon process - system environment variable _GO_DAEMON=1 +const ( + MARK_NAME = "_GO_DAEMON" + MARK_VALUE = "1" +) + +// Default file permissions for log and pid files. +const FILE_PERM = os.FileMode(0640) + +// WasReborn returns true in child process (daemon) and false in parent process. +func WasReborn() bool { + return os.Getenv(MARK_NAME) == MARK_VALUE +} + +// Reborn runs second copy of current process in the given context. +// function executes separate parts of code in child process and parent process +// and provides demonization of child process. It look similar as the +// fork-daemonization, but goroutine-safe. +// In success returns *os.Process in parent process and nil in child process. +// Otherwise returns error. +func (d *Context) Reborn() (child *os.Process, err error) { + return d.reborn() +} + +// Search searches daemons process by given in context pid file name. +// If success returns pointer on daemons os.Process structure, +// else returns error. Returns nil if filename is empty. +func (d *Context) Search() (daemon *os.Process, err error) { + return d.search() +} + +// Release provides correct pid-file release in daemon. +func (d *Context) Release() (err error) { + return d.release() +} diff --git a/vendor/github.com/sevlyar/go-daemon/daemon_stub.go b/vendor/github.com/sevlyar/go-daemon/daemon_stub.go new file mode 100644 index 000000000..25f3bbb33 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/daemon_stub.go @@ -0,0 +1,52 @@ +// +build !darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!plan9,!solaris + +package daemon + +import ( + "os" +) + +// A Context describes daemon context. +type Context struct { + // If PidFileName is non-empty, parent process will try to create and lock + // pid file with given name. Child process writes process id to file. + PidFileName string + // Permissions for new pid file. + PidFilePerm os.FileMode + + // If LogFileName is non-empty, parent process will create file with given name + // and will link to fd 2 (stderr) for child process. + LogFileName string + // Permissions for new log file. + LogFilePerm os.FileMode + + // If WorkDir is non-empty, the child changes into the directory before + // creating the process. + WorkDir string + // If Chroot is non-empty, the child changes root directory + Chroot string + + // If Env is non-nil, it gives the environment variables for the + // daemon-process in the form returned by os.Environ. + // If it is nil, the result of os.Environ will be used. + Env []string + // If Args is non-nil, it gives the command-line args for the + // daemon-process. If it is nil, the result of os.Args will be used + // (without program name). + Args []string + + // If Umask is non-zero, the daemon-process call Umask() func with given value. + Umask int +} + +func (d *Context) reborn() (child *os.Process, err error) { + return nil, errNotSupported +} + +func (d *Context) search() (daemon *os.Process, err error) { + return nil, errNotSupported +} + +func (d *Context) release() (err error) { + return errNotSupported +} diff --git a/vendor/github.com/sevlyar/go-daemon/daemon_unix.go b/vendor/github.com/sevlyar/go-daemon/daemon_unix.go new file mode 100644 index 000000000..9a334f2f1 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/daemon_unix.go @@ -0,0 +1,264 @@ +// +build darwin dragonfly freebsd linux netbsd openbsd plan9 solaris + +package daemon + +import ( + "encoding/json" + "fmt" + "os" + "path/filepath" + "syscall" + + "github.com/kardianos/osext" +) + +// A Context describes daemon context. +type Context struct { + // If PidFileName is non-empty, parent process will try to create and lock + // pid file with given name. Child process writes process id to file. + PidFileName string + // Permissions for new pid file. + PidFilePerm os.FileMode + + // If LogFileName is non-empty, parent process will create file with given name + // and will link to fd 2 (stderr) for child process. + LogFileName string + // Permissions for new log file. + LogFilePerm os.FileMode + + // If WorkDir is non-empty, the child changes into the directory before + // creating the process. + WorkDir string + // If Chroot is non-empty, the child changes root directory + Chroot string + + // If Env is non-nil, it gives the environment variables for the + // daemon-process in the form returned by os.Environ. + // If it is nil, the result of os.Environ will be used. + Env []string + // If Args is non-nil, it gives the command-line args for the + // daemon-process. If it is nil, the result of os.Args will be used. + Args []string + + // Credential holds user and group identities to be assumed by a daemon-process. + Credential *syscall.Credential + // If Umask is non-zero, the daemon-process call Umask() func with given value. + Umask int + + // Struct contains only serializable public fields (!!!) + abspath string + pidFile *LockFile + logFile *os.File + nullFile *os.File + + rpipe, wpipe *os.File +} + +func (d *Context) reborn() (child *os.Process, err error) { + if !WasReborn() { + child, err = d.parent() + } else { + err = d.child() + } + return +} + +func (d *Context) search() (daemon *os.Process, err error) { + if len(d.PidFileName) > 0 { + var pid int + if pid, err = ReadPidFile(d.PidFileName); err != nil { + return + } + daemon, err = os.FindProcess(pid) + } + return +} + +func (d *Context) parent() (child *os.Process, err error) { + if err = d.prepareEnv(); err != nil { + return + } + + defer d.closeFiles() + if err = d.openFiles(); err != nil { + return + } + + attr := &os.ProcAttr{ + Dir: d.WorkDir, + Env: d.Env, + Files: d.files(), + Sys: &syscall.SysProcAttr{ + //Chroot: d.Chroot, + Credential: d.Credential, + Setsid: true, + }, + } + + if child, err = os.StartProcess(d.abspath, d.Args, attr); err != nil { + if d.pidFile != nil { + d.pidFile.Remove() + } + return + } + + d.rpipe.Close() + encoder := json.NewEncoder(d.wpipe) + if err = encoder.Encode(d); err != nil { + return + } + _, err = fmt.Fprint(d.wpipe, "\n\n") + return +} + +func (d *Context) openFiles() (err error) { + if d.PidFilePerm == 0 { + d.PidFilePerm = FILE_PERM + } + if d.LogFilePerm == 0 { + d.LogFilePerm = FILE_PERM + } + + if d.nullFile, err = os.Open(os.DevNull); err != nil { + return + } + + if len(d.PidFileName) > 0 { + if d.PidFileName, err = filepath.Abs(d.PidFileName); err != nil { + return err + } + if d.pidFile, err = OpenLockFile(d.PidFileName, d.PidFilePerm); err != nil { + return + } + if err = d.pidFile.Lock(); err != nil { + return + } + if len(d.Chroot) > 0 { + // Calculate PID-file absolute path in child's environment + if d.PidFileName, err = filepath.Rel(d.Chroot, d.PidFileName); err != nil { + return err + } + d.PidFileName = "/" + d.PidFileName + } + } + + if len(d.LogFileName) > 0 { + if d.logFile, err = os.OpenFile(d.LogFileName, + os.O_WRONLY|os.O_CREATE|os.O_APPEND, d.LogFilePerm); err != nil { + return + } + } + + d.rpipe, d.wpipe, err = os.Pipe() + return +} + +func (d *Context) closeFiles() (err error) { + cl := func(file **os.File) { + if *file != nil { + (*file).Close() + *file = nil + } + } + cl(&d.rpipe) + cl(&d.wpipe) + cl(&d.logFile) + cl(&d.nullFile) + if d.pidFile != nil { + d.pidFile.Close() + d.pidFile = nil + } + return +} + +func (d *Context) prepareEnv() (err error) { + if d.abspath, err = osext.Executable(); err != nil { + return + } + + if len(d.Args) == 0 { + d.Args = os.Args + } + + mark := fmt.Sprintf("%s=%s", MARK_NAME, MARK_VALUE) + if len(d.Env) == 0 { + d.Env = os.Environ() + } + d.Env = append(d.Env, mark) + + return +} + +func (d *Context) files() (f []*os.File) { + log := d.nullFile + if d.logFile != nil { + log = d.logFile + } + + f = []*os.File{ + d.rpipe, // (0) stdin + log, // (1) stdout + log, // (2) stderr + d.nullFile, // (3) dup on fd 0 after initialization + } + + if d.pidFile != nil { + f = append(f, d.pidFile.File) // (4) pid file + } + return +} + +var initialized = false + +func (d *Context) child() (err error) { + if initialized { + return os.ErrInvalid + } + initialized = true + + decoder := json.NewDecoder(os.Stdin) + if err = decoder.Decode(d); err != nil { + d.pidFile.Remove() + return + } + + // create PID file after context decoding to know PID file full path. + if len(d.PidFileName) > 0 { + d.pidFile = NewLockFile(os.NewFile(4, d.PidFileName)) + if err = d.pidFile.WritePid(); err != nil { + return + } + } + + if err = syscall.Close(0); err != nil { + d.pidFile.Remove() + return + } + if err = syscallDup(3, 0); err != nil { + d.pidFile.Remove() + return + } + + if d.Umask != 0 { + syscall.Umask(int(d.Umask)) + } + if len(d.Chroot) > 0 { + err = syscall.Chroot(d.Chroot) + if err != nil { + d.pidFile.Remove() + return + } + } + + return +} + +func (d *Context) release() (err error) { + if !initialized { + return + } + if d.pidFile != nil { + err = d.pidFile.Remove() + } + return +} diff --git a/vendor/github.com/sevlyar/go-daemon/lock_file.go b/vendor/github.com/sevlyar/go-daemon/lock_file.go new file mode 100644 index 000000000..1ec81db4c --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/lock_file.go @@ -0,0 +1,109 @@ +package daemon + +import ( + "errors" + "fmt" + "os" +) + +var ( + // ErrWouldBlock indicates on locking pid-file by another process. + ErrWouldBlock = errors.New("daemon: Resource temporarily unavailable") +) + +// LockFile wraps *os.File and provide functions for locking of files. +type LockFile struct { + *os.File +} + +// NewLockFile returns a new LockFile with the given File. +func NewLockFile(file *os.File) *LockFile { + return &LockFile{file} +} + +// CreatePidFile opens the named file, applies exclusive lock and writes +// current process id to file. +func CreatePidFile(name string, perm os.FileMode) (lock *LockFile, err error) { + if lock, err = OpenLockFile(name, perm); err != nil { + return + } + if err = lock.Lock(); err != nil { + lock.Remove() + return + } + if err = lock.WritePid(); err != nil { + lock.Remove() + } + return +} + +// OpenLockFile opens the named file with flags os.O_RDWR|os.O_CREATE and specified perm. +// If successful, function returns LockFile for opened file. +func OpenLockFile(name string, perm os.FileMode) (lock *LockFile, err error) { + var file *os.File + if file, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE, perm); err == nil { + lock = &LockFile{file} + } + return +} + +// Lock apply exclusive lock on an open file. If file already locked, returns error. +func (file *LockFile) Lock() error { + return lockFile(file.Fd()) +} + +// Unlock remove exclusive lock on an open file. +func (file *LockFile) Unlock() error { + return unlockFile(file.Fd()) +} + +// ReadPidFile reads process id from file with give name and returns pid. +// If unable read from a file, returns error. +func ReadPidFile(name string) (pid int, err error) { + var file *os.File + if file, err = os.OpenFile(name, os.O_RDONLY, 0640); err != nil { + return + } + defer file.Close() + + lock := &LockFile{file} + pid, err = lock.ReadPid() + return +} + +// WritePid writes current process id to an open file. +func (file *LockFile) WritePid() (err error) { + if _, err = file.Seek(0, os.SEEK_SET); err != nil { + return + } + var fileLen int + if fileLen, err = fmt.Fprint(file, os.Getpid()); err != nil { + return + } + if err = file.Truncate(int64(fileLen)); err != nil { + return + } + err = file.Sync() + return +} + +// ReadPid reads process id from file and returns pid. +// If unable read from a file, returns error. +func (file *LockFile) ReadPid() (pid int, err error) { + if _, err = file.Seek(0, os.SEEK_SET); err != nil { + return + } + _, err = fmt.Fscan(file, &pid) + return +} + +// Remove removes lock, closes and removes an open file. +func (file *LockFile) Remove() error { + defer file.Close() + + if err := file.Unlock(); err != nil { + return err + } + + return os.Remove(file.Name()) +} diff --git a/vendor/github.com/sevlyar/go-daemon/lock_file_stub.go b/vendor/github.com/sevlyar/go-daemon/lock_file_stub.go new file mode 100644 index 000000000..3be0c01c6 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/lock_file_stub.go @@ -0,0 +1,11 @@ +// +build !darwin,!dragonfly,!freebsd,!linux,!netbsd,!openbsd,!plan9,!solaris + +package daemon + +func lockFile(fd uintptr) error { + return errNotSupported +} + +func unlockFile(fd uintptr) error { + return errNotSupported +} diff --git a/vendor/github.com/sevlyar/go-daemon/lock_file_unix.go b/vendor/github.com/sevlyar/go-daemon/lock_file_unix.go new file mode 100644 index 000000000..a34edf756 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/lock_file_unix.go @@ -0,0 +1,23 @@ +// +build darwin dragonfly freebsd linux netbsd openbsd plan9 solaris + +package daemon + +import ( + "syscall" +) + +func lockFile(fd uintptr) error { + err := syscall.Flock(int(fd), syscall.LOCK_EX|syscall.LOCK_NB) + if err == syscall.EWOULDBLOCK { + err = ErrWouldBlock + } + return err +} + +func unlockFile(fd uintptr) error { + err := syscall.Flock(int(fd), syscall.LOCK_UN) + if err == syscall.EWOULDBLOCK { + err = ErrWouldBlock + } + return err +} diff --git a/vendor/github.com/sevlyar/go-daemon/signal.go b/vendor/github.com/sevlyar/go-daemon/signal.go new file mode 100644 index 000000000..fe512da40 --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/signal.go @@ -0,0 +1,59 @@ +package daemon + +import ( + "errors" + "os" + "os/signal" + "syscall" +) + +// ErrStop should be returned signal handler function +// for termination of handling signals. +var ErrStop = errors.New("stop serve signals") + +// SignalHandlerFunc is the interface for signal handler functions. +type SignalHandlerFunc func(sig os.Signal) (err error) + +// SetSigHandler sets handler for the given signals. +// SIGTERM has the default handler, he returns ErrStop. +func SetSigHandler(handler SignalHandlerFunc, signals ...os.Signal) { + for _, sig := range signals { + handlers[sig] = handler + } +} + +// ServeSignals calls handlers for system signals. +func ServeSignals() (err error) { + signals := make([]os.Signal, 0, len(handlers)) + for sig := range handlers { + signals = append(signals, sig) + } + + ch := make(chan os.Signal, 8) + signal.Notify(ch, signals...) + + for sig := range ch { + err = handlers[sig](sig) + if err != nil { + break + } + } + + signal.Stop(ch) + + if err == ErrStop { + err = nil + } + + return +} + +var handlers = make(map[os.Signal]SignalHandlerFunc) + +func init() { + handlers[syscall.SIGTERM] = sigtermDefaultHandler +} + +func sigtermDefaultHandler(sig os.Signal) error { + return ErrStop +} diff --git a/vendor/github.com/sevlyar/go-daemon/syscall_dup.go b/vendor/github.com/sevlyar/go-daemon/syscall_dup.go new file mode 100644 index 000000000..e5721015c --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/syscall_dup.go @@ -0,0 +1,12 @@ +// +build !linux !arm64 +// +build !windows + +package daemon + +import ( + "syscall" +) + +func syscallDup(oldfd int, newfd int) (err error) { + return syscall.Dup2(oldfd, newfd) +} diff --git a/vendor/github.com/sevlyar/go-daemon/syscall_dup_arm64.go b/vendor/github.com/sevlyar/go-daemon/syscall_dup_arm64.go new file mode 100644 index 000000000..af00cd2ac --- /dev/null +++ b/vendor/github.com/sevlyar/go-daemon/syscall_dup_arm64.go @@ -0,0 +1,11 @@ +// +build linux,arm64 + +package daemon + +import "syscall" + +func syscallDup(oldfd int, newfd int) (err error) { + // linux_arm64 platform doesn't have syscall.Dup2 + // so use the nearly identical syscall.Dup3 instead. + return syscall.Dup3(oldfd, newfd, 0) +} diff --git a/vendor/vendor.json b/vendor/vendor.json index 1565ffc5c..353d347bf 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -2,6 +2,12 @@ "comment": "", "ignore": "test", "package": [ + { + "checksumSHA1": "xy6SD08Gw0mk4ILtNfaLNowczYM=", + "path": "github.com/360EntSecGroup-Skylar/excelize", + "revision": "58a7b23d11ef36156af6f694a1753715df8ae2fc", + "revisionTime": "2018-07-10T02:10:38Z" + }, { "checksumSHA1": "zgHK1vxUJDZTXAPCw+Bls7St0Ks=", "path": "github.com/astaxie/beego/config", @@ -184,6 +190,12 @@ "revision": "e1b9828bc9e5904baec057a154c09ca40fe7fae0", "revisionTime": "2017-10-27T13:37:09Z" }, + { + "checksumSHA1": "gEjGS03N1eysvpQ+FCHTxPcbxXc=", + "path": "github.com/kardianos/osext", + "revision": "ae77be60afb1dcacde03767a8c37337fad28ac14", + "revisionTime": "2017-05-10T13:15:34Z" + }, { "checksumSHA1": "IfZcD4U1dtllJKlPNeD2aU4Jn98=", "path": "github.com/lib/pq", @@ -214,6 +226,12 @@ "revision": "00c29f56e2386353d58c599509e8dc3801b0d716", "revisionTime": "2018-02-20T23:01:11Z" }, + { + "checksumSHA1": "2jsbDTvwxafPp7FJjJ8IIFlTLjs=", + "path": "github.com/mohae/deepcopy", + "revision": "c48cc78d482608239f6c4c92a4abd87eb8761c90", + "revisionTime": "2017-09-29T03:49:55Z" + }, { "checksumSHA1": "CxNwJP++vjUAyy3bbJnNss1Il9Q=", "path": "github.com/moul/http2curl", @@ -329,18 +347,36 @@ "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", "revisionTime": "2018-02-25T19:24:45Z" }, + { + "checksumSHA1": "UTe/n6TbTfNlXilqmDaAi1+L3cs=", + "path": "github.com/rpoletaev/supervisord/types", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, { "checksumSHA1": "F4x0/vDYzuOYgOMp3NlFbbTX1Vg=", "path": "github.com/rpoletaev/supervisord/util", "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", "revisionTime": "2018-02-25T19:24:45Z" }, + { + "checksumSHA1": "egwN2yJsAyH3BLqAldbGtri6LJQ=", + "path": "github.com/rpoletaev/supervisord/xmlrpcclient", + "revision": "4e265b19bfbdc40722a3d33d9e65dc2721f7ff6a", + "revisionTime": "2018-02-25T19:24:45Z" + }, { "checksumSHA1": "eDQ6f1EsNf+frcRO/9XukSEchm8=", "path": "github.com/satori/go.uuid", "revision": "36e9d2ebbde5e3f13ab2e25625fd453271d6522e", "revisionTime": "2018-01-03T17:44:51Z" }, + { + "checksumSHA1": "CrwV2Az3LIl8uFhA1/wCdXQsCi0=", + "path": "github.com/sevlyar/go-daemon", + "revision": "45a2ba1b7c6710a044163fa109bf08d060bc3afa", + "revisionTime": "2018-05-02T15:13:47Z" + }, { "checksumSHA1": "4nhXt+svWtwsjFzQMbBywcykuEo=", "path": "github.com/shopspring/decimal", From bf74ef9380635683ba9e9fea99e2e74765da3c54 Mon Sep 17 00:00:00 2001 From: Roman Poletaev Date: Thu, 12 Jul 2018 17:36:50 +0300 Subject: [PATCH 158/169] remove drop node func --- packages/smart/funcs.go | 5 ----- 1 file changed, 5 deletions(-) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 736797a13..2c7179d53 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -256,7 +256,6 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { "GetBlockHistory": GetBlockHistory, "GetMenuHistory": GetMenuHistory, "GetContractHistory": GetContractHistory, - "MemoryLeak": MemoryLeak, } switch vt { @@ -1843,10 +1842,6 @@ func GetPageHistory(sc *SmartContract, id int64) ([]interface{}, error) { return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `pages`, id) } -func MemoryLeak(sc *SmartContract) error { - MemoryLeak(sc) - return nil -} func GetMenuHistory(sc *SmartContract, id int64) ([]interface{}, error) { return GetHistory(sc.DbTransaction, sc.TxSmart.EcosystemID, `menu`, id) } From c4b673788532fbc79064355957a908fa6bf34275 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Mon, 16 Jul 2018 12:32:27 +0500 Subject: [PATCH 159/169] Modified sections table (#447) --- packages/migration/ecosystem.go | 4 ++-- packages/migration/sections_data.go | 5 +++-- packages/migration/tables_data.go | 2 +- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/migration/ecosystem.go b/packages/migration/ecosystem.go index 588e6c625..4111e0537 100644 --- a/packages/migration/ecosystem.go +++ b/packages/migration/ecosystem.go @@ -79,8 +79,8 @@ var schemaEcosystem = `DROP TABLE IF EXISTS "%[1]d_keys"; CREATE TABLE "%[1]d_ke "title" varchar(255) NOT NULL DEFAULT '', "urlname" varchar(255) NOT NULL DEFAULT '', "page" varchar(255) NOT NULL DEFAULT '', - "roles_access" text NOT NULL DEFAULT '', - "delete" bigint NOT NULL DEFAULT '0' + "roles_access" jsonb, + "status" bigint NOT NULL DEFAULT '0' ); ALTER TABLE ONLY "%[1]d_sections" ADD CONSTRAINT "%[1]d_sections_pkey" PRIMARY KEY (id); diff --git a/packages/migration/sections_data.go b/packages/migration/sections_data.go index aeae207de..455f8fe92 100644 --- a/packages/migration/sections_data.go +++ b/packages/migration/sections_data.go @@ -1,6 +1,7 @@ package migration var sectionsDataSQL = ` -INSERT INTO "%[1]d_sections" ("id","title","urlname","page","roles_access", "delete") VALUES -('1', 'Home', 'home', 'default_page', '', 0); +INSERT INTO "%[1]d_sections" ("id","title","urlname","page","roles_access", "status") VALUES +('1', 'Home', 'home', 'default_page', '[]', 2), +('2', 'Developer', 'admin', 'admin_index', '[]', 1); ` diff --git a/packages/migration/tables_data.go b/packages/migration/tables_data.go index fc40b4a70..a6256cb83 100644 --- a/packages/migration/tables_data.go +++ b/packages/migration/tables_data.go @@ -119,7 +119,7 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co "urlname": "ContractConditions(\"MainCondition\")", "page": "ContractConditions(\"MainCondition\")", "roles_access": "ContractConditions(\"MainCondition\")", - "delete": "ContractConditions(\"MainCondition\")"}', + "status": "ContractConditions(\"MainCondition\")"}', 'ContractConditions("MainCondition")'), ('14', 'applications', '{"insert": "ContractConditions(\"MainCondition\")", From 36c4a60244c2fa5f84cec8ffe0cf986977b3ab55 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Mon, 16 Jul 2018 12:32:58 +0500 Subject: [PATCH 160/169] Changed int to int64 (#448) --- packages/smart/builtin_excel.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/smart/builtin_excel.go b/packages/smart/builtin_excel.go index 588deae24..0f70fbfa3 100644 --- a/packages/smart/builtin_excel.go +++ b/packages/smart/builtin_excel.go @@ -35,7 +35,7 @@ func GetDataFromXLSX(sc *SmartContract, binaryID, startLine, linesCount, sheetNu } // GetRowsCountXLSX returns count of rows from excel file -func GetRowsCountXLSX(sc *SmartContract, binaryID, sheetNum int64) (int, error) { +func GetRowsCountXLSX(sc *SmartContract, binaryID, sheetNum int64) (int64, error) { book, err := excelBookFromStoredBinary(sc, binaryID) if err != nil { return -1, err @@ -43,7 +43,7 @@ func GetRowsCountXLSX(sc *SmartContract, binaryID, sheetNum int64) (int, error) sheetName := book.GetSheetName(int(sheetNum)) rows := book.GetRows(sheetName) - return len(rows), nil + return int64(len(rows)), nil } func excelBookFromStoredBinary(sc *SmartContract, binaryID int64) (*xl.File, error) { From 30802101ed638bf81e58e58f0eafac7b32d62ea8 Mon Sep 17 00:00:00 2001 From: Dmitriy Chertkov Date: Mon, 16 Jul 2018 14:30:40 +0300 Subject: [PATCH 161/169] Changed permissions of tables --- packages/migration/tables_data.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/migration/tables_data.go b/packages/migration/tables_data.go index a6256cb83..b984aeb36 100644 --- a/packages/migration/tables_data.go +++ b/packages/migration/tables_data.go @@ -68,8 +68,8 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co "conditions": "ContractConditions(\"MainCondition\")" }', 'ContractAccess("@1EditTable")'), ('9', 'members', - '{"insert":"ContractAccess(\"Profile_Edit\")","update":"ContractConditions(\"MainCondition\")","new_column":"ContractConditions(\"MainCondition\")"}', - '{"image_id":"ContractAccess(\"Profile_Edit\")","member_info":"ContractAccess(\"Profile_Edit\")","member_name":"false"}', + '{"insert":"ContractAccess(\"Profile_Edit\")","update":"true","new_column":"ContractConditions(\"MainCondition\")"}', + '{"image_id":"ContractAccess(\"ProfileEditAvatar\")","member_info":"ContractAccess(\"Profile_Edit\")","member_name":"false"}', 'ContractConditions("MainCondition")'), ('10', 'roles', '{"insert":"ContractAccess(\"Roles_Create\")", @@ -156,7 +156,7 @@ var tablesDataSQL = `INSERT INTO "%[1]d_tables" ("id", "name", "permissions","co "conditions": "ContractConditions(\"MainCondition\")"}', 'ContractAccess("@1EditTable")'), ('19', 'buffer_data', - '{"insert":"true","update":"ContractConditions(\"MainCondition\")", + '{"insert":"true","update":"true", "new_column":"ContractConditions(\"MainCondition\")"}', '{"key": "false", "value": "true", From 39c0dd4a6f4f6f9c4228a7aaf7cbbd226de65fd2 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Wed, 18 Jul 2018 15:39:06 +0500 Subject: [PATCH 162/169] Added Hint function (#449) --- packages/template/funcs.go | 1 + packages/template/template_test.go | 2 ++ 2 files changed, 3 insertions(+) diff --git a/packages/template/funcs.go b/packages/template/funcs.go index 3bd679113..7b2018518 100644 --- a/packages/template/funcs.go +++ b/packages/template/funcs.go @@ -71,6 +71,7 @@ func init() { `Source,Id,RollbackId`} funcs[`GetPageHistory`] = tplFunc{getPageHistoryTag, defaultTag, `getpagehistory`, `Source,Id,RollbackId`} + funcs[`Hint`] = tplFunc{defaultTag, defaultTag, `hint`, `Icon,Title,Text`} funcs[`ImageInput`] = tplFunc{defaultTag, defaultTag, `imageinput`, `Name,Width,Ratio,Format`} funcs[`InputErr`] = tplFunc{defaultTag, defaultTag, `inputerr`, `*`} funcs[`JsonToSource`] = tplFunc{jsontosourceTag, defaultTag, `jsontosource`, `Source,Data`} diff --git a/packages/template/template_test.go b/packages/template/template_test.go index 60d14e153..ecb6917f4 100644 --- a/packages/template/template_test.go +++ b/packages/template/template_test.go @@ -42,6 +42,8 @@ func TestJSON(t *testing.T) { } var forTest = tplList{ + {`Hint(Title: some text, Icon: default, Text: This is hint text)`, + `[{"tag":"hint","attr":{"icon":"default","text":"This is hint text","title":"some text"}}]`}, {`AddToolButton(Title: Open, Page: default).Popup(Width: 50, Header: Test)`, `[{"tag":"addtoolbutton","attr":{"page":"default","popup":{"header":"Test","width":"50"},"title":"Open"}}]`}, {`SetVar(ok, OK)Input(Type: text, Value: #ok# Now(YY))Input(Type:text, Value: #ok# Some text)`, From 3743b0e8bfae7cab5258f5f8db6e5b05895d02a5 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Wed, 18 Jul 2018 16:34:24 +0500 Subject: [PATCH 163/169] Added ToUpper (#455) --- packages/smart/funcs.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 698de7d50..4c9c4afa5 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -150,6 +150,7 @@ var ( "Substr": 10, "Size": 10, "ToLower": 10, + "ToUpper": 10, "TrimSpace": 10, "TableConditions": 100, "ValidateCondition": 30, @@ -226,6 +227,7 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { "ValidateCondition": ValidateCondition, "TrimSpace": strings.TrimSpace, "ToLower": strings.ToLower, + "ToUpper": strings.ToUpper, "CreateEcosystem": CreateEcosystem, "RollbackEcosystem": RollbackEcosystem, "CreateContract": CreateContract, From 2ac5a08b95ff15eb7ccbc8d9a18e29b38d47ae6a Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 19 Jul 2018 09:50:55 +0500 Subject: [PATCH 164/169] Added elif (#453) --- packages/script/compile_test.go | 69 +++++++++++++++++++++++++++++++++ packages/script/lex.go | 49 ++++++++++++++++++++++- 2 files changed, 116 insertions(+), 2 deletions(-) diff --git a/packages/script/compile_test.go b/packages/script/compile_test.go index 0eee7fd91..a1c5a871c 100644 --- a/packages/script/compile_test.go +++ b/packages/script/compile_test.go @@ -427,6 +427,75 @@ func TestVMCompile(t *testing.T) { func result() string { return Sprintf("ok=%d", long()) }`, `result`, `strconv.ParseInt: parsing "99999999999999999999": value out of range 99999999999999999999 [Ln:2 Col:34]`}, + {`func result() string { + var i, result int + + if true { + if false { + result = 99 + } else { + result = 5 + } + } + if i == 1 { + result = 20 + } elif i> 0 { + result = 30 + } + elif i == 0 + { + result = result + 50 + if true { + i=10 + } + } elif i==10 { + Println("3") + result = 0 + i=33 + } elif false { + Println("4") + result = 1 + } + else + { + Println("5") + result = 2 + } + if i == 4 { + result = result + } elif i == 20 { + result = 22 + } else { + result = result + 23 + i = 11 + } + if i == 11 { + result = result + 7 + } else { + result = 0 + } + if result == 85 { + if false { + result = 1 + } elif 0 { + result = 5 + } elif 1 { + result = result + 10 + } + } + if result == 10 { + result = 11 + } elif result == 95 { + result = result + 1 + if false { + result = 0 + } elif true { + result = result + 4 + } + } + return Sprintf("%d", result) + } + `, `result`, `100`}, } vm := NewVM() vm.Extern = true diff --git a/packages/script/lex.go b/packages/script/lex.go index f70dcf3b7..95f977736 100644 --- a/packages/script/lex.go +++ b/packages/script/lex.go @@ -90,6 +90,7 @@ const ( keyFunc keyReturn keyIf + keyElif keyElse keyWhile keyTrue @@ -117,8 +118,9 @@ const ( var ( // The list of key words keywords = map[string]uint32{`contract`: keyContract, `func`: keyFunc, `return`: keyReturn, - `if`: keyIf, `else`: keyElse, msgError: keyError, msgWarning: keyWarning, msgInfo: keyInfo, - `while`: keyWhile, `data`: keyTX, `settings`: keySettings, `nil`: keyNil, `action`: keyAction, `conditions`: keyCond, + `if`: keyIf, `elif`: keyElif, `else`: keyElse, msgError: keyError, msgWarning: keyWarning, + msgInfo: keyInfo, `while`: keyWhile, `data`: keyTX, `settings`: keySettings, `nil`: keyNil, + `action`: keyAction, `conditions`: keyCond, `true`: keyTrue, `false`: keyFalse, `break`: keyBreak, `continue`: keyContinue, `var`: keyVar, `...`: keyTail} // list of available types @@ -143,6 +145,12 @@ func (l Lexem) GetLogger() *log.Entry { return log.WithFields(log.Fields{"lex_type": l.Type, "lex_line": l.Line, "lex_column": l.Column}) } +type ifBuf struct { + count int + pair int + stop bool +} + // Lexems is a slice of lexems type Lexems []*Lexem @@ -177,6 +185,7 @@ func lexParser(input []rune) (Lexems, error) { length = uint32(len(input)) + 1 line = 1 skip := false + ifbuf := make([]ifBuf, 0) for off < length { // Here we go through the symbols one by one if off == length-1 { @@ -206,6 +215,18 @@ func lexParser(input []rune) (Lexems, error) { if (flags & lexfNext) != 0 { right++ } + if len(ifbuf) > 0 && ifbuf[len(ifbuf)-1].stop && lexID != lexNewLine { + name := string(input[lexOff:right]) + if name != `else` && name != `elif` { + for i := 0; i < ifbuf[len(ifbuf)-1].count; i++ { + lexems = append(lexems, &Lexem{lexSys | (uint32('}') << 8), + uint32('}'), line, lexOff - offline + 1}) + } + ifbuf = ifbuf[:len(ifbuf)-1] + } else { + ifbuf[len(ifbuf)-1].stop = false + } + } var value interface{} switch lexID { case lexNewLine: @@ -217,6 +238,17 @@ func lexParser(input []rune) (Lexems, error) { ch := uint32(input[lexOff]) lexID |= ch << 8 value = ch + if len(ifbuf) > 0 { + if ch == '{' { + ifbuf[len(ifbuf)-1].pair++ + } + if ch == '}' { + ifbuf[len(ifbuf)-1].pair-- + if ifbuf[len(ifbuf)-1].pair == 0 { + ifbuf[len(ifbuf)-1].stop = true + } + } + } case lexString, lexComment: value = string(input[lexOff+1 : right-1]) if lexID == lexString && skip { @@ -255,6 +287,19 @@ func lexParser(input []rune) (Lexems, error) { value = name[1:] } else if keyID, ok := keywords[name]; ok { switch keyID { + case keyIf: + ifbuf = append(ifbuf, ifBuf{}) + lexID = lexKeyword | (keyID << 8) + value = keyID + case keyElif: + if len(ifbuf) > 0 { + lexems = append(lexems, &Lexem{lexKeyword | (keyElse << 8), + uint32(keyElse), line, lexOff - offline + 1}, + &Lexem{lexSys | ('{' << 8), uint32('{'), line, lexOff - offline + 1}) + lexID = lexKeyword | (keyIf << 8) + value = uint32(keyIf) + ifbuf[len(ifbuf)-1].count++ + } case keyAction, keyCond: if len(lexems) > 0 { lexf := *lexems[len(lexems)-1] From e1dd864347affa83f912676b29f5bcb21b99399a Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 19 Jul 2018 11:13:07 +0500 Subject: [PATCH 165/169] Added BlockTime (#457) --- packages/api/contract_test.go | 11 +++++++++++ packages/smart/funcs.go | 9 +++++++++ 2 files changed, 20 insertions(+) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index f2ac1c0c9..f33670f98 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -90,6 +90,17 @@ func TestNewContracts(t *testing.T) { } var contracts = []smartContract{ + {`BlockTimeCheck`, `contract BlockTimeCheck { + action { + if Size(BlockTime()) == Size("2006-01-02 15:04:05") { + Test("ok", "1") + } else { + Test("ok", "0") + } + } + }`, []smartParams{ + {nil, map[string]string{`ok`: `1`}}, + }}, {`RecCall`, `contract RecCall { data { } conditions { } diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 4c9c4afa5..001c79bb6 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -269,6 +269,7 @@ func EmbedFuncs(vm *script.VM, vt script.VMType) { "GetContractHistoryRow": GetContractHistoryRow, "GetDataFromXLSX": GetDataFromXLSX, "GetRowsCountXLSX": GetRowsCountXLSX, + "BlockTime": BlockTime, } switch vt { @@ -1911,3 +1912,11 @@ func GetMenuHistoryRow(sc *SmartContract, id, idRollback int64) (map[string]inte func GetContractHistoryRow(sc *SmartContract, id, idRollback int64) (map[string]interface{}, error) { return GetHistoryRow(sc, `contracts`, id, idRollback) } + +func BlockTime(sc *SmartContract) string { + var blockTime int64 + if sc.BlockData != nil { + blockTime = sc.BlockData.Time + } + return Date(`2006-01-02 15:04:05`, blockTime) +} From d5c88ed42235fa4a0f5b73ec82a90834478f38bd Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 19 Jul 2018 12:36:34 +0500 Subject: [PATCH 166/169] feature/1030 now (#458) * Added checkNow function and errNow error * Added checking localtime and current_time --- packages/api/contract_test.go | 36 ++++++++++++++++++++++++++++++++++- packages/smart/errors.go | 1 + packages/smart/funcs.go | 13 +++++++++++++ packages/smart/selective.go | 11 ++++++++--- 4 files changed, 57 insertions(+), 4 deletions(-) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index f33670f98..6d283c1e3 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -90,6 +90,29 @@ func TestNewContracts(t *testing.T) { } var contracts = []smartContract{ + {`DBFindCURRENT`, `contract DBFindCURRENT { + action { + var list array + list = DBFind("mytable").Where("date < CURRENT_DATE") + } + }`, []smartParams{ + {nil, map[string]string{`error`: `{"type":"panic","error":"It is prohibited to use NOW() or current time functions"}`}}, + }}, + {`DBFindColNow`, `contract DBFindColNow { + action { + var list array + list = DBFind("mytable").Columns("now()") + } + }`, []smartParams{ + {nil, map[string]string{`error`: `{"type":"panic","error":"It is prohibited to use NOW() or current time functions"}`}}, + }}, + {`DBFindNow`, `contract DBFindNow { + action { + var list array + list = DBFind("mytable").Where("date < now ( )") + } + }`, []smartParams{ + {nil, map[string]string{`error`: `{"type":"panic","error":"It is prohibited to use NOW() or current time functions"}`}}, {`BlockTimeCheck`, `contract BlockTimeCheck { action { if Size(BlockTime()) == Size("2006-01-02 15:04:05") { @@ -1003,7 +1026,8 @@ func TestContractChain(t *testing.T) { form := url.Values{"Name": {rnd}, "ApplicationId": {"1"}, "Columns": {`[{"name":"value","type":"varchar", "index": "0", "conditions":"true"}, - {"name":"amount", "type":"number","index": "0", "conditions":"true"}]`}, + {"name":"amount", "type":"number","index": "0", "conditions":"true"}, + {"name":"dt","type":"datetime", "index": "0", "conditions":"true"}]`}, "Permissions": {`{"insert": "true", "update" : "true", "new_column": "true"}`}} err := postTx(`NewTable`, &form) if err != nil { @@ -1059,6 +1083,16 @@ func TestContractChain(t *testing.T) { if msg != rnd+`=`+rnd { t.Error(fmt.Errorf(`wrong result %s`, msg)) } + + form = url.Values{`Value`: {`contract ` + rnd + `1 { + action { + DBInsert("` + rnd + `", "amount,dt", 0, "timestamp NOW()") + } + } + `}, "ApplicationId": {"1"}, `Conditions`: {`true`}} + assert.NoError(t, postTx(`NewContract`, &form)) + assert.EqualError(t, postTx(rnd+`1`, &url.Values{}), + `{"type":"panic","error":"It is prohibited to use Now() function"}`) } func TestLoopCond(t *testing.T) { diff --git a/packages/smart/errors.go b/packages/smart/errors.go index fa861ec3c..22d6bc6c0 100644 --- a/packages/smart/errors.go +++ b/packages/smart/errors.go @@ -34,4 +34,5 @@ var ( errEmptyColumn = errors.New(`Column name is empty`) errWrongColumn = errors.New(`Column name cannot begin with digit`) errNotFound = errors.New(`Record has not been found`) + errNow = errors.New(`It is prohibited to use NOW() or current time functions`) ) diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 001c79bb6..6b0e10f33 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -781,6 +781,16 @@ func PrepareWhere(where string) string { return where } +func checkNow(inputs ...string) error { + re := regexp.MustCompile(`(now\s*\(\s*\)|localtime|current_date|current_time)`) + for _, item := range inputs { + if re.Match([]byte(strings.ToLower(item))) { + return errNow + } + } + return nil +} + // DBSelect returns an array of values of the specified columns when there is selection of data 'offset', 'limit', 'where' func DBSelect(sc *SmartContract, tblname string, columns string, id int64, order string, offset, limit, ecosystem int64, where string, params []interface{}) (int64, []interface{}, error) { @@ -794,6 +804,9 @@ func DBSelect(sc *SmartContract, tblname string, columns string, id int64, order columns = `*` } columns = strings.ToLower(columns) + if err = checkNow(columns, where); err != nil { + return 0, nil, err + } if len(order) == 0 { order = `id` } diff --git a/packages/smart/selective.go b/packages/smart/selective.go index 366ba6b69..893a621bd 100644 --- a/packages/smart/selective.go +++ b/packages/smart/selective.go @@ -52,9 +52,14 @@ func (sc *SmartContract) selectiveLoggingAndUpd(fields []string, ivalues []inter } for i, v := range ivalues { - if len(fields) > i && converter.IsByteColumn(table, fields[i]) { - switch v.(type) { - case string: + switch v.(type) { + case string: + if strings.HasPrefix(strings.TrimSpace(v.(string)), `timestamp`) { + if err = checkNow(v.(string)); err != nil { + return 0, ``, err + } + } + if len(fields) > i && converter.IsByteColumn(table, fields[i]) { if vbyte, err := hex.DecodeString(v.(string)); err == nil { ivalues[i] = vbyte } From 583baa6bf1db013f49b462aa695163f85f335e8b Mon Sep 17 00:00:00 2001 From: gentee Date: Thu, 19 Jul 2018 12:46:22 +0500 Subject: [PATCH 167/169] Fixed merged test --- packages/api/contract_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index 6d283c1e3..e6402b362 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -113,6 +113,7 @@ var contracts = []smartContract{ } }`, []smartParams{ {nil, map[string]string{`error`: `{"type":"panic","error":"It is prohibited to use NOW() or current time functions"}`}}, + }}, {`BlockTimeCheck`, `contract BlockTimeCheck { action { if Size(BlockTime()) == Size("2006-01-02 15:04:05") { From da9a2a43fc8126587633c85c02c011c691e1c3c3 Mon Sep 17 00:00:00 2001 From: Alexey Krivonogov Date: Thu, 19 Jul 2018 16:08:18 +0500 Subject: [PATCH 168/169] Fixed map type (#454) --- packages/api/contract_test.go | 24 ++++++++++++++++++++++++ packages/smart/funcs.go | 4 ++-- packages/smart/smart.go | 2 +- packages/smart/smart_p.go | 4 ++-- 4 files changed, 29 insertions(+), 5 deletions(-) diff --git a/packages/api/contract_test.go b/packages/api/contract_test.go index e6402b362..19f783318 100644 --- a/packages/api/contract_test.go +++ b/packages/api/contract_test.go @@ -90,6 +90,30 @@ func TestNewContracts(t *testing.T) { } var contracts = []smartContract{ + {`RowType`, `contract RowType { + action { + var app map + var result string + result = GetType(app) + app = DBFind("applications").Where("id=1").Row() + result = result + GetType(app) + app["app_id"] = 2 + Test("result", Sprintf("%s %s %d", result, app["name"], app["app_id"])) + } +}`, []smartParams{ + {nil, map[string]string{`result`: `map[string]interface {}map[string]interface {} System 2`}}, + }}, + {`StackType`, `contract StackType { + action { + var lenStack int + lenStack = Len($stack) + var par string + par = $stack[0] + Test("result", Sprintf("len=%d %v %s", lenStack, $stack, par)) + } + }`, []smartParams{ + {nil, map[string]string{`result`: `len=1 [@1StackType] @1StackType`}}, + }}, {`DBFindCURRENT`, `contract DBFindCURRENT { action { var list array diff --git a/packages/smart/funcs.go b/packages/smart/funcs.go index 6b0e10f33..6d3886415 100644 --- a/packages/smart/funcs.go +++ b/packages/smart/funcs.go @@ -356,7 +356,7 @@ func ContractAccess(sc *SmartContract, names ...interface{}) bool { name = fmt.Sprintf(`@%d`, sc.TxSmart.EcosystemID) + name } for i := len(sc.TxContract.StackCont) - 1; i >= 0; i-- { - contName := sc.TxContract.StackCont[i] + contName := sc.TxContract.StackCont[i].(string) if strings.HasPrefix(contName, `@`) { if contName == name { return true @@ -862,7 +862,7 @@ func DBSelect(sc *SmartContract, tblname string, columns string, id int64, order log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("scanning next row") return 0, nil, err } - row := make(map[string]string) + row := make(map[string]interface{}) for i, col := range values { var value string if col != nil { diff --git a/packages/smart/smart.go b/packages/smart/smart.go index d20ac1515..53a0ba80d 100644 --- a/packages/smart/smart.go +++ b/packages/smart/smart.go @@ -45,7 +45,7 @@ type Contract struct { TxGovAccount int64 // state wallet EGSRate float64 // money/EGS rate TableAccounts string - StackCont []string // Stack of called contracts + StackCont []interface{} // Stack of called contracts Extend *map[string]interface{} Block *script.Block } diff --git a/packages/smart/smart_p.go b/packages/smart/smart_p.go index 5b57e0e0c..92d13dada 100644 --- a/packages/smart/smart_p.go +++ b/packages/smart/smart_p.go @@ -389,7 +389,7 @@ func GetContractById(sc *SmartContract, id int64) string { } re := regexp.MustCompile(`(?is)^\s*contract\s+([\d\w_]+)\s*{`) - names := re.FindStringSubmatch(ret[0].(map[string]string)["value"]) + names := re.FindStringSubmatch(ret[0].(map[string]interface{})["value"].(string)) if len(names) != 2 { return `` } @@ -476,7 +476,7 @@ func CreateEcosystem(sc *SmartContract, wallet int64, name string) (int64, error } if Len(ret) > 0 { - pub = ret[0].(map[string]string)[`pub`] + pub = ret[0].(map[string]interface{})[`pub`].(string) } if _, _, err := DBInsert(sc, `@`+idStr+"_keys", "id,pub", wallet, pub); err != nil { log.WithFields(log.Fields{"type": consts.DBError, "error": err}).Error("inserting default page") From 2d41559f4c9210c3fce68e5d9788329b75bcc6e0 Mon Sep 17 00:00:00 2001 From: Dmitriy Chertkov Date: Tue, 24 Jul 2018 14:23:47 +0500 Subject: [PATCH 169/169] Fixed ban of nodes with duplicate of transaction errors --- packages/block/block.go | 2 +- packages/daemons/blocks_collection.go | 2 +- packages/transaction/transaction.go | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/block/block.go b/packages/block/block.go index f26b58f8e..2804ecb4e 100644 --- a/packages/block/block.go +++ b/packages/block/block.go @@ -282,7 +282,7 @@ func (b *Block) Check() error { } if err := t.Check(b.Header.Time, false); err != nil { - return utils.ErrInfo(err) + return err } } diff --git a/packages/daemons/blocks_collection.go b/packages/daemons/blocks_collection.go index e9692ab9c..4857c39e5 100644 --- a/packages/daemons/blocks_collection.go +++ b/packages/daemons/blocks_collection.go @@ -448,7 +448,7 @@ func processBlocks(blocks []*block.Block) error { if err := b.Check(); err != nil { dbTransaction.Rollback() - return utils.ErrInfo(err) + return err } if err := b.Play(dbTransaction); err != nil { diff --git a/packages/transaction/transaction.go b/packages/transaction/transaction.go index 9df7b9608..b3527fe3a 100644 --- a/packages/transaction/transaction.go +++ b/packages/transaction/transaction.go @@ -306,7 +306,7 @@ func CheckTransaction(data []byte) (*tx.Header, error) { func (t *Transaction) Check(checkTime int64, checkForDupTr bool) error { err := CheckLogTx(t.TxFullData, checkForDupTr, false) if err != nil { - return utils.ErrInfo(err) + return err } logger := log.WithFields(log.Fields{"tx_time": t.TxTime}) // time in the transaction cannot be more than MAX_TX_FORW seconds of block time