diff --git a/.gitignore b/.gitignore index 21bfa6d0..7111f107 100644 --- a/.gitignore +++ b/.gitignore @@ -23,5 +23,6 @@ dist/ !app/**/*.js tmp server/data +server-v2/api/*/data assets/ bin/ \ No newline at end of file diff --git a/server-v2/api/studio/etc/studio-api.yaml b/server-v2/api/studio/etc/studio-api.yaml index b20e9e38..8cbe778a 100644 --- a/server-v2/api/studio/etc/studio-api.yaml +++ b/server-v2/api/studio/etc/studio-api.yaml @@ -8,4 +8,7 @@ Auth: AccessSecret: "login_secret" AccessExpire: 1800 File: - UploadDir: "./upload/" + UploadDir: "./data/upload/" + TasksDir: "./data/tasks" + SqliteDbFilePath: "./data/tasks.db" + TaskIdPath: "./data/taskId.data" \ No newline at end of file diff --git a/server-v2/api/studio/internal/config/config.go b/server-v2/api/studio/internal/config/config.go index 00a52800..1161da12 100644 --- a/server-v2/api/studio/internal/config/config.go +++ b/server-v2/api/studio/internal/config/config.go @@ -2,6 +2,10 @@ package config import ( "github.com/zeromicro/go-zero/rest" + "go.uber.org/zap" + "io/ioutil" + "os" + "path/filepath" ) type Config struct { @@ -15,6 +19,76 @@ type Config struct { } File struct { - UploadDir string + UploadDir string + TasksDir string + SqliteDbFilePath string + TaskIdPath string } } + +const ( + DefaultFilesDataDir = "data" + DefaultTaskIdPath = "data/taskId.data" + DefaultUploadDir = "data/upload" + DefaultTasksDir = "data/tasks" + DefaultSqlitedbFilePath = "data/tasks.db" +) + +func (c *Config) Validate() error { + return nil +} + +func (c *Config) Complete() { + if c.File.TaskIdPath == "" { + _, err := os.Stat(DefaultFilesDataDir) + if os.IsNotExist(err) { + os.MkdirAll(DefaultFilesDataDir, 0o766) + } + abs, _ := filepath.Abs(DefaultTaskIdPath) + _, err = ioutil.ReadFile(abs) + if err != nil { + if os.IsNotExist(err) { + _, err := os.Create(abs) + if err != nil { + zap.L().Fatal("DefaultTaskIdPath Init fail", zap.Error(err)) + } else { + zap.L().Fatal("DefaultTaskIdPath Init fail", zap.Error(err)) + } + } + } + c.File.TaskIdPath = abs + } + + if c.File.UploadDir == "" { + abs, _ := filepath.Abs(DefaultTasksDir) + c.File.UploadDir = abs + _, err := os.Stat(abs) + if os.IsNotExist(err) { + os.MkdirAll(abs, 0o776) + } + } + + if c.File.TasksDir == "" { + abs, _ := filepath.Abs(DefaultTasksDir) + c.File.TasksDir = abs + _, err := os.Stat(abs) + if os.IsNotExist(err) { + os.MkdirAll(abs, 0o766) + } + } + + if c.File.SqliteDbFilePath == "" { + _, err := os.Stat(DefaultFilesDataDir) + if os.IsNotExist(err) { + os.MkdirAll(DefaultFilesDataDir, 0o766) + } + abs, _ := filepath.Abs(DefaultSqlitedbFilePath) + c.File.SqliteDbFilePath = abs + } +} + +func (c *Config) InitConfig() error { + c.Complete() + + return c.Validate() +} diff --git a/server-v2/api/studio/internal/handler/routes.go b/server-v2/api/studio/internal/handler/routes.go index 907b082b..4bb175d8 100644 --- a/server-v2/api/studio/internal/handler/routes.go +++ b/server-v2/api/studio/internal/handler/routes.go @@ -60,12 +60,12 @@ func RegisterHandlers(server *rest.Server, serverCtx *svc.ServiceContext) { []rest.Route{ { Method: http.MethodPost, - Path: "/api/file/upload", + Path: "/api/files", Handler: file.FileUploadHandler(serverCtx), }, { Method: http.MethodDelete, - Path: "/api/file/:name", + Path: "/api/files/:name", Handler: file.FileDestroyHandler(serverCtx), }, { diff --git a/server-v2/api/studio/internal/logic/file/filedestroylogic.go b/server-v2/api/studio/internal/logic/file/filedestroylogic.go index 4c75f53f..4f551c77 100644 --- a/server-v2/api/studio/internal/logic/file/filedestroylogic.go +++ b/server-v2/api/studio/internal/logic/file/filedestroylogic.go @@ -26,5 +26,5 @@ func NewFileDestroyLogic(ctx context.Context, svcCtx *svc.ServiceContext) *FileD } func (l *FileDestroyLogic) FileDestroy(req types.FileDestroyRequest) error { - return service.NewFileService(nil, l.ctx, l.svcCtx).FileDestroy(req.Name) + return service.NewFileService(l.ctx, l.svcCtx).FileDestroy(req.Name) } diff --git a/server-v2/api/studio/internal/logic/file/filesindexlogic.go b/server-v2/api/studio/internal/logic/file/filesindexlogic.go index 50595cf4..6ca16117 100644 --- a/server-v2/api/studio/internal/logic/file/filesindexlogic.go +++ b/server-v2/api/studio/internal/logic/file/filesindexlogic.go @@ -26,5 +26,5 @@ func NewFilesIndexLogic(ctx context.Context, svcCtx *svc.ServiceContext) *FilesI } func (l *FilesIndexLogic) FilesIndex() (resp *types.FilesIndexData, err error) { - return service.NewFileService(nil, l.ctx, l.svcCtx).FilesIndex() + return service.NewFileService(l.ctx, l.svcCtx).FilesIndex() } diff --git a/server-v2/api/studio/internal/logic/file/fileuploadlogic.go b/server-v2/api/studio/internal/logic/file/fileuploadlogic.go index 9776a01c..30fde52b 100644 --- a/server-v2/api/studio/internal/logic/file/fileuploadlogic.go +++ b/server-v2/api/studio/internal/logic/file/fileuploadlogic.go @@ -1,8 +1,7 @@ package file import ( - "net/http" - + "context" "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/service" "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/svc" "github.com/zeromicro/go-zero/core/logx" @@ -10,18 +9,18 @@ import ( type FileUploadLogic struct { logx.Logger - r *http.Request + ctx context.Context svcCtx *svc.ServiceContext } -func NewFileUploadLogic(r *http.Request, svcCtx *svc.ServiceContext) *FileUploadLogic { +func NewFileUploadLogic(ctx context.Context, svcCtx *svc.ServiceContext) *FileUploadLogic { return &FileUploadLogic{ - Logger: logx.WithContext(r.Context()), - r: r, + Logger: logx.WithContext(ctx), + ctx: ctx, svcCtx: svcCtx, } } func (l *FileUploadLogic) FileUpload() error { - return service.NewFileService(l.r, nil, l.svcCtx).FileUpload() + return service.NewFileService(l.ctx, l.svcCtx).FileUpload() } diff --git a/server-v2/api/studio/internal/service/file.go b/server-v2/api/studio/internal/service/file.go index 72812261..f4e4e34a 100644 --- a/server-v2/api/studio/internal/service/file.go +++ b/server-v2/api/studio/internal/service/file.go @@ -48,22 +48,6 @@ type ( } ) -func NewFileService(r *http.Request, ctx context.Context, svcCtx *svc.ServiceContext) FileService { - if r != nil { - return &fileService{ - Logger: logx.WithContext(r.Context()), - r: r, - svcCtx: svcCtx, - } - } else { - return &fileService{ - Logger: logx.WithContext(ctx), - ctx: ctx, - svcCtx: svcCtx, - } - } -} - func NewFileService(ctx context.Context, svcCtx *svc.ServiceContext) FileService { return &fileService{ Logger: logx.WithContext(ctx), diff --git a/server-v2/api/studio/internal/service/import.go b/server-v2/api/studio/internal/service/import.go index 660df854..11eba6ce 100644 --- a/server-v2/api/studio/internal/service/import.go +++ b/server-v2/api/studio/internal/service/import.go @@ -8,11 +8,10 @@ import ( "fmt" "github.com/vesoft-inc/go-pkg/middleware" importconfig "github.com/vesoft-inc/nebula-importer/pkg/config" - importerErrors "github.com/vesoft-inc/nebula-importer/pkg/errors" + importererrors "github.com/vesoft-inc/nebula-importer/pkg/errors" "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/service/importer" "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/svc" "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/types" - Config "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/config" "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/ecode" "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/utils" "github.com/zeromicro/go-zero/core/logx" @@ -65,7 +64,7 @@ func NewImportService(ctx context.Context, svcCtx *svc.ServiceContext) ImportSer func (i *importService) CreateImportTask(req *types.CreateImportTaskRequest) (*types.CreateImportTaskData, error) { jsons, err := json.Marshal(req.Config) if err != nil { - return nil, errors.New("importDataParams get fail") + return nil, ecode.WithCode(ecode.ErrParam, nil) } conf := importconfig.YAMLConfig{} @@ -75,20 +74,20 @@ func (i *importService) CreateImportTask(req *types.CreateImportTaskRequest) (*t } if err = validClientParams(&conf); err != nil { - err = importerErrors.Wrap(importerErrors.InvalidConfigPathOrFormat, err) + err = importererrors.Wrap(importererrors.InvalidConfigPathOrFormat, err) zap.L().Warn("client params is wrong", zap.Error(err)) return nil, err } - taskDir, err := importer.GetNewTaskDir() + taskDir, err := importer.GetNewTaskDir(i.svcCtx.Config.File.TasksDir) if err != nil { return nil, err } - logPath := filepath.Join(taskDir, "import.log") + logPath := filepath.Join(taskDir, importLogName) conf.LogPath = &logPath // create config file - if err := importer.CreateConfigFile(taskDir, conf); err != nil { + if err := importer.CreateConfigFile(i.svcCtx.Config.File.UploadDir, taskDir, conf); err != nil { return nil, err } @@ -121,7 +120,7 @@ func (i *importService) CreateImportTask(req *types.CreateImportTaskRequest) (*t // write taskId to file muTaskId.Lock() - taskIDBytes, err := ioutil.ReadFile(Config.Cfg.Web.TaskIdPath) + taskIDBytes, err := ioutil.ReadFile(i.svcCtx.Config.File.TaskIdPath) if err != nil { zap.L().Warn("read taskId file error", zap.Error(err)) return nil, err @@ -138,7 +137,7 @@ func (i *importService) CreateImportTask(req *types.CreateImportTaskRequest) (*t if err != nil { zap.L().Warn("read taskId file error", zap.Error(err)) } - err = ioutil.WriteFile(Config.Cfg.Web.TaskIdPath, bytes, 777) + err = ioutil.WriteFile(i.svcCtx.Config.File.TaskIdPath, bytes, 777) if err != nil { zap.L().Warn("write taskId file error", zap.Error(err)) } @@ -154,10 +153,6 @@ func (i *importService) StopImportTask(req *types.StopImportTaskRequest) error { } func (i *importService) DownloadConfig(req *types.DownloadConfigsRequest) error { - if req.Id == "" { - return errors.New("invalid Id") - } - httpReq, ok := middleware.GetRequest(i.ctx) if !ok { return ecode.WithInternalServer(fmt.Errorf("unset KeepRequest")) @@ -168,7 +163,7 @@ func (i *importService) DownloadConfig(req *types.DownloadConfigsRequest) error return ecode.WithInternalServer(fmt.Errorf("unset KeepResponse Writer")) } - configPath := filepath.Join(Config.Cfg.Web.TasksDir, req.Id, "config.yaml") + configPath := filepath.Join(i.svcCtx.Config.File.TasksDir, req.Id, "config.yaml") httpResp.Header().Set("Content-Type", "application/octet-stream") httpResp.Header().Set("Content-Disposition", "attachment;filename="+filepath.Base(configPath)) http.ServeFile(httpResp, httpReq, configPath) @@ -178,9 +173,6 @@ func (i *importService) DownloadConfig(req *types.DownloadConfigsRequest) error func (i *importService) DownloadLogs(req *types.DownloadLogsRequest) error { id := req.Id - if id == "" { - return errors.New("id parse failed") - } httpReq, ok := middleware.GetRequest(i.ctx) if !ok { @@ -194,15 +186,12 @@ func (i *importService) DownloadLogs(req *types.DownloadLogsRequest) error { filename := req.Name path := "" - if filename == "import.log" { - path = filepath.Join(Config.Cfg.Web.TasksDir, id, filename) + if filename == importLogName { + path = filepath.Join(i.svcCtx.Config.File.TasksDir, id, filename) } else { - path = filepath.Join(Config.Cfg.Web.TasksDir, id, "err", filename) + path = filepath.Join(i.svcCtx.Config.File.TasksDir, id, "err", filename) } - fmt.Println("------------------------") - fmt.Println("test") - httpResp.Header().Set("Content-Type", "application/octet-stream") httpResp.Header().Set("Content-Disposition", "attachment;filename="+filepath.Base(path)) http.ServeFile(httpResp, httpReq, path) @@ -210,25 +199,22 @@ func (i *importService) DownloadLogs(req *types.DownloadLogsRequest) error { } func (i *importService) DeleteImportTask(req *types.DeleteImportTaskRequest) error { - return importer.DeleteImportTask(req.Id, req.Address+":"+req.Port, req.Username) + return importer.DeleteImportTask(i.svcCtx.Config.File.TasksDir, req.Id, req.Address+":"+req.Port, req.Username) } func (i *importService) GetImportTask(req *types.GetImportTaskRequest) (*types.GetImportTaskData, error) { - return importer.GetImportTask(req.Id, req.Address+":"+req.Port, req.Username) + return importer.GetImportTask(i.svcCtx.Config.File.TasksDir, req.Id, req.Address+":"+req.Port, req.Username) } func (i *importService) GetManyImportTask(req *types.GetManyImportTaskRequest) (*types.GetManyImportTaskData, error) { - return importer.GetManyImportTask(req.Address+":"+req.Port, req.Username, req.Page, req.PageSize) + return importer.GetManyImportTask(i.svcCtx.Config.File.TasksDir, req.Address+":"+req.Port, req.Username, req.Page, req.PageSize) } // GetImportTaskLogNames :Get all log file's name of a task func (i *importService) GetImportTaskLogNames(req *types.GetImportTaskLogNamesRequest) (*types.GetImportTaskLogNamesData, error) { id := req.Id - if id == "" { - return nil, errors.New("id parse failed") - } - errLogDir := filepath.Join(Config.Cfg.Web.TasksDir, id, "err") + errLogDir := filepath.Join(i.svcCtx.Config.File.TasksDir, id, "err") fileInfos, err := ioutil.ReadDir(errLogDir) if err != nil { return nil, err @@ -237,7 +223,7 @@ func (i *importService) GetImportTaskLogNames(req *types.GetImportTaskLogNamesRe data := &types.GetImportTaskLogNamesData{ Names: []string{}, } - data.Names = append(data.Names, "import.log") + data.Names = append(data.Names, importLogName) for _, fileInfo := range fileInfos { name := fileInfo.Name() data.Names = append(data.Names, name) @@ -248,17 +234,17 @@ func (i *importService) GetImportTaskLogNames(req *types.GetImportTaskLogNamesRe func (i *importService) GetManyImportTaskLog(req *types.GetManyImportTaskLogRequest) (*types.GetManyImportTaskLogData, error) { path := "" if req.File == importLogName { - path = filepath.Join(Config.Cfg.Web.TasksDir, req.Id, req.File) + path = filepath.Join(i.svcCtx.Config.File.TasksDir, req.Id, req.File) } else { - path = filepath.Join(Config.Cfg.Web.TasksDir, req.Id, errContentDir, req.File) + path = filepath.Join(i.svcCtx.Config.File.TasksDir, req.Id, errContentDir, req.File) } - lines, err := readFile(path, req.Offset, req.Limit) + lines, err := readFileLines(path, req.Offset, req.Limit) if err != nil { return nil, err } muTaskId.RLock() - taskIdBytes, err := ioutil.ReadFile(Config.Cfg.Web.TaskIdPath) + taskIdBytes, err := ioutil.ReadFile(i.svcCtx.Config.File.TaskIdPath) muTaskId.RUnlock() if err != nil { zap.L().Warn("read taskId file error", zap.Error(err)) @@ -295,7 +281,7 @@ func validClientParams(conf *importconfig.YAMLConfig) error { *conf.NebulaClientSettings.Connection.User == "" || conf.NebulaClientSettings.Space == nil || *conf.NebulaClientSettings.Space == "" { - return errors.New("client params is wrong") + return ecode.WithCode(ecode.ErrParam, nil) } for _, fn := range conf.Files { @@ -308,7 +294,7 @@ func validClientParams(conf *importconfig.YAMLConfig) error { return nil } -func readFile(path string, offset int64, limit int64) ([]string, error) { +func readFileLines(path string, offset int64, limit int64) ([]string, error) { file, err := os.Open(path) if err != nil { zap.L().Warn("open file error", zap.Error(err)) diff --git a/server-v2/api/studio/internal/service/importer/importer.go b/server-v2/api/studio/internal/service/importer/importer.go index b4de26ca..3a39a672 100644 --- a/server-v2/api/studio/internal/service/importer/importer.go +++ b/server-v2/api/studio/internal/service/importer/importer.go @@ -3,17 +3,17 @@ package importer import ( "errors" "fmt" + "os" + "path/filepath" + "strconv" + "time" + importconfig "github.com/vesoft-inc/nebula-importer/pkg/config" importerErrors "github.com/vesoft-inc/nebula-importer/pkg/errors" "github.com/vesoft-inc/nebula-importer/pkg/logger" "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/types" - Config "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/config" "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/utils" "go.uber.org/zap" - "os" - "path/filepath" - "strconv" - "time" "gopkg.in/yaml.v2" ) @@ -28,22 +28,22 @@ type ImportResult struct { } } -func GetNewTaskDir() (string, error) { +func GetNewTaskDir(tasksDir string) (string, error) { taskId, err := GetTaskMgr().NewTaskID() if err != nil { return "", err } - taskDir := filepath.Join(Config.Cfg.Web.TasksDir, taskId) + taskDir := filepath.Join(tasksDir, taskId) return taskDir, nil } -func CreateConfigFile(dir string, config importconfig.YAMLConfig) error { +func CreateConfigFile(uploadDir, taskdir string, config importconfig.YAMLConfig) error { fileName := "config.yaml" - err := utils.CreateDir(dir) - if err != nil { + err := utils.CreateDir(taskdir) + if err := utils.CreateDir(taskdir); err != nil { return err } - path := filepath.Join(dir, fileName) + path := filepath.Join(taskdir, fileName) // erase user information address := *config.NebulaClientSettings.Connection.Address user := *config.NebulaClientSettings.Connection.User @@ -58,8 +58,8 @@ func CreateConfigFile(dir string, config importconfig.YAMLConfig) error { paths := make([]string, 0) failDataPaths := make([]string, 0) for _, file := range config.Files { - paths = append(paths, filepath.Join(Config.Cfg.Web.UploadDir, *file.Path)) - failDataPaths = append(failDataPaths, filepath.Join(dir, "err", *file.FailDataPath)) + paths = append(paths, filepath.Join(uploadDir, *file.Path)) + failDataPaths = append(failDataPaths, filepath.Join(taskdir, "err", *file.FailDataPath)) _, fileName := filepath.Split(*file.Path) _, fileDataName := filepath.Split(*file.FailDataPath) *file.Path = fileName @@ -156,7 +156,7 @@ func ImportStatus(taskID string) (*TaskInfo, error) { return nil, errors.New("task is not exist") } -func DeleteImportTask(taskID, address, username string) error { +func DeleteImportTask(tasksDir, taskID, address, username string) error { if id, err := strconv.Atoi(taskID); err != nil { zap.L().Warn(fmt.Sprintf("stop task fail, id : %s", taskID), zap.Error(err)) return errors.New("task not existed") @@ -167,14 +167,14 @@ func DeleteImportTask(taskID, address, username string) error { return errors.New("task not existed") } } - err := GetTaskMgr().DelTask(taskID) + err := GetTaskMgr().DelTask(tasksDir, taskID) if err != nil { return fmt.Errorf("task del fail, %s", err.Error()) } return nil } -func GetImportTask(taskID, address, username string) (*types.GetImportTaskData, error) { +func GetImportTask(tasksDir, taskID, address, username string) (*types.GetImportTaskData, error) { task := Task{} result := &types.GetImportTaskData{} @@ -203,13 +203,13 @@ func GetImportTask(taskID, address, username string) (*types.GetImportTaskData, result.User = task.TaskInfo.User result.Name = task.TaskInfo.Name result.Space = task.TaskInfo.Space - result.Stats = types.Stats(task.TaskInfo.Stats) + result.Stats = types.ImportTaskStats(task.TaskInfo.Stats) } return result, nil } -func GetManyImportTask(address, username string, page, pageSize int) (*types.GetManyImportTaskData, error) { +func GetManyImportTask(tasksDir, address, username string, page, pageSize int) (*types.GetManyImportTaskData, error) { result := &types.GetManyImportTaskData{ Total: 0, List: []types.GetImportTaskData{}, @@ -231,7 +231,7 @@ func GetManyImportTask(address, username string, page, pageSize int) (*types.Get result.Total = int64(stop - start) for i := start; i < stop; i++ { - data, _ := GetImportTask(taskIDs[i], address, username) + data, _ := GetImportTask(tasksDir, taskIDs[i], address, username) result.List = append(result.List, *data) } } diff --git a/server-v2/api/studio/internal/service/importer/taskdb.go b/server-v2/api/studio/internal/service/importer/taskdb.go index 7ca6050e..6619b2f8 100644 --- a/server-v2/api/studio/internal/service/importer/taskdb.go +++ b/server-v2/api/studio/internal/service/importer/taskdb.go @@ -1,7 +1,6 @@ package importer import ( - Config "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/config" "github.com/zeromicro/go-zero/core/logx" "gorm.io/driver/sqlite" "gorm.io/gorm" @@ -12,8 +11,8 @@ type TaskDb struct { *gorm.DB } -func InitDB() { - dbFilePath := Config.Cfg.Web.SqlitedbFilePath +func InitDB(sqlitedbFilePath string) { + dbFilePath := sqlitedbFilePath db, err := gorm.Open(sqlite.Open(dbFilePath), &gorm.Config{ Logger: logger.Default.LogMode(logger.Info), }) diff --git a/server-v2/api/studio/internal/service/importer/taskmgr.go b/server-v2/api/studio/internal/service/importer/taskmgr.go index 7c20ebeb..38f66533 100644 --- a/server-v2/api/studio/internal/service/importer/taskmgr.go +++ b/server-v2/api/studio/internal/service/importer/taskmgr.go @@ -3,15 +3,15 @@ package importer import ( "errors" "fmt" - "github.com/vesoft-inc/nebula-importer/pkg/cmd" - Config "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/config" - "github.com/zeromicro/go-zero/core/logx" "os" "path/filepath" "strconv" "sync" "time" + "github.com/vesoft-inc/nebula-importer/pkg/cmd" + "github.com/zeromicro/go-zero/core/logx" + _ "github.com/mattn/go-sqlite3" ) @@ -138,7 +138,7 @@ func (mgr *TaskMgr) AbortTask(taskID string) (err error) { return } -func (mgr *TaskMgr) DelTask(taskID string) error { +func (mgr *TaskMgr) DelTask(tasksDir, taskID string) error { _, ok := mgr.getTaskFromMap(taskID) if ok { mgr.tasks.Delete(taskID) @@ -150,7 +150,7 @@ func (mgr *TaskMgr) DelTask(taskID string) error { if err = mgr.db.DelTaskInfo(id); err != nil { return err } - taskDir := filepath.Join(Config.Cfg.Web.TasksDir, taskID) + taskDir := filepath.Join(tasksDir, taskID) return os.RemoveAll(taskDir) } diff --git a/server-v2/api/studio/internal/types/types.go b/server-v2/api/studio/internal/types/types.go index dba27cba..3ed1ca25 100644 --- a/server-v2/api/studio/internal/types/types.go +++ b/server-v2/api/studio/internal/types/types.go @@ -52,113 +52,113 @@ type FilesIndexData struct { List []FileStat `json:"list"` } -type Connection struct { +type ImportTaskConnection struct { User string `json:"user" validate:"required"` Password string `json:"password" validate:"required"` Address string `json:"address" validate:"required"` } -type ClientSettings struct { - Retry int `json:"retry,optional"` - Concurrency int `json:"concurrency,optional"` - ChannelBufferSize int `json:"channelBufferSize,optional"` - Space string `json:"space" validate:"required"` - Connection Connection `json:"connection" validate:"required"` - PostStart PostStart `json:"postStart,optional"` - PreStop PreStop `json:"preStop,optional"` +type ImportTaskClientSettings struct { + Retry int `json:"retry,optional"` + Concurrency int `json:"concurrency,optional"` + ChannelBufferSize int `json:"channelBufferSize,optional"` + Space string `json:"space" validate:"required"` + Connection ImportTaskConnection `json:"connection" validate:"required"` + PostStart ImportTaskPostStart `json:"postStart,optional"` + PreStop ImportTaskPreStop `json:"preStop,optional"` } -type PostStart struct { +type ImportTaskPostStart struct { Commands string `json:"commands" validate:"required"` AfterPeriod string `json:"afterPeriod" validate:"required"` } -type PreStop struct { +type ImportTaskPreStop struct { Commands string `json:"commands,optional"` } -type CSV struct { +type ImportTaskCSV struct { WithHeader bool `json:"withHeader,optional"` WithLabel bool `json:"withLabel,optional"` Delimiter string `json:"delimiter,optional" default:","` } -type VID struct { +type ImportTaskVID struct { Index int64 `json:"index" validate:"required"` Type string `json:"type" validate:"required"` Function string `json:"function,optional"` Prefix string `json:"prefix,optional"` } -type TagProp struct { +type ImportTaskTagProp struct { Name string `json:"name" validate:"required"` Type string `json:"type" validate:"required"` Index int64 `json:"index" validate:"required"` } -type Tag struct { - Name string `json:"name" validate:"required"` - Props []TagProp `json:"props" validate:"required"` +type ImportTaskTag struct { + Name string `json:"name" validate:"required"` + Props []ImportTaskTagProp `json:"props" validate:"required"` } -type Vertex struct { - VID VID `json:"vid" validate:"required"` - Tags []Tag `json:"tags" validate:"required"` +type ImportTaskVertex struct { + VID ImportTaskVID `json:"vid" validate:"required"` + Tags []ImportTaskTag `json:"tags" validate:"required"` } -type EdgeID struct { +type ImportTaskEdgeID struct { Index int64 `json:"index" validate:"required"` Function string `json:"function,optional"` Type string `json:"type" validate:"required"` Prefix string `json:"prefix,optional"` } -type EdgeRank struct { +type ImportTaskEdgeRank struct { Index int64 `json:"index"` } -type EdgeProp struct { +type ImportTaskEdgeProp struct { Name string `json:"name"` Type string `json:"type"` Index int64 `json:"index"` } -type Edge struct { - Name string `json:"name" validate:"required"` - SrcVID EdgeID `json:"srcVID" validate:"required"` - DstVID EdgeID `json:"dstVID" validate:"required"` - Rank EdgeRank `json:"rank, optional"` - Props []EdgeProp `json:"props" validate:"required"` +type ImportTaskEdge struct { + Name string `json:"name" validate:"required"` + SrcVID ImportTaskEdgeID `json:"srcVID" validate:"required"` + DstVID ImportTaskEdgeID `json:"dstVID" validate:"required"` + Rank ImportTaskEdgeRank `json:"rank, optional"` + Props []ImportTaskEdgeProp `json:"props" validate:"required"` } -type Schema struct { - Type string `json:"type" validate:"required"` - Edge Edge `json:"edge,optional"` - Vertex Vertex `json:"vertex,optional"` +type ImportTaskSchema struct { + Type string `json:"type" validate:"required"` + Edge ImportTaskEdge `json:"edge,optional"` + Vertex ImportTaskVertex `json:"vertex,optional"` } -type File struct { - Path string `json:"path" validate:"required"` - FailDataPath string `json:"failDataPath" validate:"required"` - BatchSize int `json:"batchSize,optional"` - Limit int `json:"limit, optional"` - InOrder bool `json:"inOrder, optional"` - Type string `json:"type" validate:"required"` - CSV CSV `json:"csv" validate:"required"` - Schema Schema `json:"schema" validate:"required"` +type ImportTaskFile struct { + Path string `json:"path" validate:"required"` + FailDataPath string `json:"failDataPath" validate:"required"` + BatchSize int `json:"batchSize,optional"` + Limit int `json:"limit, optional"` + InOrder bool `json:"inOrder, optional"` + Type string `json:"type" validate:"required"` + CSV ImportTaskCSV `json:"csv" validate:"required"` + Schema ImportTaskSchema `json:"schema" validate:"required"` } -type ImportConfig struct { - Version string `json:"version" validate:"required"` - Description string `json:"description,optional"` - RemoveTempFiles bool `json:"removeTempFiles,optional"` - ClientSettings ClientSettings `json:"clientSettings" validate:"required"` - Files []File `json:"files" validate:"required"` +type ImportTaskConfig struct { + Version string `json:"version" validate:"required"` + Description string `json:"description,optional"` + RemoveTempFiles bool `json:"removeTempFiles,optional"` + ClientSettings ImportTaskClientSettings `json:"clientSettings" validate:"required"` + Files []ImportTaskFile `json:"files" validate:"required"` } type CreateImportTaskRequest struct { - Name string `json:"name" validate:"required"` - Config ImportConfig `json:"config" validate:"required"` + Name string `json:"name" validate:"required"` + Config ImportTaskConfig `json:"config" validate:"required"` } type CreateImportTaskData struct { @@ -173,18 +173,18 @@ type GetImportTaskRequest struct { } type GetImportTaskData struct { - Id string `json:"id"` - Name string `json:"name"` - User string `json:"user"` - Address string `json:"address"` - Space string `json:"space"` - Status string `json:"status"` - CreateTime int64 `json:"createTime"` - UpdateTime int64 `json:"updateTime"` - Stats Stats `json:"stats"` -} - -type Stats struct { + Id string `json:"id"` + Name string `json:"name"` + User string `json:"user"` + Address string `json:"address"` + Space string `json:"space"` + Status string `json:"status"` + CreateTime int64 `json:"createTime"` + UpdateTime int64 `json:"updateTime"` + Stats ImportTaskStats `json:"stats"` +} + +type ImportTaskStats struct { NumFailed int64 `json:"numFailed"` NumReadFailed int64 `json:"numReadFailed"` TotalCount int64 `json:"totalCount"` diff --git a/server-v2/api/studio/restapi/file.api b/server-v2/api/studio/restapi/file.api index d403bbed..5d15f943 100644 --- a/server-v2/api/studio/restapi/file.api +++ b/server-v2/api/studio/restapi/file.api @@ -25,10 +25,10 @@ type ( service studio-api { @doc "Upload File" @handler FileUpload - post /api/file/upload + post /api/files @doc "delete file" @handler FileDestroy - delete /api/file/:name(FileDestroyRequest) + delete /api/files/:name(FileDestroyRequest) @doc "preview file" @handler FilesIndex get /api/file returns(FilesIndexData) diff --git a/server-v2/api/studio/restapi/import.api b/server-v2/api/studio/restapi/import.api index 3f343553..6c79a5ab 100644 --- a/server-v2/api/studio/restapi/import.api +++ b/server-v2/api/studio/restapi/import.api @@ -1,113 +1,113 @@ syntax = "v1" type ( - Connection { + ImportTaskConnection { User string `json:"user" validate:"required"` Password string `json:"password" validate:"required"` Address string `json:"address" validate:"required"` } - ClientSettings { - Retry int `json:"retry,optional"` - Concurrency int `json:"concurrency,optional"` - ChannelBufferSize int `json:"channelBufferSize,optional"` - Space string `json:"space" validate:"required"` - Connection Connection `json:"connection" validate:"required"` - PostStart PostStart `json:"postStart,optional"` - PreStop PreStop `json:"preStop,optional"` + ImportTaskClientSettings { + Retry int `json:"retry,optional"` + Concurrency int `json:"concurrency,optional"` + ChannelBufferSize int `json:"channelBufferSize,optional"` + Space string `json:"space" validate:"required"` + Connection ImportTaskConnection `json:"connection" validate:"required"` + PostStart ImportTaskPostStart `json:"postStart,optional"` + PreStop ImportTaskPreStop `json:"preStop,optional"` } - PostStart { + ImportTaskPostStart { Commands string `json:"commands" validate:"required"` AfterPeriod string `json:"afterPeriod" validate:"required"` } - PreStop { + ImportTaskPreStop { Commands string `json:"commands,optional"` } - CSV { + ImportTaskCSV { WithHeader bool `json:"withHeader,optional"` WithLabel bool `json:"withLabel,optional"` Delimiter string `json:"delimiter,optional" default:","` } - VID { + ImportTaskVID { Index int64 `json:"index" validate:"required"` Type string `json:"type" validate:"required"` Function string `json:"function,optional"` Prefix string `json:"prefix,optional"` } - TagProp { + ImportTaskTagProp { Name string `json:"name" validate:"required"` Type string `json:"type" validate:"required"` Index int64 `json:"index" validate:"required"` } - Tag { - Name string `json:"name" validate:"required"` - Props []TagProp `json:"props" validate:"required"` + ImportTaskTag { + Name string `json:"name" validate:"required"` + Props []ImportTaskTagProp `json:"props" validate:"required"` } - Vertex { - VID VID `json:"vid" validate:"required"` - Tags []Tag `json:"tags" validate:"required"` + ImportTaskVertex { + VID ImportTaskVID `json:"vid" validate:"required"` + Tags []ImportTaskTag `json:"tags" validate:"required"` } - EdgeID { + ImportTaskEdgeID { Index int64 `json:"index" validate:"required"` Function string `json:"function,optional"` Type string `json:"type" validate:"required"` Prefix string `json:"prefix,optional"` } - EdgeRank { + ImportTaskEdgeRank { Index int64 `json:"index"` } - EdgeProp { + ImportTaskEdgeProp { Name string `json:"name"` Type string `json:"type"` Index int64 `json:"index"` } - Edge { - Name string `json:"name" validate:"required"` - SrcVID EdgeID `json:"srcVID" validate:"required"` - DstVID EdgeID `json:"dstVID" validate:"required"` - Rank EdgeRank `json:"rank, optional"` - Props []EdgeProp `json:"props" validate:"required"` + ImportTaskEdge { + Name string `json:"name" validate:"required"` + SrcVID ImportTaskEdgeID `json:"srcVID" validate:"required"` + DstVID ImportTaskEdgeID `json:"dstVID" validate:"required"` + Rank ImportTaskEdgeRank `json:"rank, optional"` + Props []ImportTaskEdgeProp `json:"props" validate:"required"` } - Schema { - Type string `json:"type" validate:"required"` - Edge Edge `json:"edge,optional"` - Vertex Vertex `json:"vertex,optional"` + ImportTaskSchema { + Type string `json:"type" validate:"required"` + Edge ImportTaskEdge `json:"edge,optional"` + Vertex ImportTaskVertex `json:"vertex,optional"` } - File { - Path string `json:"path" validate:"required"` - FailDataPath string `json:"failDataPath" validate:"required"` - BatchSize int `json:"batchSize,optional"` - Limit int `json:"limit, optional"` - InOrder bool `json:"inOrder, optional"` - Type string `json:"type" validate:"required"` - CSV CSV `json:"csv" validate:"required"` - Schema Schema `json:"schema" validate:"required"` + ImportTaskFile { + Path string `json:"path" validate:"required"` + FailDataPath string `json:"failDataPath" validate:"required"` + BatchSize int `json:"batchSize,optional"` + Limit int `json:"limit, optional"` + InOrder bool `json:"inOrder, optional"` + Type string `json:"type" validate:"required"` + CSV ImportTaskCSV `json:"csv" validate:"required"` + Schema ImportTaskSchema `json:"schema" validate:"required"` } - ImportConfig { - Version string `json:"version" validate:"required"` - Description string `json:"description,optional"` - RemoveTempFiles bool `json:"removeTempFiles,optional"` - ClientSettings ClientSettings `json:"clientSettings" validate:"required"` - Files []File `json:"files" validate:"required"` + ImportTaskConfig { + Version string `json:"version" validate:"required"` + Description string `json:"description,optional"` + RemoveTempFiles bool `json:"removeTempFiles,optional"` + ClientSettings ImportTaskClientSettings `json:"clientSettings" validate:"required"` + Files []ImportTaskFile `json:"files" validate:"required"` } CreateImportTaskRequest { - Name string `json:"name" validate:"required"` - Config ImportConfig `json:"config" validate:"required"` + Name string `json:"name" validate:"required"` + Config ImportTaskConfig `json:"config" validate:"required"` } CreateImportTaskData { @@ -122,18 +122,18 @@ type ( } GetImportTaskData { - Id string `json:"id"` - Name string `json:"name"` - User string `json:"user"` - Address string `json:"address"` - Space string `json:"space"` - Status string `json:"status"` - CreateTime int64 `json:"createTime"` - UpdateTime int64 `json:"updateTime"` - Stats Stats `json:"stats"` - } - - Stats { + Id string `json:"id"` + Name string `json:"name"` + User string `json:"user"` + Address string `json:"address"` + Space string `json:"space"` + Status string `json:"status"` + CreateTime int64 `json:"createTime"` + UpdateTime int64 `json:"updateTime"` + Stats ImportTaskStats `json:"stats"` + } + + ImportTaskStats { NumFailed int64 `json:"numFailed"` NumReadFailed int64 `json:"numReadFailed"` TotalCount int64 `json:"totalCount"` diff --git a/server-v2/api/studio/studio.go b/server-v2/api/studio/studio.go index 9e8644d4..181cadb9 100644 --- a/server-v2/api/studio/studio.go +++ b/server-v2/api/studio/studio.go @@ -13,7 +13,6 @@ import ( "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/service/importer" "github.com/vesoft-inc/nebula-studio/server/api/studio/internal/svc" "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/auth" - Config "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/config" "github.com/vesoft-inc/nebula-studio/server/api/studio/pkg/logging" "go.uber.org/zap" @@ -38,11 +37,11 @@ func main() { panic(err) } - if err := Config.InitConfig(*configFile); err != nil { + if err := c.InitConfig(); err != nil { zap.L().Fatal("init config failed", zap.Error(err)) } - importer.InitDB() + importer.InitDB(c.File.SqliteDbFilePath) svcCtx := svc.NewServiceContext(c) server := rest.MustNewServer(c.RestConf, rest.WithNotFoundHandler(middleware.NewAssetsHandler(middleware.AssetsConfig{ @@ -57,7 +56,7 @@ func main() { server.Use(auth.AuthMiddlewareWithCtx(svcCtx)) server.Use(rest.ToMiddleware(middleware.ReserveRequest(middleware.ReserveRequestConfig{ Skipper: func(r *http.Request) bool { - if strings.HasPrefix(r.URL.Path, "/api/file/upload") { + if strings.HasPrefix(r.URL.Path, "/api/files") { return false } if strings.HasPrefix(r.URL.Path, "/api/import-tasks/download") { diff --git a/server-v2/go.mod b/server-v2/go.mod index 1bafbe9b..7808475b 100644 --- a/server-v2/go.mod +++ b/server-v2/go.mod @@ -3,10 +3,20 @@ module github.com/vesoft-inc/nebula-studio/server go 1.17 require ( - github.com/vesoft-inc/go-pkg v0.0.0-20220511092334-a180a9379d8d + github.com/vesoft-inc/go-pkg v0.0.0-20220516090733-5ce93ad3254b + github.com/vesoft-inc/nebula-importer v1.0.1-0.20220505095506-93febd41c2be github.com/zeromicro/go-zero v1.3.3 ) +require github.com/vesoft-inc/nebula-go/v3 v3.0.0-20220425030225-cdb52399b40a // indirect + +require ( + github.com/jinzhu/inflection v1.0.0 // indirect + github.com/jinzhu/now v1.1.5 // indirect + github.com/mattn/go-sqlite3 v1.14.12 + gorm.io/gorm v1.23.4 +) + require ( github.com/facebook/fbthrift v0.31.1-0.20211129061412-801ed7f9f295 // indirect github.com/satori/go.uuid v1.2.1-0.20181028125025-b2ce2384e17b // indirect @@ -53,4 +63,5 @@ require ( google.golang.org/grpc v1.46.0 // indirect google.golang.org/protobuf v1.28.0 // indirect gopkg.in/yaml.v2 v2.4.0 + gorm.io/driver/sqlite v1.3.2 ) diff --git a/server-v2/go.sum b/server-v2/go.sum index f6c28bf6..c513edad 100644 --- a/server-v2/go.sum +++ b/server-v2/go.sum @@ -250,6 +250,11 @@ github.com/jcmturner/gofork v1.0.0/go.mod h1:MK8+TM0La+2rjBD4jE12Kj1pCCxK7d2LK/U github.com/jcmturner/goidentity/v6 v6.0.1/go.mod h1:X1YW3bgtvwAXju7V3LCIMpY0Gbxyjn/mY9zx4tFonSg= github.com/jcmturner/gokrb5/v8 v8.4.2/go.mod h1:sb+Xq/fTY5yktf/VxLsE3wlfPqQjp0aWNYyvBVK62bc= github.com/jcmturner/rpc/v2 v2.0.3/go.mod h1:VUJYCIDm3PVOEHw8sgt091/20OJjskO/YJki3ELg/Hc= +github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E= +github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc= +github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= +github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ= +github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8= github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= @@ -289,6 +294,8 @@ github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Ky github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= +github.com/mattn/go-sqlite3 v1.14.12 h1:TJ1bhYJPV44phC+IMu1u2K/i5RriLTPe+yc68XDJ1Z0= +github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= @@ -391,10 +398,14 @@ github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMT github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= -github.com/vesoft-inc/go-pkg v0.0.0-20220511092334-a180a9379d8d h1:Q/eVc0H8CuQNmviD43vAxtJKyB3aFd9R8Z7pOdxokoA= -github.com/vesoft-inc/go-pkg v0.0.0-20220511092334-a180a9379d8d/go.mod h1:HCAXRhF2io+nPLQnl+RQ6XyVcp1Xdv6NgslXRBBCiEU= +github.com/vesoft-inc/go-pkg v0.0.0-20220516090733-5ce93ad3254b h1:MVAkGU2YH1p3PhWN0T+r0bkv+gn5b33J2tEdnXXZAUE= +github.com/vesoft-inc/go-pkg v0.0.0-20220516090733-5ce93ad3254b/go.mod h1:HCAXRhF2io+nPLQnl+RQ6XyVcp1Xdv6NgslXRBBCiEU= +github.com/vesoft-inc/nebula-go/v3 v3.0.0-20220425030225-cdb52399b40a h1:/8l9RT6gU0cUS1Cgzqv3A9dKto19VQBjVk1BqAAqqvM= +github.com/vesoft-inc/nebula-go/v3 v3.0.0-20220425030225-cdb52399b40a/go.mod h1:+sXv05jYQBARdTbTcIEsWVXCnF/6ttOlDK35xQ6m54s= github.com/vesoft-inc/nebula-http-gateway/ccore v0.0.0-20220413113447-a3f4c56287d8 h1:iL92Uk6hAe4vUBK/L99wf5295HYOtnD4plctVA5xek0= github.com/vesoft-inc/nebula-http-gateway/ccore v0.0.0-20220413113447-a3f4c56287d8/go.mod h1:sFEvE+cY4TgwqWx6H6msOqAUzRhsEHHKaaMgIZENHuQ= +github.com/vesoft-inc/nebula-importer v1.0.1-0.20220505095506-93febd41c2be h1:7YGSREZ6uS1WjCdKTlHEvhSeYURJFY7UXQZ008+EB2Y= +github.com/vesoft-inc/nebula-importer v1.0.1-0.20220505095506-93febd41c2be/go.mod h1:8xAQi6KI2qe40Dop/GqDXmBEurt7qGp5Pjd1MESAVNA= github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI= github.com/xdg-go/scram v1.0.2/go.mod h1:1WAq6h33pAW+iRreB34OORO2Nf7qel3VV3fjBj+hCSs= github.com/xdg-go/stringprep v1.0.2/go.mod h1:8F9zXuvzgwmyT5DUm4GUfZGDdT3W+LCvS6+da4O5kxM= @@ -790,6 +801,10 @@ gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gorm.io/driver/sqlite v1.3.2 h1:nWTy4cE52K6nnMhv23wLmur9Y3qWbZvOBz+V4PrGAxg= +gorm.io/driver/sqlite v1.3.2/go.mod h1:B+8GyC9K7VgzJAcrcXMRPdnMcck+8FgJynEehEPM16U= +gorm.io/gorm v1.23.4 h1:1BKWM67O6CflSLcwGQR7ccfmC4ebOxQrTfOQGRE9wjg= +gorm.io/gorm v1.23.4/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=