From 3760c272aab3a04556c36b059ca2165d985c387f Mon Sep 17 00:00:00 2001 From: xhe Date: Thu, 5 Aug 2021 10:57:07 +0800 Subject: [PATCH 01/26] ddl: fix bug introduced by 19222 (#26830) --- ddl/partition.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ddl/partition.go b/ddl/partition.go index a3872bbcf0cc0..2a16e6b357699 100644 --- a/ddl/partition.go +++ b/ddl/partition.go @@ -967,12 +967,12 @@ func (w *worker) onDropTablePartition(d *ddlCtx, t *meta.Meta, job *model.Job) ( job.State = model.JobStateCancelled return ver, errors.Trace(err) } + physicalTableIDs = updateDroppingPartitionInfo(tblInfo, partNames) err = dropRuleBundles(d, physicalTableIDs) if err != nil { job.State = model.JobStateCancelled return ver, errors.Wrapf(err, "failed to notify PD the placement rules") } - updateDroppingPartitionInfo(tblInfo, partNames) job.SchemaState = model.StateDeleteOnly ver, err = updateVersionAndTableInfo(t, job, tblInfo, originalState != job.SchemaState) case model.StateDeleteOnly: From b452fe99c1094465be1dad691786bc70e39b4cff Mon Sep 17 00:00:00 2001 From: Morgan Tocker Date: Wed, 4 Aug 2021 21:29:13 -0600 Subject: [PATCH 02/26] executor: allow SET CONFIG to accept hostnames (#26906) --- executor/set_config.go | 6 +++--- executor/set_test.go | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/executor/set_config.go b/executor/set_config.go index e31bab643cbb8..c9734db27d548 100644 --- a/executor/set_config.go +++ b/executor/set_config.go @@ -153,15 +153,15 @@ func (s *SetConfigExec) doRequest(url string) (retErr error) { } func isValidInstance(instance string) bool { - ip, port, err := net.SplitHostPort(instance) + host, port, err := net.SplitHostPort(instance) if err != nil { return false } if port == "" { return false } - v := net.ParseIP(ip) - return v != nil + _, err = net.LookupIP(host) + return err == nil } // ConvertConfigItem2JSON converts the config item specified by key and val to json. diff --git a/executor/set_test.go b/executor/set_test.go index 6360fc32466af..667f4ecf3880a 100644 --- a/executor/set_test.go +++ b/executor/set_test.go @@ -1332,7 +1332,8 @@ func (s *testSuite5) TestSetClusterConfig(c *C) { c.Assert(tk.ExecToErr("set config xxx log.level='info'"), ErrorMatches, "unknown type xxx") c.Assert(tk.ExecToErr("set config tidb log.level='info'"), ErrorMatches, "TiDB doesn't support to change configs online, please use SQL variables") c.Assert(tk.ExecToErr("set config '127.0.0.1:1111' log.level='info'"), ErrorMatches, "TiDB doesn't support to change configs online, please use SQL variables") - c.Assert(tk.ExecToErr("set config '127.a.b.c:1234' log.level='info'"), ErrorMatches, "invalid instance 127.a.b.c:1234") + c.Assert(tk.ExecToErr("set config '127.a.b.c:1234' log.level='info'"), ErrorMatches, "invalid instance 127.a.b.c:1234") // name doesn't resolve. + c.Assert(tk.ExecToErr("set config 'example.com:1111' log.level='info'"), ErrorMatches, "instance example.com:1111 is not found in this cluster") // name resolves. c.Assert(tk.ExecToErr("set config tikv log.level=null"), ErrorMatches, "can't set config to null") c.Assert(tk.ExecToErr("set config '1.1.1.1:1111' log.level='info'"), ErrorMatches, "instance 1.1.1.1:1111 is not found in this cluster") From ac670dd0332b2e73e2192a67bc838a4764561f97 Mon Sep 17 00:00:00 2001 From: Morgan Tocker Date: Wed, 4 Aug 2021 21:49:13 -0600 Subject: [PATCH 03/26] executor, privilege: fix show grants (#26680) --- executor/revoke.go | 19 ++++-- privilege/privileges/cache.go | 79 ++++++------------------- privilege/privileges/privileges_test.go | 48 +++++++++++++++ 3 files changed, 80 insertions(+), 66 deletions(-) diff --git a/executor/revoke.go b/executor/revoke.go index 8532f4f5fb3c7..c15bdb17e0d2b 100644 --- a/executor/revoke.go +++ b/executor/revoke.go @@ -297,12 +297,21 @@ func privUpdateForRevoke(cur []string, priv mysql.PrivilegeType) ([]string, erro func composeTablePrivUpdateForRevoke(ctx sessionctx.Context, sql *strings.Builder, priv mysql.PrivilegeType, name string, host string, db string, tbl string) error { var newTablePriv, newColumnPriv []string - if priv != mysql.AllPriv { - currTablePriv, currColumnPriv, err := getTablePriv(ctx, name, host, db, tbl) - if err != nil { - return err - } + currTablePriv, currColumnPriv, err := getTablePriv(ctx, name, host, db, tbl) + if err != nil { + return err + } + if priv == mysql.AllPriv { + // Revoke ALL does not revoke the Grant option, + // so we only need to check if the user previously had this. + tmp := SetFromString(currTablePriv) + for _, p := range tmp { + if p == mysql.Priv2SetStr[mysql.GrantPriv] { + newTablePriv = []string{mysql.Priv2SetStr[mysql.GrantPriv]} + } + } + } else { newTablePriv = SetFromString(currTablePriv) newTablePriv, err = privUpdateForRevoke(newTablePriv, priv) if err != nil { diff --git a/privilege/privileges/cache.go b/privilege/privileges/cache.go index b457c311c57d3..fd4639aa05a79 100644 --- a/privilege/privileges/cache.go +++ b/privilege/privileges/cache.go @@ -1114,7 +1114,7 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit allRoles := p.FindAllRole(roles) // Show global grants. var currentPriv mysql.PrivilegeType - var hasGrantOptionPriv, userExists = false, false + var userExists = false // Check whether user exists. if userList, ok := p.UserMap[user]; ok { for _, record := range userList { @@ -1131,21 +1131,11 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit for _, record := range p.User { if record.fullyMatch(user, host) { hasGlobalGrant = true - if (record.Privileges & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - currentPriv |= (record.Privileges & ^mysql.GrantPriv) - continue - } currentPriv |= record.Privileges } else { for _, r := range allRoles { if record.baseRecord.match(r.Username, r.Hostname) { hasGlobalGrant = true - if (record.Privileges & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - currentPriv |= (record.Privileges & ^mysql.GrantPriv) - continue - } currentPriv |= record.Privileges } } @@ -1154,9 +1144,8 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit g = userPrivToString(currentPriv) if len(g) > 0 { var s string - if hasGrantOptionPriv { + if (currentPriv & mysql.GrantPriv) > 0 { s = fmt.Sprintf(`GRANT %s ON *.* TO '%s'@'%s' WITH GRANT OPTION`, g, user, host) - } else { s = fmt.Sprintf(`GRANT %s ON *.* TO '%s'@'%s'`, g, user, host) @@ -1167,7 +1156,7 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit // This is a mysql convention. if len(gs) == 0 && hasGlobalGrant { var s string - if hasGrantOptionPriv { + if (currentPriv & mysql.GrantPriv) > 0 { s = fmt.Sprintf("GRANT USAGE ON *.* TO '%s'@'%s' WITH GRANT OPTION", user, host) } else { s = fmt.Sprintf("GRANT USAGE ON *.* TO '%s'@'%s'", user, host) @@ -1180,36 +1169,16 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit for _, record := range p.DB { if record.fullyMatch(user, host) { if _, ok := dbPrivTable[record.DB]; ok { - if (record.Privileges & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - dbPrivTable[record.DB] |= (record.Privileges & ^mysql.GrantPriv) - continue - } dbPrivTable[record.DB] |= record.Privileges } else { - if (record.Privileges & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - dbPrivTable[record.DB] = (record.Privileges & ^mysql.GrantPriv) - continue - } dbPrivTable[record.DB] = record.Privileges } } else { for _, r := range allRoles { if record.baseRecord.match(r.Username, r.Hostname) { if _, ok := dbPrivTable[record.DB]; ok { - if (record.Privileges & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - dbPrivTable[record.DB] |= (record.Privileges & ^mysql.GrantPriv) - continue - } dbPrivTable[record.DB] |= record.Privileges } else { - if (record.Privileges & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - dbPrivTable[record.DB] = (record.Privileges & ^mysql.GrantPriv) - continue - } dbPrivTable[record.DB] = record.Privileges } } @@ -1220,14 +1189,17 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit g := dbPrivToString(priv) if len(g) > 0 { var s string - if hasGrantOptionPriv { + if (priv & mysql.GrantPriv) > 0 { s = fmt.Sprintf(`GRANT %s ON %s.* TO '%s'@'%s' WITH GRANT OPTION`, g, dbName, user, host) - } else { s = fmt.Sprintf(`GRANT %s ON %s.* TO '%s'@'%s'`, g, dbName, user, host) - } gs = append(gs, s) + } else if len(g) == 0 && (priv&mysql.GrantPriv) > 0 { + // We have GRANT OPTION on the db, but no privilege granted. + // Wo we need to print a special USAGE line. + s := fmt.Sprintf(`GRANT USAGE ON %s.* TO '%s'@'%s' WITH GRANT OPTION`, dbName, user, host) + gs = append(gs, s) } } @@ -1237,36 +1209,16 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit recordKey := record.DB + "." + record.TableName if user == record.User && host == record.Host { if _, ok := dbPrivTable[record.DB]; ok { - if (record.TablePriv & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - tablePrivTable[recordKey] |= (record.TablePriv & ^mysql.GrantPriv) - continue - } tablePrivTable[recordKey] |= record.TablePriv } else { - if (record.TablePriv & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - tablePrivTable[recordKey] = (record.TablePriv & ^mysql.GrantPriv) - continue - } tablePrivTable[recordKey] = record.TablePriv } } else { for _, r := range allRoles { if record.baseRecord.match(r.Username, r.Hostname) { if _, ok := dbPrivTable[record.DB]; ok { - if (record.TablePriv & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - tablePrivTable[recordKey] |= (record.TablePriv & ^mysql.GrantPriv) - continue - } tablePrivTable[recordKey] |= record.TablePriv } else { - if (record.TablePriv & mysql.GrantPriv) > 0 { - hasGrantOptionPriv = true - tablePrivTable[recordKey] = (record.TablePriv & ^mysql.GrantPriv) - continue - } tablePrivTable[recordKey] = record.TablePriv } } @@ -1277,12 +1229,17 @@ func (p *MySQLPrivilege) showGrants(user, host string, roles []*auth.RoleIdentit g := tablePrivToString(priv) if len(g) > 0 { var s string - if hasGrantOptionPriv { + if (priv & mysql.GrantPriv) > 0 { s = fmt.Sprintf(`GRANT %s ON %s TO '%s'@'%s' WITH GRANT OPTION`, g, k, user, host) } else { s = fmt.Sprintf(`GRANT %s ON %s TO '%s'@'%s'`, g, k, user, host) } gs = append(gs, s) + } else if len(g) == 0 && (priv&mysql.GrantPriv) > 0 { + // We have GRANT OPTION on the table, but no privilege granted. + // Wo we need to print a special USAGE line. + s := fmt.Sprintf(`GRANT USAGE ON %s TO '%s'@'%s' WITH GRANT OPTION`, k, user, host) + gs = append(gs, s) } } @@ -1399,21 +1356,21 @@ func collectColumnGrant(record *columnsPrivRecord, user, host string, columnPriv } func userPrivToString(privs mysql.PrivilegeType) string { - if privs == userTablePrivilegeMask { + if (privs & ^mysql.GrantPriv) == userTablePrivilegeMask { return mysql.AllPrivilegeLiteral } return privToString(privs, mysql.AllGlobalPrivs, mysql.Priv2Str) } func dbPrivToString(privs mysql.PrivilegeType) string { - if privs == dbTablePrivilegeMask { + if (privs & ^mysql.GrantPriv) == dbTablePrivilegeMask { return mysql.AllPrivilegeLiteral } return privToString(privs, mysql.AllDBPrivs, mysql.Priv2SetStr) } func tablePrivToString(privs mysql.PrivilegeType) string { - if privs == tablePrivMask { + if (privs & ^mysql.GrantPriv) == tablePrivMask { return mysql.AllPrivilegeLiteral } return privToString(privs, mysql.AllTablePrivs, mysql.Priv2Str) diff --git a/privilege/privileges/privileges_test.go b/privilege/privileges/privileges_test.go index 3b93873b11be6..d6842e5c262de 100644 --- a/privilege/privileges/privileges_test.go +++ b/privilege/privileges/privileges_test.go @@ -1858,3 +1858,51 @@ func (s *testPrivilegeSuite) TestInfoschemaUserPrivileges(c *C) { tk.MustQuery(`SELECT * FROM information_schema.user_privileges WHERE grantee = "'isroot'@'%'"`).Check(testkit.Rows("'isroot'@'%' def SUPER NO")) tk.MustQuery(`SELECT * FROM information_schema.user_privileges WHERE grantee = "'isselectonmysqluser'@'%'"`).Check(testkit.Rows("'isselectonmysqluser'@'%' def USAGE NO")) } + +// Issues https://github.com/pingcap/tidb/issues/25972 and https://github.com/pingcap/tidb/issues/26451 +func (s *testPrivilegeSuite) TestGrantOptionAndRevoke(c *C) { + tk := testkit.NewTestKit(c, s.store) + tk.MustExec("DROP USER IF EXISTS u1, u2, u3, ruser") + tk.MustExec("CREATE USER u1, u2, u3, ruser") + tk.MustExec("GRANT ALL ON *.* TO ruser WITH GRANT OPTION") + tk.MustExec("GRANT SELECT ON *.* TO u1 WITH GRANT OPTION") + tk.MustExec("GRANT UPDATE, DELETE on db.* TO u1") + + tk.Se.Auth(&auth.UserIdentity{ + Username: "ruser", + Hostname: "localhost", + }, nil, nil) + + tk.MustQuery(`SHOW GRANTS FOR u1`).Check(testkit.Rows("GRANT SELECT ON *.* TO 'u1'@'%' WITH GRANT OPTION", "GRANT UPDATE,DELETE ON db.* TO 'u1'@'%'")) + + tk.MustExec("GRANT SELECT ON d1.* to u2") + tk.MustExec("GRANT SELECT ON d2.* to u2 WITH GRANT OPTION") + tk.MustExec("GRANT SELECT ON d3.* to u2") + tk.MustExec("GRANT SELECT ON d4.* to u2") + tk.MustExec("GRANT SELECT ON d5.* to u2") + tk.MustQuery(`SHOW GRANTS FOR u2;`).Sort().Check(testkit.Rows( + "GRANT SELECT ON d1.* TO 'u2'@'%'", + "GRANT SELECT ON d2.* TO 'u2'@'%' WITH GRANT OPTION", + "GRANT SELECT ON d3.* TO 'u2'@'%'", + "GRANT SELECT ON d4.* TO 'u2'@'%'", + "GRANT SELECT ON d5.* TO 'u2'@'%'", + "GRANT USAGE ON *.* TO 'u2'@'%'", + )) + + tk.MustExec("grant all on hchwang.* to u3 with grant option") + tk.MustQuery(`SHOW GRANTS FOR u3;`).Check(testkit.Rows("GRANT USAGE ON *.* TO 'u3'@'%'", "GRANT ALL PRIVILEGES ON hchwang.* TO 'u3'@'%' WITH GRANT OPTION")) + tk.MustExec("revoke all on hchwang.* from u3") + tk.MustQuery(`SHOW GRANTS FOR u3;`).Check(testkit.Rows("GRANT USAGE ON *.* TO 'u3'@'%'", "GRANT USAGE ON hchwang.* TO 'u3'@'%' WITH GRANT OPTION")) + + // Same again but with column privileges. + + tk.MustExec("DROP TABLE IF EXISTS test.testgrant") + tk.MustExec("CREATE TABLE test.testgrant (a int)") + tk.MustExec("grant all on test.testgrant to u3 with grant option") + tk.MustExec("revoke all on test.testgrant from u3") + tk.MustQuery(`SHOW GRANTS FOR u3`).Sort().Check(testkit.Rows( + "GRANT USAGE ON *.* TO 'u3'@'%'", + "GRANT USAGE ON hchwang.* TO 'u3'@'%' WITH GRANT OPTION", + "GRANT USAGE ON test.testgrant TO 'u3'@'%' WITH GRANT OPTION", + )) +} From 58838a732df8cf612dd802f34a2d93fe89abaf3a Mon Sep 17 00:00:00 2001 From: wjHuang Date: Thu, 5 Aug 2021 11:57:13 +0800 Subject: [PATCH 04/26] ddl: fix unstable tests (#26513) --- ddl/db_test.go | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/ddl/db_test.go b/ddl/db_test.go index 2baf5a2265835..30341a3999c11 100644 --- a/ddl/db_test.go +++ b/ddl/db_test.go @@ -294,7 +294,6 @@ func backgroundExec(s kv.Storage, sql string, done chan error) { // TestAddPrimaryKeyRollback1 is used to test scenarios that will roll back when a duplicate primary key is encountered. func (s *testDBSuite8) TestAddPrimaryKeyRollback1(c *C) { - c.Skip("unstable, skip it and fix it before 20210705") hasNullValsInKey := false idxName := "PRIMARY" addIdxSQL := "alter table t1 add primary key c3_index (c3);" @@ -312,7 +311,6 @@ func (s *testDBSuite8) TestAddPrimaryKeyRollback2(c *C) { } func (s *testDBSuite2) TestAddUniqueIndexRollback(c *C) { - c.Skip("unstable, skip it and fix it before 20210702") hasNullValsInKey := false idxName := "c3_index" addIdxSQL := "create unique index c3_index on t1 (c3)" @@ -513,8 +511,9 @@ LOOP: // delete some rows, and add some data for i := count; i < count+step; i++ { n := rand.Intn(count) - // Don't delete this row, otherwise error message would change. - if n == defaultBatchSize*2-10 { + // (2048, 2038, 2038) and (2038, 2038, 2038) + // Don't delete rows where c1 is 2048 or 2038, otherwise, the entry value in duplicated error message would change. + if n == defaultBatchSize*2-10 || n == defaultBatchSize*2 { continue } tk.MustExec("delete from t1 where c1 = ?", n) From 019fab3b0a4c20f39ef4432cdb1c288135cdb105 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Dani=C3=ABl=20van=20Eeden?= Date: Thu, 5 Aug 2021 06:05:13 +0200 Subject: [PATCH 05/26] *: Run errcheck and staticcheck via golangci-lint (#26899) --- .golangci.yml | 44 +++++++++++++++++++++++++++++ Makefile | 47 ++----------------------------- go.mod | 5 ++-- go.sum | 6 ++-- staticcheck.conf | 10 ------- tools/check/errcheck_excludes.txt | 3 -- 6 files changed, 53 insertions(+), 62 deletions(-) create mode 100644 .golangci.yml delete mode 100644 staticcheck.conf delete mode 100644 tools/check/errcheck_excludes.txt diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 0000000000000..bb879308a9462 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,44 @@ +run: + timeout: 6m +linters: + disable-all: true + enable: + - misspell + - ineffassign + - typecheck + - varcheck + - unused + - structcheck + - deadcode + - gosimple + - goimports + - errcheck + - staticcheck + - stylecheck + - gosec +linters-settings: + staticcheck: + checks: ["S1002","S1004","S1007","S1009","S1010","S1012","S1019","S1020","S1021","S1024","S1030","SA2*","SA3*","SA4009","SA5*","SA6000","SA6001","SA6005", "-SA2002"] + stylecheck: + checks: ["-ST1003"] + gosec: + excludes: + - G107 + - G108 + - G110 + - G306 + - G401 + - G402 + - G403 + - G404 + - G501 + - G502 + - G505 + - G601 + +issues: + exclude-rules: + - path: _test\.go + linters: + - errcheck + - gosec diff --git a/Makefile b/Makefile index 697727cf80af7..370ccb0d433df 100644 --- a/Makefile +++ b/Makefile @@ -30,12 +30,9 @@ parser: dev: checklist check test # Install the check tools. -check-setup:tools/bin/revive tools/bin/goword tools/bin/gometalinter tools/bin/gosec +check-setup:tools/bin/revive tools/bin/goword -check: fmt errcheck unconvert lint tidy testSuite check-static vet staticcheck errdoc - -# These need to be fixed before they can be ran regularly -check-fail: goword check-slow +check: fmt unconvert lint tidy testSuite check-static vet errdoc fmt: @echo "gofmt (simplify)" @@ -44,30 +41,8 @@ fmt: goword:tools/bin/goword tools/bin/goword $(FILES) 2>&1 | $(FAIL_ON_STDOUT) -gosec:tools/bin/gosec - tools/bin/gosec $$($(PACKAGE_DIRECTORIES)) - check-static: tools/bin/golangci-lint - tools/bin/golangci-lint run -v --disable-all --deadline=3m \ - --enable=misspell \ - --enable=ineffassign \ - --enable=typecheck \ - --enable=varcheck \ - --enable=unused \ - --enable=structcheck \ - --enable=deadcode \ - --enable=gosimple \ - --enable=goimports \ - $$($(PACKAGE_DIRECTORIES)) - -check-slow:tools/bin/gometalinter tools/bin/gosec - tools/bin/gometalinter --disable-all \ - --enable errcheck \ - $$($(PACKAGE_DIRECTORIES)) - -errcheck:tools/bin/errcheck - @echo "errcheck" - @GO111MODULE=on tools/bin/errcheck -exclude ./tools/check/errcheck_excludes.txt -ignoretests -blank $(PACKAGES) + tools/bin/golangci-lint run -v $$($(PACKAGE_DIRECTORIES)) unconvert:tools/bin/unconvert @echo "unconvert check" @@ -89,10 +64,6 @@ vet: @echo "vet" $(GO) vet -all $(PACKAGES) 2>&1 | $(FAIL_ON_STDOUT) -staticcheck: - $(GO) get honnef.co/go/tools/cmd/staticcheck - $(STATICCHECK) ./... - tidy: @echo "go mod tidy" ./tools/check/check-tidy.sh @@ -218,18 +189,6 @@ tools/bin/goword: tools/check/go.mod cd tools/check; \ $(GO) build -o ../bin/goword github.com/chzchzchz/goword -tools/bin/gometalinter: tools/check/go.mod - cd tools/check; \ - $(GO) build -o ../bin/gometalinter gopkg.in/alecthomas/gometalinter.v3 - -tools/bin/gosec: tools/check/go.mod - cd tools/check; \ - $(GO) build -o ../bin/gosec github.com/securego/gosec/cmd/gosec - -tools/bin/errcheck: tools/check/go.mod - cd tools/check; \ - $(GO) build -o ../bin/errcheck github.com/kisielk/errcheck - tools/bin/unconvert: tools/check/go.mod cd tools/check; \ $(GO) build -o ../bin/unconvert github.com/mdempsky/unconvert diff --git a/go.mod b/go.mod index 25a2951ea60a4..17af5a487a71c 100644 --- a/go.mod +++ b/go.mod @@ -72,13 +72,12 @@ require ( go.uber.org/zap v1.18.1 golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 golang.org/x/sync v0.0.0-20210220032951-036812b2e83c - golang.org/x/sys v0.0.0-20210510120138-977fb7262007 + golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c golang.org/x/text v0.3.6 - golang.org/x/tools v0.1.4 + golang.org/x/tools v0.1.5 google.golang.org/grpc v1.27.1 gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect gopkg.in/yaml.v2 v2.4.0 - honnef.co/go/tools v0.2.0 // indirect modernc.org/mathutil v1.2.2 // indirect sourcegraph.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67 diff --git a/go.sum b/go.sum index 753a0a2f374ce..93b9afc5cfc37 100644 --- a/go.sum +++ b/go.sum @@ -786,8 +786,9 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210217105451-b926d437f341/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -847,8 +848,9 @@ golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roY golang.org/x/tools v0.0.0-20201125231158-b5590deeca9b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= -golang.org/x/tools v0.1.4 h1:cVngSRcfgyZCzys3KYOpCFa+4dqX/Oub9tAq00ttGVs= golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/staticcheck.conf b/staticcheck.conf deleted file mode 100644 index 28941ad3bcd75..0000000000000 --- a/staticcheck.conf +++ /dev/null @@ -1,10 +0,0 @@ -checks = ["S1002","S1004","S1007","S1009","S1010","S1012","S1019","S1020","S1021","S1024","S1030","SA2*","SA3*","SA4009","SA5*","SA6000","SA6001","SA6005", "-SA2002"] -initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", - "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", - "IP", "JSON", "QPS", "RAM", "RPC", "SLA", - "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", - "UDP", "UI", "GID", "UID", "UUID", "URI", - "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", - "XSS"] -dot_import_whitelist = [] -http_status_code_whitelist = ["200", "400", "404", "500"] diff --git a/tools/check/errcheck_excludes.txt b/tools/check/errcheck_excludes.txt deleted file mode 100644 index d12c2b8e861a1..0000000000000 --- a/tools/check/errcheck_excludes.txt +++ /dev/null @@ -1,3 +0,0 @@ -fmt.Fprintf -fmt.Fprint -fmt.Sscanf \ No newline at end of file From ebb7d70268be38f49db34ebdc115e18eed3e1dff Mon Sep 17 00:00:00 2001 From: chAnge <50198008+chAngeZhaoZhanBo@users.noreply.github.com> Date: Thu, 5 Aug 2021 14:55:13 +0800 Subject: [PATCH 06/26] expression: Support mathematical functions pushdown to tiflash (#25596) --- expression/expr_to_pb_test.go | 56 +++++++++++++++++++++++++++++++++-- expression/expression.go | 3 +- 2 files changed, 55 insertions(+), 4 deletions(-) diff --git a/expression/expr_to_pb_test.go b/expression/expr_to_pb_test.go index 8f7c32811837e..6024503cc33e4 100644 --- a/expression/expr_to_pb_test.go +++ b/expression/expr_to_pb_test.go @@ -755,7 +755,7 @@ func (s *testEvaluatorSuite) TestExprPushDownToFlash(c *C) { c.Assert(err, IsNil) exprs = append(exprs, function) - // ScalarFuncSig_CeilDecimalToDecimal + // ScalarFuncSig_CeilDecToDec function, err = NewFunction(mock.NewContext(), ast.Ceil, types.NewFieldType(mysql.TypeNewDecimal), decimalColumn) c.Assert(err, IsNil) exprs = append(exprs, function) @@ -770,16 +770,66 @@ func (s *testEvaluatorSuite) TestExprPushDownToFlash(c *C) { c.Assert(err, IsNil) exprs = append(exprs, function) - // ScalarFuncSig_FloorDecimalToInt + // ScalarFuncSig_FloorDecToInt function, err = NewFunction(mock.NewContext(), ast.Floor, types.NewFieldType(mysql.TypeLonglong), decimalColumn) c.Assert(err, IsNil) exprs = append(exprs, function) - // ScalarFuncSig_FloorDecimalToDecimal + // ScalarFuncSig_FloorDecToDec function, err = NewFunction(mock.NewContext(), ast.Floor, types.NewFieldType(mysql.TypeNewDecimal), decimalColumn) c.Assert(err, IsNil) exprs = append(exprs, function) + // ScalarFuncSig_Log1Arg + function, err = NewFunction(mock.NewContext(), ast.Log, types.NewFieldType(mysql.TypeDouble), realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Log2Args + function, err = NewFunction(mock.NewContext(), ast.Log, types.NewFieldType(mysql.TypeDouble), realColumn, realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Log2 + function, err = NewFunction(mock.NewContext(), ast.Log2, types.NewFieldType(mysql.TypeDouble), realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Log10 + function, err = NewFunction(mock.NewContext(), ast.Log10, types.NewFieldType(mysql.TypeDouble), realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Exp + function, err = NewFunction(mock.NewContext(), ast.Exp, types.NewFieldType(mysql.TypeDouble), realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Pow + function, err = NewFunction(mock.NewContext(), ast.Pow, types.NewFieldType(mysql.TypeDouble), realColumn, realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Radians + function, err = NewFunction(mock.NewContext(), ast.Radians, types.NewFieldType(mysql.TypeDouble), realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Degrees + function, err = NewFunction(mock.NewContext(), ast.Degrees, types.NewFieldType(mysql.TypeDouble), realColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_CRC32 + function, err = NewFunction(mock.NewContext(), ast.CRC32, types.NewFieldType(mysql.TypeLonglong), stringColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + + // ScalarFuncSig_Conv + function, err = NewFunction(mock.NewContext(), ast.Conv, types.NewFieldType(mysql.TypeDouble), stringColumn, intColumn, intColumn) + c.Assert(err, IsNil) + exprs = append(exprs, function) + // Replace function, err = NewFunction(mock.NewContext(), ast.Replace, types.NewFieldType(mysql.TypeString), stringColumn, stringColumn, stringColumn) c.Assert(err, IsNil) diff --git a/expression/expression.go b/expression/expression.go index a73c6df8ba735..f84f75cb9041b 100644 --- a/expression/expression.go +++ b/expression/expression.go @@ -1017,7 +1017,8 @@ func scalarExprSupportedByFlash(function *ScalarFunction) bool { ast.Concat, ast.ConcatWS, ast.Year, ast.Month, ast.Day, ast.DateDiff, ast.TimestampDiff, ast.DateFormat, ast.FromUnixTime, - ast.Sqrt, + ast.Sqrt, ast.Log, ast.Log2, ast.Log10, ast.Ln, ast.Exp, ast.Pow, ast.Sign, + ast.Radians, ast.Degrees, ast.Conv, ast.CRC32, ast.JSONLength: return true case ast.Substr, ast.Substring, ast.Left, ast.Right, ast.CharLength: From e46d9dd03d0ae920d03986149884eb1db817c79d Mon Sep 17 00:00:00 2001 From: Meng Xin Date: Thu, 5 Aug 2021 15:25:13 +0800 Subject: [PATCH 07/26] expression: Push down ADDDATE(), DATE_ADD() on String, Real types (#26441) --- expression/expression.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/expression/expression.go b/expression/expression.go index f84f75cb9041b..183083cd6c988 100644 --- a/expression/expression.go +++ b/expression/expression.go @@ -1045,7 +1045,7 @@ func scalarExprSupportedByFlash(function *ScalarFunction) bool { } case ast.DateAdd, ast.AddDate: switch function.Function.PbCode() { - case tipb.ScalarFuncSig_AddDateDatetimeInt, tipb.ScalarFuncSig_AddDateStringInt: + case tipb.ScalarFuncSig_AddDateDatetimeInt, tipb.ScalarFuncSig_AddDateStringInt, tipb.ScalarFuncSig_AddDateStringReal: return true } case ast.DateSub, ast.SubDate: From ce429a4ae980dea854c867b22c25fb9f67a8ecda Mon Sep 17 00:00:00 2001 From: Meng Xin Date: Thu, 5 Aug 2021 15:35:13 +0800 Subject: [PATCH 08/26] expression: support date function pushed down to tiflash (#26640) --- expression/expression.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/expression/expression.go b/expression/expression.go index 183083cd6c988..73a7d1ddca616 100644 --- a/expression/expression.go +++ b/expression/expression.go @@ -1015,7 +1015,7 @@ func scalarExprSupportedByFlash(function *ScalarFunction) bool { ast.Plus, ast.Minus, ast.Div, ast.Mul, ast.Abs, ast.Mod, ast.If, ast.Ifnull, ast.Case, ast.Concat, ast.ConcatWS, - ast.Year, ast.Month, ast.Day, + ast.Date, ast.Year, ast.Month, ast.Day, ast.DateDiff, ast.TimestampDiff, ast.DateFormat, ast.FromUnixTime, ast.Sqrt, ast.Log, ast.Log2, ast.Log10, ast.Ln, ast.Exp, ast.Pow, ast.Sign, ast.Radians, ast.Degrees, ast.Conv, ast.CRC32, From b974e5fbc4560efba7d9e510616cc34b45702eec Mon Sep 17 00:00:00 2001 From: rebelice Date: Thu, 5 Aug 2021 15:49:13 +0800 Subject: [PATCH 09/26] test: fix unstable test TestAnalyzeGlobalStatsWithOpts2 (#26921) --- statistics/handle/handle_test.go | 36 ++++++++++++++++---------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/statistics/handle/handle_test.go b/statistics/handle/handle_test.go index 0b6e275a71c1c..ca44f560d6e02 100644 --- a/statistics/handle/handle_test.go +++ b/statistics/handle/handle_test.go @@ -901,11 +901,11 @@ func (s *testStatsSuite) prepareForGlobalStatsWithOpts(c *C, tk *testkit.TestKit } // nolint:unused -func (s *testStatsSuite) checkForGlobalStatsWithOpts(c *C, tk *testkit.TestKit, p string, topn, buckets int) { +func (s *testStatsSuite) checkForGlobalStatsWithOpts(c *C, tk *testkit.TestKit, t string, p string, topn, buckets int) { delta := buckets/2 + 1 for _, isIdx := range []int{0, 1} { - c.Assert(len(tk.MustQuery(fmt.Sprintf("show stats_topn where partition_name='%v' and is_index=%v", p, isIdx)).Rows()), Equals, topn) - numBuckets := len(tk.MustQuery(fmt.Sprintf("show stats_buckets where partition_name='%v' and is_index=%v", p, isIdx)).Rows()) + c.Assert(len(tk.MustQuery(fmt.Sprintf("show stats_topn where table_name='%v' and partition_name='%v' and is_index=%v", t, p, isIdx)).Rows()), Equals, topn) + numBuckets := len(tk.MustQuery(fmt.Sprintf("show stats_buckets where table_name='%v' and partition_name='%v' and is_index=%v", t, p, isIdx)).Rows()) // since the hist-building algorithm doesn't stipulate the final bucket number to be equal to the expected number exactly, // we have to check the results by a range here. c.Assert(numBuckets >= buckets-delta, IsTrue) @@ -942,9 +942,9 @@ func (s *testStatsSuite) TestAnalyzeGlobalStatsWithOpts(c *C) { sql := fmt.Sprintf("analyze table test_gstats_opt with %v topn, %v buckets", ca.topn, ca.buckets) if !ca.err { tk.MustExec(sql) - s.checkForGlobalStatsWithOpts(c, tk, "global", ca.topn, ca.buckets) - s.checkForGlobalStatsWithOpts(c, tk, "p0", ca.topn, ca.buckets) - s.checkForGlobalStatsWithOpts(c, tk, "p1", ca.topn, ca.buckets) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt", "global", ca.topn, ca.buckets) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt", "p0", ca.topn, ca.buckets) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt", "p1", ca.topn, ca.buckets) } else { err := tk.ExecToErr(sql) c.Assert(err, NotNil) @@ -961,25 +961,25 @@ func (s *testStatsSuite) TestAnalyzeGlobalStatsWithOpts2(c *C) { s.prepareForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "test_gstats_opt2") tk.MustExec("analyze table test_gstats_opt2 with 20 topn, 50 buckets, 1000 samples") - s.checkForGlobalStatsWithOpts(c, tk, "global", 2, 50) - s.checkForGlobalStatsWithOpts(c, tk, "p0", 1, 50) - s.checkForGlobalStatsWithOpts(c, tk, "p1", 1, 50) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "global", 2, 50) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p0", 1, 50) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p1", 1, 50) // analyze a partition to let its options be different with others' tk.MustExec("analyze table test_gstats_opt2 partition p0 with 10 topn, 20 buckets") - s.checkForGlobalStatsWithOpts(c, tk, "global", 10, 20) // use new options - s.checkForGlobalStatsWithOpts(c, tk, "p0", 10, 20) - s.checkForGlobalStatsWithOpts(c, tk, "p1", 1, 50) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "global", 10, 20) // use new options + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p0", 10, 20) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p1", 1, 50) tk.MustExec("analyze table test_gstats_opt2 partition p1 with 100 topn, 200 buckets") - s.checkForGlobalStatsWithOpts(c, tk, "global", 100, 200) - s.checkForGlobalStatsWithOpts(c, tk, "p0", 10, 20) - s.checkForGlobalStatsWithOpts(c, tk, "p1", 100, 200) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "global", 100, 200) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p0", 10, 20) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p1", 100, 200) tk.MustExec("analyze table test_gstats_opt2 partition p0 with 20 topn") // change back to 20 topn - s.checkForGlobalStatsWithOpts(c, tk, "global", 20, 256) - s.checkForGlobalStatsWithOpts(c, tk, "p0", 20, 256) - s.checkForGlobalStatsWithOpts(c, tk, "p1", 100, 200) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "global", 20, 256) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p0", 20, 256) + s.checkForGlobalStatsWithOpts(c, tk, "test_gstats_opt2", "p1", 100, 200) } func (s *testStatsSuite) TestGlobalStatsHealthy(c *C) { From 0398c3f359f188cd0d990d731e4f87440a0992fd Mon Sep 17 00:00:00 2001 From: Shenghui Wu <793703860@qq.com> Date: Thu, 5 Aug 2021 16:21:13 +0800 Subject: [PATCH 10/26] executor: add some simple tests to cover unparallel HashAgg (#26753) --- executor/aggregate_test.go | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/executor/aggregate_test.go b/executor/aggregate_test.go index a0d14cc7e90dc..df3be90097f64 100644 --- a/executor/aggregate_test.go +++ b/executor/aggregate_test.go @@ -1471,7 +1471,7 @@ func (s *testSerialSuite) TestAggInDisk(c *C) { tk.MustExec("drop table if exists t1") tk.MustExec("create table t(a int)") sql := "insert into t values (0)" - for i := 1; i <= 300; i++ { + for i := 1; i <= 200; i++ { sql += fmt.Sprintf(",(%v)", i) } sql += ";" @@ -1488,4 +1488,15 @@ func (s *testSerialSuite) TestAggInDisk(c *C) { strings.Contains(disk, "Bytes"), IsTrue) } } + + // Add code cover + // Test spill chunk. Add a line to avoid tmp spill chunk is always full. + tk.MustExec("insert into t values(0)") + tk.MustQuery("select sum(tt.b) from ( select /*+ HASH_AGG() */ avg(t1.a) as b from t t1 join t t2 group by t1.a, t2.a) as tt").Check( + testkit.Rows("4040100.0000")) + // Test no groupby and no data. + tk.MustExec("drop table t;") + tk.MustExec("create table t(c int, c1 int);") + tk.MustQuery("select /*+ HASH_AGG() */ count(c) from t;").Check(testkit.Rows("0")) + tk.MustQuery("select /*+ HASH_AGG() */ count(c) from t group by c1;").Check(testkit.Rows()) } From 8ebf6c316b7f5389e99a878789ff8fb71868c085 Mon Sep 17 00:00:00 2001 From: Kenan Yao Date: Thu, 5 Aug 2021 17:21:13 +0800 Subject: [PATCH 11/26] planner: avoid unnecessary optimizer warning when sql_select_limit is set (#26928) --- planner/core/integration_test.go | 1 + planner/optimize.go | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/planner/core/integration_test.go b/planner/core/integration_test.go index 5a4613e42ec29..e769cf53d8c09 100644 --- a/planner/core/integration_test.go +++ b/planner/core/integration_test.go @@ -2212,6 +2212,7 @@ func (s *testIntegrationSuite) TestSelectLimit(c *C) { // normal test tk.MustExec("set @@session.sql_select_limit=1") result := tk.MustQuery("select * from t order by a") + c.Assert(tk.Se.GetSessionVars().StmtCtx.GetWarnings(), HasLen, 0) result.Check(testkit.Rows("1")) result = tk.MustQuery("select * from t order by a limit 2") result.Check(testkit.Rows("1", "1")) diff --git a/planner/optimize.go b/planner/optimize.go index 80225befe1b42..39d1e78ef8857 100644 --- a/planner/optimize.go +++ b/planner/optimize.go @@ -127,10 +127,6 @@ func Optimize(ctx context.Context, sctx sessionctx.Context, node ast.Node, is in if !ok { useBinding = false } - if useBinding && sessVars.SelectLimit != math.MaxUint64 { - sessVars.StmtCtx.AppendWarning(errors.New("sql_select_limit is set, ignore SQL bindings")) - useBinding = false - } var ( bindRecord *bindinfo.BindRecord scope string @@ -142,6 +138,10 @@ func Optimize(ctx context.Context, sctx sessionctx.Context, node ast.Node, is in useBinding = false } } + if useBinding && sessVars.SelectLimit != math.MaxUint64 { + sessVars.StmtCtx.AppendWarning(errors.New("sql_select_limit is set, ignore SQL bindings")) + useBinding = false + } var names types.NameSlice var bestPlan, bestPlanFromBind plannercore.Plan From 3cda7d0a72b6a8332329d0c0ba69b75ae1c85c7d Mon Sep 17 00:00:00 2001 From: Mattias Jonsson Date: Thu, 5 Aug 2021 12:03:12 +0200 Subject: [PATCH 12/26] *: insert of invalid timestamp succeeded (#26584) --- ddl/column_type_change_test.go | 4 ++-- executor/index_lookup_join.go | 5 +++++ executor/insert_test.go | 12 ++++++++++++ sessionctx/stmtctx/stmtctx.go | 4 +--- table/column.go | 21 ++++++++++----------- types/datum.go | 14 ++++---------- 6 files changed, 34 insertions(+), 26 deletions(-) diff --git a/ddl/column_type_change_test.go b/ddl/column_type_change_test.go index 6c7c5595f35ba..57a4e3d167a21 100644 --- a/ddl/column_type_change_test.go +++ b/ddl/column_type_change_test.go @@ -2175,8 +2175,8 @@ func (s *testColumnTypeChangeSuite) TestCastDateToTimestampInReorgAttribute(c *C s.dom.DDL().(ddl.DDLForTest).SetHook(hook) tk.MustExec("alter table t modify column a TIMESTAMP NULL DEFAULT '2021-04-28 03:35:11' FIRST") - c.Assert(checkErr1.Error(), Equals, "[types:1292]Incorrect datetime value: '3977-02-22 00:00:00'") - c.Assert(checkErr2.Error(), Equals, "[types:1292]Incorrect datetime value: '3977-02-22 00:00:00'") + c.Assert(checkErr1.Error(), Equals, "[types:1292]Incorrect timestamp value: '3977-02-22'") + c.Assert(checkErr2.Error(), Equals, "[types:1292]Incorrect timestamp value: '3977-02-22'") tk.MustExec("drop table if exists t") } diff --git a/executor/index_lookup_join.go b/executor/index_lookup_join.go index 6f62fcff339cb..d617cddf77bd7 100644 --- a/executor/index_lookup_join.go +++ b/executor/index_lookup_join.go @@ -517,6 +517,11 @@ func (iw *innerWorker) constructLookupContent(task *lookUpJoinTask) ([]*indexJoi for rowIdx := 0; rowIdx < numRows; rowIdx++ { dLookUpKey, dHashKey, err := iw.constructDatumLookupKey(task, chkIdx, rowIdx) if err != nil { + if terror.ErrorEqual(err, types.ErrWrongValue) { + // We ignore rows with invalid datetime. + task.encodedLookUpKeys[chkIdx].AppendNull(0) + continue + } return nil, err } if dHashKey == nil { diff --git a/executor/insert_test.go b/executor/insert_test.go index bde17e6c1218b..7b6978298849d 100644 --- a/executor/insert_test.go +++ b/executor/insert_test.go @@ -336,6 +336,18 @@ func (s *testSuite3) TestInsertWrongValueForField(c *C) { tk.MustExec(`create table t (a year);`) _, err = tk.Exec(`insert into t values(2156);`) c.Assert(err.Error(), Equals, `[types:8033]invalid year`) + + tk.MustExec(`DROP TABLE IF EXISTS ts`) + tk.MustExec(`CREATE TABLE ts (id int DEFAULT NULL, time1 TIMESTAMP NULL DEFAULT NULL)`) + tk.MustExec(`SET @@sql_mode=''`) + tk.MustExec(`INSERT INTO ts (id, time1) VALUES (1, TIMESTAMP '1018-12-23 00:00:00')`) + tk.MustQuery(`SHOW WARNINGS`).Check(testkit.Rows(`Warning 1292 Incorrect timestamp value: '1018-12-23 00:00:00'`)) + tk.MustQuery(`SELECT * FROM ts ORDER BY id`).Check(testkit.Rows(`1 0000-00-00 00:00:00`)) + + tk.MustExec(`SET @@sql_mode='STRICT_TRANS_TABLES'`) + _, err = tk.Exec(`INSERT INTO ts (id, time1) VALUES (2, TIMESTAMP '1018-12-24 00:00:00')`) + c.Assert(err.Error(), Equals, `[table:1292]Incorrect timestamp value: '1018-12-24 00:00:00' for column 'time1' at row 1`) + tk.MustExec(`DROP TABLE ts`) } func (s *testSuite3) TestInsertValueForCastDecimalField(c *C) { diff --git a/sessionctx/stmtctx/stmtctx.go b/sessionctx/stmtctx/stmtctx.go index 97565705bedc4..048e24827ee34 100644 --- a/sessionctx/stmtctx/stmtctx.go +++ b/sessionctx/stmtctx/stmtctx.go @@ -63,9 +63,7 @@ type StatementContext struct { // IsDDLJobInQueue is used to mark whether the DDL job is put into the queue. // If IsDDLJobInQueue is true, it means the DDL job is in the queue of storage, and it can be handled by the DDL worker. - IsDDLJobInQueue bool - // InReorgAttribute is indicated for cast function that the transition is a kind of reorg process. - InReorgAttribute bool + IsDDLJobInQueue bool InInsertStmt bool InUpdateStmt bool InDeleteStmt bool diff --git a/table/column.go b/table/column.go index 843ab857a1fd2..6b9c2996b7509 100644 --- a/table/column.go +++ b/table/column.go @@ -224,11 +224,13 @@ func handleZeroDatetime(ctx sessionctx.Context, col *model.ColumnInfo, casted ty ignoreErr := sc.DupKeyAsWarning + // Timestamp in MySQL is since EPOCH 1970-01-01 00:00:00 UTC and can by definition not have invalid dates! + // Zero date is special for MySQL timestamp and *NOT* 1970-01-01 00:00:00, but 0000-00-00 00:00:00! // in MySQL 8.0, the Timestamp's case is different to Datetime/Date, as shown below: // // | | NZD | NZD|ST | ELSE | ELSE|ST | // | ------------ | ----------------- | ------- | ----------------- | -------- | - // | `0000-00-01` | Success + Warning | Error | Success + Warning | Error | + // | `0000-00-01` | Truncate + Warning| Error | Truncate + Warning| Error | // | `0000-00-00` | Success + Warning | Error | Success | Success | // // * **NZD**: NO_ZERO_DATE_MODE @@ -273,21 +275,13 @@ func handleZeroDatetime(ctx sessionctx.Context, col *model.ColumnInfo, casted ty // CastValue casts a value based on column type. // If forceIgnoreTruncate is true, truncated errors will be ignored. -// If returnOverflow is true, don't handle overflow errors in this function. +// If returnErr is true, directly return any conversion errors. // It's safe now and it's the same as the behavior of select statement. // Set it to true only in FillVirtualColumnValue and UnionScanExec.Next() // If the handle of err is changed latter, the behavior of forceIgnoreTruncate also need to change. // TODO: change the third arg to TypeField. Not pass ColumnInfo. func CastValue(ctx sessionctx.Context, val types.Datum, col *model.ColumnInfo, returnErr, forceIgnoreTruncate bool) (casted types.Datum, err error) { sc := ctx.GetSessionVars().StmtCtx - // Set the reorg attribute for cast value functionality. - if col.ChangeStateInfo != nil { - origin := ctx.GetSessionVars().StmtCtx.InReorgAttribute - ctx.GetSessionVars().StmtCtx.InReorgAttribute = true - defer func() { - ctx.GetSessionVars().StmtCtx.InReorgAttribute = origin - }() - } casted, err = val.ConvertTo(sc, &col.FieldType) // TODO: make sure all truncate errors are handled by ConvertTo. if returnErr && err != nil { @@ -302,7 +296,12 @@ func CastValue(ctx sessionctx.Context, val types.Datum, col *model.ColumnInfo, r } else if (sc.InInsertStmt || sc.InUpdateStmt) && !casted.IsNull() && (val.Kind() != types.KindMysqlTime || !val.GetMysqlTime().IsZero()) && (col.Tp == mysql.TypeDate || col.Tp == mysql.TypeDatetime || col.Tp == mysql.TypeTimestamp) { - if innCasted, exit, innErr := handleZeroDatetime(ctx, col, casted, val.GetString(), types.ErrWrongValue.Equal(err)); exit { + str, err1 := val.ToString() + if err1 != nil { + logutil.BgLogger().Warn("Datum ToString failed", zap.Stringer("Datum", val), zap.Error(err1)) + str = val.GetString() + } + if innCasted, exit, innErr := handleZeroDatetime(ctx, col, casted, str, types.ErrWrongValue.Equal(err)); exit { return innCasted, innErr } } diff --git a/types/datum.go b/types/datum.go index 93172fc1117a1..dd2d0557099b8 100644 --- a/types/datum.go +++ b/types/datum.go @@ -1128,16 +1128,10 @@ func (d *Datum) convertToMysqlTimestamp(sc *stmtctx.StatementContext, target *Fi } switch d.k { case KindMysqlTime: - // `select timestamp(cast("1000-01-02 23:59:59" as date)); ` casts usage will succeed. - // Alter datetime("1000-01-02 23:59:59") to timestamp will error. - if sc.InReorgAttribute { - t, err = d.GetMysqlTime().Convert(sc, target.Tp) - if err != nil { - ret.SetMysqlTime(t) - return ret, errors.Trace(ErrWrongValue.GenWithStackByArgs(DateTimeStr, t.String())) - } - } else { - t = d.GetMysqlTime() + t, err = d.GetMysqlTime().Convert(sc, target.Tp) + if err != nil { + ret.SetMysqlTime(ZeroTimestamp) + return ret, errors.Trace(ErrWrongValue.GenWithStackByArgs(TimestampStr, t.String())) } t, err = t.RoundFrac(sc, fsp) case KindMysqlDuration: From c08de09173bb76051dd4d684d8968c2fcb88d611 Mon Sep 17 00:00:00 2001 From: Chengpeng Yan <41809508+Reminiscent@users.noreply.github.com> Date: Thu, 5 Aug 2021 19:31:13 +0800 Subject: [PATCH 13/26] planner: show binding information in explain format = 'verbose' (#26930) --- bindinfo/bind_test.go | 7 +++---- executor/executor.go | 1 + planner/optimize.go | 4 ++-- sessionctx/stmtctx/stmtctx.go | 3 +++ 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/bindinfo/bind_test.go b/bindinfo/bind_test.go index a1e53c7f696ac..1e2281ee20438 100644 --- a/bindinfo/bind_test.go +++ b/bindinfo/bind_test.go @@ -2036,10 +2036,9 @@ func (s *testSuite) TestExplainShowBindSQL(c *C) { "select * from `test` . `t` SELECT * FROM `test`.`t` USE INDEX (`a`)", )) - tk.MustExec("explain select * from t") - tk.MustQuery("show warnings").Check(testkit.Rows("Warning 1105 Using the bindSQL: SELECT * FROM `test`.`t` USE INDEX (`a`)")) - tk.MustExec("explain analyze select * from t") - tk.MustQuery("show warnings").Check(testkit.Rows("Warning 1105 Using the bindSQL: SELECT * FROM `test`.`t` USE INDEX (`a`)")) + tk.MustExec("explain format = 'verbose' select * from t") + tk.MustQuery("show warnings").Check(testkit.Rows("Note 1105 Using the bindSQL: SELECT * FROM `test`.`t` USE INDEX (`a`)")) + // explain analyze do not support verbose yet. } func (s *testSuite) TestDMLIndexHintBind(c *C) { diff --git a/executor/executor.go b/executor/executor.go index 8a408911915cd..12cab3e4a8866 100644 --- a/executor/executor.go +++ b/executor/executor.go @@ -1701,6 +1701,7 @@ func ResetContextOfStmt(ctx sessionctx.Context, s ast.StmtNode) (err error) { if explainStmt, ok := s.(*ast.ExplainStmt); ok { sc.InExplainStmt = true sc.IgnoreExplainIDSuffix = (strings.ToLower(explainStmt.Format) == types.ExplainFormatBrief) + sc.InVerboseExplain = strings.ToLower(explainStmt.Format) == types.ExplainFormatVerbose s = explainStmt.Stmt } if _, ok := s.(*ast.ExplainForStmt); ok { diff --git a/planner/optimize.go b/planner/optimize.go index 39d1e78ef8857..64e3e8fd7ced6 100644 --- a/planner/optimize.go +++ b/planner/optimize.go @@ -186,8 +186,8 @@ func Optimize(ctx context.Context, sctx sessionctx.Context, node ast.Node, is in if err := setFoundInBinding(sctx, true); err != nil { logutil.BgLogger().Warn("set tidb_found_in_binding failed", zap.Error(err)) } - if _, ok := stmtNode.(*ast.ExplainStmt); ok { - sessVars.StmtCtx.AppendWarning(errors.Errorf("Using the bindSQL: %v", chosenBinding.BindSQL)) + if sessVars.StmtCtx.InVerboseExplain { + sessVars.StmtCtx.AppendNote(errors.Errorf("Using the bindSQL: %v", chosenBinding.BindSQL)) } } // Restore the hint to avoid changing the stmt node. diff --git a/sessionctx/stmtctx/stmtctx.go b/sessionctx/stmtctx/stmtctx.go index 048e24827ee34..23d4040e5c3be 100644 --- a/sessionctx/stmtctx/stmtctx.go +++ b/sessionctx/stmtctx/stmtctx.go @@ -180,6 +180,9 @@ type StatementContext struct { DiskTracker disk.Tracker LogOnExceed [2]memory.LogOnExceed } + + // InVerboseExplain indicates the statement is "explain format='verbose' ...". + InVerboseExplain bool } // StmtHints are SessionVars related sql hints. From 072cf2791e50461f6d9728efc44d184f575a8e81 Mon Sep 17 00:00:00 2001 From: tiancaiamao Date: Thu, 5 Aug 2021 19:39:12 +0800 Subject: [PATCH 14/26] *: fix some audit log error (#26767) --- bindinfo/handle.go | 8 +- executor/adapter.go | 1 + executor/bind.go | 7 ++ executor/compiler.go | 9 +++ executor/prepared.go | 14 +++- executor/trace.go | 14 ++++ plugin/integration_test.go | 162 +++++++++++++++++++++++++++++++++++++ plugin/main_test.go | 9 ++- 8 files changed, 219 insertions(+), 5 deletions(-) create mode 100644 plugin/integration_test.go diff --git a/bindinfo/handle.go b/bindinfo/handle.go index 530952da9038a..21abe241151f4 100644 --- a/bindinfo/handle.go +++ b/bindinfo/handle.go @@ -697,7 +697,13 @@ func getHintsForSQL(sctx sessionctx.Context, sql string) (string, error) { rs, err := sctx.(sqlexec.SQLExecutor).ExecuteInternal(context.TODO(), fmt.Sprintf("EXPLAIN FORMAT='hint' %s", sql)) sctx.GetSessionVars().UsePlanBaselines = origVals if rs != nil { - defer terror.Call(rs.Close) + defer func() { + // Audit log is collected in Close(), set InRestrictedSQL to avoid 'create sql binding' been recorded as 'explain'. + origin := sctx.GetSessionVars().InRestrictedSQL + sctx.GetSessionVars().InRestrictedSQL = true + terror.Call(rs.Close) + sctx.GetSessionVars().InRestrictedSQL = origin + }() } if err != nil { return "", err diff --git a/executor/adapter.go b/executor/adapter.go index 4527230880c23..b6bd6fe2f2033 100644 --- a/executor/adapter.go +++ b/executor/adapter.go @@ -829,6 +829,7 @@ func (a *ExecStmt) logAudit() { if sessVars.InRestrictedSQL { return } + err := plugin.ForeachPlugin(plugin.Audit, func(p *plugin.Plugin) error { audit := plugin.DeclareAuditManifest(p.Manifest) if audit.OnGeneralEvent != nil { diff --git a/executor/bind.go b/executor/bind.go index 4b66d46415316..42552f1dcdee1 100644 --- a/executor/bind.go +++ b/executor/bind.go @@ -77,6 +77,13 @@ func (e *SQLBindExec) dropSQLBind() error { } func (e *SQLBindExec) createSQLBind() error { + // For audit log, SQLBindExec execute "explain" statement internally, save and recover stmtctx + // is necessary to avoid 'create binding' been recorded as 'explain'. + saveStmtCtx := e.ctx.GetSessionVars().StmtCtx + defer func() { + e.ctx.GetSessionVars().StmtCtx = saveStmtCtx + }() + bindInfo := bindinfo.Binding{ BindSQL: e.bindSQL, Charset: e.charset, diff --git a/executor/compiler.go b/executor/compiler.go index 511b516a96dfc..d3750ba641199 100644 --- a/executor/compiler.go +++ b/executor/compiler.go @@ -335,6 +335,9 @@ func GetStmtLabel(stmtNode ast.StmtNode) string { case *ast.DropIndexStmt: return "DropIndex" case *ast.DropTableStmt: + if x.IsView { + return "DropView" + } return "DropTable" case *ast.ExplainStmt: return "Explain" @@ -373,6 +376,12 @@ func GetStmtLabel(stmtNode ast.StmtNode) string { return "CreateBinding" case *ast.IndexAdviseStmt: return "IndexAdvise" + case *ast.DropBindingStmt: + return "DropBinding" + case *ast.TraceStmt: + return "Trace" + case *ast.ShutdownStmt: + return "Shutdown" } return "other" } diff --git a/executor/prepared.go b/executor/prepared.go index caa8f2dc09272..e6d2e197d00b8 100644 --- a/executor/prepared.go +++ b/executor/prepared.go @@ -87,6 +87,11 @@ type PrepareExec struct { ID uint32 ParamCount int Fields []*ast.ResultField + + // If it's generated from executing "prepare stmt from '...'", the process is parse -> plan -> executor + // If it's generated from the prepare protocol, the process is session.PrepareStmt -> NewPrepareExec + // They both generate a PrepareExec struct, but the second case needs to reset the statement context while the first already do that. + needReset bool } // NewPrepareExec creates a new PrepareExec. @@ -96,6 +101,7 @@ func NewPrepareExec(ctx sessionctx.Context, sqlTxt string) *PrepareExec { return &PrepareExec{ baseExecutor: base, sqlText: sqlTxt, + needReset: true, } } @@ -135,9 +141,11 @@ func (e *PrepareExec) Next(ctx context.Context, req *chunk.Chunk) error { } stmt := stmts[0] - err = ResetContextOfStmt(e.ctx, stmt) - if err != nil { - return err + if e.needReset { + err = ResetContextOfStmt(e.ctx, stmt) + if err != nil { + return err + } } var extractor paramMarkerExtractor diff --git a/executor/trace.go b/executor/trace.go index fd3ab5ac92223..f0faa25ed504b 100644 --- a/executor/trace.go +++ b/executor/trace.go @@ -64,6 +64,12 @@ func (e *TraceExec) Next(ctx context.Context, req *chunk.Chunk) error { return nil } + // For audit log plugin to set the correct statement. + stmtCtx := e.ctx.GetSessionVars().StmtCtx + defer func() { + e.ctx.GetSessionVars().StmtCtx = stmtCtx + }() + switch e.format { case core.TraceFormatLog: return e.nextTraceLog(ctx, se, req) @@ -130,6 +136,14 @@ func (e *TraceExec) nextRowJSON(ctx context.Context, se sqlexec.SQLExecutor, req } func (e *TraceExec) executeChild(ctx context.Context, se sqlexec.SQLExecutor) { + // For audit log plugin to log the statement correctly. + // Should be logged as 'explain ...', instead of the executed SQL. + vars := e.ctx.GetSessionVars() + origin := vars.InRestrictedSQL + vars.InRestrictedSQL = true + defer func() { + vars.InRestrictedSQL = origin + }() rs, err := se.ExecuteStmt(ctx, e.stmtNode) if err != nil { var errCode uint16 diff --git a/plugin/integration_test.go b/plugin/integration_test.go new file mode 100644 index 0000000000000..9015a9be276e0 --- /dev/null +++ b/plugin/integration_test.go @@ -0,0 +1,162 @@ +// Copyright 2021 PingCAP, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// See the License for the specific language governing permissions and +// limitations under the License. + +package plugin_test + +import ( + "bytes" + "context" + "fmt" + "strconv" + "testing" + + "github.com/pingcap/tidb/config" + "github.com/pingcap/tidb/domain" + "github.com/pingcap/tidb/kv" + "github.com/pingcap/tidb/plugin" + "github.com/pingcap/tidb/session" + "github.com/pingcap/tidb/sessionctx/variable" + "github.com/pingcap/tidb/store/mockstore" + "github.com/pingcap/tidb/testkit" + "github.com/stretchr/testify/require" + "github.com/tikv/client-go/v2/testutils" +) + +type testAuditLogSuite struct { + cluster testutils.Cluster + store kv.Storage + dom *domain.Domain + + bytes.Buffer +} + +func (s *testAuditLogSuite) setup(t *testing.T) { + pluginName := "test_audit_log" + pluginVersion := uint16(1) + pluginSign := pluginName + "-" + strconv.Itoa(int(pluginVersion)) + + config.UpdateGlobal(func(conf *config.Config) { + conf.Plugin.Load = pluginSign + }) + + // setup load test hook. + loadOne := func(p *plugin.Plugin, dir string, pluginID plugin.ID) (manifest func() *plugin.Manifest, err error) { + return func() *plugin.Manifest { + m := &plugin.AuditManifest{ + Manifest: plugin.Manifest{ + Kind: plugin.Audit, + Name: pluginName, + Version: pluginVersion, + OnInit: OnInit, + OnShutdown: OnShutdown, + Validate: Validate, + }, + OnGeneralEvent: s.OnGeneralEvent, + OnConnectionEvent: OnConnectionEvent, + } + return plugin.ExportManifest(m) + }, nil + } + plugin.SetTestHook(loadOne) + + store, err := mockstore.NewMockStore( + mockstore.WithClusterInspector(func(c testutils.Cluster) { + mockstore.BootstrapWithSingleStore(c) + s.cluster = c + }), + ) + require.NoError(t, err) + s.store = store + session.SetSchemaLease(0) + session.DisableStats4Test() + + d, err := session.BootstrapSession(s.store) + require.NoError(t, err) + d.SetStatsUpdating(true) + s.dom = d +} + +func (s *testAuditLogSuite) teardown() { + s.dom.Close() + s.store.Close() +} + +func TestAuditLog(t *testing.T) { + var s testAuditLogSuite + s.setup(t) + defer s.teardown() + + var buf1 bytes.Buffer + tk := testkit.NewAsyncTestKit(t, s.store) + ctx := tk.OpenSession(context.Background(), "test") + buf1.WriteString("Use use `test`\n") // Workaround for the testing framework. + + tk.MustExec(ctx, "use test") + buf1.WriteString("Use use `test`\n") + + tk.MustExec(ctx, "create table t (id int primary key, a int, b int unique)") + buf1.WriteString("CreateTable create table `t` ( `id` int primary key , `a` int , `b` int unique )\n") + + tk.MustExec(ctx, "create view v1 as select * from t where id > 2") + buf1.WriteString("CreateView create view `v1` as select * from `t` where `id` > ?\n") + + tk.MustExec(ctx, "drop view v1") + buf1.WriteString("DropView drop view `v1`\n") + + tk.MustExec(ctx, "create session binding for select * from t where b = 123 using select * from t ignore index(b) where b = 123") + buf1.WriteString("CreateBinding create session binding for select * from `t` where `b` = ? using select * from `t` where `b` = ?\n") + + tk.MustExec(ctx, "prepare mystmt from 'select ? as num from DUAL'") + buf1.WriteString("Prepare prepare `mystmt` from ?\n") + + tk.MustExec(ctx, "set @number = 5") + buf1.WriteString("Set set @number = ?\n") + + tk.MustExec(ctx, "execute mystmt using @number") + buf1.WriteString("Select select ? as `num` from dual\n") + + tk.MustQuery(ctx, "trace format = 'row' select * from t") + buf1.WriteString("Trace trace format = ? select * from `t`\n") + + tk.MustExec(ctx, "shutdown") + buf1.WriteString("Shutdown shutdown\n") + + require.Equal(t, buf1.String(), s.Buffer.String()) +} + +func Validate(ctx context.Context, m *plugin.Manifest) error { + return nil +} + +// OnInit implements TiDB plugin's OnInit SPI. +func OnInit(ctx context.Context, manifest *plugin.Manifest) error { + return nil +} + +// OnShutdown implements TiDB plugin's OnShutdown SPI. +func OnShutdown(ctx context.Context, manifest *plugin.Manifest) error { + return nil +} + +// OnGeneralEvent implements TiDB Audit plugin's OnGeneralEvent SPI. +func (s *testAuditLogSuite) OnGeneralEvent(ctx context.Context, sctx *variable.SessionVars, event plugin.GeneralEvent, cmd string) { + if sctx != nil { + normalized, _ := sctx.StmtCtx.SQLDigest() + fmt.Fprintln(&s.Buffer, sctx.StmtCtx.StmtType, normalized) + } +} + +// OnConnectionEvent implements TiDB Audit plugin's OnConnectionEvent SPI. +func OnConnectionEvent(ctx context.Context, event plugin.ConnectionEvent, info *variable.ConnectionInfo) error { + return nil +} diff --git a/plugin/main_test.go b/plugin/main_test.go index 108caec196390..25773ec64f3c5 100644 --- a/plugin/main_test.go +++ b/plugin/main_test.go @@ -22,5 +22,12 @@ import ( func TestMain(m *testing.M) { testbridge.WorkaroundGoCheckFlags() - goleak.VerifyTestMain(m) + + opts := []goleak.Option{ + goleak.IgnoreTopFunction("go.etcd.io/etcd/pkg/logutil.(*MergeLogger).outputLoop"), + goleak.IgnoreTopFunction("go.opencensus.io/stats/view.(*worker).start"), + goleak.IgnoreTopFunction("time.Sleep"), + } + + goleak.VerifyTestMain(m, opts...) } From cc1f990bacaf596f838fad2728a9951bb5a7a0d8 Mon Sep 17 00:00:00 2001 From: Arenatlx <314806019@qq.com> Date: Thu, 5 Aug 2021 23:41:13 +0800 Subject: [PATCH 15/26] planner: fix update panic when update in prepare and execute (#26759) --- executor/builder.go | 3 ++- executor/compiler.go | 4 +++- planner/core/common_plans.go | 4 ++++ planner/core/find_best_task.go | 12 ++++++++++- planner/core/logical_plan_builder.go | 4 ++-- planner/core/preprocess.go | 26 ++++++++++++++++++++++-- planner/optimize.go | 30 +++++++++++++++++----------- 7 files changed, 64 insertions(+), 19 deletions(-) diff --git a/executor/builder.go b/executor/builder.go index 1c6b18dc51d88..10e6b22148a65 100644 --- a/executor/builder.go +++ b/executor/builder.go @@ -1950,7 +1950,8 @@ func (b *executorBuilder) buildUpdate(v *plannercore.Update) Executor { if b.err != nil { return nil } - b.err = plannercore.CheckUpdateList(assignFlag, v) + // should use the new tblID2table, since the update's schema may have been changed in Execstmt. + b.err = plannercore.CheckUpdateList(assignFlag, v, tblID2table) if b.err != nil { return nil } diff --git a/executor/compiler.go b/executor/compiler.go index d3750ba641199..8c310b004f310 100644 --- a/executor/compiler.go +++ b/executor/compiler.go @@ -55,7 +55,9 @@ func (c *Compiler) Compile(ctx context.Context, stmtNode ast.StmtNode) (*ExecStm } ret := &plannercore.PreprocessorReturn{} - if err := plannercore.Preprocess(c.Ctx, stmtNode, plannercore.WithPreprocessorReturn(ret)); err != nil { + pe := &plannercore.PreprocessExecuteISUpdate{ExecuteInfoSchemaUpdate: planner.GetExecuteForUpdateReadIS, Node: stmtNode} + err := plannercore.Preprocess(c.Ctx, stmtNode, plannercore.WithPreprocessorReturn(ret), plannercore.WithExecuteInfoSchemaUpdate(pe)) + if err != nil { return nil, err } stmtNode = plannercore.TryAddExtraLimit(c.Ctx, stmtNode) diff --git a/planner/core/common_plans.go b/planner/core/common_plans.go index 8f82ce46ddda1..2fa0878d5552e 100644 --- a/planner/core/common_plans.go +++ b/planner/core/common_plans.go @@ -282,6 +282,10 @@ func (e *Execute) OptimizePreparedPlan(ctx context.Context, sctx sessionctx.Cont preparedObj.Executor = nil // If the schema version has changed we need to preprocess it again, // if this time it failed, the real reason for the error is schema changed. + // Example: + // When running update in prepared statement's schema version distinguished from the one of execute statement + // We should reset the tableRefs in the prepared update statements, otherwise, the ast nodes still hold the old + // tableRefs columnInfo which will cause chaos in logic of trying point get plan. (should ban non-public column) ret := &PreprocessorReturn{InfoSchema: is} err := Preprocess(sctx, prepared.Stmt, InPrepare, WithPreprocessorReturn(ret)) if err != nil { diff --git a/planner/core/find_best_task.go b/planner/core/find_best_task.go index 9cd9f7cac3f31..305f884ec3085 100644 --- a/planner/core/find_best_task.go +++ b/planner/core/find_best_task.go @@ -649,6 +649,16 @@ func (ds *DataSource) skylinePruning(prop *property.PhysicalProperty) []*candida return candidates } +func (ds *DataSource) isPointGetConvertableSchema() bool { + for _, col := range ds.Columns { + // Only handle tables that all columns are public. + if col.State != model.StatePublic { + return false + } + } + return true +} + // findBestTask implements the PhysicalPlan interface. // It will enumerate all the available indices and choose a plan with least cost. func (ds *DataSource) findBestTask(prop *property.PhysicalProperty, planCounter *PlanCounterTp) (t task, cntPlan int64, err error) { @@ -745,7 +755,7 @@ func (ds *DataSource) findBestTask(prop *property.PhysicalProperty, planCounter p: dual, }, cntPlan, nil } - canConvertPointGet := len(path.Ranges) > 0 && path.StoreType == kv.TiKV + canConvertPointGet := len(path.Ranges) > 0 && path.StoreType == kv.TiKV && ds.isPointGetConvertableSchema() if canConvertPointGet && !path.IsIntHandlePath { // We simply do not build [batch] point get for prefix indexes. This can be optimized. canConvertPointGet = path.Index.Unique && !path.Index.HasPrefixIndex() diff --git a/planner/core/logical_plan_builder.go b/planner/core/logical_plan_builder.go index 838a97bf0359d..7d2dd60520ab4 100644 --- a/planner/core/logical_plan_builder.go +++ b/planner/core/logical_plan_builder.go @@ -4655,10 +4655,10 @@ type tblUpdateInfo struct { } // CheckUpdateList checks all related columns in updatable state. -func CheckUpdateList(assignFlags []int, updt *Update) error { +func CheckUpdateList(assignFlags []int, updt *Update, newTblID2Table map[int64]table.Table) error { updateFromOtherAlias := make(map[int64]tblUpdateInfo) for _, content := range updt.TblColPosInfos { - tbl := updt.tblID2Table[content.TblID] + tbl := newTblID2Table[content.TblID] flags := assignFlags[content.Start:content.End] var update, updatePK bool for i, col := range tbl.WritableCols() { diff --git a/planner/core/preprocess.go b/planner/core/preprocess.go index fff5467ead5cd..2efc7dedf5f3d 100644 --- a/planner/core/preprocess.go +++ b/planner/core/preprocess.go @@ -63,6 +63,13 @@ func WithPreprocessorReturn(ret *PreprocessorReturn) PreprocessOpt { } } +// WithExecuteInfoSchemaUpdate return a PreprocessOpt to update the `Execute` infoSchema under some conditions. +func WithExecuteInfoSchemaUpdate(pe *PreprocessExecuteISUpdate) PreprocessOpt { + return func(p *preprocessor) { + p.PreprocessExecuteISUpdate = pe + } +} + // TryAddExtraLimit trys to add an extra limit for SELECT or UNION statement when sql_select_limit is set. func TryAddExtraLimit(ctx sessionctx.Context, node ast.StmtNode) ast.StmtNode { if ctx.GetSessionVars().SelectLimit == math.MaxUint64 || ctx.GetSessionVars().InRestrictedSQL { @@ -143,6 +150,12 @@ type PreprocessorReturn struct { TxnScope string } +// PreprocessExecuteISUpdate is used to update information schema for special Execute statement in the preprocessor. +type PreprocessExecuteISUpdate struct { + ExecuteInfoSchemaUpdate func(node ast.Node, sctx sessionctx.Context) infoschema.InfoSchema + Node ast.Node +} + // preprocessor is an ast.Visitor that preprocess // ast Nodes parsed from parser. type preprocessor struct { @@ -157,6 +170,7 @@ type preprocessor struct { // values that may be returned *PreprocessorReturn + *PreprocessExecuteISUpdate err error } @@ -1596,9 +1610,17 @@ func (p *preprocessor) handleAsOfAndReadTS(node *ast.AsOfClause) { // - session variable // - transaction context func (p *preprocessor) ensureInfoSchema() infoschema.InfoSchema { - if p.InfoSchema == nil { - p.InfoSchema = p.ctx.GetInfoSchema().(infoschema.InfoSchema) + if p.InfoSchema != nil { + return p.InfoSchema + } + // `Execute` under some conditions need to see the latest information schema. + if p.PreprocessExecuteISUpdate != nil { + if newInfoSchema := p.ExecuteInfoSchemaUpdate(p.Node, p.ctx); newInfoSchema != nil { + p.InfoSchema = newInfoSchema + return p.InfoSchema + } } + p.InfoSchema = p.ctx.GetInfoSchema().(infoschema.InfoSchema) return p.InfoSchema } diff --git a/planner/optimize.go b/planner/optimize.go index 64e3e8fd7ced6..05ba86f3bbce7 100644 --- a/planner/optimize.go +++ b/planner/optimize.go @@ -77,6 +77,24 @@ func IsReadOnly(node ast.Node, vars *variable.SessionVars) bool { return ast.IsReadOnly(node) } +// GetExecuteForUpdateReadIS is used to check whether the statement is `execute` and target statement has a forUpdateRead flag. +// If so, we will return the latest information schema. +func GetExecuteForUpdateReadIS(node ast.Node, sctx sessionctx.Context) infoschema.InfoSchema { + if execStmt, isExecStmt := node.(*ast.ExecuteStmt); isExecStmt { + vars := sctx.GetSessionVars() + execID := execStmt.ExecID + if execStmt.Name != "" { + execID = vars.PreparedStmtNameToID[execStmt.Name] + } + if preparedPointer, ok := vars.PreparedStmts[execID]; ok { + if preparedObj, ok := preparedPointer.(*core.CachedPrepareStmt); ok && preparedObj.ForUpdateRead { + return domain.GetDomain(sctx).InfoSchema() + } + } + } + return nil +} + // Optimize does optimization and creates a Plan. // The node must be prepared first. func Optimize(ctx context.Context, sctx sessionctx.Context, node ast.Node, is infoschema.InfoSchema) (plannercore.Plan, types.NameSlice, error) { @@ -318,18 +336,6 @@ func optimize(ctx context.Context, sctx sessionctx.Context, node ast.Node, is in } sctx.GetSessionVars().RewritePhaseInfo.DurationRewrite = time.Since(beginRewrite) - if execPlan, ok := p.(*plannercore.Execute); ok { - execID := execPlan.ExecID - if execPlan.Name != "" { - execID = sctx.GetSessionVars().PreparedStmtNameToID[execPlan.Name] - } - if preparedPointer, ok := sctx.GetSessionVars().PreparedStmts[execID]; ok { - if preparedObj, ok := preparedPointer.(*core.CachedPrepareStmt); ok && preparedObj.ForUpdateRead { - is = domain.GetDomain(sctx).InfoSchema() - } - } - } - sctx.GetSessionVars().StmtCtx.Tables = builder.GetDBTableInfo() activeRoles := sctx.GetSessionVars().ActiveRoles // Check privilege. Maybe it's better to move this to the Preprocess, but From cdaf996f56ae5c14eab8c8f65b6de6cd474c93a6 Mon Sep 17 00:00:00 2001 From: MyonKeminta <9948422+MyonKeminta@users.noreply.github.com> Date: Fri, 6 Aug 2021 00:09:12 +0800 Subject: [PATCH 16/26] expression/builtin: Add tidb_decode_sql_digests function (#26787) --- executor/executor_test.go | 61 +++++++++++- executor/infoschema_reader.go | 13 +-- executor/show_test.go | 2 +- executor/utils.go | 178 ---------------------------------- executor/utils_test.go | 64 ------------ expression/builtin.go | 7 +- expression/builtin_info.go | 125 ++++++++++++++++++++++++ expression/errors.go | 1 + expression/util.go | 176 +++++++++++++++++++++++++++++++++ expression/util_test.go | 63 ++++++++++++ go.mod | 2 +- go.sum | 4 +- infoschema/tables_test.go | 25 ++++- 13 files changed, 458 insertions(+), 263 deletions(-) diff --git a/executor/executor_test.go b/executor/executor_test.go index 6c02d867feb54..6705459c48414 100644 --- a/executor/executor_test.go +++ b/executor/executor_test.go @@ -6690,7 +6690,7 @@ func (s *testClusterTableSuite) TestSQLDigestTextRetriever(c *C) { insertNormalized, insertDigest := parser.NormalizeDigest("insert into test_sql_digest_text_retriever values (1, 1)") _, updateDigest := parser.NormalizeDigest("update test_sql_digest_text_retriever set v = v + 1 where id = 1") - r := &executor.SQLDigestTextRetriever{ + r := &expression.SQLDigestTextRetriever{ SQLDigestsMap: map[string]string{ insertDigest.String(): "", updateDigest.String(): "", @@ -6702,6 +6702,61 @@ func (s *testClusterTableSuite) TestSQLDigestTextRetriever(c *C) { c.Assert(r.SQLDigestsMap[updateDigest.String()], Equals, "") } +func (s *testClusterTableSuite) TestFunctionDecodeSQLDigests(c *C) { + tk := testkit.NewTestKitWithInit(c, s.store) + c.Assert(tk.Se.Auth(&auth.UserIdentity{Username: "root", Hostname: "%"}, nil, nil), IsTrue) + tk.MustExec("set global tidb_enable_stmt_summary = 1") + tk.MustQuery("select @@global.tidb_enable_stmt_summary").Check(testkit.Rows("1")) + tk.MustExec("drop table if exists test_func_decode_sql_digests") + tk.MustExec("create table test_func_decode_sql_digests(id int primary key, v int)") + + q1 := "begin" + norm1, digest1 := parser.NormalizeDigest(q1) + q2 := "select @@tidb_current_ts" + norm2, digest2 := parser.NormalizeDigest(q2) + q3 := "select id, v from test_func_decode_sql_digests where id = 1 for update" + norm3, digest3 := parser.NormalizeDigest(q3) + + // TIDB_DECODE_SQL_DIGESTS function doesn't actually do "decoding", instead it queries `statements_summary` and it's + // variations for the corresponding statements. + // Execute the statements so that the queries will be saved into statements_summary table. + tk.MustExec(q1) + // Save the ts to query the transaction from tidb_trx. + ts, err := strconv.ParseUint(tk.MustQuery(q2).Rows()[0][0].(string), 10, 64) + c.Assert(err, IsNil) + c.Assert(ts, Greater, uint64(0)) + tk.MustExec(q3) + tk.MustExec("rollback") + + // Test statements truncating. + decoded := fmt.Sprintf(`["%s","%s","%s"]`, norm1, norm2, norm3) + digests := fmt.Sprintf(`["%s","%s","%s"]`, digest1, digest2, digest3) + tk.MustQuery("select tidb_decode_sql_digests(?, 0)", digests).Check(testkit.Rows(decoded)) + // The three queries are shorter than truncate length, equal to truncate length and longer than truncate length respectively. + tk.MustQuery("select tidb_decode_sql_digests(?, ?)", digests, len(norm2)).Check(testkit.Rows( + "[\"begin\",\"select @@tidb_current_ts\",\"select `id` , `v` from `...\"]")) + + // Empty array. + tk.MustQuery("select tidb_decode_sql_digests('[]')").Check(testkit.Rows("[]")) + + // NULL + tk.MustQuery("select tidb_decode_sql_digests(null)").Check(testkit.Rows("")) + + // Array containing wrong types and not-existing digests (maps to null). + tk.MustQuery("select tidb_decode_sql_digests(?)", fmt.Sprintf(`["%s",1,null,"%s",{"a":1},[2],"%s","","abcde"]`, digest1, digest2, digest3)). + Check(testkit.Rows(fmt.Sprintf(`["%s",null,null,"%s",null,null,"%s",null,null]`, norm1, norm2, norm3))) + + // Not JSON array (throws warnings) + tk.MustQuery(`select tidb_decode_sql_digests('{"a":1}')`).Check(testkit.Rows("")) + tk.MustQuery(`show warnings`).Check(testkit.Rows(`Warning 1210 The argument can't be unmarshalled as JSON array: '{"a":1}'`)) + tk.MustQuery(`select tidb_decode_sql_digests('aabbccdd')`).Check(testkit.Rows("")) + tk.MustQuery(`show warnings`).Check(testkit.Rows(`Warning 1210 The argument can't be unmarshalled as JSON array: 'aabbccdd'`)) + + // Invalid argument count. + tk.MustGetErrCode("select tidb_decode_sql_digests('a', 1, 2)", 1582) + tk.MustGetErrCode("select tidb_decode_sql_digests()", 1582) +} + func prepareLogs(c *C, logData []string, fileNames []string) { writeFile := func(file string, data string) { f, err := os.OpenFile(file, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644) @@ -8338,9 +8393,9 @@ func (s *testSerialSuite) TestDeadlocksTable(c *C) { id1 := strconv.FormatUint(rec.ID, 10) id2 := strconv.FormatUint(rec2.ID, 10) - c.Assert(failpoint.Enable("github.com/pingcap/tidb/executor/sqlDigestRetrieverSkipRetrieveGlobal", "return"), IsNil) + c.Assert(failpoint.Enable("github.com/pingcap/tidb/expression/sqlDigestRetrieverSkipRetrieveGlobal", "return"), IsNil) defer func() { - c.Assert(failpoint.Disable("github.com/pingcap/tidb/executor/sqlDigestRetrieverSkipRetrieveGlobal"), IsNil) + c.Assert(failpoint.Disable("github.com/pingcap/tidb/expression/sqlDigestRetrieverSkipRetrieveGlobal"), IsNil) }() tk := testkit.NewTestKit(c, s.store) diff --git a/executor/infoschema_reader.go b/executor/infoschema_reader.go index 0096f1cc5a991..d29d523249d70 100644 --- a/executor/infoschema_reader.go +++ b/executor/infoschema_reader.go @@ -39,6 +39,7 @@ import ( "github.com/pingcap/tidb/domain" "github.com/pingcap/tidb/domain/infosync" "github.com/pingcap/tidb/errno" + "github.com/pingcap/tidb/expression" "github.com/pingcap/tidb/infoschema" "github.com/pingcap/tidb/kv" "github.com/pingcap/tidb/meta/autoid" @@ -2137,11 +2138,11 @@ func (e *tidbTrxTableRetriever) retrieve(ctx context.Context, sctx sessionctx.Co var res [][]types.Datum err = e.nextBatch(func(start, end int) error { // Before getting rows, collect the SQL digests that needs to be retrieved first. - var sqlRetriever *SQLDigestTextRetriever + var sqlRetriever *expression.SQLDigestTextRetriever for _, c := range e.columns { if c.Name.O == txninfo.CurrentSQLDigestTextStr { if sqlRetriever == nil { - sqlRetriever = NewSQLDigestTextRetriever() + sqlRetriever = expression.NewSQLDigestTextRetriever() } for i := start; i < end; i++ { @@ -2250,9 +2251,9 @@ func (r *dataLockWaitsTableRetriever) retrieve(ctx context.Context, sctx session } // Fetch the SQL Texts of the digests above if necessary. - var sqlRetriever *SQLDigestTextRetriever + var sqlRetriever *expression.SQLDigestTextRetriever if needSQLText { - sqlRetriever = NewSQLDigestTextRetriever() + sqlRetriever = expression.NewSQLDigestTextRetriever() for _, digest := range digests { if len(digest) > 0 { sqlRetriever.SQLDigestsMap[digest] = "" @@ -2390,11 +2391,11 @@ func (r *deadlocksTableRetriever) retrieve(ctx context.Context, sctx sessionctx. err = r.nextBatch(func(start, end int) error { // Before getting rows, collect the SQL digests that needs to be retrieved first. - var sqlRetriever *SQLDigestTextRetriever + var sqlRetriever *expression.SQLDigestTextRetriever for _, c := range r.columns { if c.Name.O == deadlockhistory.ColCurrentSQLDigestTextStr { if sqlRetriever == nil { - sqlRetriever = NewSQLDigestTextRetriever() + sqlRetriever = expression.NewSQLDigestTextRetriever() } idx, waitChainIdx := r.currentIdx, r.currentWaitChainIdx diff --git a/executor/show_test.go b/executor/show_test.go index afea20e638680..ff630bc5cae2a 100644 --- a/executor/show_test.go +++ b/executor/show_test.go @@ -1130,7 +1130,7 @@ func (s *testSuite5) TestShowBuiltin(c *C) { res := tk.MustQuery("show builtins;") c.Assert(res, NotNil) rows := res.Rows() - const builtinFuncNum = 272 + const builtinFuncNum = 273 c.Assert(builtinFuncNum, Equals, len(rows)) c.Assert("abs", Equals, rows[0][0].(string)) c.Assert("yearweek", Equals, rows[builtinFuncNum-1][0].(string)) diff --git a/executor/utils.go b/executor/utils.go index cf0eaeb6f0245..dcd2a394331d6 100644 --- a/executor/utils.go +++ b/executor/utils.go @@ -14,13 +14,7 @@ package executor import ( - "context" "strings" - - "github.com/pingcap/errors" - "github.com/pingcap/failpoint" - "github.com/pingcap/tidb/sessionctx" - "github.com/pingcap/tidb/util/sqlexec" ) // SetFromString constructs a slice of strings from a comma separated string. @@ -60,178 +54,6 @@ func deleteFromSet(set []string, value string) []string { return set } -// SQLDigestTextRetriever is used to find the normalized SQL statement text by SQL digests in statements_summary table. -// It's exported for test purposes. -type SQLDigestTextRetriever struct { - // SQLDigestsMap is the place to put the digests that's requested for getting SQL text and also the place to put - // the query result. - SQLDigestsMap map[string]string - - // Replace querying for test purposes. - mockLocalData map[string]string - mockGlobalData map[string]string - // There are two ways for querying information: 1) query specified digests by WHERE IN query, or 2) query all - // information to avoid the too long WHERE IN clause. If there are more than `fetchAllLimit` digests needs to be - // queried, the second way will be chosen; otherwise, the first way will be chosen. - fetchAllLimit int -} - -// NewSQLDigestTextRetriever creates a new SQLDigestTextRetriever. -func NewSQLDigestTextRetriever() *SQLDigestTextRetriever { - return &SQLDigestTextRetriever{ - SQLDigestsMap: make(map[string]string), - fetchAllLimit: 512, - } -} - -func (r *SQLDigestTextRetriever) runMockQuery(data map[string]string, inValues []interface{}) (map[string]string, error) { - if len(inValues) == 0 { - return data, nil - } - res := make(map[string]string, len(inValues)) - for _, digest := range inValues { - if text, ok := data[digest.(string)]; ok { - res[digest.(string)] = text - } - } - return res, nil -} - -// runFetchDigestQuery runs query to the system tables to fetch the kv mapping of SQL digests and normalized SQL texts -// of the given SQL digests, if `inValues` is given, or all these mappings otherwise. If `queryGlobal` is false, it -// queries information_schema.statements_summary and information_schema.statements_summary_history; otherwise, it -// queries the cluster version of these two tables. -func (r *SQLDigestTextRetriever) runFetchDigestQuery(ctx context.Context, sctx sessionctx.Context, queryGlobal bool, inValues []interface{}) (map[string]string, error) { - // If mock data is set, query the mock data instead of the real statements_summary tables. - if !queryGlobal && r.mockLocalData != nil { - return r.runMockQuery(r.mockLocalData, inValues) - } else if queryGlobal && r.mockGlobalData != nil { - return r.runMockQuery(r.mockGlobalData, inValues) - } - - exec, ok := sctx.(sqlexec.RestrictedSQLExecutor) - if !ok { - return nil, errors.New("restricted sql can't be executed in this context") - } - - // Information in statements_summary will be periodically moved to statements_summary_history. Union them together - // to avoid missing information when statements_summary is just cleared. - stmt := "select digest, digest_text from information_schema.statements_summary union distinct " + - "select digest, digest_text from information_schema.statements_summary_history" - if queryGlobal { - stmt = "select digest, digest_text from information_schema.cluster_statements_summary union distinct " + - "select digest, digest_text from information_schema.cluster_statements_summary_history" - } - // Add the where clause if `inValues` is specified. - if len(inValues) > 0 { - stmt += " where digest in (" + strings.Repeat("%?,", len(inValues)-1) + "%?)" - } - - stmtNode, err := exec.ParseWithParams(ctx, stmt, inValues...) - if err != nil { - return nil, err - } - rows, _, err := exec.ExecRestrictedStmt(ctx, stmtNode) - if err != nil { - return nil, err - } - - res := make(map[string]string, len(rows)) - for _, row := range rows { - res[row.GetString(0)] = row.GetString(1) - } - return res, nil -} - -func (r *SQLDigestTextRetriever) updateDigestInfo(queryResult map[string]string) { - for digest, text := range r.SQLDigestsMap { - if len(text) > 0 { - // The text of this digest is already known - continue - } - sqlText, ok := queryResult[digest] - if ok { - r.SQLDigestsMap[digest] = sqlText - } - } -} - -// RetrieveLocal tries to retrieve the SQL text of the SQL digests from local information. -func (r *SQLDigestTextRetriever) RetrieveLocal(ctx context.Context, sctx sessionctx.Context) error { - if len(r.SQLDigestsMap) == 0 { - return nil - } - - var queryResult map[string]string - if len(r.SQLDigestsMap) <= r.fetchAllLimit { - inValues := make([]interface{}, 0, len(r.SQLDigestsMap)) - for key := range r.SQLDigestsMap { - inValues = append(inValues, key) - } - var err error - queryResult, err = r.runFetchDigestQuery(ctx, sctx, false, inValues) - if err != nil { - return errors.Trace(err) - } - - if len(queryResult) == len(r.SQLDigestsMap) { - r.SQLDigestsMap = queryResult - return nil - } - } else { - var err error - queryResult, err = r.runFetchDigestQuery(ctx, sctx, false, nil) - if err != nil { - return errors.Trace(err) - } - } - - r.updateDigestInfo(queryResult) - return nil -} - -// RetrieveGlobal tries to retrieve the SQL text of the SQL digests from the information of the whole cluster. -func (r *SQLDigestTextRetriever) RetrieveGlobal(ctx context.Context, sctx sessionctx.Context) error { - err := r.RetrieveLocal(ctx, sctx) - if err != nil { - return errors.Trace(err) - } - - // In some unit test environments it's unable to retrieve global info, and this function blocks it for tens of - // seconds, which wastes much time during unit test. In this case, enable this failpoint to bypass retrieving - // globally. - failpoint.Inject("sqlDigestRetrieverSkipRetrieveGlobal", func() { - failpoint.Return(nil) - }) - - var unknownDigests []interface{} - for k, v := range r.SQLDigestsMap { - if len(v) == 0 { - unknownDigests = append(unknownDigests, k) - } - } - - if len(unknownDigests) == 0 { - return nil - } - - var queryResult map[string]string - if len(r.SQLDigestsMap) <= r.fetchAllLimit { - queryResult, err = r.runFetchDigestQuery(ctx, sctx, true, unknownDigests) - if err != nil { - return errors.Trace(err) - } - } else { - queryResult, err = r.runFetchDigestQuery(ctx, sctx, true, nil) - if err != nil { - return errors.Trace(err) - } - } - - r.updateDigestInfo(queryResult) - return nil -} - // batchRetrieverHelper is a helper for batch returning data with known total rows. This helps implementing memtable // retrievers of some information_schema tables. Initialize `batchSize` and `totalRows` fields to use it. type batchRetrieverHelper struct { diff --git a/executor/utils_test.go b/executor/utils_test.go index f22155c7c1c46..4bba62668d572 100644 --- a/executor/utils_test.go +++ b/executor/utils_test.go @@ -14,8 +14,6 @@ package executor import ( - "context" - . "github.com/pingcap/check" "github.com/pingcap/errors" ) @@ -92,65 +90,3 @@ func (s *pkgTestSuite) TestBatchRetrieverHelper(c *C) { c.Assert(rangeStarts, DeepEquals, []int{0}) c.Assert(rangeEnds, DeepEquals, []int{10}) } - -func (s *pkgTestSuite) TestSQLDigestTextRetriever(c *C) { - // Create a fake session as the argument to the retriever, though it's actually not used when mock data is set. - - r := NewSQLDigestTextRetriever() - clearResult := func() { - r.SQLDigestsMap = map[string]string{ - "digest1": "", - "digest2": "", - "digest3": "", - "digest4": "", - "digest5": "", - } - } - clearResult() - r.mockLocalData = map[string]string{ - "digest1": "text1", - "digest2": "text2", - "digest6": "text6", - } - r.mockGlobalData = map[string]string{ - "digest2": "text2", - "digest3": "text3", - "digest4": "text4", - "digest7": "text7", - } - - expectedLocalResult := map[string]string{ - "digest1": "text1", - "digest2": "text2", - "digest3": "", - "digest4": "", - "digest5": "", - } - expectedGlobalResult := map[string]string{ - "digest1": "text1", - "digest2": "text2", - "digest3": "text3", - "digest4": "text4", - "digest5": "", - } - - err := r.RetrieveLocal(context.Background(), nil) - c.Assert(err, IsNil) - c.Assert(r.SQLDigestsMap, DeepEquals, expectedLocalResult) - clearResult() - - err = r.RetrieveGlobal(context.Background(), nil) - c.Assert(err, IsNil) - c.Assert(r.SQLDigestsMap, DeepEquals, expectedGlobalResult) - clearResult() - - r.fetchAllLimit = 1 - err = r.RetrieveLocal(context.Background(), nil) - c.Assert(err, IsNil) - c.Assert(r.SQLDigestsMap, DeepEquals, expectedLocalResult) - clearResult() - - err = r.RetrieveGlobal(context.Background(), nil) - c.Assert(err, IsNil) - c.Assert(r.SQLDigestsMap, DeepEquals, expectedGlobalResult) -} diff --git a/expression/builtin.go b/expression/builtin.go index f5b6caedc03fb..3625602eb8fea 100644 --- a/expression/builtin.go +++ b/expression/builtin.go @@ -896,9 +896,10 @@ var funcs = map[string]functionClass{ // TiDB internal function. ast.TiDBDecodeKey: &tidbDecodeKeyFunctionClass{baseFunctionClass{ast.TiDBDecodeKey, 1, 1}}, // This function is used to show tidb-server version info. - ast.TiDBVersion: &tidbVersionFunctionClass{baseFunctionClass{ast.TiDBVersion, 0, 0}}, - ast.TiDBIsDDLOwner: &tidbIsDDLOwnerFunctionClass{baseFunctionClass{ast.TiDBIsDDLOwner, 0, 0}}, - ast.TiDBDecodePlan: &tidbDecodePlanFunctionClass{baseFunctionClass{ast.TiDBDecodePlan, 1, 1}}, + ast.TiDBVersion: &tidbVersionFunctionClass{baseFunctionClass{ast.TiDBVersion, 0, 0}}, + ast.TiDBIsDDLOwner: &tidbIsDDLOwnerFunctionClass{baseFunctionClass{ast.TiDBIsDDLOwner, 0, 0}}, + ast.TiDBDecodePlan: &tidbDecodePlanFunctionClass{baseFunctionClass{ast.TiDBDecodePlan, 1, 1}}, + ast.TiDBDecodeSQLDigests: &tidbDecodeSQLDigestsFunctionClass{baseFunctionClass{ast.TiDBDecodeSQLDigests, 1, 2}}, // TiDB Sequence function. ast.NextVal: &nextValFunctionClass{baseFunctionClass{ast.NextVal, 1, 1}}, diff --git a/expression/builtin_info.go b/expression/builtin_info.go index 7be431165bf21..33c1ee2ec79b9 100644 --- a/expression/builtin_info.go +++ b/expression/builtin_info.go @@ -18,8 +18,11 @@ package expression import ( + "context" + "encoding/json" "sort" "strings" + "time" "github.com/pingcap/errors" "github.com/pingcap/parser/model" @@ -52,6 +55,7 @@ var ( _ functionClass = &tidbIsDDLOwnerFunctionClass{} _ functionClass = &tidbDecodePlanFunctionClass{} _ functionClass = &tidbDecodeKeyFunctionClass{} + _ functionClass = &tidbDecodeSQLDigestsFunctionClass{} _ functionClass = &nextValFunctionClass{} _ functionClass = &lastValFunctionClass{} _ functionClass = &setValFunctionClass{} @@ -71,6 +75,7 @@ var ( _ builtinFunc = &builtinTiDBVersionSig{} _ builtinFunc = &builtinRowCountSig{} _ builtinFunc = &builtinTiDBDecodeKeySig{} + _ builtinFunc = &builtinTiDBDecodeSQLDigestsSig{} _ builtinFunc = &builtinNextValSig{} _ builtinFunc = &builtinLastValSig{} _ builtinFunc = &builtinSetValSig{} @@ -771,6 +776,126 @@ func (k TiDBDecodeKeyFunctionKeyType) String() string { // TiDBDecodeKeyFunctionKey is used to identify the decoder function in context. const TiDBDecodeKeyFunctionKey TiDBDecodeKeyFunctionKeyType = 0 +type tidbDecodeSQLDigestsFunctionClass struct { + baseFunctionClass +} + +func (c *tidbDecodeSQLDigestsFunctionClass) getFunction(ctx sessionctx.Context, args []Expression) (builtinFunc, error) { + if err := c.verifyArgs(args); err != nil { + return nil, err + } + + var argTps []types.EvalType + if len(args) > 1 { + argTps = []types.EvalType{types.ETString, types.ETInt} + } else { + argTps = []types.EvalType{types.ETString} + } + bf, err := newBaseBuiltinFuncWithTp(ctx, c.funcName, args, types.ETString, argTps...) + if err != nil { + return nil, err + } + sig := &builtinTiDBDecodeSQLDigestsSig{bf} + return sig, nil +} + +type builtinTiDBDecodeSQLDigestsSig struct { + baseBuiltinFunc +} + +func (b *builtinTiDBDecodeSQLDigestsSig) Clone() builtinFunc { + newSig := &builtinTiDBDecodeSQLDigestsSig{} + newSig.cloneFrom(&b.baseBuiltinFunc) + return newSig +} + +func (b *builtinTiDBDecodeSQLDigestsSig) evalString(row chunk.Row) (string, bool, error) { + args := b.getArgs() + digestsStr, isNull, err := args[0].EvalString(b.ctx, row) + if err != nil { + return "", true, err + } + if isNull { + return "", true, nil + } + + stmtTruncateLength := int64(0) + if len(args) > 1 { + stmtTruncateLength, isNull, err = args[1].EvalInt(b.ctx, row) + if err != nil { + return "", true, err + } + if isNull { + stmtTruncateLength = 0 + } + } + + var digests []interface{} + err = json.Unmarshal([]byte(digestsStr), &digests) + if err != nil { + const errMsgMaxLength = 32 + if len(digestsStr) > errMsgMaxLength { + digestsStr = digestsStr[:errMsgMaxLength] + "..." + } + b.ctx.GetSessionVars().StmtCtx.AppendWarning(errIncorrectArgs.GenWithStack("The argument can't be unmarshalled as JSON array: '%s'", digestsStr)) + return "", true, nil + } + + // Query the SQL Statements by digests. + retriever := NewSQLDigestTextRetriever() + for _, item := range digests { + if item != nil { + digest, ok := item.(string) + if ok { + retriever.SQLDigestsMap[digest] = "" + } + } + } + + // Querying may take some time and it takes a context.Context as argument, which is not available here. + // We simply create a context with a timeout here. + timeout := time.Duration(b.ctx.GetSessionVars().MaxExecutionTime) * time.Millisecond + if timeout == 0 || timeout > 20*time.Second { + timeout = 20 * time.Second + } + ctx, cancel := context.WithTimeout(context.Background(), timeout) + defer cancel() + err = retriever.RetrieveGlobal(ctx, b.ctx) + if err != nil { + if errors.Cause(err) == context.DeadlineExceeded || errors.Cause(err) == context.Canceled { + return "", true, errUnknown.GenWithStack("Retrieving cancelled internally with error: %v", err) + } + + b.ctx.GetSessionVars().StmtCtx.AppendWarning(errUnknown.GenWithStack("Retrieving statements information failed with error: %v", err)) + return "", true, nil + } + + // Collect the result. + result := make([]interface{}, len(digests)) + for i, item := range digests { + if item == nil { + continue + } + if digest, ok := item.(string); ok { + if stmt, ok := retriever.SQLDigestsMap[digest]; ok && len(stmt) > 0 { + // Truncate too-long statements if necessary. + if stmtTruncateLength > 0 && int64(len(stmt)) > stmtTruncateLength { + stmt = stmt[:stmtTruncateLength] + "..." + } + result[i] = stmt + } + } + } + + resultStr, err := json.Marshal(result) + if err != nil { + b.ctx.GetSessionVars().StmtCtx.AppendWarning(errUnknown.GenWithStack("Marshalling result as JSON failed with error: %v", err)) + return "", true, nil + } + + return string(resultStr), false, nil +} + type tidbDecodePlanFunctionClass struct { baseFunctionClass } diff --git a/expression/errors.go b/expression/errors.go index ad0f49e64653c..43c3a8373f352 100644 --- a/expression/errors.go +++ b/expression/errors.go @@ -48,6 +48,7 @@ var ( errUnknownLocale = dbterror.ClassExpression.NewStd(mysql.ErrUnknownLocale) errNonUniq = dbterror.ClassExpression.NewStd(mysql.ErrNonUniq) errWrongValueForType = dbterror.ClassExpression.NewStd(mysql.ErrWrongValueForType) + errUnknown = dbterror.ClassExpression.NewStd(mysql.ErrUnknown) // Sequence usage privilege check. errSequenceAccessDenied = dbterror.ClassExpression.NewStd(mysql.ErrTableaccessDenied) diff --git a/expression/util.go b/expression/util.go index 3a9ecfe27b53a..56d526495638f 100644 --- a/expression/util.go +++ b/expression/util.go @@ -14,6 +14,7 @@ package expression import ( + "context" "math" "strconv" "strings" @@ -21,6 +22,7 @@ import ( "unicode" "github.com/pingcap/errors" + "github.com/pingcap/failpoint" "github.com/pingcap/parser/ast" "github.com/pingcap/parser/mysql" "github.com/pingcap/parser/opcode" @@ -31,6 +33,7 @@ import ( "github.com/pingcap/tidb/util/chunk" "github.com/pingcap/tidb/util/collate" "github.com/pingcap/tidb/util/logutil" + "github.com/pingcap/tidb/util/sqlexec" "go.uber.org/zap" "golang.org/x/tools/container/intsets" ) @@ -994,3 +997,176 @@ func GetFormatNanoTime(time float64) string { } return strconv.FormatFloat(value, 'f', 2, 64) + " " + unit } + +// SQLDigestTextRetriever is used to find the normalized SQL statement text by SQL digests in statements_summary table. +// It's exported for test purposes. It's used by the `tidb_decode_sql_digests` builtin function, but also exposed to +// be used in other modules. +type SQLDigestTextRetriever struct { + // SQLDigestsMap is the place to put the digests that's requested for getting SQL text and also the place to put + // the query result. + SQLDigestsMap map[string]string + + // Replace querying for test purposes. + mockLocalData map[string]string + mockGlobalData map[string]string + // There are two ways for querying information: 1) query specified digests by WHERE IN query, or 2) query all + // information to avoid the too long WHERE IN clause. If there are more than `fetchAllLimit` digests needs to be + // queried, the second way will be chosen; otherwise, the first way will be chosen. + fetchAllLimit int +} + +// NewSQLDigestTextRetriever creates a new SQLDigestTextRetriever. +func NewSQLDigestTextRetriever() *SQLDigestTextRetriever { + return &SQLDigestTextRetriever{ + SQLDigestsMap: make(map[string]string), + fetchAllLimit: 512, + } +} + +func (r *SQLDigestTextRetriever) runMockQuery(data map[string]string, inValues []interface{}) (map[string]string, error) { + if len(inValues) == 0 { + return data, nil + } + res := make(map[string]string, len(inValues)) + for _, digest := range inValues { + if text, ok := data[digest.(string)]; ok { + res[digest.(string)] = text + } + } + return res, nil +} + +// runFetchDigestQuery runs query to the system tables to fetch the kv mapping of SQL digests and normalized SQL texts +// of the given SQL digests, if `inValues` is given, or all these mappings otherwise. If `queryGlobal` is false, it +// queries information_schema.statements_summary and information_schema.statements_summary_history; otherwise, it +// queries the cluster version of these two tables. +func (r *SQLDigestTextRetriever) runFetchDigestQuery(ctx context.Context, sctx sessionctx.Context, queryGlobal bool, inValues []interface{}) (map[string]string, error) { + // If mock data is set, query the mock data instead of the real statements_summary tables. + if !queryGlobal && r.mockLocalData != nil { + return r.runMockQuery(r.mockLocalData, inValues) + } else if queryGlobal && r.mockGlobalData != nil { + return r.runMockQuery(r.mockGlobalData, inValues) + } + + exec, ok := sctx.(sqlexec.RestrictedSQLExecutor) + if !ok { + return nil, errors.New("restricted sql can't be executed in this context") + } + + // Information in statements_summary will be periodically moved to statements_summary_history. Union them together + // to avoid missing information when statements_summary is just cleared. + stmt := "select digest, digest_text from information_schema.statements_summary union distinct " + + "select digest, digest_text from information_schema.statements_summary_history" + if queryGlobal { + stmt = "select digest, digest_text from information_schema.cluster_statements_summary union distinct " + + "select digest, digest_text from information_schema.cluster_statements_summary_history" + } + // Add the where clause if `inValues` is specified. + if len(inValues) > 0 { + stmt += " where digest in (" + strings.Repeat("%?,", len(inValues)-1) + "%?)" + } + + stmtNode, err := exec.ParseWithParams(ctx, stmt, inValues...) + if err != nil { + return nil, err + } + rows, _, err := exec.ExecRestrictedStmt(ctx, stmtNode) + if err != nil { + return nil, err + } + + res := make(map[string]string, len(rows)) + for _, row := range rows { + res[row.GetString(0)] = row.GetString(1) + } + return res, nil +} + +func (r *SQLDigestTextRetriever) updateDigestInfo(queryResult map[string]string) { + for digest, text := range r.SQLDigestsMap { + if len(text) > 0 { + // The text of this digest is already known + continue + } + sqlText, ok := queryResult[digest] + if ok { + r.SQLDigestsMap[digest] = sqlText + } + } +} + +// RetrieveLocal tries to retrieve the SQL text of the SQL digests from local information. +func (r *SQLDigestTextRetriever) RetrieveLocal(ctx context.Context, sctx sessionctx.Context) error { + if len(r.SQLDigestsMap) == 0 { + return nil + } + + var queryResult map[string]string + if len(r.SQLDigestsMap) <= r.fetchAllLimit { + inValues := make([]interface{}, 0, len(r.SQLDigestsMap)) + for key := range r.SQLDigestsMap { + inValues = append(inValues, key) + } + var err error + queryResult, err = r.runFetchDigestQuery(ctx, sctx, false, inValues) + if err != nil { + return errors.Trace(err) + } + + if len(queryResult) == len(r.SQLDigestsMap) { + r.SQLDigestsMap = queryResult + return nil + } + } else { + var err error + queryResult, err = r.runFetchDigestQuery(ctx, sctx, false, nil) + if err != nil { + return errors.Trace(err) + } + } + + r.updateDigestInfo(queryResult) + return nil +} + +// RetrieveGlobal tries to retrieve the SQL text of the SQL digests from the information of the whole cluster. +func (r *SQLDigestTextRetriever) RetrieveGlobal(ctx context.Context, sctx sessionctx.Context) error { + err := r.RetrieveLocal(ctx, sctx) + if err != nil { + return errors.Trace(err) + } + + // In some unit test environments it's unable to retrieve global info, and this function blocks it for tens of + // seconds, which wastes much time during unit test. In this case, enable this failpoint to bypass retrieving + // globally. + failpoint.Inject("sqlDigestRetrieverSkipRetrieveGlobal", func() { + failpoint.Return(nil) + }) + + var unknownDigests []interface{} + for k, v := range r.SQLDigestsMap { + if len(v) == 0 { + unknownDigests = append(unknownDigests, k) + } + } + + if len(unknownDigests) == 0 { + return nil + } + + var queryResult map[string]string + if len(r.SQLDigestsMap) <= r.fetchAllLimit { + queryResult, err = r.runFetchDigestQuery(ctx, sctx, true, unknownDigests) + if err != nil { + return errors.Trace(err) + } + } else { + queryResult, err = r.runFetchDigestQuery(ctx, sctx, true, nil) + if err != nil { + return errors.Trace(err) + } + } + + r.updateDigestInfo(queryResult) + return nil +} diff --git a/expression/util_test.go b/expression/util_test.go index 039f399573466..ed9ba8448b116 100644 --- a/expression/util_test.go +++ b/expression/util_test.go @@ -14,6 +14,7 @@ package expression import ( + "context" "reflect" "testing" "time" @@ -413,6 +414,68 @@ func (s *testUtilSuite) TestDisableParseJSONFlag4Expr(c *check.C) { c.Assert(mysql.HasParseToJSONFlag(ft.Flag), check.IsFalse) } +func (s *testUtilSuite) TestSQLDigestTextRetriever(c *check.C) { + // Create a fake session as the argument to the retriever, though it's actually not used when mock data is set. + + r := NewSQLDigestTextRetriever() + clearResult := func() { + r.SQLDigestsMap = map[string]string{ + "digest1": "", + "digest2": "", + "digest3": "", + "digest4": "", + "digest5": "", + } + } + clearResult() + r.mockLocalData = map[string]string{ + "digest1": "text1", + "digest2": "text2", + "digest6": "text6", + } + r.mockGlobalData = map[string]string{ + "digest2": "text2", + "digest3": "text3", + "digest4": "text4", + "digest7": "text7", + } + + expectedLocalResult := map[string]string{ + "digest1": "text1", + "digest2": "text2", + "digest3": "", + "digest4": "", + "digest5": "", + } + expectedGlobalResult := map[string]string{ + "digest1": "text1", + "digest2": "text2", + "digest3": "text3", + "digest4": "text4", + "digest5": "", + } + + err := r.RetrieveLocal(context.Background(), nil) + c.Assert(err, check.IsNil) + c.Assert(r.SQLDigestsMap, check.DeepEquals, expectedLocalResult) + clearResult() + + err = r.RetrieveGlobal(context.Background(), nil) + c.Assert(err, check.IsNil) + c.Assert(r.SQLDigestsMap, check.DeepEquals, expectedGlobalResult) + clearResult() + + r.fetchAllLimit = 1 + err = r.RetrieveLocal(context.Background(), nil) + c.Assert(err, check.IsNil) + c.Assert(r.SQLDigestsMap, check.DeepEquals, expectedLocalResult) + clearResult() + + err = r.RetrieveGlobal(context.Background(), nil) + c.Assert(err, check.IsNil) + c.Assert(r.SQLDigestsMap, check.DeepEquals, expectedGlobalResult) +} + func BenchmarkExtractColumns(b *testing.B) { conditions := []Expression{ newFunction(ast.EQ, newColumn(0), newColumn(1)), diff --git a/go.mod b/go.mod index 17af5a487a71c..ee5b7386a4f03 100644 --- a/go.mod +++ b/go.mod @@ -43,7 +43,7 @@ require ( github.com/pingcap/fn v0.0.0-20200306044125-d5540d389059 github.com/pingcap/kvproto v0.0.0-20210722091755-91a52cd9e8db github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7 - github.com/pingcap/parser v0.0.0-20210802034743-dd9b189324ce + github.com/pingcap/parser v0.0.0-20210803205906-cece3020391a github.com/pingcap/sysutil v0.0.0-20210315073920-cc0985d983a3 github.com/pingcap/tidb-tools v5.0.3+incompatible github.com/pingcap/tipb v0.0.0-20210708040514-0f154bb0dc0f diff --git a/go.sum b/go.sum index 93b9afc5cfc37..d6cddccf05fc2 100644 --- a/go.sum +++ b/go.sum @@ -444,8 +444,8 @@ github.com/pingcap/log v0.0.0-20210317133921-96f4fcab92a4/go.mod h1:4rbK1p9ILyIf github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7 h1:k2BbABz9+TNpYRwsCCFS8pEEnFVOdbgEjL/kTlLuzZQ= github.com/pingcap/log v0.0.0-20210625125904-98ed8e2eb1c7/go.mod h1:8AanEdAHATuRurdGxZXBz0At+9avep+ub7U1AGYLIMM= github.com/pingcap/parser v0.0.0-20210525032559-c37778aff307/go.mod h1:xZC8I7bug4GJ5KtHhgAikjTfU4kBv1Sbo3Pf1MZ6lVw= -github.com/pingcap/parser v0.0.0-20210802034743-dd9b189324ce h1:3KjHJw5FjUbrLLunmzEdmU/CeXNfLaqnP9AMVfOVOQU= -github.com/pingcap/parser v0.0.0-20210802034743-dd9b189324ce/go.mod h1:Ek0mLKEqUGnQqBw1JnYrJQxsguU433DU68yUbsoeJ7s= +github.com/pingcap/parser v0.0.0-20210803205906-cece3020391a h1:NPO1iSULt7ztYOEifJ73IZA+xF3ywgX0Ik0X6PKy8BI= +github.com/pingcap/parser v0.0.0-20210803205906-cece3020391a/go.mod h1:Ek0mLKEqUGnQqBw1JnYrJQxsguU433DU68yUbsoeJ7s= github.com/pingcap/sysutil v0.0.0-20200206130906-2bfa6dc40bcd/go.mod h1:EB/852NMQ+aRKioCpToQ94Wl7fktV+FNnxf3CX/TTXI= github.com/pingcap/sysutil v0.0.0-20210315073920-cc0985d983a3 h1:A9KL9R+lWSVPH8IqUuH1QSTRJ5FGoY1bT2IcfPKsWD8= github.com/pingcap/sysutil v0.0.0-20210315073920-cc0985d983a3/go.mod h1:tckvA041UWP+NqYzrJ3fMgC/Hw9wnmQ/tUkp/JaHly8= diff --git a/infoschema/tables_test.go b/infoschema/tables_test.go index 6fe1feeed4e5b..14e1f6e99b390 100644 --- a/infoschema/tables_test.go +++ b/infoschema/tables_test.go @@ -1754,9 +1754,14 @@ func (s *testTableSuite) TestInfoschemaClientErrors(c *C) { c.Assert(err.Error(), Equals, "[planner:1227]Access denied; you need (at least one of) the RELOAD privilege(s) for this operation") } -func (s *testTableSuite) TestTrx(c *C) { +func (s *testTableSuite) TestTiDBTrx(c *C) { tk := s.newTestKitWithRoot(c) - _, digest := parser.NormalizeDigest("select * from trx for update;") + tk.MustExec("drop table if exists test_tidb_trx") + tk.MustExec("create table test_tidb_trx(i int)") + // Execute the statement once so that the statement will be collected into statements_summary and able to be found + // by digest. + tk.MustExec("update test_tidb_trx set i = i + 1") + _, digest := parser.NormalizeDigest("update test_tidb_trx set i = i + 1") sm := &mockSessionManager{nil, make([]*txninfo.TxnInfo, 2)} sm.txnInfo[0] = &txninfo.TxnInfo{ StartTS: 424768545227014155, @@ -1772,7 +1777,7 @@ func (s *testTableSuite) TestTrx(c *C) { sm.txnInfo[1] = &txninfo.TxnInfo{ StartTS: 425070846483628033, CurrentSQLDigest: "", - AllSQLDigests: []string{"sql1", "sql2"}, + AllSQLDigests: []string{"sql1", "sql2", digest.String()}, State: txninfo.TxnLockWaiting, ConnectionID: 10, Username: "user1", @@ -1781,9 +1786,19 @@ func (s *testTableSuite) TestTrx(c *C) { sm.txnInfo[1].BlockStartTime.Valid = true sm.txnInfo[1].BlockStartTime.Time = blockTime2 tk.Se.SetSessionManager(sm) + tk.MustQuery("select * from information_schema.TIDB_TRX;").Check(testkit.Rows( - "424768545227014155 2021-05-07 12:56:48.001000 "+digest.String()+" Idle 1 19 2 root test []", - "425070846483628033 2021-05-20 21:16:35.778000 LockWaiting 2021-05-20 13:18:30.123456 0 0 10 user1 db1 [\"sql1\",\"sql2\"]")) + "424768545227014155 2021-05-07 12:56:48.001000 "+digest.String()+" update `test_tidb_trx` set `i` = `i` + ? Idle 1 19 2 root test []", + "425070846483628033 2021-05-20 21:16:35.778000 LockWaiting 2021-05-20 13:18:30.123456 0 0 10 user1 db1 [\"sql1\",\"sql2\",\""+digest.String()+"\"]")) + + // Test the all_sql_digests column can be directly passed to the tidb_decode_sql_digests function. + c.Assert(failpoint.Enable("github.com/pingcap/tidb/expression/sqlDigestRetrieverSkipRetrieveGlobal", "return"), IsNil) + defer func() { + c.Assert(failpoint.Disable("github.com/pingcap/tidb/expression/sqlDigestRetrieverSkipRetrieveGlobal"), IsNil) + }() + tk.MustQuery("select tidb_decode_sql_digests(all_sql_digests) from information_schema.tidb_trx").Check(testkit.Rows( + "[]", + "[null,null,\"update `test_tidb_trx` set `i` = `i` + ?\"]")) } func (s *testTableSuite) TestInfoschemaDeadlockPrivilege(c *C) { From 1a54708a7f8f86515236626c78e97a33d8adf583 Mon Sep 17 00:00:00 2001 From: 3pointer Date: Fri, 6 Aug 2021 00:17:12 +0800 Subject: [PATCH 17/26] *: merge BR into TiDB (#26655) --- Makefile | 108 +- Makefile.common | 32 +- br/.codecov.yml | 22 + br/.dockerignore | 1 + br/.editorconfig | 10 + br/.gitattributes | 1 + br/.github/ISSUE_TEMPLATE/bug-report.md | 45 + br/.github/ISSUE_TEMPLATE/feature-request.md | 19 + br/.github/ISSUE_TEMPLATE/question.md | 24 + br/.github/challenge-bot.yml | 1 + br/.github/pull_request_template.md | 42 + br/.github/workflows/build.yml | 68 + br/.github/workflows/compatible_test.yml | 71 + br/.gitignore | 14 + br/.golangci.yml | 12 + br/COMPATIBILITY_TEST.md | 42 + br/CONTRIBUTING.md | 90 + br/LICENSE.md | 201 + br/README.md | 137 + br/SECURITY.md | 33 + br/cmd/br/backup.go | 157 + br/cmd/br/cmd.go | 207 + br/cmd/br/debug.go | 387 + br/cmd/br/main.go | 61 + br/cmd/br/main_test.go | 44 + br/cmd/br/restore.go | 178 + br/cmd/tidb-lightning-ctl/main.go | 409 + br/cmd/tidb-lightning-ctl/main_test.go | 47 + br/cmd/tidb-lightning/main.go | 111 + br/cmd/tidb-lightning/main_test.go | 49 + br/compatibility/backup_cluster.yaml | 128 + .../application_default_credentials.json | 6 + br/compatibility/get_last_tags.sh | 24 + br/compatibility/prepare_backup.sh | 46 + br/compatibility/prepare_data/workload | 11 + br/docker-compose.yaml | 224 + br/docker/Dockerfile | 27 + br/docker/config/pd.toml | 18 + br/docker/config/tidb.toml | 9 + br/docker/config/tikv.toml | 25 + br/docker/gcs.env | 1 + br/docker/minio.env | 6 + ...2019-08-05-new-design-of-backup-restore.md | 151 + .../2019-09-09-BR-key-rewrite-disscussion.md | 89 + ...-design-of-reorganize-importSST-to-TiKV.md | 110 + .../arch-of-reorganized-importer.svg | 1 + br/docs/resources/download-sst-diagram.svg | 1 + .../resources/solution3-of-key-rewrite.svg | 1 + br/errors.toml | 213 + br/images/arch.svg | 110 + br/metrics/alertmanager/lightning.rules.yml | 14 + br/metrics/grafana/br.json | 2969 +++++ br/metrics/grafana/lightning.json | 1604 +++ br/pkg/backup/check.go | 34 + br/pkg/backup/client.go | 947 ++ br/pkg/backup/client_test.go | 271 + br/pkg/backup/metrics.go | 31 + br/pkg/backup/push.go | 188 + br/pkg/backup/schema.go | 191 + br/pkg/backup/schema_test.go | 240 + br/pkg/cdclog/buffer.go | 216 + br/pkg/cdclog/decoder.go | 298 + br/pkg/cdclog/decoder_test.go | 168 + br/pkg/cdclog/puller.go | 183 + br/pkg/checksum/executor.go | 319 + br/pkg/checksum/executor_test.go | 130 + br/pkg/checksum/validate.go | 78 + br/pkg/conn/conn.go | 390 + br/pkg/conn/conn_test.go | 144 + br/pkg/errors/errors.go | 83 + br/pkg/glue/glue.go | 49 + br/pkg/gluetidb/glue.go | 171 + br/pkg/gluetikv/glue.go | 62 + br/pkg/gluetikv/glue_test.go | 20 + br/pkg/httputil/http.go | 22 + br/pkg/kv/checksum.go | 114 + br/pkg/kv/checksum_test.go | 90 + br/pkg/kv/kv.go | 501 + br/pkg/kv/kv_test.go | 91 + br/pkg/kv/session.go | 252 + br/pkg/kv/session_test.go | 35 + br/pkg/lightning/backend/backend.go | 480 + br/pkg/lightning/backend/backend_test.go | 409 + br/pkg/lightning/backend/importer/importer.go | 407 + .../backend/importer/importer_test.go | 292 + br/pkg/lightning/backend/kv/allocator.go | 63 + br/pkg/lightning/backend/kv/kv2sql.go | 61 + br/pkg/lightning/backend/kv/session.go | 336 + br/pkg/lightning/backend/kv/session_test.go | 35 + br/pkg/lightning/backend/kv/sql2kv.go | 532 + br/pkg/lightning/backend/kv/sql2kv_test.go | 601 + br/pkg/lightning/backend/kv/types.go | 53 + br/pkg/lightning/backend/local/duplicate.go | 631 + br/pkg/lightning/backend/local/iterator.go | 192 + .../lightning/backend/local/iterator_test.go | 253 + br/pkg/lightning/backend/local/key_adapter.go | 94 + .../backend/local/key_adapter_test.go | 154 + br/pkg/lightning/backend/local/local.go | 3182 +++++ .../lightning/backend/local/local_freebsd.go | 26 + br/pkg/lightning/backend/local/local_test.go | 762 ++ br/pkg/lightning/backend/local/local_unix.go | 90 + .../backend/local/local_unix_generic.go | 24 + .../lightning/backend/local/local_windows.go | 33 + br/pkg/lightning/backend/local/localhelper.go | 597 + .../backend/local/localhelper_test.go | 719 ++ br/pkg/lightning/backend/noop/noop.go | 183 + br/pkg/lightning/backend/tidb/tidb.go | 644 + br/pkg/lightning/backend/tidb/tidb_test.go | 385 + br/pkg/lightning/checkpoints/checkpoints.go | 1656 +++ .../checkpoints/checkpoints_file_test.go | 329 + .../checkpoints/checkpoints_sql_test.go | 530 + .../lightning/checkpoints/checkpoints_test.go | 306 + .../checkpointspb/file_checkpoints.pb.go | 2469 ++++ .../checkpointspb/file_checkpoints.proto | 72 + .../lightning/checkpoints/glue_checkpoint.go | 805 ++ br/pkg/lightning/checkpoints/tidb.go | 30 + br/pkg/lightning/common/conn.go | 110 + br/pkg/lightning/common/once_error.go | 46 + br/pkg/lightning/common/once_error_test.go | 58 + br/pkg/lightning/common/pause.go | 156 + br/pkg/lightning/common/pause_test.go | 189 + br/pkg/lightning/common/security.go | 161 + br/pkg/lightning/common/security_test.go | 98 + br/pkg/lightning/common/storage.go | 23 + br/pkg/lightning/common/storage_test.go | 33 + br/pkg/lightning/common/storage_unix.go | 73 + br/pkg/lightning/common/storage_windows.go | 50 + br/pkg/lightning/common/util.go | 395 + br/pkg/lightning/common/util_test.go | 217 + br/pkg/lightning/config/bytesize.go | 44 + br/pkg/lightning/config/bytesize_test.go | 128 + br/pkg/lightning/config/config.go | 897 ++ br/pkg/lightning/config/config_test.go | 693 ++ br/pkg/lightning/config/configlist.go | 153 + br/pkg/lightning/config/configlist_test.go | 132 + br/pkg/lightning/config/const.go | 32 + br/pkg/lightning/config/global.go | 284 + br/pkg/lightning/glue/glue.go | 190 + br/pkg/lightning/lightning.go | 692 ++ br/pkg/lightning/lightning_test.go | 563 + br/pkg/lightning/log/filter.go | 57 + br/pkg/lightning/log/filter_test.go | 95 + br/pkg/lightning/log/log.go | 226 + br/pkg/lightning/log/log_test.go | 50 + br/pkg/lightning/log/testlogger.go | 36 + br/pkg/lightning/manual/allocator.go | 20 + br/pkg/lightning/manual/manual.go | 65 + br/pkg/lightning/manual/manual_nocgo.go | 19 + br/pkg/lightning/metric/metric.go | 259 + br/pkg/lightning/metric/metric_test.go | 59 + br/pkg/lightning/mydump/bytes.go | 39 + .../lightning/mydump/csv/split_large_file.csv | 5 + br/pkg/lightning/mydump/csv_parser.go | 522 + br/pkg/lightning/mydump/csv_parser_test.go | 947 ++ br/pkg/lightning/mydump/examples/metadata | 2 + .../examples/mocker_test-schema-create.sql | 1 + .../mydump/examples/mocker_test.i-schema.sql | 6 + .../mydump/examples/mocker_test.i.sql | 1 + ...cker_test.report_case_high_risk-schema.sql | 9 + .../mocker_test.report_case_high_risk.sql | 1 + .../mocker_test.tbl_autoid-schema.sql | 8 + .../examples/mocker_test.tbl_autoid.sql | 10010 ++++++++++++++++ .../mocker_test.tbl_multi_index-schema.sql | 9 + .../examples/mocker_test.tbl_multi_index.sql | 10010 ++++++++++++++++ br/pkg/lightning/mydump/examples/test.parquet | Bin 0 -> 2686 bytes br/pkg/lightning/mydump/loader.go | 460 + br/pkg/lightning/mydump/loader_test.go | 595 + br/pkg/lightning/mydump/parquet_parser.go | 535 + .../lightning/mydump/parquet_parser_test.go | 266 + br/pkg/lightning/mydump/parser.go | 573 + br/pkg/lightning/mydump/parser.rl | 187 + br/pkg/lightning/mydump/parser_generated.go | 2515 ++++ br/pkg/lightning/mydump/parser_test.go | 883 ++ br/pkg/lightning/mydump/reader.go | 177 + br/pkg/lightning/mydump/reader_test.go | 180 + br/pkg/lightning/mydump/region.go | 402 + br/pkg/lightning/mydump/region_test.go | 332 + br/pkg/lightning/mydump/router.go | 336 + br/pkg/lightning/mydump/router_test.go | 255 + br/pkg/lightning/restore/check_info.go | 440 + br/pkg/lightning/restore/check_template.go | 130 + br/pkg/lightning/restore/checksum.go | 475 + br/pkg/lightning/restore/checksum_test.go | 405 + br/pkg/lightning/restore/meta_manager.go | 816 ++ br/pkg/lightning/restore/meta_manager_test.go | 264 + br/pkg/lightning/restore/restore.go | 2333 ++++ br/pkg/lightning/restore/restore_test.go | 2118 ++++ br/pkg/lightning/restore/table_restore.go | 886 ++ br/pkg/lightning/restore/tidb.go | 388 + br/pkg/lightning/restore/tidb_test.go | 497 + br/pkg/lightning/sigusr1_other.go | 20 + br/pkg/lightning/sigusr1_unix.go | 37 + br/pkg/lightning/tikv/tikv.go | 233 + br/pkg/lightning/tikv/tikv_test.go | 251 + br/pkg/lightning/verification/checksum.go | 106 + .../lightning/verification/checksum_test.go | 91 + br/pkg/lightning/web/progress.go | 185 + br/pkg/lightning/web/res.go | 22 + br/pkg/lightning/web/res_vfsdata.go | 202 + br/pkg/lightning/worker/worker.go | 65 + br/pkg/lightning/worker/worker_test.go | 55 + br/pkg/logutil/logging.go | 228 + br/pkg/logutil/logging_test.go | 209 + br/pkg/logutil/rate.go | 55 + br/pkg/membuf/buffer.go | 146 + br/pkg/membuf/buffer_test.go | 90 + br/pkg/metautil/metafile.go | 661 + br/pkg/metautil/metafile_test.go | 134 + br/pkg/mock/backend.go | 362 + br/pkg/mock/glue.go | 235 + br/pkg/mock/glue_checkpoint.go | 137 + br/pkg/mock/importer.go | 377 + br/pkg/mock/kv.go | 153 + br/pkg/mock/mock_cluster.go | 194 + br/pkg/mock/mock_cluster_test.go | 36 + br/pkg/mock/mockid/mockid.go | 36 + br/pkg/mock/s3iface.go | 4886 ++++++++ br/pkg/mock/storage/storage.go | 140 + br/pkg/pdutil/pd.go | 684 ++ br/pkg/pdutil/pd_test.go | 202 + br/pkg/pdutil/utils.go | 119 + br/pkg/redact/redact.go | 36 + br/pkg/redact/redact_test.go | 31 + br/pkg/restore/backoff.go | 115 + br/pkg/restore/backoff_test.go | 116 + br/pkg/restore/batcher.go | 374 + br/pkg/restore/batcher_test.go | 387 + br/pkg/restore/client.go | 1121 ++ br/pkg/restore/client_test.go | 247 + br/pkg/restore/db.go | 275 + br/pkg/restore/db_test.go | 219 + br/pkg/restore/import.go | 574 + br/pkg/restore/ingester.go | 604 + br/pkg/restore/log_client.go | 761 ++ br/pkg/restore/log_client_test.go | 126 + br/pkg/restore/merge.go | 154 + br/pkg/restore/merge_test.go | 296 + br/pkg/restore/pipeline_items.go | 273 + br/pkg/restore/range.go | 135 + br/pkg/restore/range_test.go | 84 + br/pkg/restore/split.go | 416 + br/pkg/restore/split_client.go | 550 + br/pkg/restore/split_test.go | 372 + br/pkg/restore/systable_restore.go | 216 + br/pkg/restore/util.go | 447 + br/pkg/restore/util_test.go | 269 + br/pkg/rtree/logging.go | 60 + br/pkg/rtree/logging_test.go | 46 + br/pkg/rtree/rtree.go | 219 + br/pkg/rtree/rtree_test.go | 185 + br/pkg/storage/compress.go | 130 + br/pkg/storage/compress_test.go | 42 + br/pkg/storage/flags.go | 22 + br/pkg/storage/gcs.go | 387 + br/pkg/storage/gcs_test.go | 271 + br/pkg/storage/local.go | 130 + br/pkg/storage/local_test.go | 68 + br/pkg/storage/local_unix.go | 19 + br/pkg/storage/local_windows.go | 13 + br/pkg/storage/noop.go | 71 + br/pkg/storage/parse.go | 171 + br/pkg/storage/parse_test.go | 176 + br/pkg/storage/s3.go | 744 ++ br/pkg/storage/s3_test.go | 1029 ++ br/pkg/storage/storage.go | 177 + br/pkg/storage/writer.go | 230 + br/pkg/storage/writer_test.go | 168 + br/pkg/summary/collector.go | 234 + br/pkg/summary/collector_test.go | 49 + br/pkg/summary/summary.go | 45 + br/pkg/task/backup.go | 531 + br/pkg/task/backup_raw.go | 241 + br/pkg/task/backup_test.go | 61 + br/pkg/task/common.go | 549 + br/pkg/task/common_test.go | 64 + br/pkg/task/restore.go | 572 + br/pkg/task/restore_log.go | 143 + br/pkg/task/restore_raw.go | 155 + br/pkg/task/restore_test.go | 22 + br/pkg/trace/tracing.go | 105 + br/pkg/trace/tracing_test.go | 60 + br/pkg/utils/dyn_pprof_other.go | 11 + br/pkg/utils/dyn_pprof_unix.go | 33 + br/pkg/utils/env.go | 33 + br/pkg/utils/env_test.go | 44 + br/pkg/utils/json.go | 187 + br/pkg/utils/json_test.go | 251 + br/pkg/utils/key.go | 91 + br/pkg/utils/key_test.go | 115 + br/pkg/utils/math.go | 63 + br/pkg/utils/math_test.go | 48 + br/pkg/utils/permission.go | 20 + br/pkg/utils/pprof.go | 69 + br/pkg/utils/progress.go | 155 + br/pkg/utils/progress_test.go | 77 + br/pkg/utils/retry.go | 71 + br/pkg/utils/safe_point.go | 138 + br/pkg/utils/safe_point_test.go | 138 + br/pkg/utils/schema.go | 116 + br/pkg/utils/schema_test.go | 298 + br/pkg/utils/utils_test.go | 13 + br/pkg/utils/worker.go | 99 + br/pkg/version/build/info.go | 59 + br/pkg/version/build/info_test.go | 33 + br/pkg/version/version.go | 240 + br/pkg/version/version_test.go | 296 + br/revive.toml | 52 + br/tests/README.md | 100 + br/tests/_utils/br_tikv_outage_util | 47 + br/tests/_utils/check_cluster_version | 26 + br/tests/_utils/check_contains | 24 + br/tests/_utils/check_not_contains | 24 + br/tests/_utils/generate_certs | 31 + br/tests/_utils/make_tiflash_config | 96 + br/tests/_utils/read_result | 21 + br/tests/_utils/run_br | 22 + br/tests/_utils/run_cdc | 22 + br/tests/_utils/run_curl | 32 + br/tests/_utils/run_lightning | 32 + br/tests/_utils/run_lightning_ctl | 30 + br/tests/_utils/run_pd_ctl | 22 + br/tests/_utils/run_services | 267 + br/tests/_utils/run_sql | 27 + br/tests/_utils/run_sql_in_container | 24 + br/tests/br_300_small_tables/run.sh | 105 + br/tests/br_backup_empty/run.sh | 53 + br/tests/br_backup_version/run.sh | 85 + br/tests/br_case_sensitive/run.sh | 46 + br/tests/br_clustered_index/run.sh | 196 + br/tests/br_db/run.sh | 74 + br/tests/br_db_online/run.sh | 54 + br/tests/br_db_online_newkv/run.sh | 78 + br/tests/br_db_skip/run.sh | 72 + br/tests/br_debug_meta/run.sh | 71 + br/tests/br_debug_meta/workload | 12 + br/tests/br_full/run.sh | 97 + br/tests/br_full/workload | 12 + br/tests/br_full_ddl/run.sh | 165 + br/tests/br_full_ddl/workload | 13 + br/tests/br_full_index/run.sh | 81 + br/tests/br_full_index/workload | 12 + br/tests/br_gcs/oauth.go | 25 + br/tests/br_gcs/run.sh | 146 + br/tests/br_gcs/workload | 12 + br/tests/br_history/run.sh | 67 + br/tests/br_history/workload | 12 + .../config/tidb-max-index-length.toml | 16 + br/tests/br_incompatible_tidb_config/run.sh | 125 + br/tests/br_incremental/run.sh | 58 + br/tests/br_incremental/workload | 12 + br/tests/br_incremental_ddl/run.sh | 74 + br/tests/br_incremental_index/run.sh | 74 + br/tests/br_incremental_only_ddl/run.sh | 70 + br/tests/br_incremental_same_table/run.sh | 86 + br/tests/br_insert_after_restore/run.sh | 80 + br/tests/br_key_locked/codec.go | 108 + br/tests/br_key_locked/locker.go | 348 + br/tests/br_key_locked/run.sh | 55 + br/tests/br_key_locked/workload | 12 + br/tests/br_log_restore/run.sh | 180 + br/tests/br_log_restore/workload | 12 + br/tests/br_log_test/run.sh | 43 + br/tests/br_log_test/workload | 12 + br/tests/br_move_backup/run.sh | 57 + br/tests/br_move_backup/workload | 12 + br/tests/br_other/run.sh | 190 + br/tests/br_range/run.sh | 39 + br/tests/br_rawkv/client.go | 373 + br/tests/br_rawkv/run.sh | 140 + br/tests/br_restore_TDE_enable/run.sh | 151 + br/tests/br_restore_TDE_enable/workload | 12 + br/tests/br_s3/run.sh | 159 + br/tests/br_s3/workload | 12 + br/tests/br_shuffle_leader/run.sh | 51 + br/tests/br_shuffle_leader/workload | 12 + br/tests/br_shuffle_region/run.sh | 52 + br/tests/br_shuffle_region/workload | 12 + br/tests/br_single_table/run.sh | 45 + br/tests/br_single_table/workload | 12 + br/tests/br_skip_checksum/run.sh | 86 + br/tests/br_skip_checksum/workload | 12 + br/tests/br_small_batch_size/run.sh | 78 + br/tests/br_small_batch_size/workload | 12 + br/tests/br_split_region_fail/run.sh | 84 + br/tests/br_split_region_fail/workload | 12 + br/tests/br_systables/run.sh | 102 + br/tests/br_systables/workload | 12 + br/tests/br_table_filter/run.sh | 120 + br/tests/br_table_partition/prepare.sh | 70 + br/tests/br_table_partition/run.sh | 61 + br/tests/br_tiflash/run.sh | 64 + br/tests/br_tikv_outage/run.sh | 36 + br/tests/br_tikv_outage/workload | 12 + br/tests/br_tikv_outage2/run.sh | 40 + br/tests/br_tikv_outage2/workload | 12 + br/tests/br_views_and_sequences/run.sh | 49 + br/tests/br_z_gc_safepoint/gc.go | 79 + br/tests/br_z_gc_safepoint/run.sh | 76 + br/tests/br_z_gc_safepoint/workload | 12 + br/tests/config/importer.toml | 4 + br/tests/config/ipsan.cnf | 11 + br/tests/config/pd.toml | 10 + br/tests/config/restore-tikv.toml | 25 + br/tests/config/root.cert | 9 + br/tests/config/root.key | 3 + br/tests/config/tidb.toml | 13 + br/tests/config/tikv.toml | 35 + br/tests/docker_compatible_gcs/prepare.sh | 25 + br/tests/docker_compatible_gcs/run.sh | 49 + br/tests/docker_compatible_s3/prepare.sh | 31 + br/tests/docker_compatible_s3/run.sh | 36 + br/tests/download_tools.sh | 87 + br/tests/lightning_alter_random/config.toml | 0 .../data/alter_random-schema-create.sql | 1 + .../data/alter_random.t-schema.sql | 4 + .../data/alter_random.t.sql | 5 + br/tests/lightning_alter_random/run.sh | 50 + .../lightning_auto_random_default/config.toml | 2 + .../data/auto_random-schema-create.sql | 1 + .../data/auto_random.t-schema.sql | 5 + .../data/auto_random.t.0.sql | 5 + .../data/auto_random.t.1.sql | 5 + br/tests/lightning_auto_random_default/run.sh | 61 + .../lightning_black-white-list/config.toml | 2 + .../data/firstdb-schema-create.sql | 1 + .../data/firstdb.first-schema.sql | 1 + .../data/firstdb.first.1.sql | 1 + .../data/firstdb.first.2.sql | 1 + .../data/firstdb.second-schema.sql | 1 + .../data/firstdb.second.1.sql | 1 + .../data/mysql-schema-create.sql | 1 + .../data/mysql.testtable-schema.sql | 1 + .../data/seconddb-schema-create.sql | 1 + .../data/seconddb.fourth-schema.sql | 1 + .../data/seconddb.fourth.1.sql | 1 + .../data/seconddb.third-schema.sql | 1 + .../data/seconddb.third.1.sql | 1 + .../even-table-only.toml | 14 + .../firstdb-only.toml | 5 + br/tests/lightning_black-white-list/run.sh | 66 + br/tests/lightning_character_sets/auto.toml | 8 + br/tests/lightning_character_sets/binary.toml | 8 + .../lightning_character_sets/gb18030.toml | 8 + .../gb18030/charsets-schema-create.sql | 1 + .../gb18030/charsets.gb18030-schema.sql | 1 + .../gb18030/charsets.gb18030.sql | 1 + .../mixed/charsets-schema-create.sql | 1 + .../mixed/charsets.mixed-schema.sql | 1 + .../mixed/charsets.mixed.sql | 1 + br/tests/lightning_character_sets/run.sh | 76 + .../lightning_character_sets/utf8mb4.toml | 8 + .../utf8mb4/charsets-schema-create.sql | 1 + .../utf8mb4/charsets.utf8mb4-schema.sql | 1 + .../utf8mb4/charsets.utf8mb4.sql | 1 + br/tests/lightning_checkpoint/config.toml | 11 + br/tests/lightning_checkpoint/run.sh | 120 + .../lightning_checkpoint_chunks/config.toml | 11 + .../lightning_checkpoint_chunks/file.toml | 12 + br/tests/lightning_checkpoint_chunks/run.sh | 128 + .../lightning_checkpoint_columns/config.toml | 16 + br/tests/lightning_checkpoint_columns/run.sh | 49 + .../data/cpdt-schema-create.sql | 1 + .../data/cpdt.t-schema.sql | 1 + .../data/cpdt.t.sql | 1 + .../file.toml | 6 + .../mysql.toml | 6 + .../lightning_checkpoint_dirty_tableid/run.sh | 59 + .../lightning_checkpoint_engines/config.toml | 9 + .../data/cpeng-schema-create.sql | 1 + .../data/cpeng.a-schema.sql | 1 + .../data/cpeng.a.1.sql | 1 + .../data/cpeng.a.2.sql | 1 + .../data/cpeng.a.3.sql | 1 + .../data/cpeng.b-schema.sql | 1 + .../data/cpeng.b.1.sql | 4 + .../data/cpeng.b.2.sql | 1 + .../lightning_checkpoint_engines/mysql.toml | 9 + br/tests/lightning_checkpoint_engines/run.sh | 103 + .../config.toml | 16 + .../disk_quota_checkpoint-schema-create.sql | 1 + .../data/disk_quota_checkpoint.t-schema.sql | 4 + .../data/disk_quota_checkpoint.t.0.sql | 4 + .../data/disk_quota_checkpoint.t.1.sql | 4 + .../data/disk_quota_checkpoint.t.2.sql | 4 + .../lightning_checkpoint_engines_order/run.sh | 52 + .../bad-data/cped-schema-create.sql | 1 + .../bad-data/cped.t-schema.sql | 1 + .../bad-data/cped.t.sql | 1 + .../file.toml | 10 + .../good-data/cped-schema-create.sql | 1 + .../good-data/cped.t-schema.sql | 1 + .../good-data/cped.t.sql | 1 + .../mysql.toml | 9 + .../lightning_checkpoint_error_destroy/run.sh | 52 + .../lightning_checkpoint_parquet/config.toml | 11 + .../lightning_checkpoint_parquet/parquet.go | 65 + br/tests/lightning_checkpoint_parquet/run.sh | 61 + .../config.toml | 14 + .../data/cpts-schema-create.sql | 1 + .../data/cpts.cpts-schema.sql | 1 + .../data/cpts.cpts.1.sql | 4 + .../data/cpts.cpts.2.sql | 3 + .../lightning_checkpoint_timestamp/mysql.toml | 14 + .../lightning_checkpoint_timestamp/run.sh | 46 + .../lightning_cmdline_override/config.toml | 18 + .../data/cmdline_override-schema-create.sql | 1 + .../data/cmdline_override.t-schema.sql | 1 + .../data/cmdline_override.t.sql | 1 + br/tests/lightning_cmdline_override/run.sh | 18 + .../lightning_column_permutation/config.toml | 3 + .../data/perm-schema-create.sql | 1 + .../data/perm.test_perm-schema.sql | 22 + .../data/perm.test_perm.0.csv | 6 + br/tests/lightning_column_permutation/run.sh | 18 + br/tests/lightning_common_handle/config.toml | 2 + br/tests/lightning_common_handle/run.sh | 59 + .../lightning_concurrent-restore/config.toml | 6 + br/tests/lightning_concurrent-restore/run.sh | 48 + br/tests/lightning_csv/config.toml | 12 + .../lightning_csv/data/csv-schema-create.sql | 1 + .../data/csv.empty_strings-schema.sql | 5 + .../lightning_csv/data/csv.empty_strings.csv | 4 + .../lightning_csv/data/csv.escapes-schema.sql | 16 + br/tests/lightning_csv/data/csv.escapes.CSV | 6 + .../lightning_csv/data/csv.threads-schema.sql | 27 + br/tests/lightning_csv/data/csv.threads.csv | 43 + br/tests/lightning_csv/run.sh | 43 + .../lightning_default-columns/config.toml | 2 + .../data/defcol-schema-create.sql | 1 + .../data/defcol.t-schema.sql | 6 + .../data/defcol.t.1.sql | 1 + .../data/defcol.t.2.sql | 1 + .../data/defcol.u-schema.sql | 4 + .../data/defcol.u.1.sql | 1 + br/tests/lightning_default-columns/run.sh | 35 + br/tests/lightning_disk_quota/config.toml | 8 + .../data/disk_quota-schema-create.sql | 1 + .../data/disk_quota.t-schema.sql | 12 + .../data/disk_quota.t.0.sql | 51 + .../data/disk_quota.t.1.sql | 51 + .../data/disk_quota.t.2.sql | 51 + .../data/disk_quota.t.3.sql | 51 + br/tests/lightning_disk_quota/run.sh | 75 + .../config1.toml | 30 + .../config2.toml | 30 + .../data/dup_detect-schema-create.sql | 1 + .../data/dup_detect.ta-schema.sql | 6 + .../data/dup_detect.ta.0.sql | 20 + .../data/dup_detect.ta.1.sql | 20 + .../data/dup_detect.tb-schema.sql | 6 + .../data/dup_detect.tb.0.sql | 20 + .../data/dup_detect.tb.1.sql | 20 + .../data/dup_detect.tc-schema.sql | 6 + .../data/dup_detect.tc.0.sql | 20 + .../data/dup_detect.tc.1.sql | 20 + .../data/dup_detect.td-schema.sql | 6 + .../data/dup_detect.td.0.sql | 20 + .../data/dup_detect.td.1.sql | 20 + .../data/dup_detect.te-schema.sql | 6 + .../data/dup_detect.te.0.sql | 20 + .../data/dup_detect.te.1.sql | 20 + .../data/dup_detect.tf-schema.sql | 6 + .../data/dup_detect.tf.0.sql | 20 + .../data/dup_detect.tf.1.sql | 20 + br/tests/lightning_duplicate_detection/run.sh | 45 + br/tests/lightning_error_summary/config.toml | 7 + .../data/error_summary-schema-create.sql | 1 + .../data/error_summary.a-schema.sql | 4 + .../data/error_summary.a.sql | 1 + .../data/error_summary.b-schema.sql | 4 + .../data/error_summary.b.sql | 1 + .../data/error_summary.c-schema.sql | 4 + .../data/error_summary.c.sql | 1 + br/tests/lightning_error_summary/run.sh | 67 + br/tests/lightning_examples/1.toml | 9 + br/tests/lightning_examples/131072.toml | 9 + br/tests/lightning_examples/512.toml | 9 + br/tests/lightning_examples/run.sh | 98 + .../lightning_exotic_filenames/config.toml | 2 + .../data/xfn-schema-create.sql | 1 + .../data/xfn.etn-schema.sql | 1 + .../data/xfn.etn.sql | 7 + .../data/zwk-schema-create.sql | 1 + .../data/zwk.zwb-schema.sql | 1 + .../data/zwk.zwb.sql | 1 + br/tests/lightning_exotic_filenames/run.sh | 45 + br/tests/lightning_fail_fast/chunk.toml | 6 + .../data/fail_fast-schema-create.sql | 1 + .../data/fail_fast.tb-schema.sql | 1 + .../data/fail_fast.tb.0.csv | 5 + .../data/fail_fast.tb.1.csv | 5 + .../data/fail_fast.tb.2.csv | 5 + br/tests/lightning_fail_fast/engine.toml | 9 + br/tests/lightning_fail_fast/run.sh | 34 + br/tests/lightning_file_routing/config.toml | 43 + br/tests/lightning_file_routing/run.sh | 68 + br/tests/lightning_gcs/config.toml | 0 br/tests/lightning_gcs/run.sh | 102 + .../lightning_generated_columns/config.toml | 0 .../data/gencol-schema-create.sql | 1 + .../data/gencol.nested-schema.sql | 7 + .../data/gencol.nested.0.sql | 1 + .../data/gencol.various_types-schema.sql | 19 + .../data/gencol.various_types.0.sql | 1 + br/tests/lightning_generated_columns/run.sh | 71 + br/tests/lightning_incremental/config.toml | 0 .../data/incr-schema-create.sql | 1 + .../data/incr.auto_random-schema.sql | 5 + .../data/incr.auto_random.sql | 5 + .../data/incr.pk_auto_inc-schema.sql | 4 + .../data/incr.pk_auto_inc.sql | 5 + .../data/incr.rowid_uk_inc-schema.sql | 4 + .../data/incr.rowid_uk_inc.sql | 5 + .../data/incr.uk_auto_inc-schema.sql | 4 + .../data/incr.uk_auto_inc.sql | 5 + .../data1/incr-schema-create.sql | 1 + .../data1/incr.auto_random-schema.sql | 5 + .../data1/incr.auto_random.sql | 5 + .../data1/incr.pk_auto_inc-schema.sql | 4 + .../data1/incr.pk_auto_inc.sql | 5 + .../data1/incr.rowid_uk_inc-schema.sql | 4 + .../data1/incr.rowid_uk_inc.sql | 5 + .../data1/incr.uk_auto_inc-schema.sql | 4 + .../data1/incr.uk_auto_inc.sql | 5 + br/tests/lightning_incremental/run.sh | 76 + br/tests/lightning_issue_282/config.toml | 0 .../data/issue282-schema-create.sql | 1 + .../data/issue282.t_access3-schema.sql | 4 + .../data/issue282.t_access3.sql | 5 + br/tests/lightning_issue_282/run.sh | 29 + br/tests/lightning_issue_410/config.toml | 0 .../data/issue410-schema-create.sql | 1 + .../data/issue410.row_flow_d-schema.sql | 28 + .../data/issue410.row_flow_d.0.csv | 2 + br/tests/lightning_issue_410/run.sh | 30 + br/tests/lightning_issue_519/config.toml | 8 + .../data/issue519-schema-create.sql | 1 + .../data/issue519.t-schema.sql | 1 + .../lightning_issue_519/data/issue519.t.csv | 10 + br/tests/lightning_issue_519/run.sh | 25 + br/tests/lightning_local_backend/config.toml | 13 + .../data/cpeng-schema-create.sql | 1 + .../data/cpeng.a-schema.sql | 1 + .../data/cpeng.a.1.sql | 1 + .../data/cpeng.a.2.sql | 1 + .../data/cpeng.a.3.sql | 1 + .../data/cpeng.b-schema.sql | 1 + .../data/cpeng.b.1.sql | 4 + .../data/cpeng.b.2.sql | 1 + br/tests/lightning_local_backend/file.toml | 17 + br/tests/lightning_local_backend/mysql.toml | 17 + br/tests/lightning_local_backend/run.sh | 126 + br/tests/lightning_new_collation/config.toml | 0 br/tests/lightning_new_collation/run.sh | 59 + .../tidb-new-collation.toml | 11 + br/tests/lightning_no_schema/config.toml | 2 + .../lightning_no_schema/data/noschema.t.sql | 15 + br/tests/lightning_no_schema/run.sh | 27 + br/tests/lightning_parquet/config.toml | 10 + .../data/export_info_ci.json | 1 + .../export_tables_info_ci_from_1_to_9.json | 1 + .../data/test/test.customer/_SUCCESS | 0 ...51c-49ba-bdf3-5864befff481-c000.gz.parquet | Bin 0 -> 15251 bytes .../data/test/test.district/_SUCCESS | 0 ...765-432a-8f18-cd17b4607f2a-c000.gz.parquet | Bin 0 -> 3626 bytes .../data/test/test.history/_SUCCESS | 0 ...169-4335-a93f-8805e02def97-c000.gz.parquet | Bin 0 -> 4133 bytes .../data/test/test.item/_SUCCESS | 0 ...c54-477f-9907-6e3eae50358b-c000.gz.parquet | Bin 0 -> 7078 bytes .../data/test/test.new_order/_SUCCESS | 0 ...629-4445-bd96-d34b6674b09d-c000.gz.parquet | Bin 0 -> 1140 bytes .../data/test/test.order_line/_SUCCESS | 0 ...f59-4ff6-b271-2e4b27ffbcf5-c000.gz.parquet | Bin 0 -> 5468 bytes .../data/test/test.orders/_SUCCESS | 0 ...2c0-4961-9bf7-32a0a04ffee5-c000.gz.parquet | Bin 0 -> 2941 bytes .../data/test/test.stock/_SUCCESS | 0 ...034-4d65-b375-ee55aa479215-c000.gz.parquet | Bin 0 -> 15085 bytes .../data/test/test.warehouse/_SUCCESS | 0 ...d2f-4c5c-8a2f-d162bde6c360-c000.gz.parquet | Bin 0 -> 2719 bytes br/tests/lightning_parquet/db.sql | 219 + br/tests/lightning_parquet/run.sh | 52 + .../lightning_partitioned-table/config.toml | 2 + .../data/partitioned-schema-create.sql | 1 + .../data/partitioned.a-schema.sql | 1 + .../data/partitioned.a.sql | 10 + br/tests/lightning_partitioned-table/run.sh | 35 + br/tests/lightning_restore/config.toml | 5 + br/tests/lightning_restore/run.sh | 42 + br/tests/lightning_routes/config.toml | 10 + .../data/routes_a0-schema-create.sql | 1 + .../data/routes_a0.t0-schema.sql | 1 + .../lightning_routes/data/routes_a0.t0.1.sql | 1 + .../lightning_routes/data/routes_a0.t0.2.sql | 1 + .../data/routes_a0.t1-schema.sql | 1 + .../lightning_routes/data/routes_a0.t1.1.sql | 1 + .../data/routes_a1-schema-create.sql | 1 + .../data/routes_a1.s1-schema.sql | 1 + .../lightning_routes/data/routes_a1.s1.sql | 1 + .../data/routes_a1.t2-schema.sql | 1 + .../lightning_routes/data/routes_a1.t2.sql | 1 + br/tests/lightning_routes/run.sh | 25 + br/tests/lightning_row-format-v2/config.toml | 2 + .../data/rowformatv2-schema-create.sql | 1 + .../data/rowformatv2.t1-schema.sql | 258 + .../data/rowformatv2.t1.1.sql | 51 + br/tests/lightning_row-format-v2/run.sh | 28 + br/tests/lightning_s3/config.toml | 0 br/tests/lightning_s3/run.sh | 68 + br/tests/lightning_shard_rowid/config.toml | 0 .../data/shard_rowid-schema-create.sql | 1 + .../data/shard_rowid.shr-schema.sql | 7 + .../data/shard_rowid.shr.0.sql | 17 + br/tests/lightning_shard_rowid/run.sh | 50 + .../lightning_source_linkfile/config.toml | 1 + br/tests/lightning_source_linkfile/run.sh | 60 + .../data/sqlmodedb-schema-create.sql | 1 + .../data/sqlmodedb.t-schema.sql | 7 + .../lightning_sqlmode/data/sqlmodedb.t.1.sql | 6 + br/tests/lightning_sqlmode/off.toml | 5 + br/tests/lightning_sqlmode/on.toml | 5 + br/tests/lightning_sqlmode/run.sh | 57 + .../data/dup-schema-create.sql | 1 + .../data/dup.dup-schema.sql | 1 + .../data/dup.dup.sql | 3 + .../lightning_tidb_duplicate_data/error.toml | 3 + .../lightning_tidb_duplicate_data/ignore.toml | 3 + .../replace.toml | 2 + br/tests/lightning_tidb_duplicate_data/run.sh | 64 + br/tests/lightning_tidb_rowid/config.toml | 0 .../data/rowid-schema-create.sql | 1 + .../data/rowid.explicit_tidb_rowid-schema.sql | 1 + .../data/rowid.explicit_tidb_rowid.sql | 11 + .../data/rowid.non_pk-schema.sql | 1 + .../data/rowid.non_pk.sql | 11 + .../data/rowid.non_pk_auto_inc-schema.sql | 9 + .../data/rowid.non_pk_auto_inc.sql | 26 + .../data/rowid.pre_rebase-schema.sql | 1 + .../data/rowid.pre_rebase.sql | 1 + .../data/rowid.specific_auto_inc-schema.sql | 1 + .../data/rowid.specific_auto_inc.sql | 7 + br/tests/lightning_tidb_rowid/run.sh | 75 + br/tests/lightning_tiflash/config.toml | 2 + br/tests/lightning_tiflash/run.sh | 71 + .../lightning_too_many_columns/config.toml | 0 .../data/too_many_columns-schema-create.sql | 1 + .../data/too_many_columns.t-schema.sql | 259 + .../data/too_many_columns.t.0.csv | 2 + br/tests/lightning_too_many_columns/run.sh | 30 + br/tests/lightning_tool_135/config.toml | 2 + .../data/tool_135-schema-create.sql | 1 + .../data/tool_135.bar1-schema.sql | 1 + .../lightning_tool_135/data/tool_135.bar1.sql | 10 + .../data/tool_135.bar2-schema.sql | 1 + .../lightning_tool_135/data/tool_135.bar2.sql | 10 + .../data/tool_135.bar3-schema.sql | 1 + .../lightning_tool_135/data/tool_135.bar3.sql | 10 + .../data/tool_135.bar4-schema.sql | 1 + .../lightning_tool_135/data/tool_135.bar4.sql | 10 + .../data/tool_135.bar5-schema.sql | 1 + .../lightning_tool_135/data/tool_135.bar5.sql | 10 + br/tests/lightning_tool_135/run.sh | 94 + br/tests/lightning_tool_1420/config.toml | 2 + .../data/EE1420-schema-create.sql | 1 + .../data/EE1420.pt_role-schema.sql | 3 + .../data/EE1420.pt_role.sql | 1 + br/tests/lightning_tool_1420/run.sh | 24 + br/tests/lightning_tool_1472/config.toml | 2 + .../data/EE1472-schema-create.sql | 1 + .../data/EE1472.notpk-schema.sql | 5 + .../data/EE1472.notpk.1.sql | 8 + .../data/EE1472.notpk.2.sql | 8 + .../data/EE1472.pk-schema.sql | 3 + .../lightning_tool_1472/data/EE1472.pk.1.sql | 8 + .../lightning_tool_1472/data/EE1472.pk.2.sql | 8 + br/tests/lightning_tool_1472/run.sh | 31 + br/tests/lightning_tool_241/config.toml | 8 + .../data/qyjc-schema-create.sql | 1 + .../data/qyjc.q_alarm_group-schema.sql | 16 + .../data/qyjc.q_alarm_group.sql | 0 .../data/qyjc.q_alarm_message_log-schema.sql | 16 + .../data/qyjc.q_alarm_message_log.sql | 0 .../data/qyjc.q_alarm_receiver-schema.sql | 12 + .../data/qyjc.q_config-schema.sql | 9 + .../data/qyjc.q_fish_event-schema.sql | 15 + .../data/qyjc.q_fish_event.sql | 85 + .../qyjc.q_report_circular_data-schema.sql | 16 + .../data/qyjc.q_report_desc-schema.sql | 16 + .../data/qyjc.q_report_summary-schema.sql | 14 + .../data/qyjc.q_system_update-schema.sql | 7 + .../data/qyjc.q_system_update.sql | 0 .../data/qyjc.q_user_log-schema.sql | 12 + .../data/qyjc.q_user_log.sql | 0 br/tests/lightning_tool_241/run.sh | 45 + .../lightning_unused_config_keys/config.toml | 6 + .../data/unused_config_keys-schema-create.sql | 1 + br/tests/lightning_unused_config_keys/run.sh | 29 + br/tests/lightning_various_types/config.toml | 0 .../data/vt-schema-create.sql | 1 + .../data/vt.bigint-schema.sql | 3 + .../data/vt.bigint.0.sql | 8 + .../data/vt.binary-schema.sql | 4 + .../data/vt.binary.sql | 52 + .../data/vt.bit-schema.sql | 4 + .../lightning_various_types/data/vt.bit.sql | 6 + .../data/vt.char-schema.sql | 4 + .../lightning_various_types/data/vt.char.sql | 51 + .../data/vt.datetime-schema.sql | 7 + .../data/vt.datetime.sql | 72 + .../data/vt.decimal-schema.sql | 4 + .../data/vt.decimal.sql | 51 + .../data/vt.double-schema.sql | 4 + .../data/vt.double.sql | 42 + .../data/vt.empty_strings-schema.sql | 4 + .../data/vt.empty_strings.sql | 1 + .../data/vt.enum-set-schema.sql | 26 + .../data/vt.enum-set.sql | 36 + .../data/vt.json-schema.sql | 4 + .../lightning_various_types/data/vt.json.sql | 106 + .../data/vt.precise_types-schema.sql | 6 + .../data/vt.precise_types.sql | 6 + br/tests/lightning_various_types/run.sh | 117 + br/tests/lightning_view/config.toml | 1 + .../lightning_view/data/db0-schema-create.sql | 1 + .../data/db0.v2-schema-view.sql | 13 + .../lightning_view/data/db0.v2-schema.sql | 1 + .../lightning_view/data/db1-schema-create.sql | 1 + .../lightning_view/data/db1.tbl-schema.sql | 1 + br/tests/lightning_view/data/db1.tbl.0.sql | 5 + .../data/db1.v1-schema-view.sql | 13 + .../lightning_view/data/db1.v1-schema.sql | 1 + br/tests/lightning_view/run.sh | 36 + br/tests/run.sh | 64 + br/tests/run_compatible.sh | 46 + br/tests/up.sh | 188 + br/tidb-lightning.toml | 289 + br/tools/.gitignore | 1 + br/tools/Makefile | 34 + br/tools/check-errdoc.sh | 21 + br/tools/go.mod | 16 + br/tools/go.sum | 707 ++ br/tools/go_mod_guard.go | 35 + br/web/README.md | 93 + br/web/docs/InfoPage.png | Bin 0 -> 48339 bytes br/web/docs/ProgressPage.png | Bin 0 -> 41010 bytes br/web/docs/TableProgressPage.png | Bin 0 -> 47424 bytes br/web/docs/api.yaml | 521 + br/web/go.mod | 5 + br/web/go.sum | 1 + br/web/package-lock.json | 4867 ++++++++ br/web/package.json | 31 + br/web/public/index.html | 14 + br/web/src/ChunksProgressPanel.tsx | 114 + br/web/src/DottedProgress.tsx | 72 + br/web/src/EnginesProgressPanel.tsx | 72 + br/web/src/ErrorButton.tsx | 85 + br/web/src/InfoButton.tsx | 39 + br/web/src/InfoPage.tsx | 112 + br/web/src/MoveTaskButton.tsx | 123 + br/web/src/PauseButton.tsx | 33 + br/web/src/ProgressPage.tsx | 116 + br/web/src/RefreshButton.tsx | 124 + br/web/src/TableProgressCard.tsx | 113 + br/web/src/TableProgressPage.tsx | 73 + br/web/src/TaskButton.tsx | 139 + br/web/src/TitleBar.tsx | 95 + br/web/src/TitleLink.tsx | 33 + br/web/src/api.ts | 268 + br/web/src/index.tsx | 179 + br/web/src/json-bigint.d.ts | 17 + br/web/tsconfig.json | 19 + br/web/webpack.config.js | 38 + errors.toml | 210 + executor/brie.go | 6 +- go.mod | 67 +- go.sum | 280 +- session/session_test.go | 4 +- tools/check/go.mod | 1 + tools/check/go.sum | 2 + 878 files changed, 126185 insertions(+), 78 deletions(-) create mode 100644 br/.codecov.yml create mode 120000 br/.dockerignore create mode 100644 br/.editorconfig create mode 100644 br/.gitattributes create mode 100644 br/.github/ISSUE_TEMPLATE/bug-report.md create mode 100644 br/.github/ISSUE_TEMPLATE/feature-request.md create mode 100644 br/.github/ISSUE_TEMPLATE/question.md create mode 100644 br/.github/challenge-bot.yml create mode 100644 br/.github/pull_request_template.md create mode 100644 br/.github/workflows/build.yml create mode 100644 br/.github/workflows/compatible_test.yml create mode 100644 br/.gitignore create mode 100644 br/.golangci.yml create mode 100644 br/COMPATIBILITY_TEST.md create mode 100644 br/CONTRIBUTING.md create mode 100644 br/LICENSE.md create mode 100644 br/README.md create mode 100644 br/SECURITY.md create mode 100644 br/cmd/br/backup.go create mode 100644 br/cmd/br/cmd.go create mode 100644 br/cmd/br/debug.go create mode 100644 br/cmd/br/main.go create mode 100644 br/cmd/br/main_test.go create mode 100644 br/cmd/br/restore.go create mode 100644 br/cmd/tidb-lightning-ctl/main.go create mode 100644 br/cmd/tidb-lightning-ctl/main_test.go create mode 100644 br/cmd/tidb-lightning/main.go create mode 100644 br/cmd/tidb-lightning/main_test.go create mode 100644 br/compatibility/backup_cluster.yaml create mode 100644 br/compatibility/credentials/application_default_credentials.json create mode 100644 br/compatibility/get_last_tags.sh create mode 100644 br/compatibility/prepare_backup.sh create mode 100644 br/compatibility/prepare_data/workload create mode 100644 br/docker-compose.yaml create mode 100644 br/docker/Dockerfile create mode 100644 br/docker/config/pd.toml create mode 100644 br/docker/config/tidb.toml create mode 100644 br/docker/config/tikv.toml create mode 100644 br/docker/gcs.env create mode 100644 br/docker/minio.env create mode 100644 br/docs/cn/2019-08-05-new-design-of-backup-restore.md create mode 100644 br/docs/cn/2019-09-09-BR-key-rewrite-disscussion.md create mode 100644 br/docs/cn/2019-09-17-design-of-reorganize-importSST-to-TiKV.md create mode 100644 br/docs/resources/arch-of-reorganized-importer.svg create mode 100644 br/docs/resources/download-sst-diagram.svg create mode 100644 br/docs/resources/solution3-of-key-rewrite.svg create mode 100644 br/errors.toml create mode 100644 br/images/arch.svg create mode 100644 br/metrics/alertmanager/lightning.rules.yml create mode 100644 br/metrics/grafana/br.json create mode 100644 br/metrics/grafana/lightning.json create mode 100644 br/pkg/backup/check.go create mode 100644 br/pkg/backup/client.go create mode 100644 br/pkg/backup/client_test.go create mode 100644 br/pkg/backup/metrics.go create mode 100644 br/pkg/backup/push.go create mode 100644 br/pkg/backup/schema.go create mode 100644 br/pkg/backup/schema_test.go create mode 100644 br/pkg/cdclog/buffer.go create mode 100644 br/pkg/cdclog/decoder.go create mode 100644 br/pkg/cdclog/decoder_test.go create mode 100644 br/pkg/cdclog/puller.go create mode 100644 br/pkg/checksum/executor.go create mode 100644 br/pkg/checksum/executor_test.go create mode 100644 br/pkg/checksum/validate.go create mode 100644 br/pkg/conn/conn.go create mode 100644 br/pkg/conn/conn_test.go create mode 100644 br/pkg/errors/errors.go create mode 100644 br/pkg/glue/glue.go create mode 100644 br/pkg/gluetidb/glue.go create mode 100644 br/pkg/gluetikv/glue.go create mode 100644 br/pkg/gluetikv/glue_test.go create mode 100644 br/pkg/httputil/http.go create mode 100644 br/pkg/kv/checksum.go create mode 100644 br/pkg/kv/checksum_test.go create mode 100644 br/pkg/kv/kv.go create mode 100644 br/pkg/kv/kv_test.go create mode 100644 br/pkg/kv/session.go create mode 100644 br/pkg/kv/session_test.go create mode 100644 br/pkg/lightning/backend/backend.go create mode 100644 br/pkg/lightning/backend/backend_test.go create mode 100644 br/pkg/lightning/backend/importer/importer.go create mode 100644 br/pkg/lightning/backend/importer/importer_test.go create mode 100644 br/pkg/lightning/backend/kv/allocator.go create mode 100644 br/pkg/lightning/backend/kv/kv2sql.go create mode 100644 br/pkg/lightning/backend/kv/session.go create mode 100644 br/pkg/lightning/backend/kv/session_test.go create mode 100644 br/pkg/lightning/backend/kv/sql2kv.go create mode 100644 br/pkg/lightning/backend/kv/sql2kv_test.go create mode 100644 br/pkg/lightning/backend/kv/types.go create mode 100644 br/pkg/lightning/backend/local/duplicate.go create mode 100644 br/pkg/lightning/backend/local/iterator.go create mode 100644 br/pkg/lightning/backend/local/iterator_test.go create mode 100644 br/pkg/lightning/backend/local/key_adapter.go create mode 100644 br/pkg/lightning/backend/local/key_adapter_test.go create mode 100644 br/pkg/lightning/backend/local/local.go create mode 100644 br/pkg/lightning/backend/local/local_freebsd.go create mode 100644 br/pkg/lightning/backend/local/local_test.go create mode 100644 br/pkg/lightning/backend/local/local_unix.go create mode 100644 br/pkg/lightning/backend/local/local_unix_generic.go create mode 100644 br/pkg/lightning/backend/local/local_windows.go create mode 100644 br/pkg/lightning/backend/local/localhelper.go create mode 100644 br/pkg/lightning/backend/local/localhelper_test.go create mode 100644 br/pkg/lightning/backend/noop/noop.go create mode 100644 br/pkg/lightning/backend/tidb/tidb.go create mode 100644 br/pkg/lightning/backend/tidb/tidb_test.go create mode 100644 br/pkg/lightning/checkpoints/checkpoints.go create mode 100644 br/pkg/lightning/checkpoints/checkpoints_file_test.go create mode 100644 br/pkg/lightning/checkpoints/checkpoints_sql_test.go create mode 100644 br/pkg/lightning/checkpoints/checkpoints_test.go create mode 100644 br/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.pb.go create mode 100644 br/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto create mode 100644 br/pkg/lightning/checkpoints/glue_checkpoint.go create mode 100644 br/pkg/lightning/checkpoints/tidb.go create mode 100644 br/pkg/lightning/common/conn.go create mode 100644 br/pkg/lightning/common/once_error.go create mode 100644 br/pkg/lightning/common/once_error_test.go create mode 100644 br/pkg/lightning/common/pause.go create mode 100644 br/pkg/lightning/common/pause_test.go create mode 100644 br/pkg/lightning/common/security.go create mode 100644 br/pkg/lightning/common/security_test.go create mode 100644 br/pkg/lightning/common/storage.go create mode 100644 br/pkg/lightning/common/storage_test.go create mode 100644 br/pkg/lightning/common/storage_unix.go create mode 100644 br/pkg/lightning/common/storage_windows.go create mode 100644 br/pkg/lightning/common/util.go create mode 100644 br/pkg/lightning/common/util_test.go create mode 100644 br/pkg/lightning/config/bytesize.go create mode 100644 br/pkg/lightning/config/bytesize_test.go create mode 100644 br/pkg/lightning/config/config.go create mode 100644 br/pkg/lightning/config/config_test.go create mode 100644 br/pkg/lightning/config/configlist.go create mode 100644 br/pkg/lightning/config/configlist_test.go create mode 100644 br/pkg/lightning/config/const.go create mode 100644 br/pkg/lightning/config/global.go create mode 100644 br/pkg/lightning/glue/glue.go create mode 100644 br/pkg/lightning/lightning.go create mode 100644 br/pkg/lightning/lightning_test.go create mode 100644 br/pkg/lightning/log/filter.go create mode 100644 br/pkg/lightning/log/filter_test.go create mode 100644 br/pkg/lightning/log/log.go create mode 100644 br/pkg/lightning/log/log_test.go create mode 100644 br/pkg/lightning/log/testlogger.go create mode 100644 br/pkg/lightning/manual/allocator.go create mode 100644 br/pkg/lightning/manual/manual.go create mode 100644 br/pkg/lightning/manual/manual_nocgo.go create mode 100644 br/pkg/lightning/metric/metric.go create mode 100644 br/pkg/lightning/metric/metric_test.go create mode 100644 br/pkg/lightning/mydump/bytes.go create mode 100644 br/pkg/lightning/mydump/csv/split_large_file.csv create mode 100644 br/pkg/lightning/mydump/csv_parser.go create mode 100644 br/pkg/lightning/mydump/csv_parser_test.go create mode 100644 br/pkg/lightning/mydump/examples/metadata create mode 100644 br/pkg/lightning/mydump/examples/mocker_test-schema-create.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.i-schema.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.i.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.report_case_high_risk-schema.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.report_case_high_risk.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.tbl_autoid-schema.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.tbl_autoid.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.tbl_multi_index-schema.sql create mode 100644 br/pkg/lightning/mydump/examples/mocker_test.tbl_multi_index.sql create mode 100644 br/pkg/lightning/mydump/examples/test.parquet create mode 100644 br/pkg/lightning/mydump/loader.go create mode 100644 br/pkg/lightning/mydump/loader_test.go create mode 100644 br/pkg/lightning/mydump/parquet_parser.go create mode 100644 br/pkg/lightning/mydump/parquet_parser_test.go create mode 100644 br/pkg/lightning/mydump/parser.go create mode 100644 br/pkg/lightning/mydump/parser.rl create mode 100644 br/pkg/lightning/mydump/parser_generated.go create mode 100644 br/pkg/lightning/mydump/parser_test.go create mode 100644 br/pkg/lightning/mydump/reader.go create mode 100644 br/pkg/lightning/mydump/reader_test.go create mode 100644 br/pkg/lightning/mydump/region.go create mode 100644 br/pkg/lightning/mydump/region_test.go create mode 100644 br/pkg/lightning/mydump/router.go create mode 100644 br/pkg/lightning/mydump/router_test.go create mode 100644 br/pkg/lightning/restore/check_info.go create mode 100644 br/pkg/lightning/restore/check_template.go create mode 100644 br/pkg/lightning/restore/checksum.go create mode 100644 br/pkg/lightning/restore/checksum_test.go create mode 100644 br/pkg/lightning/restore/meta_manager.go create mode 100644 br/pkg/lightning/restore/meta_manager_test.go create mode 100644 br/pkg/lightning/restore/restore.go create mode 100644 br/pkg/lightning/restore/restore_test.go create mode 100644 br/pkg/lightning/restore/table_restore.go create mode 100644 br/pkg/lightning/restore/tidb.go create mode 100644 br/pkg/lightning/restore/tidb_test.go create mode 100644 br/pkg/lightning/sigusr1_other.go create mode 100644 br/pkg/lightning/sigusr1_unix.go create mode 100644 br/pkg/lightning/tikv/tikv.go create mode 100644 br/pkg/lightning/tikv/tikv_test.go create mode 100644 br/pkg/lightning/verification/checksum.go create mode 100644 br/pkg/lightning/verification/checksum_test.go create mode 100644 br/pkg/lightning/web/progress.go create mode 100644 br/pkg/lightning/web/res.go create mode 100644 br/pkg/lightning/web/res_vfsdata.go create mode 100644 br/pkg/lightning/worker/worker.go create mode 100644 br/pkg/lightning/worker/worker_test.go create mode 100644 br/pkg/logutil/logging.go create mode 100644 br/pkg/logutil/logging_test.go create mode 100644 br/pkg/logutil/rate.go create mode 100644 br/pkg/membuf/buffer.go create mode 100644 br/pkg/membuf/buffer_test.go create mode 100644 br/pkg/metautil/metafile.go create mode 100644 br/pkg/metautil/metafile_test.go create mode 100644 br/pkg/mock/backend.go create mode 100644 br/pkg/mock/glue.go create mode 100644 br/pkg/mock/glue_checkpoint.go create mode 100644 br/pkg/mock/importer.go create mode 100644 br/pkg/mock/kv.go create mode 100644 br/pkg/mock/mock_cluster.go create mode 100644 br/pkg/mock/mock_cluster_test.go create mode 100644 br/pkg/mock/mockid/mockid.go create mode 100644 br/pkg/mock/s3iface.go create mode 100644 br/pkg/mock/storage/storage.go create mode 100644 br/pkg/pdutil/pd.go create mode 100644 br/pkg/pdutil/pd_test.go create mode 100644 br/pkg/pdutil/utils.go create mode 100644 br/pkg/redact/redact.go create mode 100644 br/pkg/redact/redact_test.go create mode 100644 br/pkg/restore/backoff.go create mode 100644 br/pkg/restore/backoff_test.go create mode 100644 br/pkg/restore/batcher.go create mode 100644 br/pkg/restore/batcher_test.go create mode 100644 br/pkg/restore/client.go create mode 100644 br/pkg/restore/client_test.go create mode 100644 br/pkg/restore/db.go create mode 100644 br/pkg/restore/db_test.go create mode 100644 br/pkg/restore/import.go create mode 100644 br/pkg/restore/ingester.go create mode 100644 br/pkg/restore/log_client.go create mode 100644 br/pkg/restore/log_client_test.go create mode 100644 br/pkg/restore/merge.go create mode 100644 br/pkg/restore/merge_test.go create mode 100644 br/pkg/restore/pipeline_items.go create mode 100644 br/pkg/restore/range.go create mode 100644 br/pkg/restore/range_test.go create mode 100644 br/pkg/restore/split.go create mode 100755 br/pkg/restore/split_client.go create mode 100644 br/pkg/restore/split_test.go create mode 100644 br/pkg/restore/systable_restore.go create mode 100644 br/pkg/restore/util.go create mode 100644 br/pkg/restore/util_test.go create mode 100644 br/pkg/rtree/logging.go create mode 100644 br/pkg/rtree/logging_test.go create mode 100644 br/pkg/rtree/rtree.go create mode 100644 br/pkg/rtree/rtree_test.go create mode 100644 br/pkg/storage/compress.go create mode 100644 br/pkg/storage/compress_test.go create mode 100644 br/pkg/storage/flags.go create mode 100644 br/pkg/storage/gcs.go create mode 100644 br/pkg/storage/gcs_test.go create mode 100644 br/pkg/storage/local.go create mode 100644 br/pkg/storage/local_test.go create mode 100644 br/pkg/storage/local_unix.go create mode 100644 br/pkg/storage/local_windows.go create mode 100644 br/pkg/storage/noop.go create mode 100644 br/pkg/storage/parse.go create mode 100644 br/pkg/storage/parse_test.go create mode 100644 br/pkg/storage/s3.go create mode 100644 br/pkg/storage/s3_test.go create mode 100644 br/pkg/storage/storage.go create mode 100644 br/pkg/storage/writer.go create mode 100644 br/pkg/storage/writer_test.go create mode 100644 br/pkg/summary/collector.go create mode 100644 br/pkg/summary/collector_test.go create mode 100644 br/pkg/summary/summary.go create mode 100644 br/pkg/task/backup.go create mode 100644 br/pkg/task/backup_raw.go create mode 100644 br/pkg/task/backup_test.go create mode 100644 br/pkg/task/common.go create mode 100644 br/pkg/task/common_test.go create mode 100644 br/pkg/task/restore.go create mode 100644 br/pkg/task/restore_log.go create mode 100644 br/pkg/task/restore_raw.go create mode 100644 br/pkg/task/restore_test.go create mode 100644 br/pkg/trace/tracing.go create mode 100644 br/pkg/trace/tracing_test.go create mode 100644 br/pkg/utils/dyn_pprof_other.go create mode 100644 br/pkg/utils/dyn_pprof_unix.go create mode 100644 br/pkg/utils/env.go create mode 100644 br/pkg/utils/env_test.go create mode 100644 br/pkg/utils/json.go create mode 100644 br/pkg/utils/json_test.go create mode 100644 br/pkg/utils/key.go create mode 100644 br/pkg/utils/key_test.go create mode 100644 br/pkg/utils/math.go create mode 100644 br/pkg/utils/math_test.go create mode 100644 br/pkg/utils/permission.go create mode 100644 br/pkg/utils/pprof.go create mode 100644 br/pkg/utils/progress.go create mode 100644 br/pkg/utils/progress_test.go create mode 100644 br/pkg/utils/retry.go create mode 100644 br/pkg/utils/safe_point.go create mode 100644 br/pkg/utils/safe_point_test.go create mode 100644 br/pkg/utils/schema.go create mode 100644 br/pkg/utils/schema_test.go create mode 100644 br/pkg/utils/utils_test.go create mode 100644 br/pkg/utils/worker.go create mode 100644 br/pkg/version/build/info.go create mode 100644 br/pkg/version/build/info_test.go create mode 100644 br/pkg/version/version.go create mode 100644 br/pkg/version/version_test.go create mode 100644 br/revive.toml create mode 100644 br/tests/README.md create mode 100644 br/tests/_utils/br_tikv_outage_util create mode 100755 br/tests/_utils/check_cluster_version create mode 100755 br/tests/_utils/check_contains create mode 100755 br/tests/_utils/check_not_contains create mode 100755 br/tests/_utils/generate_certs create mode 100755 br/tests/_utils/make_tiflash_config create mode 100755 br/tests/_utils/read_result create mode 100755 br/tests/_utils/run_br create mode 100755 br/tests/_utils/run_cdc create mode 100755 br/tests/_utils/run_curl create mode 100755 br/tests/_utils/run_lightning create mode 100755 br/tests/_utils/run_lightning_ctl create mode 100755 br/tests/_utils/run_pd_ctl create mode 100644 br/tests/_utils/run_services create mode 100755 br/tests/_utils/run_sql create mode 100755 br/tests/_utils/run_sql_in_container create mode 100644 br/tests/br_300_small_tables/run.sh create mode 100644 br/tests/br_backup_empty/run.sh create mode 100644 br/tests/br_backup_version/run.sh create mode 100644 br/tests/br_case_sensitive/run.sh create mode 100755 br/tests/br_clustered_index/run.sh create mode 100755 br/tests/br_db/run.sh create mode 100755 br/tests/br_db_online/run.sh create mode 100755 br/tests/br_db_online_newkv/run.sh create mode 100755 br/tests/br_db_skip/run.sh create mode 100644 br/tests/br_debug_meta/run.sh create mode 100644 br/tests/br_debug_meta/workload create mode 100755 br/tests/br_full/run.sh create mode 100644 br/tests/br_full/workload create mode 100755 br/tests/br_full_ddl/run.sh create mode 100644 br/tests/br_full_ddl/workload create mode 100755 br/tests/br_full_index/run.sh create mode 100644 br/tests/br_full_index/workload create mode 100644 br/tests/br_gcs/oauth.go create mode 100755 br/tests/br_gcs/run.sh create mode 100644 br/tests/br_gcs/workload create mode 100755 br/tests/br_history/run.sh create mode 100644 br/tests/br_history/workload create mode 100644 br/tests/br_incompatible_tidb_config/config/tidb-max-index-length.toml create mode 100755 br/tests/br_incompatible_tidb_config/run.sh create mode 100755 br/tests/br_incremental/run.sh create mode 100644 br/tests/br_incremental/workload create mode 100755 br/tests/br_incremental_ddl/run.sh create mode 100755 br/tests/br_incremental_index/run.sh create mode 100755 br/tests/br_incremental_only_ddl/run.sh create mode 100755 br/tests/br_incremental_same_table/run.sh create mode 100755 br/tests/br_insert_after_restore/run.sh create mode 100644 br/tests/br_key_locked/codec.go create mode 100644 br/tests/br_key_locked/locker.go create mode 100755 br/tests/br_key_locked/run.sh create mode 100644 br/tests/br_key_locked/workload create mode 100755 br/tests/br_log_restore/run.sh create mode 100644 br/tests/br_log_restore/workload create mode 100644 br/tests/br_log_test/run.sh create mode 100644 br/tests/br_log_test/workload create mode 100755 br/tests/br_move_backup/run.sh create mode 100644 br/tests/br_move_backup/workload create mode 100644 br/tests/br_other/run.sh create mode 100644 br/tests/br_range/run.sh create mode 100644 br/tests/br_rawkv/client.go create mode 100644 br/tests/br_rawkv/run.sh create mode 100755 br/tests/br_restore_TDE_enable/run.sh create mode 100644 br/tests/br_restore_TDE_enable/workload create mode 100755 br/tests/br_s3/run.sh create mode 100644 br/tests/br_s3/workload create mode 100755 br/tests/br_shuffle_leader/run.sh create mode 100644 br/tests/br_shuffle_leader/workload create mode 100755 br/tests/br_shuffle_region/run.sh create mode 100644 br/tests/br_shuffle_region/workload create mode 100755 br/tests/br_single_table/run.sh create mode 100644 br/tests/br_single_table/workload create mode 100755 br/tests/br_skip_checksum/run.sh create mode 100644 br/tests/br_skip_checksum/workload create mode 100755 br/tests/br_small_batch_size/run.sh create mode 100644 br/tests/br_small_batch_size/workload create mode 100644 br/tests/br_split_region_fail/run.sh create mode 100644 br/tests/br_split_region_fail/workload create mode 100644 br/tests/br_systables/run.sh create mode 100644 br/tests/br_systables/workload create mode 100755 br/tests/br_table_filter/run.sh create mode 100755 br/tests/br_table_partition/prepare.sh create mode 100755 br/tests/br_table_partition/run.sh create mode 100644 br/tests/br_tiflash/run.sh create mode 100644 br/tests/br_tikv_outage/run.sh create mode 100644 br/tests/br_tikv_outage/workload create mode 100644 br/tests/br_tikv_outage2/run.sh create mode 100644 br/tests/br_tikv_outage2/workload create mode 100755 br/tests/br_views_and_sequences/run.sh create mode 100644 br/tests/br_z_gc_safepoint/gc.go create mode 100755 br/tests/br_z_gc_safepoint/run.sh create mode 100644 br/tests/br_z_gc_safepoint/workload create mode 100644 br/tests/config/importer.toml create mode 100644 br/tests/config/ipsan.cnf create mode 100644 br/tests/config/pd.toml create mode 100644 br/tests/config/restore-tikv.toml create mode 100644 br/tests/config/root.cert create mode 100644 br/tests/config/root.key create mode 100644 br/tests/config/tidb.toml create mode 100644 br/tests/config/tikv.toml create mode 100755 br/tests/docker_compatible_gcs/prepare.sh create mode 100755 br/tests/docker_compatible_gcs/run.sh create mode 100755 br/tests/docker_compatible_s3/prepare.sh create mode 100755 br/tests/docker_compatible_s3/run.sh create mode 100755 br/tests/download_tools.sh create mode 100644 br/tests/lightning_alter_random/config.toml create mode 100644 br/tests/lightning_alter_random/data/alter_random-schema-create.sql create mode 100644 br/tests/lightning_alter_random/data/alter_random.t-schema.sql create mode 100644 br/tests/lightning_alter_random/data/alter_random.t.sql create mode 100644 br/tests/lightning_alter_random/run.sh create mode 100644 br/tests/lightning_auto_random_default/config.toml create mode 100644 br/tests/lightning_auto_random_default/data/auto_random-schema-create.sql create mode 100644 br/tests/lightning_auto_random_default/data/auto_random.t-schema.sql create mode 100644 br/tests/lightning_auto_random_default/data/auto_random.t.0.sql create mode 100644 br/tests/lightning_auto_random_default/data/auto_random.t.1.sql create mode 100644 br/tests/lightning_auto_random_default/run.sh create mode 100644 br/tests/lightning_black-white-list/config.toml create mode 100644 br/tests/lightning_black-white-list/data/firstdb-schema-create.sql create mode 100644 br/tests/lightning_black-white-list/data/firstdb.first-schema.sql create mode 100644 br/tests/lightning_black-white-list/data/firstdb.first.1.sql create mode 100644 br/tests/lightning_black-white-list/data/firstdb.first.2.sql create mode 100644 br/tests/lightning_black-white-list/data/firstdb.second-schema.sql create mode 100644 br/tests/lightning_black-white-list/data/firstdb.second.1.sql create mode 100644 br/tests/lightning_black-white-list/data/mysql-schema-create.sql create mode 100644 br/tests/lightning_black-white-list/data/mysql.testtable-schema.sql create mode 100644 br/tests/lightning_black-white-list/data/seconddb-schema-create.sql create mode 100644 br/tests/lightning_black-white-list/data/seconddb.fourth-schema.sql create mode 100644 br/tests/lightning_black-white-list/data/seconddb.fourth.1.sql create mode 100644 br/tests/lightning_black-white-list/data/seconddb.third-schema.sql create mode 100644 br/tests/lightning_black-white-list/data/seconddb.third.1.sql create mode 100644 br/tests/lightning_black-white-list/even-table-only.toml create mode 100644 br/tests/lightning_black-white-list/firstdb-only.toml create mode 100755 br/tests/lightning_black-white-list/run.sh create mode 100644 br/tests/lightning_character_sets/auto.toml create mode 100644 br/tests/lightning_character_sets/binary.toml create mode 100644 br/tests/lightning_character_sets/gb18030.toml create mode 100644 br/tests/lightning_character_sets/gb18030/charsets-schema-create.sql create mode 100644 br/tests/lightning_character_sets/gb18030/charsets.gb18030-schema.sql create mode 100644 br/tests/lightning_character_sets/gb18030/charsets.gb18030.sql create mode 100644 br/tests/lightning_character_sets/mixed/charsets-schema-create.sql create mode 100644 br/tests/lightning_character_sets/mixed/charsets.mixed-schema.sql create mode 100644 br/tests/lightning_character_sets/mixed/charsets.mixed.sql create mode 100755 br/tests/lightning_character_sets/run.sh create mode 100644 br/tests/lightning_character_sets/utf8mb4.toml create mode 100644 br/tests/lightning_character_sets/utf8mb4/charsets-schema-create.sql create mode 100644 br/tests/lightning_character_sets/utf8mb4/charsets.utf8mb4-schema.sql create mode 100644 br/tests/lightning_character_sets/utf8mb4/charsets.utf8mb4.sql create mode 100644 br/tests/lightning_checkpoint/config.toml create mode 100755 br/tests/lightning_checkpoint/run.sh create mode 100644 br/tests/lightning_checkpoint_chunks/config.toml create mode 100644 br/tests/lightning_checkpoint_chunks/file.toml create mode 100755 br/tests/lightning_checkpoint_chunks/run.sh create mode 100644 br/tests/lightning_checkpoint_columns/config.toml create mode 100755 br/tests/lightning_checkpoint_columns/run.sh create mode 100644 br/tests/lightning_checkpoint_dirty_tableid/data/cpdt-schema-create.sql create mode 100644 br/tests/lightning_checkpoint_dirty_tableid/data/cpdt.t-schema.sql create mode 100644 br/tests/lightning_checkpoint_dirty_tableid/data/cpdt.t.sql create mode 100644 br/tests/lightning_checkpoint_dirty_tableid/file.toml create mode 100644 br/tests/lightning_checkpoint_dirty_tableid/mysql.toml create mode 100755 br/tests/lightning_checkpoint_dirty_tableid/run.sh create mode 100644 br/tests/lightning_checkpoint_engines/config.toml create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng-schema-create.sql create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng.a-schema.sql create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng.a.1.sql create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng.a.2.sql create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng.a.3.sql create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng.b-schema.sql create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng.b.1.sql create mode 100644 br/tests/lightning_checkpoint_engines/data/cpeng.b.2.sql create mode 100644 br/tests/lightning_checkpoint_engines/mysql.toml create mode 100755 br/tests/lightning_checkpoint_engines/run.sh create mode 100644 br/tests/lightning_checkpoint_engines_order/config.toml create mode 100644 br/tests/lightning_checkpoint_engines_order/data/disk_quota_checkpoint-schema-create.sql create mode 100644 br/tests/lightning_checkpoint_engines_order/data/disk_quota_checkpoint.t-schema.sql create mode 100644 br/tests/lightning_checkpoint_engines_order/data/disk_quota_checkpoint.t.0.sql create mode 100644 br/tests/lightning_checkpoint_engines_order/data/disk_quota_checkpoint.t.1.sql create mode 100644 br/tests/lightning_checkpoint_engines_order/data/disk_quota_checkpoint.t.2.sql create mode 100644 br/tests/lightning_checkpoint_engines_order/run.sh create mode 100644 br/tests/lightning_checkpoint_error_destroy/bad-data/cped-schema-create.sql create mode 100644 br/tests/lightning_checkpoint_error_destroy/bad-data/cped.t-schema.sql create mode 100644 br/tests/lightning_checkpoint_error_destroy/bad-data/cped.t.sql create mode 100644 br/tests/lightning_checkpoint_error_destroy/file.toml create mode 100644 br/tests/lightning_checkpoint_error_destroy/good-data/cped-schema-create.sql create mode 100644 br/tests/lightning_checkpoint_error_destroy/good-data/cped.t-schema.sql create mode 100644 br/tests/lightning_checkpoint_error_destroy/good-data/cped.t.sql create mode 100644 br/tests/lightning_checkpoint_error_destroy/mysql.toml create mode 100755 br/tests/lightning_checkpoint_error_destroy/run.sh create mode 100644 br/tests/lightning_checkpoint_parquet/config.toml create mode 100644 br/tests/lightning_checkpoint_parquet/parquet.go create mode 100755 br/tests/lightning_checkpoint_parquet/run.sh create mode 100644 br/tests/lightning_checkpoint_timestamp/config.toml create mode 100644 br/tests/lightning_checkpoint_timestamp/data/cpts-schema-create.sql create mode 100644 br/tests/lightning_checkpoint_timestamp/data/cpts.cpts-schema.sql create mode 100644 br/tests/lightning_checkpoint_timestamp/data/cpts.cpts.1.sql create mode 100644 br/tests/lightning_checkpoint_timestamp/data/cpts.cpts.2.sql create mode 100644 br/tests/lightning_checkpoint_timestamp/mysql.toml create mode 100755 br/tests/lightning_checkpoint_timestamp/run.sh create mode 100644 br/tests/lightning_cmdline_override/config.toml create mode 100644 br/tests/lightning_cmdline_override/data/cmdline_override-schema-create.sql create mode 100644 br/tests/lightning_cmdline_override/data/cmdline_override.t-schema.sql create mode 100644 br/tests/lightning_cmdline_override/data/cmdline_override.t.sql create mode 100755 br/tests/lightning_cmdline_override/run.sh create mode 100644 br/tests/lightning_column_permutation/config.toml create mode 100644 br/tests/lightning_column_permutation/data/perm-schema-create.sql create mode 100644 br/tests/lightning_column_permutation/data/perm.test_perm-schema.sql create mode 100644 br/tests/lightning_column_permutation/data/perm.test_perm.0.csv create mode 100644 br/tests/lightning_column_permutation/run.sh create mode 100644 br/tests/lightning_common_handle/config.toml create mode 100644 br/tests/lightning_common_handle/run.sh create mode 100644 br/tests/lightning_concurrent-restore/config.toml create mode 100644 br/tests/lightning_concurrent-restore/run.sh create mode 100644 br/tests/lightning_csv/config.toml create mode 100644 br/tests/lightning_csv/data/csv-schema-create.sql create mode 100644 br/tests/lightning_csv/data/csv.empty_strings-schema.sql create mode 100644 br/tests/lightning_csv/data/csv.empty_strings.csv create mode 100644 br/tests/lightning_csv/data/csv.escapes-schema.sql create mode 100644 br/tests/lightning_csv/data/csv.escapes.CSV create mode 100644 br/tests/lightning_csv/data/csv.threads-schema.sql create mode 100644 br/tests/lightning_csv/data/csv.threads.csv create mode 100755 br/tests/lightning_csv/run.sh create mode 100644 br/tests/lightning_default-columns/config.toml create mode 100644 br/tests/lightning_default-columns/data/defcol-schema-create.sql create mode 100644 br/tests/lightning_default-columns/data/defcol.t-schema.sql create mode 100644 br/tests/lightning_default-columns/data/defcol.t.1.sql create mode 100644 br/tests/lightning_default-columns/data/defcol.t.2.sql create mode 100644 br/tests/lightning_default-columns/data/defcol.u-schema.sql create mode 100644 br/tests/lightning_default-columns/data/defcol.u.1.sql create mode 100755 br/tests/lightning_default-columns/run.sh create mode 100644 br/tests/lightning_disk_quota/config.toml create mode 100644 br/tests/lightning_disk_quota/data/disk_quota-schema-create.sql create mode 100644 br/tests/lightning_disk_quota/data/disk_quota.t-schema.sql create mode 100644 br/tests/lightning_disk_quota/data/disk_quota.t.0.sql create mode 100644 br/tests/lightning_disk_quota/data/disk_quota.t.1.sql create mode 100644 br/tests/lightning_disk_quota/data/disk_quota.t.2.sql create mode 100644 br/tests/lightning_disk_quota/data/disk_quota.t.3.sql create mode 100644 br/tests/lightning_disk_quota/run.sh create mode 100644 br/tests/lightning_duplicate_detection/config1.toml create mode 100644 br/tests/lightning_duplicate_detection/config2.toml create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect-schema-create.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.ta-schema.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.ta.0.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.ta.1.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tb-schema.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tb.0.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tb.1.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tc-schema.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tc.0.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tc.1.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.td-schema.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.td.0.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.td.1.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.te-schema.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.te.0.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.te.1.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tf-schema.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tf.0.sql create mode 100644 br/tests/lightning_duplicate_detection/data/dup_detect.tf.1.sql create mode 100644 br/tests/lightning_duplicate_detection/run.sh create mode 100644 br/tests/lightning_error_summary/config.toml create mode 100644 br/tests/lightning_error_summary/data/error_summary-schema-create.sql create mode 100644 br/tests/lightning_error_summary/data/error_summary.a-schema.sql create mode 100644 br/tests/lightning_error_summary/data/error_summary.a.sql create mode 100644 br/tests/lightning_error_summary/data/error_summary.b-schema.sql create mode 100644 br/tests/lightning_error_summary/data/error_summary.b.sql create mode 100644 br/tests/lightning_error_summary/data/error_summary.c-schema.sql create mode 100644 br/tests/lightning_error_summary/data/error_summary.c.sql create mode 100755 br/tests/lightning_error_summary/run.sh create mode 100644 br/tests/lightning_examples/1.toml create mode 100644 br/tests/lightning_examples/131072.toml create mode 100644 br/tests/lightning_examples/512.toml create mode 100755 br/tests/lightning_examples/run.sh create mode 100644 br/tests/lightning_exotic_filenames/config.toml create mode 100644 br/tests/lightning_exotic_filenames/data/xfn-schema-create.sql create mode 100644 br/tests/lightning_exotic_filenames/data/xfn.etn-schema.sql create mode 100644 br/tests/lightning_exotic_filenames/data/xfn.etn.sql create mode 100644 br/tests/lightning_exotic_filenames/data/zwk-schema-create.sql create mode 100644 br/tests/lightning_exotic_filenames/data/zwk.zwb-schema.sql create mode 100644 br/tests/lightning_exotic_filenames/data/zwk.zwb.sql create mode 100755 br/tests/lightning_exotic_filenames/run.sh create mode 100644 br/tests/lightning_fail_fast/chunk.toml create mode 100644 br/tests/lightning_fail_fast/data/fail_fast-schema-create.sql create mode 100644 br/tests/lightning_fail_fast/data/fail_fast.tb-schema.sql create mode 100644 br/tests/lightning_fail_fast/data/fail_fast.tb.0.csv create mode 100644 br/tests/lightning_fail_fast/data/fail_fast.tb.1.csv create mode 100644 br/tests/lightning_fail_fast/data/fail_fast.tb.2.csv create mode 100644 br/tests/lightning_fail_fast/engine.toml create mode 100755 br/tests/lightning_fail_fast/run.sh create mode 100644 br/tests/lightning_file_routing/config.toml create mode 100755 br/tests/lightning_file_routing/run.sh create mode 100644 br/tests/lightning_gcs/config.toml create mode 100644 br/tests/lightning_gcs/run.sh create mode 100644 br/tests/lightning_generated_columns/config.toml create mode 100644 br/tests/lightning_generated_columns/data/gencol-schema-create.sql create mode 100644 br/tests/lightning_generated_columns/data/gencol.nested-schema.sql create mode 100644 br/tests/lightning_generated_columns/data/gencol.nested.0.sql create mode 100644 br/tests/lightning_generated_columns/data/gencol.various_types-schema.sql create mode 100644 br/tests/lightning_generated_columns/data/gencol.various_types.0.sql create mode 100644 br/tests/lightning_generated_columns/run.sh create mode 100644 br/tests/lightning_incremental/config.toml create mode 100644 br/tests/lightning_incremental/data/incr-schema-create.sql create mode 100644 br/tests/lightning_incremental/data/incr.auto_random-schema.sql create mode 100644 br/tests/lightning_incremental/data/incr.auto_random.sql create mode 100644 br/tests/lightning_incremental/data/incr.pk_auto_inc-schema.sql create mode 100644 br/tests/lightning_incremental/data/incr.pk_auto_inc.sql create mode 100644 br/tests/lightning_incremental/data/incr.rowid_uk_inc-schema.sql create mode 100644 br/tests/lightning_incremental/data/incr.rowid_uk_inc.sql create mode 100644 br/tests/lightning_incremental/data/incr.uk_auto_inc-schema.sql create mode 100644 br/tests/lightning_incremental/data/incr.uk_auto_inc.sql create mode 100644 br/tests/lightning_incremental/data1/incr-schema-create.sql create mode 100644 br/tests/lightning_incremental/data1/incr.auto_random-schema.sql create mode 100644 br/tests/lightning_incremental/data1/incr.auto_random.sql create mode 100644 br/tests/lightning_incremental/data1/incr.pk_auto_inc-schema.sql create mode 100644 br/tests/lightning_incremental/data1/incr.pk_auto_inc.sql create mode 100644 br/tests/lightning_incremental/data1/incr.rowid_uk_inc-schema.sql create mode 100644 br/tests/lightning_incremental/data1/incr.rowid_uk_inc.sql create mode 100644 br/tests/lightning_incremental/data1/incr.uk_auto_inc-schema.sql create mode 100644 br/tests/lightning_incremental/data1/incr.uk_auto_inc.sql create mode 100644 br/tests/lightning_incremental/run.sh create mode 100644 br/tests/lightning_issue_282/config.toml create mode 100644 br/tests/lightning_issue_282/data/issue282-schema-create.sql create mode 100644 br/tests/lightning_issue_282/data/issue282.t_access3-schema.sql create mode 100644 br/tests/lightning_issue_282/data/issue282.t_access3.sql create mode 100644 br/tests/lightning_issue_282/run.sh create mode 100644 br/tests/lightning_issue_410/config.toml create mode 100644 br/tests/lightning_issue_410/data/issue410-schema-create.sql create mode 100644 br/tests/lightning_issue_410/data/issue410.row_flow_d-schema.sql create mode 100644 br/tests/lightning_issue_410/data/issue410.row_flow_d.0.csv create mode 100644 br/tests/lightning_issue_410/run.sh create mode 100644 br/tests/lightning_issue_519/config.toml create mode 100644 br/tests/lightning_issue_519/data/issue519-schema-create.sql create mode 100644 br/tests/lightning_issue_519/data/issue519.t-schema.sql create mode 100644 br/tests/lightning_issue_519/data/issue519.t.csv create mode 100755 br/tests/lightning_issue_519/run.sh create mode 100644 br/tests/lightning_local_backend/config.toml create mode 100644 br/tests/lightning_local_backend/data/cpeng-schema-create.sql create mode 100644 br/tests/lightning_local_backend/data/cpeng.a-schema.sql create mode 100644 br/tests/lightning_local_backend/data/cpeng.a.1.sql create mode 100644 br/tests/lightning_local_backend/data/cpeng.a.2.sql create mode 100644 br/tests/lightning_local_backend/data/cpeng.a.3.sql create mode 100644 br/tests/lightning_local_backend/data/cpeng.b-schema.sql create mode 100644 br/tests/lightning_local_backend/data/cpeng.b.1.sql create mode 100644 br/tests/lightning_local_backend/data/cpeng.b.2.sql create mode 100644 br/tests/lightning_local_backend/file.toml create mode 100644 br/tests/lightning_local_backend/mysql.toml create mode 100755 br/tests/lightning_local_backend/run.sh create mode 100644 br/tests/lightning_new_collation/config.toml create mode 100644 br/tests/lightning_new_collation/run.sh create mode 100644 br/tests/lightning_new_collation/tidb-new-collation.toml create mode 100644 br/tests/lightning_no_schema/config.toml create mode 100644 br/tests/lightning_no_schema/data/noschema.t.sql create mode 100644 br/tests/lightning_no_schema/run.sh create mode 100644 br/tests/lightning_parquet/config.toml create mode 100644 br/tests/lightning_parquet/data/export_info_ci.json create mode 100644 br/tests/lightning_parquet/data/export_tables_info_ci_from_1_to_9.json create mode 100644 br/tests/lightning_parquet/data/test/test.customer/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.customer/part-00000-c3744aeb-351c-49ba-bdf3-5864befff481-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.district/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.district/part-00000-f61f4bef-6765-432a-8f18-cd17b4607f2a-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.history/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.history/part-00000-8cf0e97a-1169-4335-a93f-8805e02def97-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.item/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.item/part-00000-8905ded8-4c54-477f-9907-6e3eae50358b-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.new_order/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.new_order/part-00000-d868200e-b629-4445-bd96-d34b6674b09d-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.order_line/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.order_line/part-00000-e36fecd0-3f59-4ff6-b271-2e4b27ffbcf5-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.orders/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.orders/part-00000-b45481f5-92c0-4961-9bf7-32a0a04ffee5-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.stock/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.stock/part-00000-eef45943-3034-4d65-b375-ee55aa479215-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/data/test/test.warehouse/_SUCCESS create mode 100644 br/tests/lightning_parquet/data/test/test.warehouse/part-00000-c6c33252-4d2f-4c5c-8a2f-d162bde6c360-c000.gz.parquet create mode 100644 br/tests/lightning_parquet/db.sql create mode 100755 br/tests/lightning_parquet/run.sh create mode 100644 br/tests/lightning_partitioned-table/config.toml create mode 100644 br/tests/lightning_partitioned-table/data/partitioned-schema-create.sql create mode 100644 br/tests/lightning_partitioned-table/data/partitioned.a-schema.sql create mode 100644 br/tests/lightning_partitioned-table/data/partitioned.a.sql create mode 100755 br/tests/lightning_partitioned-table/run.sh create mode 100644 br/tests/lightning_restore/config.toml create mode 100755 br/tests/lightning_restore/run.sh create mode 100644 br/tests/lightning_routes/config.toml create mode 100644 br/tests/lightning_routes/data/routes_a0-schema-create.sql create mode 100644 br/tests/lightning_routes/data/routes_a0.t0-schema.sql create mode 100644 br/tests/lightning_routes/data/routes_a0.t0.1.sql create mode 100644 br/tests/lightning_routes/data/routes_a0.t0.2.sql create mode 100644 br/tests/lightning_routes/data/routes_a0.t1-schema.sql create mode 100644 br/tests/lightning_routes/data/routes_a0.t1.1.sql create mode 100644 br/tests/lightning_routes/data/routes_a1-schema-create.sql create mode 100644 br/tests/lightning_routes/data/routes_a1.s1-schema.sql create mode 100644 br/tests/lightning_routes/data/routes_a1.s1.sql create mode 100644 br/tests/lightning_routes/data/routes_a1.t2-schema.sql create mode 100644 br/tests/lightning_routes/data/routes_a1.t2.sql create mode 100755 br/tests/lightning_routes/run.sh create mode 100644 br/tests/lightning_row-format-v2/config.toml create mode 100644 br/tests/lightning_row-format-v2/data/rowformatv2-schema-create.sql create mode 100644 br/tests/lightning_row-format-v2/data/rowformatv2.t1-schema.sql create mode 100644 br/tests/lightning_row-format-v2/data/rowformatv2.t1.1.sql create mode 100644 br/tests/lightning_row-format-v2/run.sh create mode 100644 br/tests/lightning_s3/config.toml create mode 100755 br/tests/lightning_s3/run.sh create mode 100644 br/tests/lightning_shard_rowid/config.toml create mode 100644 br/tests/lightning_shard_rowid/data/shard_rowid-schema-create.sql create mode 100644 br/tests/lightning_shard_rowid/data/shard_rowid.shr-schema.sql create mode 100644 br/tests/lightning_shard_rowid/data/shard_rowid.shr.0.sql create mode 100644 br/tests/lightning_shard_rowid/run.sh create mode 100644 br/tests/lightning_source_linkfile/config.toml create mode 100644 br/tests/lightning_source_linkfile/run.sh create mode 100644 br/tests/lightning_sqlmode/data/sqlmodedb-schema-create.sql create mode 100644 br/tests/lightning_sqlmode/data/sqlmodedb.t-schema.sql create mode 100644 br/tests/lightning_sqlmode/data/sqlmodedb.t.1.sql create mode 100644 br/tests/lightning_sqlmode/off.toml create mode 100644 br/tests/lightning_sqlmode/on.toml create mode 100755 br/tests/lightning_sqlmode/run.sh create mode 100644 br/tests/lightning_tidb_duplicate_data/data/dup-schema-create.sql create mode 100644 br/tests/lightning_tidb_duplicate_data/data/dup.dup-schema.sql create mode 100644 br/tests/lightning_tidb_duplicate_data/data/dup.dup.sql create mode 100644 br/tests/lightning_tidb_duplicate_data/error.toml create mode 100644 br/tests/lightning_tidb_duplicate_data/ignore.toml create mode 100644 br/tests/lightning_tidb_duplicate_data/replace.toml create mode 100644 br/tests/lightning_tidb_duplicate_data/run.sh create mode 100644 br/tests/lightning_tidb_rowid/config.toml create mode 100644 br/tests/lightning_tidb_rowid/data/rowid-schema-create.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.explicit_tidb_rowid-schema.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.explicit_tidb_rowid.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.non_pk-schema.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.non_pk.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.non_pk_auto_inc-schema.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.non_pk_auto_inc.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.pre_rebase-schema.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.pre_rebase.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.specific_auto_inc-schema.sql create mode 100644 br/tests/lightning_tidb_rowid/data/rowid.specific_auto_inc.sql create mode 100755 br/tests/lightning_tidb_rowid/run.sh create mode 100644 br/tests/lightning_tiflash/config.toml create mode 100644 br/tests/lightning_tiflash/run.sh create mode 100644 br/tests/lightning_too_many_columns/config.toml create mode 100644 br/tests/lightning_too_many_columns/data/too_many_columns-schema-create.sql create mode 100644 br/tests/lightning_too_many_columns/data/too_many_columns.t-schema.sql create mode 100644 br/tests/lightning_too_many_columns/data/too_many_columns.t.0.csv create mode 100644 br/tests/lightning_too_many_columns/run.sh create mode 100644 br/tests/lightning_tool_135/config.toml create mode 100644 br/tests/lightning_tool_135/data/tool_135-schema-create.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar1-schema.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar1.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar2-schema.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar2.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar3-schema.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar3.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar4-schema.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar4.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar5-schema.sql create mode 100644 br/tests/lightning_tool_135/data/tool_135.bar5.sql create mode 100755 br/tests/lightning_tool_135/run.sh create mode 100644 br/tests/lightning_tool_1420/config.toml create mode 100644 br/tests/lightning_tool_1420/data/EE1420-schema-create.sql create mode 100644 br/tests/lightning_tool_1420/data/EE1420.pt_role-schema.sql create mode 100644 br/tests/lightning_tool_1420/data/EE1420.pt_role.sql create mode 100755 br/tests/lightning_tool_1420/run.sh create mode 100644 br/tests/lightning_tool_1472/config.toml create mode 100644 br/tests/lightning_tool_1472/data/EE1472-schema-create.sql create mode 100644 br/tests/lightning_tool_1472/data/EE1472.notpk-schema.sql create mode 100644 br/tests/lightning_tool_1472/data/EE1472.notpk.1.sql create mode 100644 br/tests/lightning_tool_1472/data/EE1472.notpk.2.sql create mode 100644 br/tests/lightning_tool_1472/data/EE1472.pk-schema.sql create mode 100644 br/tests/lightning_tool_1472/data/EE1472.pk.1.sql create mode 100644 br/tests/lightning_tool_1472/data/EE1472.pk.2.sql create mode 100755 br/tests/lightning_tool_1472/run.sh create mode 100644 br/tests/lightning_tool_241/config.toml create mode 100644 br/tests/lightning_tool_241/data/qyjc-schema-create.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_alarm_group-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_alarm_group.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_alarm_message_log-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_alarm_message_log.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_alarm_receiver-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_config-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_fish_event-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_fish_event.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_report_circular_data-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_report_desc-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_report_summary-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_system_update-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_system_update.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_user_log-schema.sql create mode 100644 br/tests/lightning_tool_241/data/qyjc.q_user_log.sql create mode 100755 br/tests/lightning_tool_241/run.sh create mode 100644 br/tests/lightning_unused_config_keys/config.toml create mode 100644 br/tests/lightning_unused_config_keys/data/unused_config_keys-schema-create.sql create mode 100755 br/tests/lightning_unused_config_keys/run.sh create mode 100644 br/tests/lightning_various_types/config.toml create mode 100644 br/tests/lightning_various_types/data/vt-schema-create.sql create mode 100644 br/tests/lightning_various_types/data/vt.bigint-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.bigint.0.sql create mode 100644 br/tests/lightning_various_types/data/vt.binary-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.binary.sql create mode 100644 br/tests/lightning_various_types/data/vt.bit-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.bit.sql create mode 100644 br/tests/lightning_various_types/data/vt.char-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.char.sql create mode 100644 br/tests/lightning_various_types/data/vt.datetime-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.datetime.sql create mode 100644 br/tests/lightning_various_types/data/vt.decimal-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.decimal.sql create mode 100644 br/tests/lightning_various_types/data/vt.double-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.double.sql create mode 100644 br/tests/lightning_various_types/data/vt.empty_strings-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.empty_strings.sql create mode 100644 br/tests/lightning_various_types/data/vt.enum-set-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.enum-set.sql create mode 100644 br/tests/lightning_various_types/data/vt.json-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.json.sql create mode 100644 br/tests/lightning_various_types/data/vt.precise_types-schema.sql create mode 100644 br/tests/lightning_various_types/data/vt.precise_types.sql create mode 100755 br/tests/lightning_various_types/run.sh create mode 100644 br/tests/lightning_view/config.toml create mode 100644 br/tests/lightning_view/data/db0-schema-create.sql create mode 100644 br/tests/lightning_view/data/db0.v2-schema-view.sql create mode 100644 br/tests/lightning_view/data/db0.v2-schema.sql create mode 100644 br/tests/lightning_view/data/db1-schema-create.sql create mode 100644 br/tests/lightning_view/data/db1.tbl-schema.sql create mode 100644 br/tests/lightning_view/data/db1.tbl.0.sql create mode 100644 br/tests/lightning_view/data/db1.v1-schema-view.sql create mode 100644 br/tests/lightning_view/data/db1.v1-schema.sql create mode 100755 br/tests/lightning_view/run.sh create mode 100755 br/tests/run.sh create mode 100755 br/tests/run_compatible.sh create mode 100755 br/tests/up.sh create mode 100644 br/tidb-lightning.toml create mode 100644 br/tools/.gitignore create mode 100644 br/tools/Makefile create mode 100755 br/tools/check-errdoc.sh create mode 100644 br/tools/go.mod create mode 100644 br/tools/go.sum create mode 100644 br/tools/go_mod_guard.go create mode 100644 br/web/README.md create mode 100644 br/web/docs/InfoPage.png create mode 100644 br/web/docs/ProgressPage.png create mode 100644 br/web/docs/TableProgressPage.png create mode 100644 br/web/docs/api.yaml create mode 100644 br/web/go.mod create mode 100644 br/web/go.sum create mode 100644 br/web/package-lock.json create mode 100644 br/web/package.json create mode 100644 br/web/public/index.html create mode 100644 br/web/src/ChunksProgressPanel.tsx create mode 100644 br/web/src/DottedProgress.tsx create mode 100644 br/web/src/EnginesProgressPanel.tsx create mode 100644 br/web/src/ErrorButton.tsx create mode 100644 br/web/src/InfoButton.tsx create mode 100644 br/web/src/InfoPage.tsx create mode 100644 br/web/src/MoveTaskButton.tsx create mode 100644 br/web/src/PauseButton.tsx create mode 100644 br/web/src/ProgressPage.tsx create mode 100644 br/web/src/RefreshButton.tsx create mode 100644 br/web/src/TableProgressCard.tsx create mode 100644 br/web/src/TableProgressPage.tsx create mode 100644 br/web/src/TaskButton.tsx create mode 100644 br/web/src/TitleBar.tsx create mode 100644 br/web/src/TitleLink.tsx create mode 100644 br/web/src/api.ts create mode 100644 br/web/src/index.tsx create mode 100644 br/web/src/json-bigint.d.ts create mode 100644 br/web/tsconfig.json create mode 100644 br/web/webpack.config.js diff --git a/Makefile b/Makefile index 370ccb0d433df..ed4f4a3dec136 100644 --- a/Makefile +++ b/Makefile @@ -45,8 +45,8 @@ check-static: tools/bin/golangci-lint tools/bin/golangci-lint run -v $$($(PACKAGE_DIRECTORIES)) unconvert:tools/bin/unconvert - @echo "unconvert check" - @GO111MODULE=on tools/bin/unconvert ./... + @echo "unconvert check(skip check the genenrated or copied code in lightning)" + @GO111MODULE=on tools/bin/unconvert $(UNCONVERT_PACKAGES) gogenerate: @echo "go generate ./..." @@ -81,7 +81,9 @@ test: test_part_1 test_part_2 test_part_1: checklist explaintest -test_part_2: gotest gogenerate +test_part_2: gotest gogenerate br_unit_test + +test_part_br: br_unit_test br_integration_test explaintest: server_check @cd cmd/explaintest && ./run-tests.sh -s ../../bin/tidb-server @@ -103,7 +105,7 @@ ifeq ("$(TRAVIS_COVERAGE)", "1") @export log_level=info; \ $(OVERALLS) -project=github.com/pingcap/tidb \ -covermode=count \ - -ignore='.git,vendor,cmd,docs,tests,LICENSES' \ + -ignore='.git,br,vendor,cmd,docs,tests,LICENSES' \ -concurrency=4 \ -- -coverpkg=./... \ || { $(FAILPOINT_DISABLE); exit 1; } @@ -204,6 +206,10 @@ tools/bin/errdoc-gen: tools/check/go.mod tools/bin/golangci-lint: curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b ./tools/bin v1.41.1 +tools/bin/vfsgendev: tools/check/go.mod + cd tools/check; \ + $(GO) build -o ../bin/vfsgendev github.com/shurcooL/vfsgen/cmd/vfsgendev + # Usage: # # $ make vectorized-bench VB_FILE=Time VB_FUNC=builtinCurrentDateSig @@ -230,3 +236,97 @@ endif bench-daily: cd ./session && \ go test -run TestBenchDaily --date `git log -n1 --date=unix --pretty=format:%cd` --commit `git log -n1 --pretty=format:%h` --outfile $(TO) + +build_tools: build_br build_lightning build_lightning-ctl + +br_web: + @cd br/web && npm install && npm run build + +build_br: + CGO_ENABLED=1 $(GOBUILD) $(RACE_FLAG) -ldflags '$(TOOL_LDFLAGS) $(CHECK_FLAG)' -o $(BR_BIN) br/cmd/br/*.go + +build_lightning_for_web: + CGO_ENABLED=1 $(GOBUILD) -tags dev $(RACE_FLAG) -ldflags '$(TOOL_LDFLAGS) $(CHECK_FLAG)' -o $(LIGHTNING_BIN) br/cmd/tidb-lightning/main.go + +build_lightning: + CGO_ENABLED=1 $(GOBUILD) $(RACE_FLAG) -ldflags '$(TOOL_LDFLAGS) $(CHECK_FLAG)' -o $(LIGHTNING_BIN) br/cmd/tidb-lightning/main.go + +build_lightning-ctl: + CGO_ENABLED=1 $(GOBUILD) $(RACE_FLAG) -ldflags '$(TOOL_LDFLAGS) $(CHECK_FLAG)' -o $(LIGHTNING_CTL_BIN) br/cmd/tidb-lightning-ctl/main.go + +build_for_br_integration_test: + @make failpoint-enable + ($(GOTEST) -c -cover -covermode=count \ + -coverpkg=github.com/pingcap/tidb/br/... \ + -o $(BR_BIN).test \ + github.com/pingcap/tidb/br/cmd/br && \ + $(GOTEST) -c -cover -covermode=count \ + -coverpkg=github.com/pingcap/tidb/br/... \ + -o $(LIGHTNING_BIN).test \ + github.com/pingcap/tidb/br/cmd/tidb-lightning && \ + $(GOTEST) -c -cover -covermode=count \ + -coverpkg=github.com/pingcap/tidb/br/... \ + -o $(LIGHTNING_CTL_BIN).test \ + github.com/pingcap/tidb/br/cmd/tidb-lightning-ctl && \ + $(GOBUILD) $(RACE_FLAG) -o bin/locker br/tests/br_key_locked/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/gc br/tests/br_z_gc_safepoint/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/oauth br/tests/br_gcs/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/rawkv br/tests/br_rawkv/*.go && \ + $(GOBUILD) $(RACE_FLAG) -o bin/parquet_gen br/tests/lightning_checkpoint_parquet/*.go \ + ) || (make failpoint-disable && exit 1) + @make failpoint-disable + +br_unit_test: export ARGS=$$($(BR_PACKAGES)) +br_unit_test: + @make failpoint-enable + $(GOTEST) $(RACE_FLAG) -tags leak $(ARGS) || ( make failpoint-disable && exit 1 ) + @make failpoint-disable + +br_integration_test: br_bins build_br build_for_br_integration_test + @cd br && tests/run.sh + +br_compatibility_test_prepare: + @cd br && tests/run_compatible.sh prepare + +br_compatibility_test: + @cd br && tests/run_compatible.sh run + +# There is no FreeBSD environment for GitHub actions. So cross-compile on Linux +# but that doesn't work with CGO_ENABLED=1, so disable cgo. The reason to have +# cgo enabled on regular builds is performance. +ifeq ("$(GOOS)", "freebsd") + GOBUILD = CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '$(LDFLAGS)' +endif + +br_coverage: + tools/bin/gocovmerge "$(TEST_DIR)"/cov.* | grep -vE ".*.pb.go|.*__failpoint_binding__.go" > "$(TEST_DIR)/all_cov.out" +ifeq ("$(JenkinsCI)", "1") + tools/bin/goveralls -coverprofile=$(TEST_DIR)/all_cov.out -service=jenkins-ci -repotoken $(COVERALLS_TOKEN) +else + go tool cover -html "$(TEST_DIR)/all_cov.out" -o "$(TEST_DIR)/all_cov.html" + grep -F ' $@ + @rm tmp_parser.go + +data_parsers: tools/bin/vfsgendev br/pkg/lightning/mydump/parser_generated.go br_web + PATH="$(GOPATH)/bin":"$(PATH)":"$(TOOLS)" protoc -I. -I"$(GOPATH)/src" br/pkg/lightning/checkpoints/checkpointspb/file_checkpoints.proto --gogofaster_out=. + tools/bin/vfsgendev -source='"github.com/pingcap/tidb/br/pkg/lightning/web".Res' && mv res_vfsdata.go br/pkg/lightning/web/ diff --git a/Makefile.common b/Makefile.common index 8885dc6c3a979..255c89b432979 100644 --- a/Makefile.common +++ b/Makefile.common @@ -43,10 +43,12 @@ endif ARCH := "`uname -s`" LINUX := "Linux" MAC := "Darwin" -PACKAGE_LIST := go list ./...| grep -vE "cmd|github.com\/pingcap\/tidb\/tests" +PACKAGE_LIST := go list ./...| grep -vE "cmd|github.com\/pingcap\/tidb\/tests|github.com\/pingcap\/tidb\/br" PACKAGES ?= $$($(PACKAGE_LIST)) PACKAGE_DIRECTORIES := $(PACKAGE_LIST) | sed 's|github.com/pingcap/$(PROJECT)/||' FILES := $$(find $$($(PACKAGE_DIRECTORIES)) -name "*.go") +UNCONVERT_PACKAGES_LIST := go list ./...| grep -vE "lightning\/checkpoints|lightning\/manual|lightning\/common" +UNCONVERT_PACKAGES := $$($(UNCONVERT_PACKAGES_LIST)) FAILPOINT_ENABLE := $$(find $$PWD/ -type d | grep -vE "(\.git|tools)" | xargs tools/bin/failpoint-ctl enable) FAILPOINT_DISABLE := $$(find $$PWD/ -type d | grep -vE "(\.git|tools)" | xargs tools/bin/failpoint-ctl disable) @@ -78,3 +80,31 @@ CHECK_FLAG = ifeq ("$(WITH_CHECK)", "1") CHECK_FLAG = $(TEST_LDFLAGS) endif + +BR_PKG := github.com/pingcap/tidb/br +BR_PACKAGES := go list ./...| grep "github.com\/pingcap\/tidb\/br" +LIGHTNING_BIN := bin/tidb-lightning +LIGHTNING_CTL_BIN := bin/tidb-lightning-ctl +BR_BIN := bin/br +TEST_DIR := /tmp/backup_restore_test + +TOOL_RELEASE_VERSION = +ifeq ($(TOOL_RELEASE_VERSION),) + TOOL_RELEASE_VERSION := v4.0.0-dev + release_version_regex := ^v4\..*$$ + release_branch_regex := "^release-[0-9]\.[0-9].*$$|^HEAD$$|^.*/*tags/v[0-9]\.[0-9]\..*$$" + ifneq ($(shell git rev-parse --abbrev-ref HEAD | egrep $(release_branch_regex)),) + # If we are in release branch, try to use tag version. + ifneq ($(shell git describe --tags --dirty | egrep $(release_version_regex)),) + TOOL_RELEASE_VERSION := $(shell git describe --tags --dirty) + endif + else ifneq ($(shell git status --porcelain),) + # Add -dirty if the working tree is dirty for non release branch. + TOOL_RELEASE_VERSION := $(TOOL_RELEASE_VERSION)-dirty + endif +endif + +TOOL_LDFLAGS += -X "$(BR_PKG)/pkg/version/build.ReleaseVersion=$(TOOL_RELEASE_VERSION)" +TOOL_LDFLAGS += -X "$(BR_PKG)/pkg/version/build.BuildTS=$(shell date -u '+%Y-%m-%d %I:%M:%S')" +TOOL_LDFLAGS += -X "$(BR_PKG)/pkg/version/build.GitHash=$(shell git rev-parse HEAD)" +TOOL_LDFLAGS += -X "$(BR_PKG)/pkg/version/build.GitBranch=$(shell git rev-parse --abbrev-ref HEAD)" diff --git a/br/.codecov.yml b/br/.codecov.yml new file mode 100644 index 0000000000000..9fc8064faee5c --- /dev/null +++ b/br/.codecov.yml @@ -0,0 +1,22 @@ +codecov: + require_ci_to_pass: yes + +comment: + layout: "reach, diff, flags, files" + behavior: default + require_changes: false # if true: only post the comment if coverage changes + require_base: no # [yes :: must have a base report to post] + require_head: yes # [yes :: must have a head report to post] + branches: # branch names that can post comment + - "master" + +coverage: + status: + project: + default: + # Allow the coverage to drop by 3% + target: 85% + threshold: 3% + branches: + - master + patch: off diff --git a/br/.dockerignore b/br/.dockerignore new file mode 120000 index 0000000000000..3e4e48b0b5fe6 --- /dev/null +++ b/br/.dockerignore @@ -0,0 +1 @@ +.gitignore \ No newline at end of file diff --git a/br/.editorconfig b/br/.editorconfig new file mode 100644 index 0000000000000..43c6a002cce45 --- /dev/null +++ b/br/.editorconfig @@ -0,0 +1,10 @@ +[*] +end_of_line = lf +insert_final_newline = true +charset = utf-8 + +# tab_size = 4 spaces +[*.go] +indent_style = tab +indent_size = 4 +trim_trailing_whitespace = true diff --git a/br/.gitattributes b/br/.gitattributes new file mode 100644 index 0000000000000..ba35fa1000ef6 --- /dev/null +++ b/br/.gitattributes @@ -0,0 +1 @@ +*_generated.go linguist-generated=true diff --git a/br/.github/ISSUE_TEMPLATE/bug-report.md b/br/.github/ISSUE_TEMPLATE/bug-report.md new file mode 100644 index 0000000000000..555efb841bd26 --- /dev/null +++ b/br/.github/ISSUE_TEMPLATE/bug-report.md @@ -0,0 +1,45 @@ +--- +name: "🐛 Bug Report" +about: Something isn't working as expected +title: '' +labels: 'type/bug ' +--- + +Please answer these questions before submitting your issue. Thanks! + +1. What did you do? +If possible, provide a recipe for reproducing the error. + + +2. What did you expect to see? + + + +3. What did you see instead? + + + +4. What version of BR and TiDB/TiKV/PD are you using? + + + +5. Operation logs + - Please upload `br.log` for BR if possible + - Please upload `tidb-lightning.log` for TiDB-Lightning if possible + - Please upload `tikv-importer.log` from TiKV-Importer if possible + - Other interesting logs + + +6. Configuration of the cluster and the task + - `tidb-lightning.toml` for TiDB-Lightning if possible + - `tikv-importer.toml` for TiKV-Importer if possible + - `topology.yml` if deployed by TiUP + + +7. Screenshot/exported-PDF of Grafana dashboard or metrics' graph in Prometheus if possible diff --git a/br/.github/ISSUE_TEMPLATE/feature-request.md b/br/.github/ISSUE_TEMPLATE/feature-request.md new file mode 100644 index 0000000000000..ed6b4c5b0bf7c --- /dev/null +++ b/br/.github/ISSUE_TEMPLATE/feature-request.md @@ -0,0 +1,19 @@ +--- +name: "🚀 Feature Request" +about: I have a suggestion +labels: 'type/feature-request' +--- + +## Feature Request + +### Describe your feature request related problem: + + +### Describe the feature you'd like: + + +### Describe alternatives you've considered: + + +### Teachability, Documentation, Adoption, Migration Strategy: + diff --git a/br/.github/ISSUE_TEMPLATE/question.md b/br/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 0000000000000..23a8118377288 --- /dev/null +++ b/br/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,24 @@ +--- +name: "\U0001F914 Question" +labels: "type/question" +about: Usage question that isn't answered in docs or discussion + +--- + +## Question + + + diff --git a/br/.github/challenge-bot.yml b/br/.github/challenge-bot.yml new file mode 100644 index 0000000000000..15d2f38ece965 --- /dev/null +++ b/br/.github/challenge-bot.yml @@ -0,0 +1 @@ +defaultSigLabel: sig/migrate diff --git a/br/.github/pull_request_template.md b/br/.github/pull_request_template.md new file mode 100644 index 0000000000000..7f64aa17286f2 --- /dev/null +++ b/br/.github/pull_request_template.md @@ -0,0 +1,42 @@ + + +### What problem does this PR solve? + + +### What is changed and how it works? + + +### Check List + +Tests + + - Unit test + - Integration test + - Manual test (add detailed scripts or steps below) + - No code + +Code changes + + - Has exported function/method change + - Has exported variable/fields change + - Has interface methods change + - Has persistent data change + +Side effects + + - Possible performance regression + - Increased code complexity + - Breaking backward compatibility + +Related changes + + - Need to cherry-pick to the release branch + - Need to update the documentation + +### Release note + + - + + diff --git a/br/.github/workflows/build.yml b/br/.github/workflows/build.yml new file mode 100644 index 0000000000000..472def46ae2a0 --- /dev/null +++ b/br/.github/workflows/build.yml @@ -0,0 +1,68 @@ +name: build +on: + push: + branches: + - master + - 'release-[0-9].[0-9]*' + paths-ignore: + - '**.html' + - '**.md' + - 'CNAME' + - 'LICENSE' + - 'docs/**' + - 'tests/**' + - 'docker/**' + - '.github/workflows/**.yml' + pull_request: + branches: + - master + - 'release-[0-9].[0-9]*' + paths-ignore: + - '**.html' + - '**.md' + - 'CNAME' + - 'LICENSE' + - 'docs/**' + - 'tests/**' + - 'docker/**' + - '.github/workflows/**.yml' + +jobs: + compile: + name: ${{ matrix.os }} / ${{ matrix.target}} + runs-on: ${{ matrix.os }} + strategy: + matrix: + include: + - os: macos-latest + target: x86_64-apple-darwin + + - os: ubuntu-latest + target: aarch64-unknown-linux-gnu + + - os: windows-latest + target: x86_64-pc-windows-msvc + steps: + - uses: actions/checkout@v2.1.0 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.16 + + - name: Run build + run: make build + + compile-freebsd: + name: Compile for FreeBSD job + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2.1.0 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.16 + + - name: Compile for FreeBSD + run: GOOS=freebsd make build diff --git a/br/.github/workflows/compatible_test.yml b/br/.github/workflows/compatible_test.yml new file mode 100644 index 0000000000000..9fbf0c52e83ac --- /dev/null +++ b/br/.github/workflows/compatible_test.yml @@ -0,0 +1,71 @@ +name: compatibility-test + +on: + push: + branches: + - master + - 'release-[0-9].[0-9]*' + paths-ignore: + - '**.html' + - '**.md' + - 'CNAME' + - 'LICENSE' + - 'docs/**' + - 'tests/**' + - 'docker/**' + - '.github/workflows/**.yml' + pull_request: + branches: + - master + - 'release-[0-9].[0-9]*' + paths-ignore: + - '**.html' + - '**.md' + - 'CNAME' + - 'LICENSE' + - 'docs/**' + - 'tests/**' + - 'docker/**' + - '.github/workflows/**.yml' + +jobs: + build: + runs-on: ubuntu-latest + timeout-minutes: 25 + steps: + + - name: Free disk space + run: | + sudo rm -rf /usr/local/lib/android + sudo rm -rf /usr/share/dotnet + docker volume prune -f + docker image prune -f + + - uses: actions/checkout@v2 + + - name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.16 + + - name: Generate compatibility test backup data + timeout-minutes: 15 + run: sh compatibility/prepare_backup.sh + + - name: Start server + run: | + TAG=nightly PORT_SUFFIX=1 docker-compose -f compatibility/backup_cluster.yaml rm -s -v + TAG=nightly PORT_SUFFIX=1 docker-compose -f compatibility/backup_cluster.yaml build + TAG=nightly PORT_SUFFIX=1 docker-compose -f compatibility/backup_cluster.yaml up --remove-orphans -d + TAG=nightly PORT_SUFFIX=1 docker-compose -f compatibility/backup_cluster.yaml exec -T control make compatibility_test + + - name: Collect component log + if: ${{ failure() }} + run: | + tar czvf ${{ github.workspace }}/logs.tar.gz /tmp/br/docker/backup_logs/* + + - uses: actions/upload-artifact@v2 + if: ${{ failure() }} + with: + name: logs + path: ${{ github.workspace }}/logs.tar.gz diff --git a/br/.gitignore b/br/.gitignore new file mode 100644 index 0000000000000..c29d04732ce40 --- /dev/null +++ b/br/.gitignore @@ -0,0 +1,14 @@ +/br +/bin +/.idea +/docker/data/ +/docker/logs/ +*.swp +.DS_Store +/go.mod +/go.sum + +# for the web interface +web/node_modules/ +web/dist/ +.vscode/ diff --git a/br/.golangci.yml b/br/.golangci.yml new file mode 100644 index 0000000000000..0cb2a9b651251 --- /dev/null +++ b/br/.golangci.yml @@ -0,0 +1,12 @@ +linters-settings: + gocyclo: + min-complexity: 40 + +issues: + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # TODO Remove it. + - path: split_client.go + text: "SA1019:" + linters: + - staticcheck diff --git a/br/COMPATIBILITY_TEST.md b/br/COMPATIBILITY_TEST.md new file mode 100644 index 0000000000000..b5580835baee8 --- /dev/null +++ b/br/COMPATIBILITY_TEST.md @@ -0,0 +1,42 @@ +# Compatibility test + +## Background + +We had some incompatibility issues in the past, which made BR cannot restore backed up data in some situations. +So we need a test workflow to check the compatiblity. + +## Goal + +- Ensure backward compatibility for restoring data from the previous 3 minor versions + +## Workflow + +### Data Preparation + +This workflow needs previous backup data. To get this data. we perform the following steps + +- Run a TiDB cluster with previous version. +- Run backup jobs with corresponding BR version, with different storages (s3, gcs). + +Given we test for the previous 3 versions, and there are 2 different storage systems, we will produce 6 backup archives for 6 separate compatibility tests. + +### Test Content + +- Start TiDB cluster with nightly version. +- Build BR binary with current directory. +- Use BR to restore different version backup data one by one. +- Make sure restore data is expected. + +### Running tests + +Start a cluster with docker-compose and Build br with latest version. + +```sh +docker-compose -f docker-compose.yaml rm -s -v && \ +docker-compose -f docker-compose.yaml build && \ +docker-compose -f docker-compose.yaml up --remove-orphans +``` + +```sh +docker-compose -f docker-compose.yaml control make compatibility_test +``` diff --git a/br/CONTRIBUTING.md b/br/CONTRIBUTING.md new file mode 100644 index 0000000000000..1f2846471a7a1 --- /dev/null +++ b/br/CONTRIBUTING.md @@ -0,0 +1,90 @@ +# How to contribute + +This document outlines some of the conventions on development workflow, commit +message formatting, contact points and other resources to make it easier to get +your contribution accepted. + +## Getting started + +- Fork the repository on GitHub. +- Read the README.md for build instructions. +- Play with the project, submit bugs, submit patches! + +## Building BR + +Developing BR requires: + +* [Go 1.16+](http://golang.org/doc/code.html) +* An internet connection to download the dependencies + +Simply run `make` to build the program. + +```sh +make +``` + +### Running tests + +This project contains unit tests and integration tests with coverage collection. +See [tests/README.md](./tests/README.md) for how to execute and add tests. + +### Updating dependencies + +BR uses [Go 1.11 module](https://github.com/golang/go/wiki/Modules) to manage dependencies. +To add or update a dependency: use the `go mod edit` command to change the dependency. + +## Contribution flow + +This is a rough outline of what a contributor's workflow looks like: + +- Create a topic branch from where you want to base your work. This is usually `master`. +- Make commits of logical units and add test case if the change fixes a bug or adds new functionality. +- Run tests and make sure all the tests are passed. +- Make sure your commit messages are in the proper format (see below). +- Push your changes to a topic branch in your fork of the repository. +- Submit a pull request. +- Your PR must receive LGTMs from two maintainers. + +Thanks for your contributions! + +### Code style + +The coding style suggested by the Golang community is used in BR. +See the [style doc](https://github.com/golang/go/wiki/CodeReviewComments) for details. + +Please follow this style to make BR easy to review, maintain and develop. + +### Format of the Commit Message + +We follow a rough convention for commit messages that is designed to answer two +questions: what changed and why. The subject line should feature the what and +the body of the commit should describe the why. + +``` +restore: add comment for variable declaration + +Improve documentation. +``` + +The format can be described more formally as follows: + +``` +: + + + +