Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(linter): fix golangci config and some issues in tests #8669

Merged
merged 1 commit into from
Feb 15, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 7 additions & 21 deletions .golangci.yml
Original file line number Diff line number Diff line change
@@ -1,20 +1,15 @@
run:
tests: false
skip-dirs:
- contrib
- compose
- graphql/bench
- graphql/e2e
- graphql/test
- graphql/testdata
- systest
- t
skip-files:
- ".*test.go$"

linters-settings:
lll:
line-length: 120
staticcheck:
checks:
- all
- '-SA1019' # it is okay to use math/rand at times.
# we should review this again when we upgrade to go 1.20

linters:
disable-all: true
Expand All @@ -26,15 +21,6 @@ linters:
- gosimple
- govet
- lll
- varcheck
- unused

issues:
# Excluding configuration per-path, per-linter, per-text and per-source
exclude-rules:
- linters:
- golint
text: "(const|var|type|method|func|struct field) .+ should be"
- linters:
- golint
text: "(method parameter|func parameter|func result) .+ should be"
- staticcheck
- goimports
1 change: 1 addition & 0 deletions algo/packed_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ func TestMergeSorted1Packed(t *testing.T) {
require.Equal(t, []uint64{55}, codec.Decode(MergeSortedPacked(input), 0))
}

//nolint:unused
func printPack(t *testing.T, pack *pb.UidPack) {
for _, block := range pack.Blocks {
t.Logf("[%x]Block base: %d. Num uids: %d. Deltas: %x\n",
Expand Down
5 changes: 3 additions & 2 deletions chunker/json_parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
* limitations under the License.
*/

//nolint:lll
package chunker

import (
Expand Down Expand Up @@ -1229,7 +1230,7 @@ func BenchmarkNoFacets(b *testing.B) {
// we're parsing 125 nquads at a time, so the MB/s == MNquads/s
b.SetBytes(125)
for n := 0; n < b.N; n++ {
Parse([]byte(json), SetNquads)
_, _ = Parse([]byte(json), SetNquads)
}
}

Expand Down Expand Up @@ -1370,6 +1371,6 @@ func BenchmarkNoFacetsFast(b *testing.B) {
// we're parsing 125 nquads at a time, so the MB/s == MNquads/s
b.SetBytes(125)
for n := 0; n < b.N; n++ {
FastParse([]byte(json), SetNquads)
_, _ = FastParse([]byte(json), SetNquads)
}
}
2 changes: 1 addition & 1 deletion chunker/rdf_parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -760,7 +760,7 @@ var testNQuads = []struct {
},
// Should parse dates
{
input: `_:alice <knows> "stuff" (key1=2002-10-02T15:00:00.05Z, key2=2006-01-02T15:04:05, key3=2006-01-02T00:00:00Z) .`,
input: `_:alice <knows> "stuff" (key1=2002-10-02T15:00:00.05Z, key2=2006-01-02T15:04:05, key3=2006-01-02T00:00:00Z) .`, //nolint:lll
nq: api.NQuad{
Subject: "_:alice",
Predicate: "knows",
Expand Down
9 changes: 7 additions & 2 deletions codec/codec_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,10 @@ func TestBufferUidPack(t *testing.T) {
FreePack(pack)

buf := z.NewBuffer(10<<10, "TestBufferUidPack")
defer buf.Release()
defer func() {
require.NoError(t, buf.Release())
}()

DecodeToBuffer(buf, &pb.UidPack{})
require.Equal(t, 0, buf.LenNoPadding())
require.NoError(t, buf.Release())
Expand All @@ -96,7 +99,9 @@ func TestBufferUidPack(t *testing.T) {
require.Equal(t, expected, actual)

actualbuffer := z.NewBuffer(10<<10, "TestBufferUidPack")
defer actualbuffer.Release()
defer func() {
require.NoError(t, actualbuffer.Release())
}()

DecodeToBuffer(actualbuffer, pack)
enc := EncodeFromBuffer(actualbuffer.Bytes(), 256)
Expand Down
4 changes: 3 additions & 1 deletion conn/node_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,9 @@ func (n *Node) run(wg *sync.WaitGroup) {
for _, entry := range rd.CommittedEntries {
if entry.Type == raftpb.EntryConfChange {
var cc raftpb.ConfChange
cc.Unmarshal(entry.Data)
if err := cc.Unmarshal(entry.Data); err != nil {
fmt.Printf("error in unmarshalling: %v\n", err)
}
n.Raft().ApplyConfChange(cc)
} else if entry.Type == raftpb.EntryNormal {
if bytes.HasPrefix(entry.Data, []byte("hey")) {
Expand Down
6 changes: 3 additions & 3 deletions contrib/integration/testtxn/main_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ var s state

func TestMain(m *testing.M) {
log.SetFlags(log.LstdFlags | log.Lshortfile)
testutil.AssignUids(200)
x.CheckfNoTrace(testutil.AssignUids(200))
dg, err := testutil.DgraphClientWithGroot(testutil.SockAddr)
x.CheckfNoTrace(err)
s.dg = dg
Expand Down Expand Up @@ -442,7 +442,7 @@ func TestReadIndexKeySameTxn(t *testing.T) {
}

txn = s.dg.NewTxn()
defer txn.Discard(context.Background())
defer func() { require.NoError(t, txn.Discard(context.Background())) }()
q := `{ me(func: le(name, "Manish")) { uid }}`
resp, err := txn.Query(context.Background(), q)
if err != nil {
Expand Down Expand Up @@ -883,7 +883,7 @@ func TestConcurrentQueryMutate(t *testing.T) {
alterSchema(s.dg, "name: string .")

txn := s.dg.NewTxn()
defer txn.Discard(context.Background())
defer func() { require.NoError(t, txn.Discard(context.Background())) }()

// Do one query, so a new timestamp is assigned to the txn.
q := `{me(func: uid(0x01)) { name }}`
Expand Down
6 changes: 3 additions & 3 deletions dgraph/cmd/alpha/http_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,8 @@ func queryWithGz(queryText, contentType, debug, timeout string, gzReq, gzResp bo
if gzReq {
var b bytes.Buffer
gz := gzip.NewWriter(&b)
gz.Write([]byte(queryText))
gz.Close()
_, _ = gz.Write([]byte(queryText))
_ = gz.Close()
buf = &b
} else {
buf = bytes.NewBufferString(queryText)
Expand Down Expand Up @@ -311,7 +311,7 @@ func runRequest(req *http.Request) (*x.QueryResWithData, []byte, *http.Response,
}

qr := new(x.QueryResWithData)
json.Unmarshal(body, qr) // Don't check error.
_ = json.Unmarshal(body, qr) // Don't check error.
if len(qr.Errors) > 0 {
return nil, nil, resp, errors.New(qr.Errors[0].Message)
}
Expand Down
33 changes: 17 additions & 16 deletions dgraph/cmd/alpha/run_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
* limitations under the License.
*/

//nolint:lll
package alpha

import (
Expand Down Expand Up @@ -216,7 +217,7 @@ func TestDeletePredicate(t *testing.T) {
friend: string @index(term) .
`
require.NoError(t, dropAll())
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := alterSchemaWithRetry(s1)
require.NoError(t, err)

Expand Down Expand Up @@ -446,7 +447,7 @@ func TestSchemaMutationIndexAdd(t *testing.T) {
`

// reset Schema
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := runMutation(m)
require.NoError(t, err)

Expand Down Expand Up @@ -486,7 +487,7 @@ func TestSchemaMutationIndexRemove(t *testing.T) {
`

// reset Schema
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
// add index to name
err := alterSchemaWithRetry(s1)
require.NoError(t, err)
Expand Down Expand Up @@ -530,7 +531,7 @@ func TestSchemaMutationReverseAdd(t *testing.T) {
var s = `friend: [uid] @reverse .`

// reset Schema
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := runMutation(m)
require.NoError(t, err)

Expand Down Expand Up @@ -574,7 +575,7 @@ func TestSchemaMutationReverseRemove(t *testing.T) {
`

// reset Schema
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := runMutation(m)
require.NoError(t, err)

Expand Down Expand Up @@ -621,7 +622,7 @@ func TestSchemaMutationCountAdd(t *testing.T) {
`

// reset Schema
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := runMutation(m)
require.NoError(t, err)

Expand Down Expand Up @@ -677,7 +678,7 @@ func TestJsonMutation(t *testing.T) {
name: string @index(exact) .
`
require.NoError(t, dropAll())
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := alterSchemaWithRetry(s1)
require.NoError(t, err)

Expand Down Expand Up @@ -732,7 +733,7 @@ func TestJsonMutationNumberParsing(t *testing.T) {
}
`
require.NoError(t, dropAll())
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := runJSONMutation(m1)
require.NoError(t, err)

Expand Down Expand Up @@ -818,7 +819,7 @@ func TestDeleteAll(t *testing.T) {
friend: [uid] @reverse .
name: string @index(term) .
`
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := alterSchemaWithRetry(s1)
require.NoError(t, err)

Expand Down Expand Up @@ -926,7 +927,7 @@ func TestSchemaMutation4Error(t *testing.T) {
age:int .
`
// reset Schema
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := alterSchemaWithRetry(m)
require.NoError(t, err)

Expand Down Expand Up @@ -957,7 +958,7 @@ func TestSchemaMutation5Error(t *testing.T) {
friends: [uid] .
`
// reset Schema
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := alterSchemaWithRetry(m)
require.NoError(t, err)

Expand All @@ -980,7 +981,7 @@ func TestSchemaMutation5Error(t *testing.T) {

// A basic sanity check. We will do more extensive testing for multiple values in query.
func TestMultipleValues(t *testing.T) {
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
m := `
occupations: [string] .
`
Expand Down Expand Up @@ -1011,7 +1012,7 @@ func TestMultipleValues(t *testing.T) {

func TestListTypeSchemaChange(t *testing.T) {
require.NoError(t, dropAll())
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
m := `
occupations: [string] @index(term) .
`
Expand Down Expand Up @@ -1106,7 +1107,7 @@ func TestDeleteAllSP2(t *testing.T) {
}
`
require.NoError(t, dropAll())
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
err := alterSchemaWithRetry(s)
require.NoError(t, err)

Expand Down Expand Up @@ -1385,7 +1386,7 @@ func TestGrpcCompressionSupport(t *testing.T) {
require.NoError(t, err)

dc := dgo.NewDgraphClient(api.NewDgraphClient(conn))
dc.LoginIntoNamespace(context.Background(), x.GrootId, "password", x.GalaxyNamespace)
require.NoError(t, dc.LoginIntoNamespace(context.Background(), x.GrootId, "password", x.GalaxyNamespace))
q := `schema {}`
tx := dc.NewTxn()
_, err = tx.Query(context.Background(), q)
Expand Down Expand Up @@ -1503,7 +1504,7 @@ func TestIPStringParsing(t *testing.T) {
}

func TestJSONQueryWithVariables(t *testing.T) {
schema.ParseBytes([]byte(""), 1)
require.NoError(t, schema.ParseBytes([]byte(""), 1))
m := `
user_id: string @index(exact) @upsert .
user_name: string @index(hash) .
Expand Down
8 changes: 4 additions & 4 deletions dgraph/cmd/alpha/upsert_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2774,7 +2774,7 @@ func TestUpsertMultiValueJson(t *testing.T) {
// delete color for employess of company1 and set color for employees of company2
m3 := `
{
"query": "{user1(func: eq(works_for, \"company1\")) {c1 as uid} user2(func: eq(works_for, \"company2\")) {c2 as uid}}",
"query": "{u1(func: eq(works_for, \"company1\")) {c1 as uid} u2(func: eq(works_for, \"company2\")) {c2 as uid}}",
"mutations": [
{
"delete": [
Expand All @@ -2800,8 +2800,8 @@ func TestUpsertMultiValueJson(t *testing.T) {
require.NoError(t, err)
result = QueryResult{}
require.NoError(t, json.Unmarshal(mr.data, &result))
require.Equal(t, 2, len(result.Queries["user1"]))
require.Equal(t, 2, len(result.Queries["user2"]))
require.Equal(t, 2, len(result.Queries["u1"]))
require.Equal(t, 2, len(result.Queries["u2"]))
}

func TestValVarWithBlankNode(t *testing.T) {
Expand Down Expand Up @@ -2866,7 +2866,7 @@ upsert {

// This test may fail sometimes because ACL token
// can get expired while the mutations is running.
func upsertTooBigTest(t *testing.T) {
func upsertTooBigTest(t *testing.T) { //nolint:unused
require.NoError(t, dropAll())

for i := 0; i < 1e6+1; {
Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/live/load-json/load_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ func TestMain(m *testing.M) {
// instead of all over /tmp or the working directory.
tmpDir, err := os.MkdirTemp("", "test.tmp-")
x.Check(err)
os.Chdir(tmpDir)
x.Check(os.Chdir(tmpDir))
defer os.RemoveAll(tmpDir)

os.Exit(m.Run())
Expand Down
2 changes: 1 addition & 1 deletion dgraph/cmd/live/load-uids/load_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -399,7 +399,7 @@ func TestMain(m *testing.M) {
// instead of all over /tmp or the working directory.
tmpDir, err := os.MkdirTemp("", "test.tmp-")
x.Check(err)
os.Chdir(tmpDir)
x.Check(os.Chdir(tmpDir))
defer os.RemoveAll(tmpDir)

os.Exit(m.Run())
Expand Down
3 changes: 1 addition & 2 deletions dgraph/cmd/zero/zero_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,9 @@ func TestIdBump(t *testing.T) {
}

func TestProposalKey(t *testing.T) {

id := uint64(2)
node := &node{Node: &conn.Node{Id: id}, ctx: context.Background(), closer: z.NewCloser(1)}
node.initProposalKey(node.Id)
require.NoError(t, node.initProposalKey(node.Id))

pkey := proposalKey
nodeIdFromKey := proposalKey >> 48
Expand Down
2 changes: 0 additions & 2 deletions dgraph/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,6 @@ func main() {
humanize.IBytes(js.Active), humanize.IBytes(js.Allocated),
humanize.IBytes(js.Resident), humanize.IBytes(js.Retained))
lastAlloc = uint64(z.NumAllocBytes())
} else {
// Don't update the lastJs here.
}

runtime.ReadMemStats(&ms)
Expand Down
Loading