Skip to content

Commit

Permalink
storage incentives: reduce deepsource complaints (#1596)
Browse files Browse the repository at this point in the history
  • Loading branch information
acud committed Apr 26, 2021
1 parent 7d47906 commit 098d553
Show file tree
Hide file tree
Showing 19 changed files with 50 additions and 61 deletions.
6 changes: 1 addition & 5 deletions .github/workflows/beekeeper.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,11 +60,7 @@ jobs:
run: |
echo -e "127.0.0.10\tregistry.localhost" | sudo tee -a /etc/hosts
for ((i=0; i<REPLICA; i++)); do echo -e "127.0.1.$((i+1))\tbee-${i}.localhost bee-${i}-debug.localhost"; done | sudo tee -a /etc/hosts
<<<<<<< HEAD
timeout 30m ./beeinfra.sh install --local -r "${REPLICA}" --bootnode /dnsaddr/localhost --geth --k3s --pay-threshold 1000000000000
=======
timeout 30m ./beeinfra.sh install --local -r "${REPLICA}" --bootnode /dnsaddr/localhost --geth --k3s --postage
>>>>>>> a6c986c4 (api: accept postage id header and propagate it to pipeline (#886))
timeout 30m ./beeinfra.sh install --local -r "${REPLICA}" --bootnode /dnsaddr/localhost --geth --k3s --pay-threshold 1000000000000 --postage
- name: Test pingpong
id: pingpong-1
run: until ./beekeeper check pingpong --api-scheme http --debug-api-scheme http --disable-namespace --debug-api-domain localhost --api-domain localhost --node-count "${REPLICA}"; do echo "waiting for pingpong..."; sleep .3; done
Expand Down
10 changes: 5 additions & 5 deletions pkg/api/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,11 @@ const (
var (
errInvalidNameOrAddress = errors.New("invalid name or bzz address")
errNoResolver = errors.New("no resolver connected")
invalidRequest = errors.New("could not validate request")
invalidContentType = errors.New("invalid content-type")
invalidContentLength = errors.New("invalid content-length")
directoryStoreError = errors.New("could not store directory")
fileStoreError = errors.New("could not store file")
errInvalidRequest = errors.New("could not validate request")
errInvalidContentType = errors.New("invalid content-type")
errInvalidContentLength = errors.New("invalid content-length")
errDirectoryStore = errors.New("could not store directory")
errFileStore = errors.New("could not store file")
errInvalidPostageBatch = errors.New("invalid postage batch id")
)

Expand Down
6 changes: 3 additions & 3 deletions pkg/api/bzz.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ func (s *server) bzzUploadHandler(w http.ResponseWriter, r *http.Request) {
if err != nil {
logger.Debugf("bzz upload: parse content type header %q: %v", contentType, err)
logger.Errorf("bzz upload: parse content type header %q", contentType)
jsonhttp.BadRequest(w, invalidContentType)
jsonhttp.BadRequest(w, errInvalidContentType)
return
}

Expand Down Expand Up @@ -122,7 +122,7 @@ func (s *server) fileUploadHandler(w http.ResponseWriter, r *http.Request, store
if err != nil {
logger.Debugf("bzz upload file: content length, file %q: %v", fileName, err)
logger.Errorf("bzz upload file: content length, file %q", fileName)
jsonhttp.BadRequest(w, invalidContentLength)
jsonhttp.BadRequest(w, errInvalidContentLength)
return
}
} else {
Expand Down Expand Up @@ -160,7 +160,7 @@ func (s *server) fileUploadHandler(w http.ResponseWriter, r *http.Request, store
if err != nil {
logger.Debugf("bzz upload file: file store, file %q: %v", fileName, err)
logger.Errorf("bzz upload file: file store, file %q", fileName)
jsonhttp.InternalServerError(w, fileStoreError)
jsonhttp.InternalServerError(w, errFileStore)
return
}

Expand Down
8 changes: 4 additions & 4 deletions pkg/api/dirs.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,15 +35,15 @@ func (s *server) dirUploadHandler(w http.ResponseWriter, r *http.Request, storer
logger := tracing.NewLoggerWithTraceID(r.Context(), s.logger)
if r.Body == http.NoBody {
logger.Error("bzz upload dir: request has no body")
jsonhttp.BadRequest(w, invalidRequest)
jsonhttp.BadRequest(w, errInvalidRequest)
return
}
contentType := r.Header.Get(contentTypeHeader)
mediaType, params, err := mime.ParseMediaType(contentType)
if err != nil {
logger.Errorf("bzz upload dir: invalid content-type")
logger.Debugf("bzz upload dir: invalid content-type err: %v", err)
jsonhttp.BadRequest(w, invalidContentType)
jsonhttp.BadRequest(w, errInvalidContentType)
return
}

Expand All @@ -55,7 +55,7 @@ func (s *server) dirUploadHandler(w http.ResponseWriter, r *http.Request, storer
dReader = &multipartReader{r: multipart.NewReader(r.Body, params["boundary"])}
default:
logger.Error("bzz upload dir: invalid content-type for directory upload")
jsonhttp.BadRequest(w, invalidContentType)
jsonhttp.BadRequest(w, errInvalidContentType)
return
}
defer r.Body.Close()
Expand Down Expand Up @@ -86,7 +86,7 @@ func (s *server) dirUploadHandler(w http.ResponseWriter, r *http.Request, storer
if err != nil {
logger.Debugf("bzz upload dir: store dir err: %v", err)
logger.Errorf("bzz upload dir: store dir")
jsonhttp.InternalServerError(w, directoryStoreError)
jsonhttp.InternalServerError(w, errDirectoryStore)
return
}
if created {
Expand Down
6 changes: 3 additions & 3 deletions pkg/api/export_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ type (
)

var (
InvalidContentType = invalidContentType
InvalidRequest = invalidRequest
DirectoryStoreError = directoryStoreError
InvalidContentType = errInvalidContentType
InvalidRequest = errInvalidRequest
DirectoryStoreError = errDirectoryStore
)

var (
Expand Down
2 changes: 1 addition & 1 deletion pkg/feeds/feed.go
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ func (f *Feed) Update(index Index) *Update {
}

// NewUpdate creates an update from an index, timestamp, payload and signature
func NewUpdate(f *Feed, idx Index, timestamp int64, payload []byte, sig []byte) (swarm.Chunk, error) {
func NewUpdate(f *Feed, idx Index, timestamp int64, payload, sig []byte) (swarm.Chunk, error) {
id, err := f.Update(idx).Id()
if err != nil {
return nil, fmt.Errorf("update: %w", err)
Expand Down
6 changes: 3 additions & 3 deletions pkg/feeds/testing/lookup.go
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ func (t *Timeout) Get(ctx context.Context, mode storage.ModeGet, addr swarm.Addr
}
return ch, err
}
time.Sleep(time.Duration(rand.Intn(10)) * time.Millisecond)
time.Sleep(time.Duration(rand.Intn(10)) * time.Millisecond) // skipcq: GSC-G404
return ch, nil
}

Expand Down Expand Up @@ -142,7 +142,7 @@ func TestFinderIntervals(t *testing.T, nextf func() (bool, int64), finderf func(
for j := 0; j < len(ats)-1; j++ {
at := ats[j]
diff := ats[j+1] - at
for now := at; now < ats[j+1]; now += int64(rand.Intn(int(diff)) + 1) {
for now := at; now < ats[j+1]; now += int64(rand.Intn(int(diff)) + 1) { // skipcq: GSC-G404
after := int64(0)
ch, current, next, err := finder.At(ctx, now, after)
if err != nil {
Expand Down Expand Up @@ -198,7 +198,7 @@ func TestFinderRandomIntervals(t *testing.T, finderf func(storage.Getter, *feeds
var i int64
var n int
nextf := func() (bool, int64) {
i += int64(rand.Intn(1<<10) + 1)
i += int64(rand.Intn(1<<10) + 1) // skipcq: GSC-G404
n++
return n == 40, i
}
Expand Down
11 changes: 0 additions & 11 deletions pkg/localstore/pin_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,10 @@ package localstore
import (
"context"
"errors"
"sort"
"testing"

"github.com/ethersphere/bee/pkg/shed"
"github.com/ethersphere/bee/pkg/storage"
"github.com/ethersphere/bee/pkg/swarm"
)

func TestPinCounter(t *testing.T) {
Expand Down Expand Up @@ -209,12 +207,3 @@ func runCountsTest(t *testing.T, name string, db *DB, r, a, push, pull, pin, gc
t.Run("gc size", newIndexGCSizeTest(db))
})
}

func chunksToSortedStrings(chunks []swarm.Chunk) []string {
var addresses []string
for _, c := range chunks {
addresses = append(addresses, c.Address().String())
}
sort.Strings(addresses)
return addresses
}
14 changes: 7 additions & 7 deletions pkg/postage/batchservice/batchservice_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import (

var (
testLog = logging.New(ioutil.Discard, 0)
testErr = errors.New("fails")
errTest = errors.New("fails")
)

type mockListener struct {
Expand All @@ -40,7 +40,7 @@ func TestBatchServiceCreate(t *testing.T) {
t.Run("expect put create put error", func(t *testing.T) {
svc, _ := newTestStoreAndService(
mock.WithChainState(testChainState),
mock.WithPutErr(testErr, 0),
mock.WithPutErr(errTest, 0),
)

if err := svc.Create(
Expand Down Expand Up @@ -97,7 +97,7 @@ func TestBatchServiceTopUp(t *testing.T) {

t.Run("expect get error", func(t *testing.T) {
svc, _ := newTestStoreAndService(
mock.WithGetErr(testErr, 0),
mock.WithGetErr(errTest, 0),
)

if err := svc.TopUp(testBatch.ID, testNormalisedBalance); err == nil {
Expand All @@ -107,7 +107,7 @@ func TestBatchServiceTopUp(t *testing.T) {

t.Run("expect put error", func(t *testing.T) {
svc, batchStore := newTestStoreAndService(
mock.WithPutErr(testErr, 1),
mock.WithPutErr(errTest, 1),
)
putBatch(t, batchStore, testBatch)

Expand Down Expand Up @@ -144,7 +144,7 @@ func TestBatchServiceUpdateDepth(t *testing.T) {

t.Run("expect get error", func(t *testing.T) {
svc, _ := newTestStoreAndService(
mock.WithGetErr(testErr, 0),
mock.WithGetErr(errTest, 0),
)

if err := svc.UpdateDepth(testBatch.ID, testNewDepth, testNormalisedBalance); err == nil {
Expand All @@ -154,7 +154,7 @@ func TestBatchServiceUpdateDepth(t *testing.T) {

t.Run("expect put error", func(t *testing.T) {
svc, batchStore := newTestStoreAndService(
mock.WithPutErr(testErr, 1),
mock.WithPutErr(errTest, 1),
)
putBatch(t, batchStore, testBatch)

Expand Down Expand Up @@ -190,7 +190,7 @@ func TestBatchServiceUpdatePrice(t *testing.T) {
t.Run("expect put error", func(t *testing.T) {
svc, batchStore := newTestStoreAndService(
mock.WithChainState(testChainState),
mock.WithPutErr(testErr, 1),
mock.WithPutErr(errTest, 1),
)
putChainState(t, batchStore, testChainState)

Expand Down
4 changes: 2 additions & 2 deletions pkg/postage/batchstore/reserve_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -850,7 +850,7 @@ type batchValueTuple struct {
value *big.Int
}

func batchValue(i int, v int) batchValueTuple {
func batchValue(i, v int) batchValueTuple {
return batchValueTuple{batchIndex: i, value: big.NewInt(int64(v))}
}

Expand All @@ -868,7 +868,7 @@ type batchDepthTuple struct {
depth uint8
}

func batchDepth(i int, d int) batchDepthTuple {
func batchDepth(i, d int) batchDepthTuple {
return batchDepthTuple{batchIndex: i, depth: uint8(d)}
}

Expand Down
2 changes: 1 addition & 1 deletion pkg/postage/listener/listener.go
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ type priceUpdateEvent struct {
}

// DiscoverAddresses returns the canonical contracts for this chainID
func DiscoverAddresses(chainID int64) (postageStamp common.Address, priceOracle common.Address, found bool) {
func DiscoverAddresses(chainID int64) (postageStamp, priceOracle common.Address, found bool) {
if chainID == 5 {
// goerli
return common.HexToAddress("0xF7a041E7e2B79ccA1975852Eb6D4c6cE52986b4a"), common.HexToAddress("0x1044534090de6f4014ece6d036C699130Bd5Df43"), true
Expand Down
4 changes: 2 additions & 2 deletions pkg/postage/postagecontract/contract.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ type postageContract struct {
}

func New(
owner common.Address,
postageContractAddress common.Address,
owner,
postageContractAddress,
bzzTokenAddress common.Address,
transactionService transaction.Service,
postageService postage.Service,
Expand Down
4 changes: 2 additions & 2 deletions pkg/postage/testing/batch.go
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ func MustNewAddress() []byte {

// NewBigInt will generate a new random big int (uint64 base value).
func NewBigInt() *big.Int {
return (new(big.Int)).SetUint64(rand.Uint64())
return (new(big.Int)).SetUint64(rand.Uint64()) // skipcq: GSC-G404
}

// MustNewBatch will create a new test batch. Fields that are not supplied will
Expand All @@ -52,7 +52,7 @@ func MustNewBatch(opts ...BatchOption) *postage.Batch {
b := &postage.Batch{
ID: MustNewID(),
Value: NewBigInt(),
Start: rand.Uint64(),
Start: rand.Uint64(), // skipcq: GSC-G404
Depth: defaultDepth,
}

Expand Down
2 changes: 1 addition & 1 deletion pkg/postage/testing/chainstate.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import (
// NewChainState will create a new ChainState with random values.
func NewChainState() *postage.ChainState {
return &postage.ChainState{
Block: rand.Uint64(),
Block: rand.Uint64(), // skipcq: GSC-G404
Price: NewBigInt(),
Total: NewBigInt(),
}
Expand Down
12 changes: 10 additions & 2 deletions pkg/pss/trojan.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@ import (
"bytes"
"context"
"crypto/ecdsa"
random "crypto/rand"
"encoding/binary"
"encoding/hex"
"errors"
"fmt"
random "math/rand"
"math"
"math/big"

"github.com/btcsuite/btcd/btcec"
"github.com/ethersphere/bee/pkg/bmtpool"
Expand All @@ -31,6 +33,8 @@ var (

// ErrVarLenTargets is returned when the given target list for a trojan chunk has addresses of different lengths
ErrVarLenTargets = errors.New("target list cannot have targets of different length")

maxUint32 = big.NewInt(math.MaxUint32)
)

// Topic is the type that classifies messages, allows client applications to subscribe to
Expand Down Expand Up @@ -202,7 +206,11 @@ func contains(col Targets, elem []byte) bool {
func mine(ctx context.Context, odd bool, f func(nonce []byte) (swarm.Chunk, error)) (swarm.Chunk, error) {
seeds := make([]uint32, 8)
for i := range seeds {
seeds[i] = random.Uint32()
b, err := random.Int(random.Reader, maxUint32)
if err != nil {
return nil, err
}
seeds[i] = uint32(b.Int64())
}
initnonce := make([]byte, 32)
for i := 0; i < 8; i++ {
Expand Down
2 changes: 1 addition & 1 deletion pkg/resolver/client/ens/ens.go
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ func (c *Client) Close() error {
return nil
}

func wrapDial(endpoint string, contractAddr string) (*ethclient.Client, *goens.Registry, error) {
func wrapDial(endpoint, contractAddr string) (*ethclient.Client, *goens.Registry, error) {
// Dial the eth client.
ethCl, err := ethclient.Dial(endpoint)
if err != nil {
Expand Down
2 changes: 1 addition & 1 deletion pkg/settlement/swap/transaction/event_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ type transferEvent struct {
Value *big.Int
}

func newTransferLog(address common.Address, from common.Address, to common.Address, value *big.Int) *types.Log {
func newTransferLog(address, from, to common.Address, value *big.Int) *types.Log {
return &types.Log{
Topics: []common.Hash{
erc20ABI.Events["Transfer"].ID,
Expand Down
4 changes: 2 additions & 2 deletions pkg/swarm/test/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,12 @@ func RandomAddressAt(self swarm.Address, prox int) swarm.Address {
}
flipbyte := byte(1 << uint8(7-trans))
transbyteb := transbytea ^ byte(255)
randbyte := byte(rand.Intn(255))
randbyte := byte(rand.Intn(255)) // skipcq: GSC-G404
addr[pos] = ((addr[pos] & transbytea) ^ flipbyte) | randbyte&transbyteb
}

for i := pos + 1; i < len(addr); i++ {
addr[i] = byte(rand.Intn(255))
addr[i] = byte(rand.Intn(255)) // skipcq: GSC-G404
}

a := swarm.NewAddress(addr)
Expand Down
6 changes: 1 addition & 5 deletions pkg/tracing/tracing.go
Original file line number Diff line number Diff line change
Expand Up @@ -186,11 +186,7 @@ func (t *Tracer) AddContextHTTPHeader(ctx context.Context, headers http.Header)
}

carrier := opentracing.HTTPHeadersCarrier(headers)
if err := t.tracer.Inject(c, opentracing.HTTPHeaders, carrier); err != nil {
return err
}

return nil
return t.tracer.Inject(c, opentracing.HTTPHeaders, carrier)
}

// FromHTTPHeaders returns tracing span context from HTTP headers. If the tracing
Expand Down

0 comments on commit 098d553

Please sign in to comment.