Skip to content

Commit

Permalink
fix: some unit test (#68)
Browse files Browse the repository at this point in the history
Co-authored-by: RayHuangCN <[email protected]>
  • Loading branch information
RayHuangCN and RayHuangCN authored Sep 2, 2021
1 parent a8596ea commit def3fe4
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 14 deletions.
3 changes: 2 additions & 1 deletion pkg/explore/explore_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,8 @@ func TestExplore_Run(t *testing.T) {
Config: &config.Config{
ScrapeConfigs: []*config.ScrapeConfig{
{
JobName: "job1",
JobName: "job1",
ScrapeTimeout: model.Duration(time.Second * 3),
},
},
},
Expand Down
1 change: 0 additions & 1 deletion pkg/scrape/manager_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,5 @@ func TestManager(t *testing.T) {
s := ss.GetJob(cfg.JobName)
r.NotNil(s)

r.Equal(time.Second, s.timeout)
r.Equal(u.String(), s.proxyURL.String())
}
6 changes: 2 additions & 4 deletions pkg/scrape/scrape.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,6 @@ type JobInfo struct {
// proxyURL save old proxyURL set in ScrapeConfig if env SCRAPE_PROXY is not empty
// proxyURL will be saved in head "Origin-Proxy" when scrape request is send
proxyURL *url.URL
timeout time.Duration
}

func newJobInfo(cfg config.ScrapeConfig) (*JobInfo, error) {
Expand All @@ -78,7 +77,6 @@ func newJobInfo(cfg config.ScrapeConfig) (*JobInfo, error) {
Cli: client,
Config: &cfg,
proxyURL: oldProxy.URL,
timeout: time.Duration(cfg.ScrapeTimeout),
}, nil
}

Expand All @@ -93,12 +91,12 @@ func (j *JobInfo) Scrape(url string) ([]byte, string, error) {
req.Header.Add("Accept", acceptHeader)
req.Header.Add("Accept-Encoding", "gzip")
req.Header.Set("User-Agent", userAgentHeader)
req.Header.Set("X-prometheusURL-Cli-Timeout-Seconds", fmt.Sprintf("%f", j.timeout.Seconds()))
req.Header.Set("X-prometheusURL-Cli-Timeout-Seconds", fmt.Sprintf("%f", time.Duration(j.Config.ScrapeTimeout).Seconds()))
if j.proxyURL != nil {
req.Header.Set("Origin-Proxy", j.proxyURL.String())
}

ctx, _ := context.WithTimeout(context.Background(), j.timeout)
ctx, _ := context.WithTimeout(context.Background(), time.Duration(j.Config.ScrapeTimeout))
resp, err := j.Cli.Do(req.WithContext(ctx))
if err != nil {
return nil, "", err
Expand Down
11 changes: 7 additions & 4 deletions pkg/scrape/scrape_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,13 @@ package scrape
import (
"bytes"
"compress/gzip"
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/pkg/relabel"
"net/http"
"net/http/httptest"
"net/url"
"testing"

"github.com/prometheus/common/model"
"time"

"github.com/stretchr/testify/require"

Expand Down Expand Up @@ -76,8 +76,11 @@ metrics0{code="201"} 2

u, _ := url.Parse("http://127.0.0.1:8080")
info := &JobInfo{
Cli: ts.Client(),
Config: &config.ScrapeConfig{JobName: "test"},
Cli: ts.Client(),
Config: &config.ScrapeConfig{
JobName: "test",
ScrapeTimeout: model.Duration(time.Second),
},
proxyURL: u,
}

Expand Down
19 changes: 15 additions & 4 deletions pkg/sidecar/proxy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package sidecar

import (
"github.com/prometheus/common/model"
"github.com/prometheus/prometheus/config"
scrape2 "github.com/prometheus/prometheus/scrape"
"github.com/sirupsen/logrus"
Expand All @@ -26,6 +27,7 @@ import (
"net/http/httptest"
"strings"
"testing"
"time"
"tkestack.io/kvass/pkg/scrape"
"tkestack.io/kvass/pkg/target"
)
Expand All @@ -49,16 +51,22 @@ func TestProxy_ServeHTTP(t *testing.T) {
wantTargetStatus: map[uint64]*target.ScrapeStatus{},
},
{
name: "invalid hash",
job: &config.ScrapeConfig{JobName: "job1"},
name: "invalid hash",
job: &config.ScrapeConfig{
JobName: "job1",
ScrapeTimeout: model.Duration(time.Second * 3),
},
status: map[uint64]*target.ScrapeStatus{},
uri: "/metrics?_jobName=job1&_scheme=http&_hash=xxxx",
wantStatusCode: http.StatusBadRequest,
wantTargetStatus: map[uint64]*target.ScrapeStatus{},
},
{
name: "scrape failed",
job: &config.ScrapeConfig{JobName: "job1"},
job: &config.ScrapeConfig{
JobName: "job1",
ScrapeTimeout: model.Duration(time.Second * 3),
},
status: map[uint64]*target.ScrapeStatus{
1: {},
},
Expand All @@ -75,7 +83,10 @@ func TestProxy_ServeHTTP(t *testing.T) {
},
{
name: "scrape success",
job: &config.ScrapeConfig{JobName: "job1"},
job: &config.ScrapeConfig{
JobName: "job1",
ScrapeTimeout: model.Duration(time.Second * 3),
},
status: map[uint64]*target.ScrapeStatus{
1: {},
},
Expand Down

0 comments on commit def3fe4

Please sign in to comment.