Skip to content

Commit

Permalink
Merge branch 'master' of github.com:argoproj/argo-rollouts into chore…
Browse files Browse the repository at this point in the history
…/yaml

Signed-off-by: zachaller <[email protected]>
  • Loading branch information
zachaller committed May 4, 2023
2 parents 6d8498b + f72e565 commit 62ef835
Show file tree
Hide file tree
Showing 50 changed files with 3,943 additions and 1,881 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/go.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ jobs:
path: coverage.out

- name: Upload code coverage information to codecov.io
uses: codecov/[email protected].2
uses: codecov/[email protected].3
with:
file: coverage.out

Expand Down
40 changes: 20 additions & 20 deletions analysis/analysis.go
Original file line number Diff line number Diff line change
Expand Up @@ -321,34 +321,18 @@ func (c *Controller) runMeasurements(run *v1alpha1.AnalysisRun, tasks []metricTa
//redact secret values from logs
logger := logutil.WithRedactor(*logutil.WithAnalysisRun(run).WithField("metric", t.metric.Name), secrets)

resultsLock.Lock()
metricResult := analysisutil.GetResult(run, t.metric.Name)
resultsLock.Unlock()

provider, err := c.newProvider(*logger, t.metric)
if err != nil {
log.Errorf("Error in getting metric provider :%v", err)
return err
}
if metricResult == nil {
metricResult = &v1alpha1.MetricResult{
Name: t.metric.Name,
Phase: v1alpha1.AnalysisPhaseRunning,
DryRun: dryRunMetricsMap[t.metric.Name],
Metadata: provider.GetMetadata(t.metric),
}
}

var newMeasurement v1alpha1.Measurement
if err != nil {
provider, providerErr := c.newProvider(*logger, t.metric)
if providerErr != nil {
log.Errorf("Error in getting metric provider :%v", providerErr)
if t.incompleteMeasurement != nil {
newMeasurement = *t.incompleteMeasurement
} else {
startedAt := timeutil.MetaNow()
newMeasurement.StartedAt = &startedAt
}
newMeasurement.Phase = v1alpha1.AnalysisPhaseError
newMeasurement.Message = err.Error()
newMeasurement.Message = providerErr.Error()
} else {
if t.incompleteMeasurement == nil {
newMeasurement = provider.Run(run, t.metric)
Expand All @@ -366,12 +350,28 @@ func (c *Controller) runMeasurements(run *v1alpha1.AnalysisRun, tasks []metricTa
}
}

resultsLock.Lock()
metricResult := analysisutil.GetResult(run, t.metric.Name)
resultsLock.Unlock()
if metricResult == nil {
metricResult = &v1alpha1.MetricResult{
Name: t.metric.Name,
Phase: v1alpha1.AnalysisPhaseRunning,
DryRun: dryRunMetricsMap[t.metric.Name],
}

if provider != nil && providerErr == nil {
metricResult.Metadata = provider.GetMetadata(t.metric)
}
}

if newMeasurement.Phase.Completed() {
logger.Infof("Measurement Completed. Result: %s", newMeasurement.Phase)
if newMeasurement.FinishedAt == nil {
finishedAt := timeutil.MetaNow()
newMeasurement.FinishedAt = &finishedAt
}

switch newMeasurement.Phase {
case v1alpha1.AnalysisPhaseSuccessful:
metricResult.Successful++
Expand Down
39 changes: 39 additions & 0 deletions analysis/controller_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ package analysis
import (
"context"
"encoding/json"
"fmt"
"reflect"
"testing"
"time"
Expand Down Expand Up @@ -317,6 +318,44 @@ func TestNoReconcileForAnalysisRunWithDeletionTimestamp(t *testing.T) {
f.run(getKey(ar, t))
}

func TestFailedToCreateProviderError(t *testing.T) {
f := newFixture(t)
defer f.Close()

ar := &v1alpha1.AnalysisRun{
ObjectMeta: metav1.ObjectMeta{
Name: "foo",
Namespace: metav1.NamespaceDefault,
},
Spec: v1alpha1.AnalysisRunSpec{
Metrics: []v1alpha1.Metric{
{
Name: "metric1",
Provider: v1alpha1.MetricProvider{
Plugin: map[string]json.RawMessage{"mypluginns/myplugin": []byte(`{"invalid": "json"}`)},
},
},
},
},
}
f.analysisRunLister = append(f.analysisRunLister, ar)
f.objects = append(f.objects, ar)

c, i, k8sI := f.newController(noResyncPeriodFunc)
c.newProvider = func(logCtx log.Entry, metric v1alpha1.Metric) (metric.Provider, error) {
return nil, fmt.Errorf("failed to create provider")
}

pi := f.expectPatchAnalysisRunAction(ar)

f.runController(getKey(ar, t), true, false, c, i, k8sI)

updatedAr := f.getPatchedAnalysisRun(pi)

assert.Equal(t, v1alpha1.AnalysisPhaseError, updatedAr.Status.MetricResults[0].Measurements[0].Phase)
assert.Equal(t, "failed to create provider", updatedAr.Status.MetricResults[0].Measurements[0].Message)
}

func TestRun(t *testing.T) {
f := newFixture(t)
defer f.Close()
Expand Down
10 changes: 0 additions & 10 deletions docs/assets/versions.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,6 @@ setTimeout(function() {
caret.innerHTML = "<i class='fa fa-caret-down dropdown-caret'></i>"
caret.classList.add('dropdown-caret')
div.querySelector('.rst-current-version').appendChild(caret);
div.querySelector('.rst-current-version').addEventListener('click', function() {
const classes = container.className.split(' ');
const index = classes.indexOf('shift-up');
if (index === -1) {
classes.push('shift-up');
} else {
classes.splice(index, 1);
}
container.className = classes.join(' ');
});
}

var CSSLink = document.createElement('link');
Expand Down
20 changes: 20 additions & 0 deletions docs/features/analysis.md
Original file line number Diff line number Diff line change
Expand Up @@ -804,6 +804,26 @@ spec:
limit: 20
```

### Define custom Labels/Annotations for AnalysisRun

If you would like to annotate/label the `AnalysisRun` with the custom labels your can do it by specifying
`analysisRunMetadata` field.

```yaml hl_lines="9 10 11"
kind: Rollout
spec:
...
steps:
- analysis:
templates:
- templateName: my-template
analysisRunMetadata:
labels:
my-custom-label: label-value
annotations:
my-custom-annotation: annotation-value
```

### Measurements Retention for Experiments

If an experiment wants to retain more results of its analysis metrics, it simply needs to specify the
Expand Down
Loading

0 comments on commit 62ef835

Please sign in to comment.