From 8541f1fcdbf004d37967106f5551532ece4b0e6e Mon Sep 17 00:00:00 2001 From: Vincent Demeester Date: Mon, 20 Jan 2020 16:39:47 +0100 Subject: [PATCH] =?UTF-8?q?Add=20Pipeline=20to=20v1alpha2=20=F0=9F=8E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This port Pipeline spec to v1alpha2, nothing really change from v1alpha1. Signed-off-by: Vincent Demeester --- .../pipeline/v1alpha2/pipeline_defaults.go | 40 ++ .../pipeline/v1alpha2/pipeline_interface.go | 26 + pkg/apis/pipeline/v1alpha2/pipeline_types.go | 246 ++++++++ .../pipeline/v1alpha2/pipeline_validation.go | 268 ++++++++ .../v1alpha2/pipeline_validation_test.go | 578 ++++++++++++++++++ pkg/apis/pipeline/v1alpha2/register.go | 4 +- .../v1alpha2/zz_generated.deepcopy.go | 331 ++++++++++ .../pipeline/v1alpha2/fake/fake_pipeline.go | 128 ++++ .../v1alpha2/fake/fake_pipeline_client.go | 4 + .../pipeline/v1alpha2/generated_expansion.go | 2 + .../typed/pipeline/v1alpha2/pipeline.go | 174 ++++++ .../pipeline/v1alpha2/pipeline_client.go | 5 + .../informers/externalversions/generic.go | 2 + .../pipeline/v1alpha2/interface.go | 7 + .../pipeline/v1alpha2/pipeline.go | 89 +++ .../pipeline/v1alpha2/pipeline/fake/fake.go | 40 ++ .../pipeline/v1alpha2/pipeline/pipeline.go | 52 ++ .../pipeline/v1alpha2/expansion_generated.go | 8 + .../listers/pipeline/v1alpha2/pipeline.go | 94 +++ 19 files changed, 2096 insertions(+), 2 deletions(-) create mode 100644 pkg/apis/pipeline/v1alpha2/pipeline_defaults.go create mode 100644 pkg/apis/pipeline/v1alpha2/pipeline_interface.go create mode 100644 pkg/apis/pipeline/v1alpha2/pipeline_types.go create mode 100644 pkg/apis/pipeline/v1alpha2/pipeline_validation.go create mode 100644 pkg/apis/pipeline/v1alpha2/pipeline_validation_test.go create mode 100644 pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline.go create mode 100644 pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline.go create mode 100644 pkg/client/informers/externalversions/pipeline/v1alpha2/pipeline.go create mode 100644 pkg/client/injection/informers/pipeline/v1alpha2/pipeline/fake/fake.go create mode 100644 pkg/client/injection/informers/pipeline/v1alpha2/pipeline/pipeline.go create mode 100644 pkg/client/listers/pipeline/v1alpha2/pipeline.go diff --git a/pkg/apis/pipeline/v1alpha2/pipeline_defaults.go b/pkg/apis/pipeline/v1alpha2/pipeline_defaults.go new file mode 100644 index 00000000000..9d61991f1ff --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/pipeline_defaults.go @@ -0,0 +1,40 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "context" + + "knative.dev/pkg/apis" +) + +var _ apis.Defaultable = (*Pipeline)(nil) + +func (p *Pipeline) SetDefaults(ctx context.Context) { + p.Spec.SetDefaults(ctx) +} + +func (ps *PipelineSpec) SetDefaults(ctx context.Context) { + for _, pt := range ps.Tasks { + if pt.TaskRef.Kind == "" { + pt.TaskRef.Kind = NamespacedTaskKind + } + } + for i := range ps.Params { + ps.Params[i].SetDefaults(ctx) + } +} diff --git a/pkg/apis/pipeline/v1alpha2/pipeline_interface.go b/pkg/apis/pipeline/v1alpha2/pipeline_interface.go new file mode 100644 index 00000000000..d13c0c50ed1 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/pipeline_interface.go @@ -0,0 +1,26 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" + +// PipelineInterface is implemented by Pipeline and ClusterPipeline +type PipelineInterface interface { + PipelineMetadata() metav1.ObjectMeta + PipelineSpec() PipelineSpec + Copy() PipelineInterface +} diff --git a/pkg/apis/pipeline/v1alpha2/pipeline_types.go b/pkg/apis/pipeline/v1alpha2/pipeline_types.go new file mode 100644 index 00000000000..ab3ee3a7c01 --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/pipeline_types.go @@ -0,0 +1,246 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "github.com/tektoncd/pipeline/pkg/reconciler/pipeline/dag" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +// Check that Pipeline may be validated and defaulted. +// TaskKind defines the type of Task used by the pipeline. +type TaskKind string + +const ( + // NamespacedTaskKind indicates that the task type has a namepace scope. + NamespacedTaskKind TaskKind = "Task" + // ClusterTaskKind indicates that task type has a cluster scope. + ClusterTaskKind TaskKind = "ClusterTask" +) + +// +genclient +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object +// +genclient:noStatus + +// Pipeline describes a list of Tasks to execute. It expresses how outputs +// of tasks feed into inputs of subsequent tasks. +// +k8s:openapi-gen=true +type Pipeline struct { + metav1.TypeMeta `json:",inline"` + // +optional + metav1.ObjectMeta `json:"metadata,omitempty"` + + // Spec holds the desired state of the Pipeline from the client + // +optional + Spec PipelineSpec `json:"spec"` + + // Status is deprecated. + // It usually is used to communicate the observed state of the Pipeline from + // the controller, but was unused as there is no controller for Pipeline. + // +optional + Status *PipelineStatus `json:"status,omitempty"` +} + +// PipelineStatus does not contain anything because Pipelines on their own +// do not have a status, they just hold data which is later used by a +// PipelineRun. +// Deprecated +type PipelineStatus struct { +} + +func (p *Pipeline) PipelineMetadata() metav1.ObjectMeta { + return p.ObjectMeta +} + +func (p *Pipeline) PipelineSpec() PipelineSpec { + return p.Spec +} + +func (p *Pipeline) Copy() PipelineInterface { + return p.DeepCopy() +} + +// PipelineSpec defines the desired state of Pipeline. +type PipelineSpec struct { + // Resources declares the names and types of the resources given to the + // Pipeline's tasks as inputs and outputs. + Resources []PipelineDeclaredResource `json:"resources,omitempty"` + // Tasks declares the graph of Tasks that execute when this Pipeline is run. + Tasks []PipelineTask `json:"tasks,omitempty"` + // Params declares a list of input parameters that must be supplied when + // this Pipeline is run. + Params []ParamSpec `json:"params,omitempty"` +} + +// PipelineTask defines a task in a Pipeline, passing inputs from both +// Params and from the output of previous tasks. +type PipelineTask struct { + // Name is the name of this task within the context of a Pipeline. Name is + // used as a coordinate with the `from` and `runAfter` fields to establish + // the execution order of tasks relative to one another. + Name string `json:"name,omitempty"` + + // TaskRef is a reference to a task definition. + // +optional + TaskRef *TaskRef `json:"taskRef,omitempty"` + + // TaskSpec is a specification of a task + // +optional + TaskSpec *TaskSpec `json:"taskSpec,omitempty"` + + // Conditions is a list of conditions that need to be true for the task to run + // +optional + Conditions []PipelineTaskCondition `json:"conditions,omitempty"` + + // Retries represents how many times this task should be retried in case of task failure: ConditionSucceeded set to False + // +optional + Retries int `json:"retries,omitempty"` + + // RunAfter is the list of PipelineTask names that should be executed before + // this Task executes. (Used to force a specific ordering in graph execution.) + // +optional + RunAfter []string `json:"runAfter,omitempty"` + + // Resources declares the resources given to this task as inputs and + // outputs. + // +optional + Resources *PipelineTaskResources `json:"resources,omitempty"` + // Parameters declares parameters passed to this task. + // +optional + Params []Param `json:"params,omitempty"` +} + +func (pt PipelineTask) HashKey() string { + return pt.Name +} + +func (pt PipelineTask) Deps() []string { + deps := []string{} + deps = append(deps, pt.RunAfter...) + if pt.Resources != nil { + for _, rd := range pt.Resources.Inputs { + deps = append(deps, rd.From...) + } + } + // Add any dependents from conditional resources. + for _, cond := range pt.Conditions { + for _, rd := range cond.Resources { + deps = append(deps, rd.From...) + } + } + return deps +} + +type PipelineTaskList []PipelineTask + +func (l PipelineTaskList) Items() []dag.Task { + tasks := []dag.Task{} + for _, t := range l { + tasks = append(tasks, dag.Task(t)) + } + return tasks +} + +// PipelineTaskParam is used to provide arbitrary string parameters to a Task. +type PipelineTaskParam struct { + Name string `json:"name"` + Value string `json:"value"` +} + +// PipelineTaskCondition allows a PipelineTask to declare a Condition to be evaluated before +// the Task is run. +type PipelineTaskCondition struct { + // ConditionRef is the name of the Condition to use for the conditionCheck + ConditionRef string `json:"conditionRef"` + + // Params declare parameters passed to this Condition + // +optional + Params []Param `json:"params,omitempty"` + + // Resources declare the resources provided to this Condition as input + Resources []PipelineTaskInputResource `json:"resources,omitempty"` +} + +// PipelineDeclaredResource is used by a Pipeline to declare the types of the +// PipelineResources that it will required to run and names which can be used to +// refer to these PipelineResources in PipelineTaskResourceBindings. +type PipelineDeclaredResource struct { + // Name is the name that will be used by the Pipeline to refer to this resource. + // It does not directly correspond to the name of any PipelineResources Task + // inputs or outputs, and it does not correspond to the actual names of the + // PipelineResources that will be bound in the PipelineRun. + Name string `json:"name"` + // Type is the type of the PipelineResource. + Type PipelineResourceType `json:"type"` +} + +// PipelineTaskResources allows a Pipeline to declare how its DeclaredPipelineResources +// should be provided to a Task as its inputs and outputs. +type PipelineTaskResources struct { + // Inputs holds the mapping from the PipelineResources declared in + // DeclaredPipelineResources to the input PipelineResources required by the Task. + Inputs []PipelineTaskInputResource `json:"inputs,omitempty"` + // Outputs holds the mapping from the PipelineResources declared in + // DeclaredPipelineResources to the input PipelineResources required by the Task. + Outputs []PipelineTaskOutputResource `json:"outputs,omitempty"` +} + +// PipelineTaskInputResource maps the name of a declared PipelineResource input +// dependency in a Task to the resource in the Pipeline's DeclaredPipelineResources +// that should be used. This input may come from a previous task. +type PipelineTaskInputResource struct { + // Name is the name of the PipelineResource as declared by the Task. + Name string `json:"name"` + // Resource is the name of the DeclaredPipelineResource to use. + Resource string `json:"resource"` + // From is the list of PipelineTask names that the resource has to come from. + // (Implies an ordering in the execution graph.) + // +optional + From []string `json:"from,omitempty"` +} + +// PipelineTaskOutputResource maps the name of a declared PipelineResource output +// dependency in a Task to the resource in the Pipeline's DeclaredPipelineResources +// that should be used. +type PipelineTaskOutputResource struct { + // Name is the name of the PipelineResource as declared by the Task. + Name string `json:"name"` + // Resource is the name of the DeclaredPipelineResource to use. + Resource string `json:"resource"` +} + +// TaskRef can be used to refer to a specific instance of a task. +// Copied from CrossVersionObjectReference: https://github.com/kubernetes/kubernetes/blob/169df7434155cbbc22f1532cba8e0a9588e29ad8/pkg/apis/autoscaling/types.go#L64 +type TaskRef struct { + // Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names + Name string `json:"name,omitempty"` + // TaskKind inficates the kind of the task, namespaced or cluster scoped. + Kind TaskKind `json:"kind,omitempty"` + // API version of the referent + // +optional + APIVersion string `json:"apiVersion,omitempty"` +} + +// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object + +// PipelineList contains a list of Pipeline +type PipelineList struct { + metav1.TypeMeta `json:",inline"` + // +optional + metav1.ListMeta `json:"metadata,omitempty"` + Items []Pipeline `json:"items"` +} diff --git a/pkg/apis/pipeline/v1alpha2/pipeline_validation.go b/pkg/apis/pipeline/v1alpha2/pipeline_validation.go new file mode 100644 index 00000000000..11130a05b0e --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/pipeline_validation.go @@ -0,0 +1,268 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2 + +import ( + "context" + "fmt" + "strings" + + "github.com/tektoncd/pipeline/pkg/apis/validate" + "github.com/tektoncd/pipeline/pkg/list" + "github.com/tektoncd/pipeline/pkg/reconciler/pipeline/dag" + "github.com/tektoncd/pipeline/pkg/substitution" + "k8s.io/apimachinery/pkg/api/equality" + "k8s.io/apimachinery/pkg/util/validation" + "knative.dev/pkg/apis" +) + +var _ apis.Validatable = (*Pipeline)(nil) + +// Validate checks that the Pipeline structure is valid but does not validate +// that any references resources exist, that is done at run time. +func (p *Pipeline) Validate(ctx context.Context) *apis.FieldError { + if err := validate.ObjectMetadata(p.GetObjectMeta()); err != nil { + return err.ViaField("metadata") + } + return p.Spec.Validate(ctx) +} + +func validateDeclaredResources(ps *PipelineSpec) error { + required := []string{} + for _, t := range ps.Tasks { + if t.Resources != nil { + for _, input := range t.Resources.Inputs { + required = append(required, input.Resource) + } + for _, output := range t.Resources.Outputs { + required = append(required, output.Resource) + } + } + + for _, condition := range t.Conditions { + for _, cr := range condition.Resources { + required = append(required, cr.Resource) + } + } + } + + provided := make([]string, 0, len(ps.Resources)) + for _, resource := range ps.Resources { + provided = append(provided, resource.Name) + } + missing := list.DiffLeft(required, provided) + if len(missing) > 0 { + return fmt.Errorf("pipeline declared resources didn't match usage in Tasks: Didn't provide required values: %s", missing) + } + return nil +} + +func isOutput(outputs []PipelineTaskOutputResource, resource string) bool { + for _, output := range outputs { + if output.Resource == resource { + return true + } + } + return false +} + +// validateFrom ensures that the `from` values make sense: that they rely on values from Tasks +// that ran previously, and that the PipelineResource is actually an output of the Task it should come from. +func validateFrom(tasks []PipelineTask) error { + taskOutputs := map[string][]PipelineTaskOutputResource{} + for _, task := range tasks { + var to []PipelineTaskOutputResource + if task.Resources != nil { + to = make([]PipelineTaskOutputResource, len(task.Resources.Outputs)) + copy(to, task.Resources.Outputs) + } + taskOutputs[task.Name] = to + } + for _, t := range tasks { + inputResources := []PipelineTaskInputResource{} + if t.Resources != nil { + inputResources = append(inputResources, t.Resources.Inputs...) + } + + for _, c := range t.Conditions { + inputResources = append(inputResources, c.Resources...) + } + + for _, rd := range inputResources { + for _, pt := range rd.From { + outputs, found := taskOutputs[pt] + if !found { + return fmt.Errorf("expected resource %s to be from task %s, but task %s doesn't exist", rd.Resource, pt, pt) + } + if !isOutput(outputs, rd.Resource) { + return fmt.Errorf("the resource %s from %s must be an output but is an input", rd.Resource, pt) + } + } + } + } + return nil +} + +// validateGraph ensures the Pipeline's dependency Graph (DAG) make sense: that there is no dependency +// cycle or that they rely on values from Tasks that ran previously, and that the PipelineResource +// is actually an output of the Task it should come from. +func validateGraph(tasks []PipelineTask) error { + if _, err := dag.Build(PipelineTaskList(tasks)); err != nil { + return err + } + return nil +} + +// Validate checks that taskNames in the Pipeline are valid and that the graph +// of Tasks expressed in the Pipeline makes sense. +func (ps *PipelineSpec) Validate(ctx context.Context) *apis.FieldError { + if equality.Semantic.DeepEqual(ps, &PipelineSpec{}) { + return apis.ErrMissingField(apis.CurrentField) + } + + // Names cannot be duplicated + taskNames := map[string]struct{}{} + for i, t := range ps.Tasks { + // can't have both taskRef and taskSpec at the same time + if (t.TaskRef != nil && t.TaskRef.Name != "") && t.TaskSpec != nil { + return apis.ErrMultipleOneOf(fmt.Sprintf("spec.tasks[%d].taskRef", i), fmt.Sprintf("spec.tasks[%d].taskSpec", i)) + } + // Check that one of TaskRef and TaskSpec is present + if (t.TaskRef == nil || (t.TaskRef != nil && t.TaskRef.Name == "")) && t.TaskSpec == nil { + return apis.ErrMissingOneOf(fmt.Sprintf("spec.tasks[%d].taskRef", i), fmt.Sprintf("spec.tasks[%d].taskSpec", i)) + } + // Validate TaskSpec if it's present + if t.TaskSpec != nil { + if err := t.TaskSpec.Validate(ctx); err != nil { + return err + } + } + if t.TaskRef != nil && t.TaskRef.Name != "" { + // Task names are appended to the container name, which must exist and + // must be a valid k8s name + if errSlice := validation.IsQualifiedName(t.Name); len(errSlice) != 0 { + return apis.ErrInvalidValue(strings.Join(errSlice, ","), fmt.Sprintf("spec.tasks[%d].name", i)) + } + // TaskRef name must be a valid k8s name + if errSlice := validation.IsQualifiedName(t.TaskRef.Name); len(errSlice) != 0 { + return apis.ErrInvalidValue(strings.Join(errSlice, ","), fmt.Sprintf("spec.tasks[%d].taskRef.name", i)) + } + if _, ok := taskNames[t.Name]; ok { + return apis.ErrMultipleOneOf(fmt.Sprintf("spec.tasks[%d].name", i)) + } + taskNames[t.Name] = struct{}{} + } + } + + // All declared resources should be used, and the Pipeline shouldn't try to use any resources + // that aren't declared + if err := validateDeclaredResources(ps); err != nil { + return apis.ErrInvalidValue(err.Error(), "spec.resources") + } + + // The from values should make sense + if err := validateFrom(ps.Tasks); err != nil { + return apis.ErrInvalidValue(err.Error(), "spec.tasks.resources.inputs.from") + } + + // Validate the pipeline task graph + if err := validateGraph(ps.Tasks); err != nil { + return apis.ErrInvalidValue(err.Error(), "spec.tasks") + } + + // The parameter variables should be valid + if err := validatePipelineParameterVariables(ps.Tasks, ps.Params); err != nil { + return err + } + + return nil +} + +func validatePipelineParameterVariables(tasks []PipelineTask, params []ParamSpec) *apis.FieldError { + parameterNames := map[string]struct{}{} + arrayParameterNames := map[string]struct{}{} + + for _, p := range params { + // Verify that p is a valid type. + validType := false + for _, allowedType := range AllParamTypes { + if p.Type == allowedType { + validType = true + } + } + if !validType { + return apis.ErrInvalidValue(string(p.Type), fmt.Sprintf("spec.params.%s.type", p.Name)) + } + + // If a default value is provided, ensure its type matches param's declared type. + if (p.Default != nil) && (p.Default.Type != p.Type) { + return &apis.FieldError{ + Message: fmt.Sprintf( + "\"%v\" type does not match default value's type: \"%v\"", p.Type, p.Default.Type), + Paths: []string{ + fmt.Sprintf("spec.params.%s.type", p.Name), + fmt.Sprintf("spec.params.%s.default.type", p.Name), + }, + } + } + + // Add parameter name to parameterNames, and to arrayParameterNames if type is array. + parameterNames[p.Name] = struct{}{} + if p.Type == ParamTypeArray { + arrayParameterNames[p.Name] = struct{}{} + } + } + + return validatePipelineVariables(tasks, "params", parameterNames, arrayParameterNames) +} + +func validatePipelineVariables(tasks []PipelineTask, prefix string, paramNames map[string]struct{}, arrayParamNames map[string]struct{}) *apis.FieldError { + for _, task := range tasks { + for _, param := range task.Params { + if param.Value.Type == ParamTypeString { + if err := validatePipelineVariable(fmt.Sprintf("param[%s]", param.Name), param.Value.StringVal, prefix, paramNames); err != nil { + return err + } + if err := validatePipelineNoArrayReferenced(fmt.Sprintf("param[%s]", param.Name), param.Value.StringVal, prefix, arrayParamNames); err != nil { + return err + } + } else { + for _, arrayElement := range param.Value.ArrayVal { + if err := validatePipelineVariable(fmt.Sprintf("param[%s]", param.Name), arrayElement, prefix, paramNames); err != nil { + return err + } + if err := validatePipelineArraysIsolated(fmt.Sprintf("param[%s]", param.Name), arrayElement, prefix, arrayParamNames); err != nil { + return err + } + } + } + } + } + return nil +} + +func validatePipelineVariable(name, value, prefix string, vars map[string]struct{}) *apis.FieldError { + return substitution.ValidateVariable(name, value, prefix, "", "task parameter", "pipelinespec.params", vars) +} + +func validatePipelineNoArrayReferenced(name, value, prefix string, vars map[string]struct{}) *apis.FieldError { + return substitution.ValidateVariableProhibited(name, value, prefix, "", "task parameter", "pipelinespec.params", vars) +} + +func validatePipelineArraysIsolated(name, value, prefix string, vars map[string]struct{}) *apis.FieldError { + return substitution.ValidateVariableIsolated(name, value, prefix, "", "task parameter", "pipelinespec.params", vars) +} diff --git a/pkg/apis/pipeline/v1alpha2/pipeline_validation_test.go b/pkg/apis/pipeline/v1alpha2/pipeline_validation_test.go new file mode 100644 index 00000000000..360bb07908a --- /dev/null +++ b/pkg/apis/pipeline/v1alpha2/pipeline_validation_test.go @@ -0,0 +1,578 @@ +/* +Copyright 2020 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package v1alpha2_test + +import ( + "context" + "testing" + + "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + corev1 "k8s.io/api/core/v1" + metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" +) + +func TestPipeline_Validate(t *testing.T) { + tests := []struct { + name string + p *v1alpha2.Pipeline + failureExpected bool + }{{ + name: "valid metadata", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{Name: "foo", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}}}, + }, + }, + failureExpected: false, + }, { + name: "valid resource declarations and usage", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Resources: []v1alpha2.PipelineDeclaredResource{{ + Name: "great-resource", Type: v1alpha2.PipelineResourceTypeGit, + }, { + Name: "wonderful-resource", Type: v1alpha2.PipelineResourceTypeImage, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "some-workspace", Resource: "great-resource", + }}, + Outputs: []v1alpha2.PipelineTaskOutputResource{{ + Name: "some-imagee", Resource: "wonderful-resource", + }}, + }, + Conditions: []v1alpha2.PipelineTaskCondition{{ + ConditionRef: "some-condition", + Resources: []v1alpha2.PipelineTaskInputResource{{ + Name: "some-workspace", Resource: "great-resource", + }}, + }}, + }, { + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "some-imagee", Resource: "wonderful-resource", From: []string{"bar"}, + }}, + }, + Conditions: []v1alpha2.PipelineTaskCondition{{ + ConditionRef: "some-condition-2", + Resources: []v1alpha2.PipelineTaskInputResource{{ + Name: "some-image", Resource: "wonderful-resource", From: []string{"bar"}, + }}, + }}, + }}, + }, + }, + failureExpected: false, + }, { + name: "period in name", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipe.line"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{Name: "foo", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}}}, + }, + }, + failureExpected: true, + }, { + name: "pipeline name too long", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "asdf123456789012345678901234567890123456789012345678901234567890"}, + }, + failureExpected: true, + }, { + name: "pipeline spec missing taskref and taskspec", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{ + {Name: "foo"}, + }, + }, + }, + failureExpected: true, + }, { + name: "pipeline spec with taskref and taskspec", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{ + { + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + TaskSpec: &v1alpha2.TaskSpec{ + Steps: []v1alpha2.Step{{ + Container: corev1.Container{Name: "foo", Image: "bar"}, + }}, + }, + }, + }, + }, + }, + failureExpected: true, + }, { + name: "pipeline spec invalid taskspec", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{ + { + Name: "foo", + TaskSpec: &v1alpha2.TaskSpec{}, + }, + }, + }, + }, + failureExpected: true, + }, { + name: "pipeline spec valid taskspec", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{ + { + Name: "foo", + TaskSpec: &v1alpha2.TaskSpec{ + Steps: []v1alpha2.Step{{ + Container: corev1.Container{Name: "foo", Image: "bar"}, + }}, + }, + }, + }, + }, + }, + failureExpected: false, + }, { + name: "pipeline spec invalid (duplicate tasks)", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{ + {Name: "foo", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}}, + {Name: "foo", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}}, + }, + }, + }, + failureExpected: true, + }, { + name: "pipeline spec empty task name", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{Name: "", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}}}, + }, + }, + failureExpected: true, + }, { + name: "pipeline spec invalid task name", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{Name: "_foo", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}}}, + }, + }, + failureExpected: true, + }, { + name: "pipeline spec invalid taskref name", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{Name: "foo", TaskRef: &v1alpha2.TaskRef{Name: "_foo-task"}}}, + }, + }, + failureExpected: true, + }, { + name: "valid condition only resource", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Resources: []v1alpha2.PipelineDeclaredResource{{ + Name: "great-resource", Type: v1alpha2.PipelineResourceTypeGit, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Conditions: []v1alpha2.PipelineTaskCondition{{ + ConditionRef: "some-condition", + Resources: []v1alpha2.PipelineTaskInputResource{{ + Name: "sowe-workspace", Resource: "great-resource", + }}, + }}, + }}, + }, + }, + failureExpected: false, + }, { + name: "valid parameter variables", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", Type: v1alpha2.ParamTypeString, + }, { + Name: "foo-is-baz", Type: v1alpha2.ParamTypeString, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Params: []v1alpha2.Param{{ + Name: "a-param", Value: v1alpha2.ArrayOrString{StringVal: "$(baz) and $(foo-is-baz)"}, + }}, + }}, + }, + }, + failureExpected: false, + }, { + name: "valid array parameter variables", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", Type: v1alpha2.ParamTypeArray, Default: &v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{"some", "default"}}, + }, { + Name: "foo-is-baz", Type: v1alpha2.ParamTypeArray, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Params: []v1alpha2.Param{{ + Name: "a-param", Value: v1alpha2.ArrayOrString{ArrayVal: []string{"$(baz)", "and", "$(foo-is-)"}}, + }}, + }}, + }, + }, + failureExpected: false, + }, { + name: "pipeline parameter nested in task parameter", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", Type: v1alpha2.ParamTypeString, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Params: []v1alpha2.Param{{ + Name: "a-param", Value: v1alpha2.ArrayOrString{StringVal: "$(input.workspace.$(baz))"}, + }}, + }}, + }, + }, + failureExpected: false, + }, { + name: "from is on first task", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Resources: []v1alpha2.PipelineDeclaredResource{{ + Name: "great-resource", Type: v1alpha2.PipelineResourceTypeGit, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "the-resource", Resource: "great-resource", From: []string{"bar"}, + }}, + }, + }}, + }, + }, + failureExpected: true, + }, { + name: "from task doesnt exist", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Resources: []v1alpha2.PipelineDeclaredResource{{ + Name: "great-resource", Type: v1alpha2.PipelineResourceTypeGit, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "baz", TaskRef: &v1alpha2.TaskRef{Name: "baz-task"}, + }, { + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "the-resource", Resource: "great-resource", From: []string{"bar"}, + }}, + }, + }}, + }, + }, + failureExpected: true, + }, { + name: "output resources missing from declaration", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Resources: []v1alpha2.PipelineDeclaredResource{{ + Name: "great-resource", Type: v1alpha2.PipelineResourceTypeGit, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "the-resource", Resource: "great-resource", + }}, + Outputs: []v1alpha2.PipelineTaskOutputResource{{ + Name: "the-magic-resource", Resource: "missing-resource", + }}, + }, + }}, + }, + }, + failureExpected: true, + }, { + name: "input resources missing from declaration", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Resources: []v1alpha2.PipelineDeclaredResource{{ + Name: "great-resource", Type: v1alpha2.PipelineResourceTypeGit, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "the-resource", Resource: "missing-resource", + }}, + Outputs: []v1alpha2.PipelineTaskOutputResource{{ + Name: "the-magic-resource", Resource: "great-resource", + }}, + }, + }}, + }, + }, + failureExpected: true, + }, { + name: "invalid condition only resource", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Conditions: []v1alpha2.PipelineTaskCondition{{ + ConditionRef: "some-condition", + Resources: []v1alpha2.PipelineTaskInputResource{{ + Name: "sowe-workspace", Resource: "missing-resource", + }}, + }}, + }}, + }, + }, + failureExpected: true, + }, { + name: "invalid from in condition", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + }, { + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Conditions: []v1alpha2.PipelineTaskCondition{{ + ConditionRef: "some-condition", + Resources: []v1alpha2.PipelineTaskInputResource{{ + Name: "sowe-workspace", Resource: "missing-resource", From: []string{"foo"}, + }}, + }}, + }}, + }, + }, + failureExpected: true, + }, { + name: "from resource isn't output by task", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Resources: []v1alpha2.PipelineDeclaredResource{{ + Name: "great-resource", Type: v1alpha2.PipelineResourceTypeGit, + }, { + Name: "wonderful-resource", Type: v1alpha2.PipelineResourceTypeImage, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "some-resource", Resource: "great-resource", + }}, + }, + }, { + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Resources: &v1alpha2.PipelineTaskResources{ + Inputs: []v1alpha2.PipelineTaskInputResource{{ + Name: "wow-image", Resource: "wonderful-resource", From: []string{"bar"}, + }}, + }, + }}, + }, + }, + failureExpected: true, + }, { + name: "not defined parameter variable", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Params: []v1alpha2.Param{{ + Name: "a-param", Value: v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeString, StringVal: "$(params.does-not-exist)"}, + }}, + }}, + }, + }, + failureExpected: true, + }, { + name: "not defined parameter variable with defined", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "foo", Type: v1alpha2.ParamTypeString, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + Params: []v1alpha2.Param{{ + Name: "a-param", Value: v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeString, StringVal: "$(params.foo) and $(params.does-not-exist)"}, + }}, + }}, + }, + }, + failureExpected: true, + }, { + name: "invalid parameter type", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "foo", Type: "invalidtype", + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + }}, + }, + }, + failureExpected: true, + }, { + name: "array parameter mismatching default type", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "foo", Type: v1alpha2.ParamTypeArray, Default: &v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeString, StringVal: "astring"}, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + }}, + }, + }, + failureExpected: true, + }, { + name: "string parameter mismatching default type", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "foo", Type: v1alpha2.ParamTypeString, Default: &v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{"anarray", "elements"}}, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", + TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, + }}, + }, + }, + failureExpected: true, + }, { + name: "array parameter used as string", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", Type: v1alpha2.ParamTypeString, Default: &v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{"anarray", "elements"}}, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Params: []v1alpha2.Param{{ + Name: "a-param", Value: v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeString, StringVal: "$(params.baz)"}, + }}, + }}, + }, + }, + failureExpected: true, + }, { + name: "array parameter string template not isolated", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Params: []v1alpha2.ParamSpec{{ + Name: "baz", Type: v1alpha2.ParamTypeString, Default: &v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{"anarray", "elements"}}, + }}, + Tasks: []v1alpha2.PipelineTask{{ + Name: "bar", + TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, + Params: []v1alpha2.Param{{ + Name: "a-param", Value: v1alpha2.ArrayOrString{Type: v1alpha2.ParamTypeArray, ArrayVal: []string{"value: $(params.baz)", "last"}}, + }}, + }}, + }, + }, + failureExpected: true, + }, { + name: "invalid dependency graph between the tasks", + p: &v1alpha2.Pipeline{ + ObjectMeta: metav1.ObjectMeta{Name: "pipeline"}, + Spec: v1alpha2.PipelineSpec{ + Tasks: []v1alpha2.PipelineTask{{ + Name: "foo", TaskRef: &v1alpha2.TaskRef{Name: "foo-task"}, RunAfter: []string{"bar"}, + }, { + Name: "bar", TaskRef: &v1alpha2.TaskRef{Name: "bar-task"}, RunAfter: []string{"foo"}, + }}, + }, + }, + failureExpected: true, + }} + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + err := tt.p.Validate(context.Background()) + if (!tt.failureExpected) && (err != nil) { + t.Errorf("Pipeline.Validate() returned error: %v", err) + } + + if tt.failureExpected && (err == nil) { + t.Error("Pipeline.Validate() did not return error, wanted error") + } + }) + } +} diff --git a/pkg/apis/pipeline/v1alpha2/register.go b/pkg/apis/pipeline/v1alpha2/register.go index 6798906c03b..dd79de2293d 100644 --- a/pkg/apis/pipeline/v1alpha2/register.go +++ b/pkg/apis/pipeline/v1alpha2/register.go @@ -48,6 +48,8 @@ func addKnownTypes(scheme *runtime.Scheme) error { scheme.AddKnownTypes(SchemeGroupVersion, &Task{}, &TaskList{}, + &Pipeline{}, + &PipelineList{}, ) // &Condition{}, // &ConditionList{}, @@ -55,8 +57,6 @@ func addKnownTypes(scheme *runtime.Scheme) error { // &ClusterTaskList{}, // &TaskRun{}, // &TaskRunList{}, - // &Pipeline{}, - // &PipelineList{}, // &PipelineRun{}, // &PipelineRunList{}, // &PipelineResource{}, diff --git a/pkg/apis/pipeline/v1alpha2/zz_generated.deepcopy.go b/pkg/apis/pipeline/v1alpha2/zz_generated.deepcopy.go index 79b4271ec58..0dae238068e 100644 --- a/pkg/apis/pipeline/v1alpha2/zz_generated.deepcopy.go +++ b/pkg/apis/pipeline/v1alpha2/zz_generated.deepcopy.go @@ -121,6 +121,321 @@ func (in *ParamSpec) DeepCopy() *ParamSpec { return out } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *Pipeline) DeepCopyInto(out *Pipeline) { + *out = *in + out.TypeMeta = in.TypeMeta + in.ObjectMeta.DeepCopyInto(&out.ObjectMeta) + in.Spec.DeepCopyInto(&out.Spec) + if in.Status != nil { + in, out := &in.Status, &out.Status + *out = new(PipelineStatus) + **out = **in + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Pipeline. +func (in *Pipeline) DeepCopy() *Pipeline { + if in == nil { + return nil + } + out := new(Pipeline) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *Pipeline) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineDeclaredResource) DeepCopyInto(out *PipelineDeclaredResource) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineDeclaredResource. +func (in *PipelineDeclaredResource) DeepCopy() *PipelineDeclaredResource { + if in == nil { + return nil + } + out := new(PipelineDeclaredResource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineList) DeepCopyInto(out *PipelineList) { + *out = *in + out.TypeMeta = in.TypeMeta + out.ListMeta = in.ListMeta + if in.Items != nil { + in, out := &in.Items, &out.Items + *out = make([]Pipeline, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineList. +func (in *PipelineList) DeepCopy() *PipelineList { + if in == nil { + return nil + } + out := new(PipelineList) + in.DeepCopyInto(out) + return out +} + +// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object. +func (in *PipelineList) DeepCopyObject() runtime.Object { + if c := in.DeepCopy(); c != nil { + return c + } + return nil +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineSpec) DeepCopyInto(out *PipelineSpec) { + *out = *in + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = make([]PipelineDeclaredResource, len(*in)) + copy(*out, *in) + } + if in.Tasks != nil { + in, out := &in.Tasks, &out.Tasks + *out = make([]PipelineTask, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Params != nil { + in, out := &in.Params, &out.Params + *out = make([]ParamSpec, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineSpec. +func (in *PipelineSpec) DeepCopy() *PipelineSpec { + if in == nil { + return nil + } + out := new(PipelineSpec) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineStatus) DeepCopyInto(out *PipelineStatus) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineStatus. +func (in *PipelineStatus) DeepCopy() *PipelineStatus { + if in == nil { + return nil + } + out := new(PipelineStatus) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineTask) DeepCopyInto(out *PipelineTask) { + *out = *in + if in.TaskRef != nil { + in, out := &in.TaskRef, &out.TaskRef + *out = new(TaskRef) + **out = **in + } + if in.TaskSpec != nil { + in, out := &in.TaskSpec, &out.TaskSpec + *out = new(TaskSpec) + (*in).DeepCopyInto(*out) + } + if in.Conditions != nil { + in, out := &in.Conditions, &out.Conditions + *out = make([]PipelineTaskCondition, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.RunAfter != nil { + in, out := &in.RunAfter, &out.RunAfter + *out = make([]string, len(*in)) + copy(*out, *in) + } + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = new(PipelineTaskResources) + (*in).DeepCopyInto(*out) + } + if in.Params != nil { + in, out := &in.Params, &out.Params + *out = make([]Param, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTask. +func (in *PipelineTask) DeepCopy() *PipelineTask { + if in == nil { + return nil + } + out := new(PipelineTask) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineTaskCondition) DeepCopyInto(out *PipelineTaskCondition) { + *out = *in + if in.Params != nil { + in, out := &in.Params, &out.Params + *out = make([]Param, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Resources != nil { + in, out := &in.Resources, &out.Resources + *out = make([]PipelineTaskInputResource, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskCondition. +func (in *PipelineTaskCondition) DeepCopy() *PipelineTaskCondition { + if in == nil { + return nil + } + out := new(PipelineTaskCondition) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineTaskInputResource) DeepCopyInto(out *PipelineTaskInputResource) { + *out = *in + if in.From != nil { + in, out := &in.From, &out.From + *out = make([]string, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskInputResource. +func (in *PipelineTaskInputResource) DeepCopy() *PipelineTaskInputResource { + if in == nil { + return nil + } + out := new(PipelineTaskInputResource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in PipelineTaskList) DeepCopyInto(out *PipelineTaskList) { + { + in := &in + *out = make(PipelineTaskList, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + return + } +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskList. +func (in PipelineTaskList) DeepCopy() PipelineTaskList { + if in == nil { + return nil + } + out := new(PipelineTaskList) + in.DeepCopyInto(out) + return *out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineTaskOutputResource) DeepCopyInto(out *PipelineTaskOutputResource) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskOutputResource. +func (in *PipelineTaskOutputResource) DeepCopy() *PipelineTaskOutputResource { + if in == nil { + return nil + } + out := new(PipelineTaskOutputResource) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineTaskParam) DeepCopyInto(out *PipelineTaskParam) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskParam. +func (in *PipelineTaskParam) DeepCopy() *PipelineTaskParam { + if in == nil { + return nil + } + out := new(PipelineTaskParam) + in.DeepCopyInto(out) + return out +} + +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *PipelineTaskResources) DeepCopyInto(out *PipelineTaskResources) { + *out = *in + if in.Inputs != nil { + in, out := &in.Inputs, &out.Inputs + *out = make([]PipelineTaskInputResource, len(*in)) + for i := range *in { + (*in)[i].DeepCopyInto(&(*out)[i]) + } + } + if in.Outputs != nil { + in, out := &in.Outputs, &out.Outputs + *out = make([]PipelineTaskOutputResource, len(*in)) + copy(*out, *in) + } + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new PipelineTaskResources. +func (in *PipelineTaskResources) DeepCopy() *PipelineTaskResources { + if in == nil { + return nil + } + out := new(PipelineTaskResources) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *Step) DeepCopyInto(out *Step) { *out = *in @@ -198,6 +513,22 @@ func (in *TaskList) DeepCopyObject() runtime.Object { return nil } +// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. +func (in *TaskRef) DeepCopyInto(out *TaskRef) { + *out = *in + return +} + +// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TaskRef. +func (in *TaskRef) DeepCopy() *TaskRef { + if in == nil { + return nil + } + out := new(TaskRef) + in.DeepCopyInto(out) + return out +} + // DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil. func (in *TaskResource) DeepCopyInto(out *TaskResource) { *out = *in diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline.go new file mode 100644 index 00000000000..9987d89c0af --- /dev/null +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline.go @@ -0,0 +1,128 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by client-gen. DO NOT EDIT. + +package fake + +import ( + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + labels "k8s.io/apimachinery/pkg/labels" + schema "k8s.io/apimachinery/pkg/runtime/schema" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + testing "k8s.io/client-go/testing" +) + +// FakePipelines implements PipelineInterface +type FakePipelines struct { + Fake *FakeTektonV1alpha2 + ns string +} + +var pipelinesResource = schema.GroupVersionResource{Group: "tekton.dev", Version: "v1alpha2", Resource: "pipelines"} + +var pipelinesKind = schema.GroupVersionKind{Group: "tekton.dev", Version: "v1alpha2", Kind: "Pipeline"} + +// Get takes name of the pipeline, and returns the corresponding pipeline object, and an error if there is any. +func (c *FakePipelines) Get(name string, options v1.GetOptions) (result *v1alpha2.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewGetAction(pipelinesResource, c.ns, name), &v1alpha2.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Pipeline), err +} + +// List takes label and field selectors, and returns the list of Pipelines that match those selectors. +func (c *FakePipelines) List(opts v1.ListOptions) (result *v1alpha2.PipelineList, err error) { + obj, err := c.Fake. + Invokes(testing.NewListAction(pipelinesResource, pipelinesKind, c.ns, opts), &v1alpha2.PipelineList{}) + + if obj == nil { + return nil, err + } + + label, _, _ := testing.ExtractFromListOptions(opts) + if label == nil { + label = labels.Everything() + } + list := &v1alpha2.PipelineList{ListMeta: obj.(*v1alpha2.PipelineList).ListMeta} + for _, item := range obj.(*v1alpha2.PipelineList).Items { + if label.Matches(labels.Set(item.Labels)) { + list.Items = append(list.Items, item) + } + } + return list, err +} + +// Watch returns a watch.Interface that watches the requested pipelines. +func (c *FakePipelines) Watch(opts v1.ListOptions) (watch.Interface, error) { + return c.Fake. + InvokesWatch(testing.NewWatchAction(pipelinesResource, c.ns, opts)) + +} + +// Create takes the representation of a pipeline and creates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *FakePipelines) Create(pipeline *v1alpha2.Pipeline) (result *v1alpha2.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewCreateAction(pipelinesResource, c.ns, pipeline), &v1alpha2.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Pipeline), err +} + +// Update takes the representation of a pipeline and updates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *FakePipelines) Update(pipeline *v1alpha2.Pipeline) (result *v1alpha2.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewUpdateAction(pipelinesResource, c.ns, pipeline), &v1alpha2.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Pipeline), err +} + +// Delete takes name of the pipeline and deletes it. Returns an error if one occurs. +func (c *FakePipelines) Delete(name string, options *v1.DeleteOptions) error { + _, err := c.Fake. + Invokes(testing.NewDeleteAction(pipelinesResource, c.ns, name), &v1alpha2.Pipeline{}) + + return err +} + +// DeleteCollection deletes a collection of objects. +func (c *FakePipelines) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { + action := testing.NewDeleteCollectionAction(pipelinesResource, c.ns, listOptions) + + _, err := c.Fake.Invokes(action, &v1alpha2.PipelineList{}) + return err +} + +// Patch applies the patch and returns the patched pipeline. +func (c *FakePipelines) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha2.Pipeline, err error) { + obj, err := c.Fake. + Invokes(testing.NewPatchSubresourceAction(pipelinesResource, c.ns, name, pt, data, subresources...), &v1alpha2.Pipeline{}) + + if obj == nil { + return nil, err + } + return obj.(*v1alpha2.Pipeline), err +} diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go index b01051d0d42..bbdf7ddf8dd 100644 --- a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/fake/fake_pipeline_client.go @@ -28,6 +28,10 @@ type FakeTektonV1alpha2 struct { *testing.Fake } +func (c *FakeTektonV1alpha2) Pipelines(namespace string) v1alpha2.PipelineInterface { + return &FakePipelines{c, namespace} +} + func (c *FakeTektonV1alpha2) Tasks(namespace string) v1alpha2.TaskInterface { return &FakeTasks{c, namespace} } diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go index 61dab461677..b2f3d71464a 100644 --- a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/generated_expansion.go @@ -18,4 +18,6 @@ limitations under the License. package v1alpha2 +type PipelineExpansion interface{} + type TaskExpansion interface{} diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline.go new file mode 100644 index 00000000000..a5c10399123 --- /dev/null +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline.go @@ -0,0 +1,174 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by client-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + "time" + + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + scheme "github.com/tektoncd/pipeline/pkg/client/clientset/versioned/scheme" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + types "k8s.io/apimachinery/pkg/types" + watch "k8s.io/apimachinery/pkg/watch" + rest "k8s.io/client-go/rest" +) + +// PipelinesGetter has a method to return a PipelineInterface. +// A group's client should implement this interface. +type PipelinesGetter interface { + Pipelines(namespace string) PipelineInterface +} + +// PipelineInterface has methods to work with Pipeline resources. +type PipelineInterface interface { + Create(*v1alpha2.Pipeline) (*v1alpha2.Pipeline, error) + Update(*v1alpha2.Pipeline) (*v1alpha2.Pipeline, error) + Delete(name string, options *v1.DeleteOptions) error + DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error + Get(name string, options v1.GetOptions) (*v1alpha2.Pipeline, error) + List(opts v1.ListOptions) (*v1alpha2.PipelineList, error) + Watch(opts v1.ListOptions) (watch.Interface, error) + Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha2.Pipeline, err error) + PipelineExpansion +} + +// pipelines implements PipelineInterface +type pipelines struct { + client rest.Interface + ns string +} + +// newPipelines returns a Pipelines +func newPipelines(c *TektonV1alpha2Client, namespace string) *pipelines { + return &pipelines{ + client: c.RESTClient(), + ns: namespace, + } +} + +// Get takes name of the pipeline, and returns the corresponding pipeline object, and an error if there is any. +func (c *pipelines) Get(name string, options v1.GetOptions) (result *v1alpha2.Pipeline, err error) { + result = &v1alpha2.Pipeline{} + err = c.client.Get(). + Namespace(c.ns). + Resource("pipelines"). + Name(name). + VersionedParams(&options, scheme.ParameterCodec). + Do(). + Into(result) + return +} + +// List takes label and field selectors, and returns the list of Pipelines that match those selectors. +func (c *pipelines) List(opts v1.ListOptions) (result *v1alpha2.PipelineList, err error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + result = &v1alpha2.PipelineList{} + err = c.client.Get(). + Namespace(c.ns). + Resource("pipelines"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Do(). + Into(result) + return +} + +// Watch returns a watch.Interface that watches the requested pipelines. +func (c *pipelines) Watch(opts v1.ListOptions) (watch.Interface, error) { + var timeout time.Duration + if opts.TimeoutSeconds != nil { + timeout = time.Duration(*opts.TimeoutSeconds) * time.Second + } + opts.Watch = true + return c.client.Get(). + Namespace(c.ns). + Resource("pipelines"). + VersionedParams(&opts, scheme.ParameterCodec). + Timeout(timeout). + Watch() +} + +// Create takes the representation of a pipeline and creates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *pipelines) Create(pipeline *v1alpha2.Pipeline) (result *v1alpha2.Pipeline, err error) { + result = &v1alpha2.Pipeline{} + err = c.client.Post(). + Namespace(c.ns). + Resource("pipelines"). + Body(pipeline). + Do(). + Into(result) + return +} + +// Update takes the representation of a pipeline and updates it. Returns the server's representation of the pipeline, and an error, if there is any. +func (c *pipelines) Update(pipeline *v1alpha2.Pipeline) (result *v1alpha2.Pipeline, err error) { + result = &v1alpha2.Pipeline{} + err = c.client.Put(). + Namespace(c.ns). + Resource("pipelines"). + Name(pipeline.Name). + Body(pipeline). + Do(). + Into(result) + return +} + +// Delete takes name of the pipeline and deletes it. Returns an error if one occurs. +func (c *pipelines) Delete(name string, options *v1.DeleteOptions) error { + return c.client.Delete(). + Namespace(c.ns). + Resource("pipelines"). + Name(name). + Body(options). + Do(). + Error() +} + +// DeleteCollection deletes a collection of objects. +func (c *pipelines) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { + var timeout time.Duration + if listOptions.TimeoutSeconds != nil { + timeout = time.Duration(*listOptions.TimeoutSeconds) * time.Second + } + return c.client.Delete(). + Namespace(c.ns). + Resource("pipelines"). + VersionedParams(&listOptions, scheme.ParameterCodec). + Timeout(timeout). + Body(options). + Do(). + Error() +} + +// Patch applies the patch and returns the patched pipeline. +func (c *pipelines) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1alpha2.Pipeline, err error) { + result = &v1alpha2.Pipeline{} + err = c.client.Patch(pt). + Namespace(c.ns). + Resource("pipelines"). + SubResource(subresources...). + Name(name). + Body(data). + Do(). + Into(result) + return +} diff --git a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go index 52493bbea87..c51399bdd0e 100644 --- a/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go +++ b/pkg/client/clientset/versioned/typed/pipeline/v1alpha2/pipeline_client.go @@ -27,6 +27,7 @@ import ( type TektonV1alpha2Interface interface { RESTClient() rest.Interface + PipelinesGetter TasksGetter } @@ -35,6 +36,10 @@ type TektonV1alpha2Client struct { restClient rest.Interface } +func (c *TektonV1alpha2Client) Pipelines(namespace string) PipelineInterface { + return newPipelines(c, namespace) +} + func (c *TektonV1alpha2Client) Tasks(namespace string) TaskInterface { return newTasks(c, namespace) } diff --git a/pkg/client/informers/externalversions/generic.go b/pkg/client/informers/externalversions/generic.go index 5437e89bead..cf6d3fde208 100644 --- a/pkg/client/informers/externalversions/generic.go +++ b/pkg/client/informers/externalversions/generic.go @@ -68,6 +68,8 @@ func (f *sharedInformerFactory) ForResource(resource schema.GroupVersionResource return &genericInformer{resource: resource.GroupResource(), informer: f.Tekton().V1alpha1().TaskRuns().Informer()}, nil // Group=tekton.dev, Version=v1alpha2 + case v1alpha2.SchemeGroupVersion.WithResource("pipelines"): + return &genericInformer{resource: resource.GroupResource(), informer: f.Tekton().V1alpha2().Pipelines().Informer()}, nil case v1alpha2.SchemeGroupVersion.WithResource("tasks"): return &genericInformer{resource: resource.GroupResource(), informer: f.Tekton().V1alpha2().Tasks().Informer()}, nil diff --git a/pkg/client/informers/externalversions/pipeline/v1alpha2/interface.go b/pkg/client/informers/externalversions/pipeline/v1alpha2/interface.go index 994beeea500..d179c9f6f45 100644 --- a/pkg/client/informers/externalversions/pipeline/v1alpha2/interface.go +++ b/pkg/client/informers/externalversions/pipeline/v1alpha2/interface.go @@ -24,6 +24,8 @@ import ( // Interface provides access to all the informers in this group version. type Interface interface { + // Pipelines returns a PipelineInformer. + Pipelines() PipelineInformer // Tasks returns a TaskInformer. Tasks() TaskInformer } @@ -39,6 +41,11 @@ func New(f internalinterfaces.SharedInformerFactory, namespace string, tweakList return &version{factory: f, namespace: namespace, tweakListOptions: tweakListOptions} } +// Pipelines returns a PipelineInformer. +func (v *version) Pipelines() PipelineInformer { + return &pipelineInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} +} + // Tasks returns a TaskInformer. func (v *version) Tasks() TaskInformer { return &taskInformer{factory: v.factory, namespace: v.namespace, tweakListOptions: v.tweakListOptions} diff --git a/pkg/client/informers/externalversions/pipeline/v1alpha2/pipeline.go b/pkg/client/informers/externalversions/pipeline/v1alpha2/pipeline.go new file mode 100644 index 00000000000..6801a3e82d0 --- /dev/null +++ b/pkg/client/informers/externalversions/pipeline/v1alpha2/pipeline.go @@ -0,0 +1,89 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by informer-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + time "time" + + pipelinev1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + versioned "github.com/tektoncd/pipeline/pkg/client/clientset/versioned" + internalinterfaces "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/internalinterfaces" + v1alpha2 "github.com/tektoncd/pipeline/pkg/client/listers/pipeline/v1alpha2" + v1 "k8s.io/apimachinery/pkg/apis/meta/v1" + runtime "k8s.io/apimachinery/pkg/runtime" + watch "k8s.io/apimachinery/pkg/watch" + cache "k8s.io/client-go/tools/cache" +) + +// PipelineInformer provides access to a shared informer and lister for +// Pipelines. +type PipelineInformer interface { + Informer() cache.SharedIndexInformer + Lister() v1alpha2.PipelineLister +} + +type pipelineInformer struct { + factory internalinterfaces.SharedInformerFactory + tweakListOptions internalinterfaces.TweakListOptionsFunc + namespace string +} + +// NewPipelineInformer constructs a new informer for Pipeline type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewPipelineInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers) cache.SharedIndexInformer { + return NewFilteredPipelineInformer(client, namespace, resyncPeriod, indexers, nil) +} + +// NewFilteredPipelineInformer constructs a new informer for Pipeline type. +// Always prefer using an informer factory to get a shared informer instead of getting an independent +// one. This reduces memory footprint and number of connections to the server. +func NewFilteredPipelineInformer(client versioned.Interface, namespace string, resyncPeriod time.Duration, indexers cache.Indexers, tweakListOptions internalinterfaces.TweakListOptionsFunc) cache.SharedIndexInformer { + return cache.NewSharedIndexInformer( + &cache.ListWatch{ + ListFunc: func(options v1.ListOptions) (runtime.Object, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.TektonV1alpha2().Pipelines(namespace).List(options) + }, + WatchFunc: func(options v1.ListOptions) (watch.Interface, error) { + if tweakListOptions != nil { + tweakListOptions(&options) + } + return client.TektonV1alpha2().Pipelines(namespace).Watch(options) + }, + }, + &pipelinev1alpha2.Pipeline{}, + resyncPeriod, + indexers, + ) +} + +func (f *pipelineInformer) defaultInformer(client versioned.Interface, resyncPeriod time.Duration) cache.SharedIndexInformer { + return NewFilteredPipelineInformer(client, f.namespace, resyncPeriod, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}, f.tweakListOptions) +} + +func (f *pipelineInformer) Informer() cache.SharedIndexInformer { + return f.factory.InformerFor(&pipelinev1alpha2.Pipeline{}, f.defaultInformer) +} + +func (f *pipelineInformer) Lister() v1alpha2.PipelineLister { + return v1alpha2.NewPipelineLister(f.Informer().GetIndexer()) +} diff --git a/pkg/client/injection/informers/pipeline/v1alpha2/pipeline/fake/fake.go b/pkg/client/injection/informers/pipeline/v1alpha2/pipeline/fake/fake.go new file mode 100644 index 00000000000..946e54037a7 --- /dev/null +++ b/pkg/client/injection/informers/pipeline/v1alpha2/pipeline/fake/fake.go @@ -0,0 +1,40 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package fake + +import ( + "context" + + fake "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory/fake" + pipeline "github.com/tektoncd/pipeline/pkg/client/injection/informers/pipeline/v1alpha2/pipeline" + controller "knative.dev/pkg/controller" + injection "knative.dev/pkg/injection" +) + +var Get = pipeline.Get + +func init() { + injection.Fake.RegisterInformer(withInformer) +} + +func withInformer(ctx context.Context) (context.Context, controller.Informer) { + f := fake.Get(ctx) + inf := f.Tekton().V1alpha2().Pipelines() + return context.WithValue(ctx, pipeline.Key{}, inf), inf.Informer() +} diff --git a/pkg/client/injection/informers/pipeline/v1alpha2/pipeline/pipeline.go b/pkg/client/injection/informers/pipeline/v1alpha2/pipeline/pipeline.go new file mode 100644 index 00000000000..ff19269f11d --- /dev/null +++ b/pkg/client/injection/informers/pipeline/v1alpha2/pipeline/pipeline.go @@ -0,0 +1,52 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by injection-gen. DO NOT EDIT. + +package pipeline + +import ( + "context" + + v1alpha2 "github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1alpha2" + factory "github.com/tektoncd/pipeline/pkg/client/injection/informers/factory" + controller "knative.dev/pkg/controller" + injection "knative.dev/pkg/injection" + logging "knative.dev/pkg/logging" +) + +func init() { + injection.Default.RegisterInformer(withInformer) +} + +// Key is used for associating the Informer inside the context.Context. +type Key struct{} + +func withInformer(ctx context.Context) (context.Context, controller.Informer) { + f := factory.Get(ctx) + inf := f.Tekton().V1alpha2().Pipelines() + return context.WithValue(ctx, Key{}, inf), inf.Informer() +} + +// Get extracts the typed informer from the context. +func Get(ctx context.Context) v1alpha2.PipelineInformer { + untyped := ctx.Value(Key{}) + if untyped == nil { + logging.FromContext(ctx).Panic( + "Unable to fetch github.com/tektoncd/pipeline/pkg/client/informers/externalversions/pipeline/v1alpha2.PipelineInformer from context.") + } + return untyped.(v1alpha2.PipelineInformer) +} diff --git a/pkg/client/listers/pipeline/v1alpha2/expansion_generated.go b/pkg/client/listers/pipeline/v1alpha2/expansion_generated.go index 0bdeafb14e8..d1690016b88 100644 --- a/pkg/client/listers/pipeline/v1alpha2/expansion_generated.go +++ b/pkg/client/listers/pipeline/v1alpha2/expansion_generated.go @@ -18,6 +18,14 @@ limitations under the License. package v1alpha2 +// PipelineListerExpansion allows custom methods to be added to +// PipelineLister. +type PipelineListerExpansion interface{} + +// PipelineNamespaceListerExpansion allows custom methods to be added to +// PipelineNamespaceLister. +type PipelineNamespaceListerExpansion interface{} + // TaskListerExpansion allows custom methods to be added to // TaskLister. type TaskListerExpansion interface{} diff --git a/pkg/client/listers/pipeline/v1alpha2/pipeline.go b/pkg/client/listers/pipeline/v1alpha2/pipeline.go new file mode 100644 index 00000000000..5fa74362ac4 --- /dev/null +++ b/pkg/client/listers/pipeline/v1alpha2/pipeline.go @@ -0,0 +1,94 @@ +/* +Copyright 2019 The Tekton Authors + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// Code generated by lister-gen. DO NOT EDIT. + +package v1alpha2 + +import ( + v1alpha2 "github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha2" + "k8s.io/apimachinery/pkg/api/errors" + "k8s.io/apimachinery/pkg/labels" + "k8s.io/client-go/tools/cache" +) + +// PipelineLister helps list Pipelines. +type PipelineLister interface { + // List lists all Pipelines in the indexer. + List(selector labels.Selector) (ret []*v1alpha2.Pipeline, err error) + // Pipelines returns an object that can list and get Pipelines. + Pipelines(namespace string) PipelineNamespaceLister + PipelineListerExpansion +} + +// pipelineLister implements the PipelineLister interface. +type pipelineLister struct { + indexer cache.Indexer +} + +// NewPipelineLister returns a new PipelineLister. +func NewPipelineLister(indexer cache.Indexer) PipelineLister { + return &pipelineLister{indexer: indexer} +} + +// List lists all Pipelines in the indexer. +func (s *pipelineLister) List(selector labels.Selector) (ret []*v1alpha2.Pipeline, err error) { + err = cache.ListAll(s.indexer, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha2.Pipeline)) + }) + return ret, err +} + +// Pipelines returns an object that can list and get Pipelines. +func (s *pipelineLister) Pipelines(namespace string) PipelineNamespaceLister { + return pipelineNamespaceLister{indexer: s.indexer, namespace: namespace} +} + +// PipelineNamespaceLister helps list and get Pipelines. +type PipelineNamespaceLister interface { + // List lists all Pipelines in the indexer for a given namespace. + List(selector labels.Selector) (ret []*v1alpha2.Pipeline, err error) + // Get retrieves the Pipeline from the indexer for a given namespace and name. + Get(name string) (*v1alpha2.Pipeline, error) + PipelineNamespaceListerExpansion +} + +// pipelineNamespaceLister implements the PipelineNamespaceLister +// interface. +type pipelineNamespaceLister struct { + indexer cache.Indexer + namespace string +} + +// List lists all Pipelines in the indexer for a given namespace. +func (s pipelineNamespaceLister) List(selector labels.Selector) (ret []*v1alpha2.Pipeline, err error) { + err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { + ret = append(ret, m.(*v1alpha2.Pipeline)) + }) + return ret, err +} + +// Get retrieves the Pipeline from the indexer for a given namespace and name. +func (s pipelineNamespaceLister) Get(name string) (*v1alpha2.Pipeline, error) { + obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) + if err != nil { + return nil, err + } + if !exists { + return nil, errors.NewNotFound(v1alpha2.Resource("pipeline"), name) + } + return obj.(*v1alpha2.Pipeline), nil +}