-
Notifications
You must be signed in to change notification settings - Fork 1.6k
/
pipeline_spec.proto
789 lines (689 loc) · 29.6 KB
/
pipeline_spec.proto
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
syntax = "proto3";
package ml_pipelines;
import "google/protobuf/any.proto";
import "google/protobuf/struct.proto";
option go_package = "github.com/kubeflow/pipelines/api/v2alpha1/go";
// The spec of a pipeline job.
message PipelineJob {
string name = 1; // Name of the job.
// User friendly display name
string display_name = 2;
reserved 3, 4, 5, 6;
// Definition of the pipeline that is being executed.
google.protobuf.Struct pipeline_spec = 7;
reserved 8, 9, 10;
// The labels with user-defined metadata to organize PipelineJob.
map<string, string> labels = 11;
// The runtime config of a PipelineJob.
message RuntimeConfig {
// The runtime parameters of the PipelineJob. The parameters will be
// passed into [PipelineJob.pipeline_spec][] to replace the placeholders
// at runtime.
map<string, Value> parameters = 1;
// A path in a Cloud Storage bucket which will be treated as the root
// output directory of the pipeline. It is used by the system to
// generate the paths of output artifacts.
// This is a GCP-specific optimization.
string gcs_output_directory = 2;
}
// Runtime config of the pipeline.
RuntimeConfig runtime_config = 12;
}
// The spec of a pipeline.
message PipelineSpec {
// The metadata of the pipeline.
PipelineInfo pipeline_info = 1;
// The deployment config of the pipeline.
// The deployment config can be extended to provide platform specific configs.
google.protobuf.Struct deployment_spec = 7;
// The version of the sdk, which compiles the spec.
string sdk_version = 4;
// The version of the schema.
string schema_version = 5;
// The definition of the runtime parameter.
message RuntimeParameter {
// Required field. The type of the runtime parameter.
PrimitiveType.PrimitiveTypeEnum type = 1;
// Optional field. Default value of the runtime parameter. If not set and
// the runtime parameter value is not provided during runtime, an error will
// be raised.
Value default_value = 2;
}
// The map of name to definition of all components used in this pipeline.
map<string, ComponentSpec> components = 8;
// The definition of the main pipeline. Execution of the pipeline is
// completed upon the completion of this component.
ComponentSpec root = 9;
}
// Definition of a component.
message ComponentSpec {
// Definition of the input parameters and artifacts of the component.
ComponentInputsSpec input_definitions = 1;
// Definition of the output parameters and artifacts of the component.
ComponentOutputsSpec output_definitions = 2;
// Either a DAG or a single execution.
oneof implementation {
DagSpec dag = 3;
string executor_label = 4;
}
}
// A DAG contains multiple tasks.
message DagSpec {
// The tasks inside the dag.
map<string, PipelineTaskSpec> tasks = 1;
// Defines how the outputs of the dag are linked to the sub tasks.
DagOutputsSpec outputs = 2;
}
// Definition of the output artifacts and parameters of the DAG component.
message DagOutputsSpec {
// Selects a defined output artifact from a sub task of the DAG.
message ArtifactSelectorSpec {
// The name of the sub task which produces the output that matches with
// the `output_artifact_key`.
string producer_subtask = 1;
// The key of [ComponentOutputsSpec.artifacts][] map of the producer task.
string output_artifact_key = 2;
}
// Selects a list of output artifacts that will be aggregated to the single
// output artifact channel of the DAG.
message DagOutputArtifactSpec {
// The selected artifacts will be aggregated as output as a single
// output channel of the DAG.
repeated ArtifactSelectorSpec artifact_selectors = 1;
}
// Name to the output artifact channel of the DAG.
map<string, DagOutputArtifactSpec> artifacts = 1;
// Selects a defined output parameter from a sub task of the DAG.
message ParameterSelectorSpec {
// The name of the sub task which produces the output that matches with
// the `output_parameter_key`.
string producer_subtask = 1;
// The key of [ComponentOutputsSpec.parameters][] map of the producer task.
string output_parameter_key = 2;
}
// Aggregate output parameters from sub tasks into a list object.
message ParameterSelectorsSpec {
repeated ParameterSelectorSpec parameter_selectors = 1;
}
// Aggregates output parameters from sub tasks into a map object.
message MapParameterSelectorsSpec {
map<string, ParameterSelectorSpec> mapped_parameters = 2;
}
// We support four ways to fan-in output parameters from sub tasks to the DAG
// parent task.
// 1. Directly expose a single output parameter from a sub task,
// 2. (Conditional flow) Expose a list of output from multiple tasks
// (some might be skipped) but allows only one of the output being generated.
// 3. Expose a list of outputs from multiple tasks (e.g. iterator flow).
// 4. Expose the aggregation of output parameters as a name-value map.
message DagOutputParameterSpec {
oneof kind {
// Returns the sub-task parameter as a DAG parameter. The selected
// parameter must have the same type as the DAG parameter type.
ParameterSelectorSpec value_from_parameter = 1;
// Returns one of the sub-task parameters as a DAG parameter. If there are
// multiple values are available to select, the DAG will fail. All the
// selected parameters must have the same type as the DAG parameter type.
ParameterSelectorsSpec value_from_oneof = 2;
}
reserved 3;
}
// The name to the output parameter.
map<string, DagOutputParameterSpec> parameters = 2;
}
// Definition specification of the component input parameters and artifacts.
message ComponentInputsSpec {
// Definition of an artifact input.
message ArtifactSpec {
ArtifactTypeSchema artifact_type = 1;
}
// Definition of a parameter input.
message ParameterSpec {
PrimitiveType.PrimitiveTypeEnum type = 1;
}
// Name to artifact input.
map<string, ArtifactSpec> artifacts = 1;
// Name to parameter input.
map<string, ParameterSpec> parameters = 2;
}
// Definition specification of the component output parameters and artifacts.
message ComponentOutputsSpec {
// Definition of an artifact output.
message ArtifactSpec {
ArtifactTypeSchema artifact_type = 1;
// Deprecated. Use [ArtifactSpec.metadata][] instead.
map<string, ValueOrRuntimeParameter> properties = 2 [deprecated = true];
// Deprecated. Use [ArtifactSpec.metadata][] instead.
map<string, ValueOrRuntimeParameter> custom_properties = 3
[deprecated = true];
// Properties of the Artifact.
google.protobuf.Struct metadata = 4;
}
// Definition of a parameter output.
message ParameterSpec {
PrimitiveType.PrimitiveTypeEnum type = 1;
}
// Name to artifact output.
map<string, ArtifactSpec> artifacts = 1;
// Name to parameter output.
map<string, ParameterSpec> parameters = 2;
}
// The spec of task inputs.
message TaskInputsSpec {
// The specification of a task input artifact.
message InputArtifactSpec {
message TaskOutputArtifactSpec {
// The name of the upstream task which produces the output that matches
// with the `output_artifact_key`.
string producer_task = 1;
// The key of [TaskOutputsSpec.artifacts][] map of the producer task.
string output_artifact_key = 2;
}
oneof kind {
// Pass the input artifact from another task within the same parent
// component.
TaskOutputArtifactSpec task_output_artifact = 3;
// Pass the input artifact from parent component input artifact.
string component_input_artifact = 4;
}
reserved 5;
}
// Represents an input parameter. The value can be taken from an upstream
// task's output parameter (if specifying `producer_task` and
// `output_parameter_key`, or it can be a runtime value, which can either be
// determined at compile-time, or from a pipeline parameter.
message InputParameterSpec {
// Represents an upstream task's output parameter.
message TaskOutputParameterSpec {
// The name of the upstream task which produces the output parameter that
// matches with the `output_parameter_key`.
string producer_task = 1;
// The key of [TaskOutputsSpec.parameters][] map of the producer task.
string output_parameter_key = 2;
}
// Represents an upstream task's final status. The field can only be set if
// the schema version is `2.0.0`. The resolved input parameter will be a
// json payload in string type.
message TaskFinalStatus {
// The name of the upsteram task where the final status is coming from.
string producer_task = 1;
}
oneof kind {
// Output parameter from an upstream task.
TaskOutputParameterSpec task_output_parameter = 1;
// A constant value or runtime parameter.
ValueOrRuntimeParameter runtime_value = 2;
// Pass the input parameter from parent component input parameter.
string component_input_parameter = 3;
// The final status of an uptream task.
TaskFinalStatus task_final_status = 5;
}
// Selector expression of Common Expression Language (CEL)
// that applies to the parameter found from above kind.
//
// The expression is applied to the Value type
// [Value][]. For example,
// 'size(string_value)' will return the size of the Value.string_value.
//
// After applying the selection, the parameter will be returned as a
// [Value][]. The type of the Value is either deferred from the input
// definition in the corresponding
// [ComponentSpec.input_definitions.parameters][], or if not found,
// automatically deferred as either string value or double value.
//
// In addition to the builtin functions in CEL, The value.string_value can
// be treated as a json string and parsed to the [google.protobuf.Value][]
// proto message. Then, the CEL expression provided in this field will be
// used to get the requested field. For examples,
// - if Value.string_value is a json array of "[1.1, 2.2, 3.3]",
// 'parseJson(string_value)[i]' will pass the ith parameter from the list
// to the current task, or
// - if the Value.string_value is a json map of "{"a": 1.1, "b": 2.2,
// "c": 3.3}, 'parseJson(string_value)[key]' will pass the map value from
// the struct map to the current task.
//
// If unset, the value will be passed directly to the current task.
string parameter_expression_selector = 4;
}
// A map of input parameters which are small values, stored by the system and
// can be queriable.
map<string, InputParameterSpec> parameters = 1;
// A map of input artifacts.
map<string, InputArtifactSpec> artifacts = 2;
}
// The spec of task outputs.
message TaskOutputsSpec {
// The specification of a task output artifact.
message OutputArtifactSpec {
// The type of the artifact.
ArtifactTypeSchema artifact_type = 1;
// The properties of the artifact, which are determined either at
// compile-time, or at pipeline submission time through runtime parameters
map<string, ValueOrRuntimeParameter> properties = 2;
// The custom properties of the artifact, which are determined either at
// compile-time, or at pipeline submission time through runtime parameters
map<string, ValueOrRuntimeParameter> custom_properties = 3;
}
// Specification for output parameters produced by the task.
message OutputParameterSpec {
// Required field. The type of the output parameter.
PrimitiveType.PrimitiveTypeEnum type = 1;
}
// A map of output parameters which are small values, stored by the system and
// can be queriable. The output key is used
// by [TaskInputsSpec.InputParameterSpec][] of the downstream task to specify
// the data dependency. The same key will also be used by
// [ExecutorInput.Inputs][] to reference the output parameter.
map<string, OutputParameterSpec> parameters = 1;
// A map of output artifacts. Keyed by output key. The output key is used
// by [TaskInputsSpec.InputArtifactSpec][] of the downstream task to specify
// the data dependency. The same key will also be used by
// [ExecutorInput.Inputs][] to reference the output artifact.
map<string, OutputArtifactSpec> artifacts = 2;
}
// Represent primitive types. The wrapper is needed to give a namespace of
// enum value so we don't need add `PRIMITIVE_TYPE_` prefix of each enum value.
message PrimitiveType {
// The primitive types.
enum PrimitiveTypeEnum {
PRIMITIVE_TYPE_UNSPECIFIED = 0;
INT = 1;
DOUBLE = 2;
STRING = 3;
}
}
// The spec of a pipeline task.
message PipelineTaskSpec {
// Basic info of a pipeline task.
PipelineTaskInfo task_info = 1;
// Specification for task inputs which contains parameters and artifacts.
TaskInputsSpec inputs = 2;
// A list of names of upstream tasks that do not provide input
// artifacts for this task, but nonetheless whose completion this task depends
// on.
repeated string dependent_tasks = 5;
message CachingOptions {
// Whether or not to enable cache for this task. Defaults to false.
bool enable_cache = 1;
}
CachingOptions caching_options = 6;
// Reference to a component. Use this field to define either a DAG or an
// executor.
ComponentRef component_ref = 7;
// Trigger policy defines how the task gets triggered. If a task is not
// triggered, it will run into SKIPPED state.
message TriggerPolicy {
// An expression which will be evaluated into a boolean value. True to
// trigger the task to run. The expression follows the language of
// [CEL Spec][https://github.com/google/cel-spec]. It can access the data
// from [ExecutorInput][] message of the task.
// For example:
// - `inputs.artifacts['model'][0].properties['accuracy']*100 > 90`
// - `inputs.parameters['type'] == 'foo' && inputs.parameters['num'] == 1`
string condition = 1;
// An enum defines the trigger strategy of when the task will be ready to be
// triggered.
// ALL_UPSTREAM_TASKS_SUCCEEDED - all upstream tasks in succeeded state.
// ALL_UPSTREAM_TASKS_COMPLETED - all upstream tasks in any final state.
// (Note that CANCELLED is also a final state but job will not trigger new
// tasks when job is in CANCELLING state, so that the task with the trigger
// policy at ALL_UPSTREAM_TASKS_COMPLETED will not start when job
// cancellation is in progress.)
enum TriggerStrategy {
// Unspecified. Behave the same as ALL_UPSTREAM_TASKS_SUCCEEDED.
TRIGGER_STRATEGY_UNSPECIFIED = 0;
// Specifies that all upstream tasks are in succeeded state.
ALL_UPSTREAM_TASKS_SUCCEEDED = 1;
// Specifies that all upstream tasks are in any final state.
ALL_UPSTREAM_TASKS_COMPLETED = 2;
}
// The trigger strategy of this task. The `strategy` and `condition` are
// in logic "AND", as a task will only be tested for the `condition` when
// the `strategy` is meet.
// Unset or set to default value of TRIGGER_STATEGY_UNDEFINED behaves the
// same as ALL_UPSTREAM_TASKS_SUCCEEDED.
TriggerStrategy strategy = 2;
}
// Trigger policy of the task.
TriggerPolicy trigger_policy = 8;
// Iterator supports fanning out the task into multiple sub-tasks based on the
// values of input artifact or parameter. The current task will become the
// parent of all the fan-out tasks. The output of the current task follows
// the following conventions:
// * Output artifacts with the same name of each iteration will be merged
// into one output artifact channel of the parent iterator task.
// * Output parameters with the same name of each iteration will be merged
// into a string output parameter with the same name with content being a
// JSON array.
//
// For example, if an iterator starts two sub-tasks (t1 and t2) with the
// following outputs.
//
// t1.outputs.parameters = { 'p': 'v1' }
// t1.outputs.artifacts = { 'a': [a1] }
// t2.outputs.parameters = { 'p': 'v2' }
// t2.outputs.artifacts = { 'a': [a2] }
// parent_task.outputs.parameters = { 'p': '["v1", "v2"]' }
// parent_task.outputs.aritfacts = { 'a': [a1, a2] }
oneof iterator {
// Iterator to iterate over an artifact input.
ArtifactIteratorSpec artifact_iterator = 9;
// Iterator to iterate over a parameter input.
ParameterIteratorSpec parameter_iterator = 10;
}
}
// The spec of an artifact iterator. It supports fan-out a workflow from a list
// of artifacts.
message ArtifactIteratorSpec {
// Specifies the name of the artifact channel which contains the collection of
// items to iterate. The iterator will create a sub-task for each item of
// the collection and pass the item as a new input artifact channel as
// specified by [item_input][].
message ItemsSpec {
// The name of the input artifact.
string input_artifact = 1;
}
// The items to iterate.
ItemsSpec items = 1;
// The name of the input artifact channel which has the artifact item from the
// [items][] collection.
string item_input = 2;
}
// The spec of a parameter iterator. It supports fan-out a workflow from a
// string parameter which contains a JSON array.
message ParameterIteratorSpec {
// Specifies the spec to decribe the parameter items to iterate.
message ItemsSpec {
// Specifies where to get the collection of items to iterate. The iterator
// will create a sub-task for each item of the collection and pass the item
// as a new input parameter as specified by [item_input][].
oneof kind {
// The raw JSON array.
string raw = 1;
// The name of the input parameter whose value has the items collection.
// The parameter must be in STRING type and its content can be parsed
// as a JSON array.
string input_parameter = 2;
}
}
// The items to iterate.
ItemsSpec items = 1;
// The name of the input parameter which has the item value from the
// [items][] collection.
string item_input = 2;
}
message ComponentRef {
// The name of a component. Refer to the key of the
// [PipelineSpec.components][] map.
string name = 1;
}
// Basic info of a pipeline.
message PipelineInfo {
// Required field. The name of the pipeline.
// The name will be used to create or find pipeline context in MLMD.
string name = 1;
}
// The definition of a artifact type in MLMD.
message ArtifactTypeSchema {
oneof kind {
// The name of the type. The format of the title must be:
// `<namespace>.<title>.<version>`.
// Examples:
// - `aiplatform.Model.v1`
// - `acme.CustomModel.v2`
// When this field is set, the type must be pre-registered in the MLMD
// store.
string schema_title = 1;
// Points to a YAML file stored on Google Cloud Storage describing the
// format.
string schema_uri = 2;
// Contains a raw YAML string, describing the format of
// the properties of the type.
string instance_schema = 3;
}
}
// The basic info of a task.
message PipelineTaskInfo {
// The unique name of the task within the pipeline definition. This name
// will be used in downstream tasks to indicate task and data dependencies.
string name = 1;
}
// Definition for a value or reference to a runtime parameter. A
// ValueOrRuntimeParameter instance can be either a field value that is
// determined during compilation time, or a runtime parameter which will be
// determined during runtime.
message ValueOrRuntimeParameter {
oneof value {
// Constant value which is determined in compile time.
Value constant_value = 1;
// The runtime parameter refers to the parent component input parameter.
string runtime_parameter = 2;
}
}
// The definition of the deployment config of the pipeline. It contains the
// the platform specific executor configs for KFP OSS.
message PipelineDeploymentConfig {
// The specification on a container invocation.
// The string fields of the message support string based placeholder contract
// defined in [ExecutorInput](). The output of the container follows the
// contract of [ExecutorOutput]().
message PipelineContainerSpec {
// The image uri of the container.
string image = 1;
// The main entrypoint commands of the container to run. If not provided,
// fallback to use the entry point command defined in the container image.
repeated string command = 2;
// The arguments to pass into the main entrypoint of the container.
repeated string args = 3;
// The lifecycle hooks of the container.
// Each hook follows the same I/O contract as the main container entrypoint.
// See [ExecutorInput]() and [ExecutorOutput]() for details.
// (-- TODO(b/165323565): add more documentation on caching and lifecycle
// hooks. --)
message Lifecycle {
// The command and args to execute a program.
message Exec {
// The command of the exec program.
repeated string command = 2;
// The args of the exec program.
repeated string args = 3;
}
// This hook is invoked before caching check. It can change the properties
// of the execution and output artifacts before they are used to compute
// the cache key. The updated metadata will be passed into the main
// container entrypoint.
Exec pre_cache_check = 1;
}
// The lifecycle hooks of the container executor.
Lifecycle lifecycle = 4;
// The specification on the resource requirements of a container execution.
// This can include specification of vCPU, memory requirements, as well as
// accelerator types and counts.
message ResourceSpec {
// The limit of the number of vCPU cores. This container execution needs
// at most cpu_limit vCPU to run.
double cpu_limit = 1;
// The memory limit in GB. This container execution needs at most
// memory_limit RAM to run.
double memory_limit = 2;
// The specification on the accelerators being attached to this container.
message AcceleratorConfig {
// The type of accelerators.
string type = 1;
// The number of accelerators.
int64 count = 2;
}
AcceleratorConfig accelerator = 3;
}
ResourceSpec resources = 5;
}
// The specification to import or reimport a new artifact to the pipeline.
message ImporterSpec {
// The URI of the artifact.
ValueOrRuntimeParameter artifact_uri = 1;
// The type of the artifact.
ArtifactTypeSchema type_schema = 2;
// The properties of the artifact.
// Deprecated. Use [ImporterSpec.metadata][] instead.
map<string, ValueOrRuntimeParameter> properties = 3 [deprecated = true];
// The custom properties of the artifact.
// Deprecated. Use [ImporterSpec.metadata][] instead.
map<string, ValueOrRuntimeParameter> custom_properties = 4
[deprecated = true];
// Properties of the Artifact.
google.protobuf.Struct metadata = 6;
// Whether or not import an artifact regardless it has been imported before.
bool reimport = 5;
}
// ResolverSpec resolves artifacts from historical metadata and returns them
// to the pipeline as output artifacts of the resolver task. The downstream
// tasks can consume them as their input artifacts.
message ResolverSpec {
// The query to fetch artifacts.
message ArtifactQuerySpec {
// The filter of the artifact query. The supported syntax are:
// - `in_context("<context name>")`
// - `artifact_type='<artifact type name>'`
// - `uri='<uri>'`
// - `state=<state>`
// - `name='value'`
// - `and` to combine two conditions and returns when both are true.
// If no `in_context` filter is set, the query will be scoped to the
// the current pipeline context.
string filter = 1;
// The maximum number of the artifacts to be returned from the
// query. If not defined, the default limit is `1`.
int32 limit = 2;
}
// A list of resolver output definitions. The
// key of the map must be exactly the same as
// the keys in the [PipelineTaskOutputsSpec.artifacts][] map.
// At least one output must be defined.
map<string, ArtifactQuerySpec> output_artifact_queries = 1;
}
message AIPlatformCustomJobSpec {
option deprecated = true;
// API Specification for invoking a Google Cloud AI Platform CustomJob.
// The fields must match the field names and structures of CustomJob
// defined in
// https://cloud.google.com/ai-platform-unified/docs/reference/rest/v1beta1/projects.locations.customJobs.
// The field types must be either the same, or be a string containing the
// string based placeholder contract defined in [ExecutorInput](). The
// placeholders will be replaced with the actual value during the runtime
// before the job is launched.
google.protobuf.Struct custom_job = 1;
}
// The specification of the executor.
message ExecutorSpec {
oneof spec {
// Starts a container.
PipelineContainerSpec container = 1;
// Import an artifact.
ImporterSpec importer = 2;
// Resolves an existing artifact.
ResolverSpec resolver = 3;
// Starts a Google Cloud AI Platform CustomJob.
AIPlatformCustomJobSpec custom_job = 4 [deprecated = true];
}
}
// Map from executor label to executor spec.
map<string, ExecutorSpec> executors = 1;
}
// Value is the value of the field.
message Value {
oneof value {
// An integer value
int64 int_value = 1;
// A double value
double double_value = 2;
// A string value
string string_value = 3;
}
}
// The definition of a runtime artifact.
message RuntimeArtifact {
// The name of an artifact.
string name = 1;
// The type of the artifact.
ArtifactTypeSchema type = 2;
// The URI of the artifact.
string uri = 3;
// The properties of the artifact.
// Deprecated. Use [RuntimeArtifact.metadata][] instead.
map<string, Value> properties = 4 [deprecated = true];
// The custom properties of the artifact.
// Deprecated. Use [RuntimeArtifact.metadata][] instead.
map<string, Value> custom_properties = 5 [deprecated = true];
// Properties of the Artifact.
google.protobuf.Struct metadata = 6;
}
// Message that represents a list of artifacts.
message ArtifactList {
// A list of artifacts.
repeated RuntimeArtifact artifacts = 1;
}
// The input of an executor, which includes all the data that
// can be passed into the executor spec by a string based placeholder.
//
// The string based placeholder uses a JSON path to reference to the data
// in the [ExecutionInput]().
//
// `{{$}}`: prints the full [ExecutorInput]() as a JSON string.
// `{{$.inputs.artifacts['<name>'].uri}}`: prints the URI of an input
// artifact.
// `{{$.inputs.artifacts['<name>'].properties['<property name>']}}`: prints
// the
// property of an input artifact.
// `{{$.inputs.parameters['<name>']}}`: prints the value of an input
// parameter.
// `{{$.outputs.artifacts['<name>'].uri}}: prints the URI of an output artifact.
// `{{$.outputs.artifacts['<name>'].properties['<property name>']}}`: prints the
// property of an output artifact.
// `{{$.outputs.parameters['<name>'].output_file}}`: prints a file path which
// points to a file and container can write to it to return the value of the
// parameter..
// `{{$.outputs.output_file}}`: prints a file path of the output metadata file
// which is used to send output metadata from executor to orchestrator. The
// contract of the output metadata is [ExecutorOutput](). When both parameter
// output file and executor output metadata files are set by the container, the
// output metadata file will have higher precedence to set output parameters.
message ExecutorInput {
// The runtime inputs data of the execution.
message Inputs {
// Input parameters of the execution.
map<string, Value> parameters = 1;
// Input artifacts of the execution.
map<string, ArtifactList> artifacts = 2;
}
// The runtime input artifacts of the task invocation.
Inputs inputs = 1;
// The runtime output parameter.
message OutputParameter {
// The file path which is used by the executor to pass the parameter value
// to the system.
string output_file = 1;
}
// The runtime outputs data of the execution.
message Outputs {
// The runtime output parameters.
map<string, OutputParameter> parameters = 1;
// The runtime output artifacts.
map<string, ArtifactList> artifacts = 2;
// The file path of the full output metadata JSON. The schema of the output
// file is [ExecutorOutput][].
//
// When the full output metadata file is set by the container, the output
// parameter files will be ignored.
string output_file = 3;
}
// The runtime output artifacts of the task invocation.
Outputs outputs = 2;
}
// The schema of the output metadata of an execution. It will be used to parse
// the output metadata file.
message ExecutorOutput {
// The values for output parameters.
map<string, Value> parameters = 1;
// The updated metadata for output artifact.
map<string, ArtifactList> artifacts = 2;
}