diff --git a/cqproto/grpc.go b/cqproto/grpc.go index f8bb0954..0c9df090 100644 --- a/cqproto/grpc.go +++ b/cqproto/grpc.go @@ -26,7 +26,7 @@ func (g GRPCClient) GetProviderSchema(ctx context.Context, _ *GetProviderSchemaR Name: res.GetName(), Version: res.GetVersion(), ResourceTables: tablesFromProto(res.GetResourceTables()), - Migrations: res.Migrations, + Migrations: migrationsFromProto(res.GetMigrations()), } return resp, nil @@ -116,7 +116,7 @@ func (g *GRPCServer) GetProviderSchema(ctx context.Context, _ *internal.GetProvi Name: resp.Name, Version: resp.Version, ResourceTables: tablesToProto(resp.ResourceTables), - Migrations: resp.Migrations, + Migrations: migrationsToProto(resp.Migrations), }, nil } @@ -369,3 +369,21 @@ func PartialFetchToCQProto(in []schema.ResourceFetchError) []*FailedResourceFetc } return failedResources } + +func migrationsFromProto(in map[string]*internal.DialectMigration) map[string]map[string][]byte { + ret := make(map[string]map[string][]byte, len(in)) + for k := range in { + ret[k] = in[k].Migrations + } + return ret +} + +func migrationsToProto(in map[string]map[string][]byte) map[string]*internal.DialectMigration { + ret := make(map[string]*internal.DialectMigration, len(in)) + for k := range in { + ret[k] = &internal.DialectMigration{ + Migrations: in[k], + } + } + return ret +} diff --git a/cqproto/internal/plugin.pb.go b/cqproto/internal/plugin.pb.go index b5ece841..4d3ac8f6 100644 --- a/cqproto/internal/plugin.pb.go +++ b/cqproto/internal/plugin.pb.go @@ -1,17 +1,16 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.26.0 -// protoc v3.15.7 +// protoc v3.17.3 // source: internal/plugin.proto package internal import ( - reflect "reflect" - sync "sync" - protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" + reflect "reflect" + sync "sync" ) const ( @@ -623,6 +622,53 @@ func (x *Diagnostic) GetResource() string { return "" } +type DialectMigration struct { + state protoimpl.MessageState + sizeCache protoimpl.SizeCache + unknownFields protoimpl.UnknownFields + + Migrations map[string][]byte `protobuf:"bytes,1,rep,name=migrations,proto3" json:"migrations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` +} + +func (x *DialectMigration) Reset() { + *x = DialectMigration{} + if protoimpl.UnsafeEnabled { + mi := &file_internal_plugin_proto_msgTypes[5] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) + } +} + +func (x *DialectMigration) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*DialectMigration) ProtoMessage() {} + +func (x *DialectMigration) ProtoReflect() protoreflect.Message { + mi := &file_internal_plugin_proto_msgTypes[5] + if protoimpl.UnsafeEnabled && x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use DialectMigration.ProtoReflect.Descriptor instead. +func (*DialectMigration) Descriptor() ([]byte, []int) { + return file_internal_plugin_proto_rawDescGZIP(), []int{5} +} + +func (x *DialectMigration) GetMigrations() map[string][]byte { + if x != nil { + return x.Migrations + } + return nil +} + type GetProviderSchema struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -632,7 +678,7 @@ type GetProviderSchema struct { func (x *GetProviderSchema) Reset() { *x = GetProviderSchema{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[5] + mi := &file_internal_plugin_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -645,7 +691,7 @@ func (x *GetProviderSchema) String() string { func (*GetProviderSchema) ProtoMessage() {} func (x *GetProviderSchema) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[5] + mi := &file_internal_plugin_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -658,7 +704,7 @@ func (x *GetProviderSchema) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProviderSchema.ProtoReflect.Descriptor instead. func (*GetProviderSchema) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{5} + return file_internal_plugin_proto_rawDescGZIP(), []int{6} } type GetProviderConfig struct { @@ -670,7 +716,7 @@ type GetProviderConfig struct { func (x *GetProviderConfig) Reset() { *x = GetProviderConfig{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[6] + mi := &file_internal_plugin_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -683,7 +729,7 @@ func (x *GetProviderConfig) String() string { func (*GetProviderConfig) ProtoMessage() {} func (x *GetProviderConfig) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[6] + mi := &file_internal_plugin_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -696,7 +742,7 @@ func (x *GetProviderConfig) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProviderConfig.ProtoReflect.Descriptor instead. func (*GetProviderConfig) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{6} + return file_internal_plugin_proto_rawDescGZIP(), []int{7} } // Table is the definition of how a table is defined in a provider @@ -715,7 +761,7 @@ type Table struct { func (x *Table) Reset() { *x = Table{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[7] + mi := &file_internal_plugin_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -728,7 +774,7 @@ func (x *Table) String() string { func (*Table) ProtoMessage() {} func (x *Table) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[7] + mi := &file_internal_plugin_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -741,7 +787,7 @@ func (x *Table) ProtoReflect() protoreflect.Message { // Deprecated: Use Table.ProtoReflect.Descriptor instead. func (*Table) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{7} + return file_internal_plugin_proto_rawDescGZIP(), []int{8} } func (x *Table) GetName() string { @@ -793,7 +839,7 @@ type Column struct { func (x *Column) Reset() { *x = Column{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[8] + mi := &file_internal_plugin_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -806,7 +852,7 @@ func (x *Column) String() string { func (*Column) ProtoMessage() {} func (x *Column) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[8] + mi := &file_internal_plugin_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -819,7 +865,7 @@ func (x *Column) ProtoReflect() protoreflect.Message { // Deprecated: Use Column.ProtoReflect.Descriptor instead. func (*Column) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{8} + return file_internal_plugin_proto_rawDescGZIP(), []int{9} } func (x *Column) GetName() string { @@ -862,7 +908,7 @@ type ColumnMeta struct { func (x *ColumnMeta) Reset() { *x = ColumnMeta{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[9] + mi := &file_internal_plugin_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -875,7 +921,7 @@ func (x *ColumnMeta) String() string { func (*ColumnMeta) ProtoMessage() {} func (x *ColumnMeta) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[9] + mi := &file_internal_plugin_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -888,7 +934,7 @@ func (x *ColumnMeta) ProtoReflect() protoreflect.Message { // Deprecated: Use ColumnMeta.ProtoReflect.Descriptor instead. func (*ColumnMeta) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{9} + return file_internal_plugin_proto_rawDescGZIP(), []int{10} } func (x *ColumnMeta) GetResolver() *ResolverMeta { @@ -917,7 +963,7 @@ type ResolverMeta struct { func (x *ResolverMeta) Reset() { *x = ResolverMeta{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[10] + mi := &file_internal_plugin_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -930,7 +976,7 @@ func (x *ResolverMeta) String() string { func (*ResolverMeta) ProtoMessage() {} func (x *ResolverMeta) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[10] + mi := &file_internal_plugin_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -943,7 +989,7 @@ func (x *ResolverMeta) ProtoReflect() protoreflect.Message { // Deprecated: Use ResolverMeta.ProtoReflect.Descriptor instead. func (*ResolverMeta) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{10} + return file_internal_plugin_proto_rawDescGZIP(), []int{11} } func (x *ResolverMeta) GetName() string { @@ -971,7 +1017,7 @@ type TableCreationOptions struct { func (x *TableCreationOptions) Reset() { *x = TableCreationOptions{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[11] + mi := &file_internal_plugin_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -984,7 +1030,7 @@ func (x *TableCreationOptions) String() string { func (*TableCreationOptions) ProtoMessage() {} func (x *TableCreationOptions) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[11] + mi := &file_internal_plugin_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -997,7 +1043,7 @@ func (x *TableCreationOptions) ProtoReflect() protoreflect.Message { // Deprecated: Use TableCreationOptions.ProtoReflect.Descriptor instead. func (*TableCreationOptions) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{11} + return file_internal_plugin_proto_rawDescGZIP(), []int{12} } func (x *TableCreationOptions) GetPrimaryKeys() []string { @@ -1019,7 +1065,7 @@ type ConnectionDetails struct { func (x *ConnectionDetails) Reset() { *x = ConnectionDetails{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[12] + mi := &file_internal_plugin_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1032,7 +1078,7 @@ func (x *ConnectionDetails) String() string { func (*ConnectionDetails) ProtoMessage() {} func (x *ConnectionDetails) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[12] + mi := &file_internal_plugin_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1045,7 +1091,7 @@ func (x *ConnectionDetails) ProtoReflect() protoreflect.Message { // Deprecated: Use ConnectionDetails.ProtoReflect.Descriptor instead. func (*ConnectionDetails) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{12} + return file_internal_plugin_proto_rawDescGZIP(), []int{13} } func (x *ConnectionDetails) GetType() ConnectionType { @@ -1082,7 +1128,7 @@ type ConfigureProvider_Request struct { func (x *ConfigureProvider_Request) Reset() { *x = ConfigureProvider_Request{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[13] + mi := &file_internal_plugin_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1095,7 +1141,7 @@ func (x *ConfigureProvider_Request) String() string { func (*ConfigureProvider_Request) ProtoMessage() {} func (x *ConfigureProvider_Request) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[13] + mi := &file_internal_plugin_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1157,7 +1203,7 @@ type ConfigureProvider_Response struct { func (x *ConfigureProvider_Response) Reset() { *x = ConfigureProvider_Response{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[14] + mi := &file_internal_plugin_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1170,7 +1216,7 @@ func (x *ConfigureProvider_Response) String() string { func (*ConfigureProvider_Response) ProtoMessage() {} func (x *ConfigureProvider_Response) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[14] + mi := &file_internal_plugin_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1209,7 +1255,7 @@ type FetchResources_Request struct { func (x *FetchResources_Request) Reset() { *x = FetchResources_Request{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[15] + mi := &file_internal_plugin_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1222,7 +1268,7 @@ func (x *FetchResources_Request) String() string { func (*FetchResources_Request) ProtoMessage() {} func (x *FetchResources_Request) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[15] + mi := &file_internal_plugin_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1281,7 +1327,7 @@ type FetchResources_Response struct { func (x *FetchResources_Response) Reset() { *x = FetchResources_Response{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[16] + mi := &file_internal_plugin_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1294,7 +1340,7 @@ func (x *FetchResources_Response) String() string { func (*FetchResources_Response) ProtoMessage() {} func (x *FetchResources_Response) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[16] + mi := &file_internal_plugin_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1361,7 +1407,7 @@ type GetProviderSchema_Request struct { func (x *GetProviderSchema_Request) Reset() { *x = GetProviderSchema_Request{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[18] + mi := &file_internal_plugin_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1374,7 +1420,7 @@ func (x *GetProviderSchema_Request) String() string { func (*GetProviderSchema_Request) ProtoMessage() {} func (x *GetProviderSchema_Request) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[18] + mi := &file_internal_plugin_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1387,7 +1433,7 @@ func (x *GetProviderSchema_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProviderSchema_Request.ProtoReflect.Descriptor instead. func (*GetProviderSchema_Request) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{5, 0} + return file_internal_plugin_proto_rawDescGZIP(), []int{6, 0} } type GetProviderSchema_Response struct { @@ -1395,16 +1441,16 @@ type GetProviderSchema_Response struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` - Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` - ResourceTables map[string]*Table `protobuf:"bytes,3,rep,name=resource_tables,json=resourceTables,proto3" json:"resource_tables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - Migrations map[string][]byte `protobuf:"bytes,4,rep,name=migrations,proto3" json:"migrations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` + Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` + ResourceTables map[string]*Table `protobuf:"bytes,3,rep,name=resource_tables,json=resourceTables,proto3" json:"resource_tables,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + Migrations map[string]*DialectMigration `protobuf:"bytes,5,rep,name=migrations,proto3" json:"migrations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` } func (x *GetProviderSchema_Response) Reset() { *x = GetProviderSchema_Response{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[19] + mi := &file_internal_plugin_proto_msgTypes[21] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1417,7 +1463,7 @@ func (x *GetProviderSchema_Response) String() string { func (*GetProviderSchema_Response) ProtoMessage() {} func (x *GetProviderSchema_Response) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[19] + mi := &file_internal_plugin_proto_msgTypes[21] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1430,7 +1476,7 @@ func (x *GetProviderSchema_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProviderSchema_Response.ProtoReflect.Descriptor instead. func (*GetProviderSchema_Response) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{5, 1} + return file_internal_plugin_proto_rawDescGZIP(), []int{6, 1} } func (x *GetProviderSchema_Response) GetName() string { @@ -1454,7 +1500,7 @@ func (x *GetProviderSchema_Response) GetResourceTables() map[string]*Table { return nil } -func (x *GetProviderSchema_Response) GetMigrations() map[string][]byte { +func (x *GetProviderSchema_Response) GetMigrations() map[string]*DialectMigration { if x != nil { return x.Migrations } @@ -1470,7 +1516,7 @@ type GetProviderConfig_Request struct { func (x *GetProviderConfig_Request) Reset() { *x = GetProviderConfig_Request{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[22] + mi := &file_internal_plugin_proto_msgTypes[24] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1483,7 +1529,7 @@ func (x *GetProviderConfig_Request) String() string { func (*GetProviderConfig_Request) ProtoMessage() {} func (x *GetProviderConfig_Request) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[22] + mi := &file_internal_plugin_proto_msgTypes[24] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1496,7 +1542,7 @@ func (x *GetProviderConfig_Request) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProviderConfig_Request.ProtoReflect.Descriptor instead. func (*GetProviderConfig_Request) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{6, 0} + return file_internal_plugin_proto_rawDescGZIP(), []int{7, 0} } type GetProviderConfig_Response struct { @@ -1512,7 +1558,7 @@ type GetProviderConfig_Response struct { func (x *GetProviderConfig_Response) Reset() { *x = GetProviderConfig_Response{} if protoimpl.UnsafeEnabled { - mi := &file_internal_plugin_proto_msgTypes[23] + mi := &file_internal_plugin_proto_msgTypes[25] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1525,7 +1571,7 @@ func (x *GetProviderConfig_Response) String() string { func (*GetProviderConfig_Response) ProtoMessage() {} func (x *GetProviderConfig_Response) ProtoReflect() protoreflect.Message { - mi := &file_internal_plugin_proto_msgTypes[23] + mi := &file_internal_plugin_proto_msgTypes[25] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1538,7 +1584,7 @@ func (x *GetProviderConfig_Response) ProtoReflect() protoreflect.Message { // Deprecated: Use GetProviderConfig_Response.ProtoReflect.Descriptor instead. func (*GetProviderConfig_Response) Descriptor() ([]byte, []int) { - return file_internal_plugin_proto_rawDescGZIP(), []int{6, 1} + return file_internal_plugin_proto_rawDescGZIP(), []int{7, 1} } func (x *GetProviderConfig_Response) GetName() string { @@ -1667,126 +1713,137 @@ var file_internal_plugin_proto_rawDesc = []byte{ 0x08, 0x44, 0x41, 0x54, 0x41, 0x42, 0x41, 0x53, 0x45, 0x10, 0x04, 0x22, 0x2f, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x52, 0x52, 0x4f, 0x52, 0x10, 0x01, 0x12, - 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x22, 0x9c, 0x03, 0x0a, - 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0xfb, 0x02, - 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, - 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, - 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x5e, 0x0a, 0x0f, 0x72, 0x65, 0x73, 0x6f, - 0x75, 0x72, 0x63, 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, - 0x0b, 0x32, 0x35, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, - 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, 0x62, - 0x6c, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x51, 0x0a, 0x0a, 0x6d, 0x69, 0x67, 0x72, - 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, - 0x4d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, - 0x0a, 0x6d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x4f, 0x0a, 0x13, 0x52, - 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x45, 0x6e, 0x74, - 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, - 0x03, 0x6b, 0x65, 0x79, 0x12, 0x22, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, - 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x3d, 0x0a, 0x0f, - 0x4d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x70, 0x0a, 0x11, 0x47, - 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, - 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x50, 0x0a, 0x08, 0x52, + 0x0b, 0x0a, 0x07, 0x57, 0x41, 0x52, 0x4e, 0x49, 0x4e, 0x47, 0x10, 0x02, 0x22, 0x9a, 0x01, 0x0a, + 0x10, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x4d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x47, 0x0a, 0x0a, 0x6d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, + 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x44, 0x69, + 0x61, 0x6c, 0x65, 0x63, 0x74, 0x4d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x4d, + 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, + 0x6d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x3d, 0x0a, 0x0f, 0x4d, 0x69, + 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0xb5, 0x03, 0x0a, 0x11, 0x47, 0x65, + 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x1a, + 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x94, 0x03, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, - 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, - 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x22, 0xda, 0x01, - 0x0a, 0x05, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, - 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, - 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x52, 0x07, 0x63, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x2a, 0x0a, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, - 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x09, 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x3a, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, - 0x65, 0x43, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, - 0x48, 0x00, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x42, 0x0a, - 0x0a, 0x08, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0x8c, 0x01, 0x0a, 0x06, 0x43, - 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, - 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, - 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x04, 0x74, - 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, - 0x70, 0x65, 0x12, 0x25, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4d, - 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, 0x61, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, - 0x75, 0x6d, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x2f, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x6c, - 0x76, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, - 0x6f, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x08, - 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x12, 0x22, 0x0a, 0x0c, 0x49, 0x67, 0x6e, 0x6f, - 0x72, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, - 0x49, 0x67, 0x6e, 0x6f, 0x72, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x22, 0x3c, 0x0a, 0x0c, - 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, - 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, - 0x12, 0x18, 0x0a, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x08, 0x52, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, 0x22, 0x38, 0x0a, 0x14, 0x54, 0x61, - 0x62, 0x6c, 0x65, 0x43, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, - 0x6e, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x50, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x4b, 0x65, 0x79, - 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x50, 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, - 0x4b, 0x65, 0x79, 0x73, 0x22, 0x50, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, - 0x6f, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x29, 0x0a, 0x04, 0x74, 0x79, 0x70, - 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, - 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, - 0x74, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x73, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x09, 0x52, 0x03, 0x64, 0x73, 0x6e, 0x2a, 0x97, 0x02, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, 0x75, 0x6d, - 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, - 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4c, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, - 0x53, 0x4d, 0x41, 0x4c, 0x4c, 0x49, 0x4e, 0x54, 0x10, 0x02, 0x12, 0x07, 0x0a, 0x03, 0x49, 0x4e, - 0x54, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x42, 0x49, 0x47, 0x49, 0x4e, 0x54, 0x10, 0x04, 0x12, - 0x09, 0x0a, 0x05, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x10, 0x05, 0x12, 0x08, 0x0a, 0x04, 0x55, 0x55, - 0x49, 0x44, 0x10, 0x06, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x10, 0x07, - 0x12, 0x0e, 0x0a, 0x0a, 0x42, 0x59, 0x54, 0x45, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x08, - 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, - 0x10, 0x09, 0x12, 0x0d, 0x0a, 0x09, 0x49, 0x4e, 0x54, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, - 0x0a, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x49, 0x4d, 0x45, 0x53, 0x54, 0x41, 0x4d, 0x50, 0x10, 0x0b, - 0x12, 0x08, 0x0a, 0x04, 0x4a, 0x53, 0x4f, 0x4e, 0x10, 0x0c, 0x12, 0x0e, 0x0a, 0x0a, 0x55, 0x55, - 0x49, 0x44, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x0d, 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, - 0x45, 0x54, 0x10, 0x0e, 0x12, 0x0e, 0x0a, 0x0a, 0x49, 0x4e, 0x45, 0x54, 0x5f, 0x41, 0x52, 0x52, - 0x41, 0x59, 0x10, 0x0f, 0x12, 0x08, 0x0a, 0x04, 0x43, 0x49, 0x44, 0x52, 0x10, 0x10, 0x12, 0x0e, - 0x0a, 0x0a, 0x43, 0x49, 0x44, 0x52, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x11, 0x12, 0x0c, - 0x0a, 0x08, 0x4d, 0x41, 0x43, 0x5f, 0x41, 0x44, 0x44, 0x52, 0x10, 0x12, 0x12, 0x12, 0x0a, 0x0e, - 0x4d, 0x41, 0x43, 0x5f, 0x41, 0x44, 0x44, 0x52, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x13, - 0x2a, 0x1e, 0x0a, 0x0e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, - 0x70, 0x65, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x4f, 0x53, 0x54, 0x47, 0x52, 0x45, 0x53, 0x10, 0x00, - 0x32, 0xeb, 0x02, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x58, 0x0a, - 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, - 0x6d, 0x61, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, - 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, - 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, - 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x58, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, - 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x20, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, + 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x5e, 0x0a, 0x0f, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, + 0x65, 0x5f, 0x74, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x58, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, - 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, - 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, - 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, - 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x0e, 0x46, - 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x12, 0x1d, 0x2e, - 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x6f, 0x75, - 0x72, 0x63, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, - 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, - 0x63, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x30, 0x01, 0x42, 0x0b, - 0x5a, 0x09, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x62, 0x06, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x33, + 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, + 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0e, 0x72, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x54, + 0x61, 0x62, 0x6c, 0x65, 0x73, 0x12, 0x51, 0x0a, 0x0a, 0x6d, 0x69, 0x67, 0x72, 0x61, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, + 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x2e, 0x4d, 0x69, 0x67, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x6d, 0x69, + 0x67, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x1a, 0x4f, 0x0a, 0x13, 0x52, 0x65, 0x73, 0x6f, + 0x75, 0x72, 0x63, 0x65, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x22, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, + 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x56, 0x0a, 0x0f, 0x4d, 0x69, 0x67, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, + 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x2d, + 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x17, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x44, 0x69, 0x61, 0x6c, 0x65, 0x63, 0x74, 0x4d, 0x69, 0x67, + 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, + 0x01, 0x22, 0x70, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, + 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x1a, 0x09, 0x0a, 0x07, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, + 0x74, 0x1a, 0x50, 0x0a, 0x08, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, + 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x16, 0x0a, 0x06, 0x63, + 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x63, 0x6f, 0x6e, + 0x66, 0x69, 0x67, 0x22, 0xda, 0x01, 0x0a, 0x05, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x12, 0x12, 0x0a, + 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, + 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, + 0x69, 0x6f, 0x6e, 0x12, 0x27, 0x0a, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x18, 0x03, + 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, + 0x75, 0x6d, 0x6e, 0x52, 0x07, 0x63, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x73, 0x12, 0x2a, 0x0a, 0x09, + 0x72, 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x52, 0x09, 0x72, + 0x65, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x3a, 0x0a, 0x07, 0x6f, 0x70, 0x74, 0x69, + 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x2e, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, + 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x00, 0x52, 0x07, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, + 0x73, 0x88, 0x01, 0x01, 0x42, 0x0a, 0x0a, 0x08, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, + 0x22, 0x8c, 0x01, 0x0a, 0x06, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x6e, + 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, + 0x20, 0x0a, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, 0x74, 0x69, 0x6f, + 0x6e, 0x12, 0x25, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0e, 0x32, + 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x54, 0x79, + 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x25, 0x0a, 0x04, 0x6d, 0x65, 0x74, 0x61, + 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, + 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x52, 0x04, 0x6d, 0x65, 0x74, 0x61, 0x22, + 0x61, 0x0a, 0x0a, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x4d, 0x65, 0x74, 0x61, 0x12, 0x2f, 0x0a, + 0x08, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, + 0x4d, 0x65, 0x74, 0x61, 0x52, 0x08, 0x72, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x12, 0x22, + 0x0a, 0x0c, 0x49, 0x67, 0x6e, 0x6f, 0x72, 0x65, 0x45, 0x78, 0x69, 0x73, 0x74, 0x73, 0x18, 0x02, + 0x20, 0x01, 0x28, 0x08, 0x52, 0x0c, 0x49, 0x67, 0x6e, 0x6f, 0x72, 0x65, 0x45, 0x78, 0x69, 0x73, + 0x74, 0x73, 0x22, 0x3c, 0x0a, 0x0c, 0x52, 0x65, 0x73, 0x6f, 0x6c, 0x76, 0x65, 0x72, 0x4d, 0x65, + 0x74, 0x61, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, + 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x69, + 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x62, 0x75, 0x69, 0x6c, 0x74, 0x69, 0x6e, + 0x22, 0x38, 0x0a, 0x14, 0x54, 0x61, 0x62, 0x6c, 0x65, 0x43, 0x72, 0x65, 0x61, 0x74, 0x69, 0x6f, + 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x20, 0x0a, 0x0b, 0x50, 0x72, 0x69, 0x6d, + 0x61, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x09, 0x52, 0x0b, 0x50, + 0x72, 0x69, 0x6d, 0x61, 0x72, 0x79, 0x4b, 0x65, 0x79, 0x73, 0x22, 0x50, 0x0a, 0x11, 0x43, 0x6f, + 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, + 0x29, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x15, 0x2e, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, 0x74, 0x69, 0x6f, 0x6e, + 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x64, 0x73, + 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x64, 0x73, 0x6e, 0x2a, 0x97, 0x02, 0x0a, + 0x0a, 0x43, 0x6f, 0x6c, 0x75, 0x6d, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0b, 0x0a, 0x07, 0x49, + 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x42, 0x4f, 0x4f, 0x4c, + 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x4d, 0x41, 0x4c, 0x4c, 0x49, 0x4e, 0x54, 0x10, 0x02, + 0x12, 0x07, 0x0a, 0x03, 0x49, 0x4e, 0x54, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x42, 0x49, 0x47, + 0x49, 0x4e, 0x54, 0x10, 0x04, 0x12, 0x09, 0x0a, 0x05, 0x46, 0x4c, 0x4f, 0x41, 0x54, 0x10, 0x05, + 0x12, 0x08, 0x0a, 0x04, 0x55, 0x55, 0x49, 0x44, 0x10, 0x06, 0x12, 0x0a, 0x0a, 0x06, 0x53, 0x54, + 0x52, 0x49, 0x4e, 0x47, 0x10, 0x07, 0x12, 0x0e, 0x0a, 0x0a, 0x42, 0x59, 0x54, 0x45, 0x5f, 0x41, + 0x52, 0x52, 0x41, 0x59, 0x10, 0x08, 0x12, 0x10, 0x0a, 0x0c, 0x53, 0x54, 0x52, 0x49, 0x4e, 0x47, + 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x09, 0x12, 0x0d, 0x0a, 0x09, 0x49, 0x4e, 0x54, 0x5f, + 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x0a, 0x12, 0x0d, 0x0a, 0x09, 0x54, 0x49, 0x4d, 0x45, 0x53, + 0x54, 0x41, 0x4d, 0x50, 0x10, 0x0b, 0x12, 0x08, 0x0a, 0x04, 0x4a, 0x53, 0x4f, 0x4e, 0x10, 0x0c, + 0x12, 0x0e, 0x0a, 0x0a, 0x55, 0x55, 0x49, 0x44, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x0d, + 0x12, 0x08, 0x0a, 0x04, 0x49, 0x4e, 0x45, 0x54, 0x10, 0x0e, 0x12, 0x0e, 0x0a, 0x0a, 0x49, 0x4e, + 0x45, 0x54, 0x5f, 0x41, 0x52, 0x52, 0x41, 0x59, 0x10, 0x0f, 0x12, 0x08, 0x0a, 0x04, 0x43, 0x49, + 0x44, 0x52, 0x10, 0x10, 0x12, 0x0e, 0x0a, 0x0a, 0x43, 0x49, 0x44, 0x52, 0x5f, 0x41, 0x52, 0x52, + 0x41, 0x59, 0x10, 0x11, 0x12, 0x0c, 0x0a, 0x08, 0x4d, 0x41, 0x43, 0x5f, 0x41, 0x44, 0x44, 0x52, + 0x10, 0x12, 0x12, 0x12, 0x0a, 0x0e, 0x4d, 0x41, 0x43, 0x5f, 0x41, 0x44, 0x44, 0x52, 0x5f, 0x41, + 0x52, 0x52, 0x41, 0x59, 0x10, 0x13, 0x2a, 0x1e, 0x0a, 0x0e, 0x43, 0x6f, 0x6e, 0x6e, 0x65, 0x63, + 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x79, 0x70, 0x65, 0x12, 0x0c, 0x0a, 0x08, 0x50, 0x4f, 0x53, 0x54, + 0x47, 0x52, 0x45, 0x53, 0x10, 0x00, 0x32, 0xeb, 0x02, 0x0a, 0x08, 0x50, 0x72, 0x6f, 0x76, 0x69, + 0x64, 0x65, 0x72, 0x12, 0x58, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, + 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, 0x6d, 0x61, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, + 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, 0x68, 0x65, + 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x53, 0x63, + 0x68, 0x65, 0x6d, 0x61, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x58, 0x0a, + 0x11, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, + 0x69, 0x67, 0x12, 0x20, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x47, 0x65, 0x74, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x58, 0x0a, 0x11, 0x43, 0x6f, 0x6e, 0x66, 0x69, + 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x12, 0x20, 0x2e, 0x70, + 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, 0x50, 0x72, + 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x21, + 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x65, + 0x50, 0x72, 0x6f, 0x76, 0x69, 0x64, 0x65, 0x72, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x12, 0x51, 0x0a, 0x0e, 0x46, 0x65, 0x74, 0x63, 0x68, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, + 0x63, 0x65, 0x73, 0x12, 0x1d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x65, 0x74, 0x63, + 0x68, 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x65, + 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x65, 0x74, 0x63, 0x68, + 0x52, 0x65, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x2e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x30, 0x01, 0x42, 0x0b, 0x5a, 0x09, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, + 0x6c, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1802,7 +1859,7 @@ func file_internal_plugin_proto_rawDescGZIP() []byte { } var file_internal_plugin_proto_enumTypes = make([]protoimpl.EnumInfo, 5) -var file_internal_plugin_proto_msgTypes = make([]protoimpl.MessageInfo, 24) +var file_internal_plugin_proto_msgTypes = make([]protoimpl.MessageInfo, 26) var file_internal_plugin_proto_goTypes = []interface{}{ (ColumnType)(0), // 0: proto.ColumnType (ConnectionType)(0), // 1: proto.ConnectionType @@ -1814,58 +1871,62 @@ var file_internal_plugin_proto_goTypes = []interface{}{ (*ResourceFetchSummary)(nil), // 7: proto.ResourceFetchSummary (*PartialFetchFailedResource)(nil), // 8: proto.PartialFetchFailedResource (*Diagnostic)(nil), // 9: proto.Diagnostic - (*GetProviderSchema)(nil), // 10: proto.GetProviderSchema - (*GetProviderConfig)(nil), // 11: proto.GetProviderConfig - (*Table)(nil), // 12: proto.Table - (*Column)(nil), // 13: proto.Column - (*ColumnMeta)(nil), // 14: proto.ColumnMeta - (*ResolverMeta)(nil), // 15: proto.ResolverMeta - (*TableCreationOptions)(nil), // 16: proto.TableCreationOptions - (*ConnectionDetails)(nil), // 17: proto.ConnectionDetails - (*ConfigureProvider_Request)(nil), // 18: proto.ConfigureProvider.Request - (*ConfigureProvider_Response)(nil), // 19: proto.ConfigureProvider.Response - (*FetchResources_Request)(nil), // 20: proto.FetchResources.Request - (*FetchResources_Response)(nil), // 21: proto.FetchResources.Response - nil, // 22: proto.FetchResources.Response.FinishedResourcesEntry - (*GetProviderSchema_Request)(nil), // 23: proto.GetProviderSchema.Request - (*GetProviderSchema_Response)(nil), // 24: proto.GetProviderSchema.Response - nil, // 25: proto.GetProviderSchema.Response.ResourceTablesEntry - nil, // 26: proto.GetProviderSchema.Response.MigrationsEntry - (*GetProviderConfig_Request)(nil), // 27: proto.GetProviderConfig.Request - (*GetProviderConfig_Response)(nil), // 28: proto.GetProviderConfig.Response + (*DialectMigration)(nil), // 10: proto.DialectMigration + (*GetProviderSchema)(nil), // 11: proto.GetProviderSchema + (*GetProviderConfig)(nil), // 12: proto.GetProviderConfig + (*Table)(nil), // 13: proto.Table + (*Column)(nil), // 14: proto.Column + (*ColumnMeta)(nil), // 15: proto.ColumnMeta + (*ResolverMeta)(nil), // 16: proto.ResolverMeta + (*TableCreationOptions)(nil), // 17: proto.TableCreationOptions + (*ConnectionDetails)(nil), // 18: proto.ConnectionDetails + (*ConfigureProvider_Request)(nil), // 19: proto.ConfigureProvider.Request + (*ConfigureProvider_Response)(nil), // 20: proto.ConfigureProvider.Response + (*FetchResources_Request)(nil), // 21: proto.FetchResources.Request + (*FetchResources_Response)(nil), // 22: proto.FetchResources.Response + nil, // 23: proto.FetchResources.Response.FinishedResourcesEntry + nil, // 24: proto.DialectMigration.MigrationsEntry + (*GetProviderSchema_Request)(nil), // 25: proto.GetProviderSchema.Request + (*GetProviderSchema_Response)(nil), // 26: proto.GetProviderSchema.Response + nil, // 27: proto.GetProviderSchema.Response.ResourceTablesEntry + nil, // 28: proto.GetProviderSchema.Response.MigrationsEntry + (*GetProviderConfig_Request)(nil), // 29: proto.GetProviderConfig.Request + (*GetProviderConfig_Response)(nil), // 30: proto.GetProviderConfig.Response } var file_internal_plugin_proto_depIdxs = []int32{ 2, // 0: proto.ResourceFetchSummary.status:type_name -> proto.ResourceFetchSummary.Status 9, // 1: proto.ResourceFetchSummary.diagnostics:type_name -> proto.Diagnostic 3, // 2: proto.Diagnostic.type:type_name -> proto.Diagnostic.Type 4, // 3: proto.Diagnostic.severity:type_name -> proto.Diagnostic.Severity - 13, // 4: proto.Table.columns:type_name -> proto.Column - 12, // 5: proto.Table.relations:type_name -> proto.Table - 16, // 6: proto.Table.options:type_name -> proto.TableCreationOptions - 0, // 7: proto.Column.type:type_name -> proto.ColumnType - 14, // 8: proto.Column.meta:type_name -> proto.ColumnMeta - 15, // 9: proto.ColumnMeta.resolver:type_name -> proto.ResolverMeta - 1, // 10: proto.ConnectionDetails.type:type_name -> proto.ConnectionType - 17, // 11: proto.ConfigureProvider.Request.connection:type_name -> proto.ConnectionDetails - 22, // 12: proto.FetchResources.Response.finished_resources:type_name -> proto.FetchResources.Response.FinishedResourcesEntry - 8, // 13: proto.FetchResources.Response.partial_fetch_failed_resources:type_name -> proto.PartialFetchFailedResource - 7, // 14: proto.FetchResources.Response.summary:type_name -> proto.ResourceFetchSummary - 25, // 15: proto.GetProviderSchema.Response.resource_tables:type_name -> proto.GetProviderSchema.Response.ResourceTablesEntry - 26, // 16: proto.GetProviderSchema.Response.migrations:type_name -> proto.GetProviderSchema.Response.MigrationsEntry - 12, // 17: proto.GetProviderSchema.Response.ResourceTablesEntry.value:type_name -> proto.Table - 23, // 18: proto.Provider.GetProviderSchema:input_type -> proto.GetProviderSchema.Request - 27, // 19: proto.Provider.GetProviderConfig:input_type -> proto.GetProviderConfig.Request - 18, // 20: proto.Provider.ConfigureProvider:input_type -> proto.ConfigureProvider.Request - 20, // 21: proto.Provider.FetchResources:input_type -> proto.FetchResources.Request - 24, // 22: proto.Provider.GetProviderSchema:output_type -> proto.GetProviderSchema.Response - 28, // 23: proto.Provider.GetProviderConfig:output_type -> proto.GetProviderConfig.Response - 19, // 24: proto.Provider.ConfigureProvider:output_type -> proto.ConfigureProvider.Response - 21, // 25: proto.Provider.FetchResources:output_type -> proto.FetchResources.Response - 22, // [22:26] is the sub-list for method output_type - 18, // [18:22] is the sub-list for method input_type - 18, // [18:18] is the sub-list for extension type_name - 18, // [18:18] is the sub-list for extension extendee - 0, // [0:18] is the sub-list for field type_name + 24, // 4: proto.DialectMigration.migrations:type_name -> proto.DialectMigration.MigrationsEntry + 14, // 5: proto.Table.columns:type_name -> proto.Column + 13, // 6: proto.Table.relations:type_name -> proto.Table + 17, // 7: proto.Table.options:type_name -> proto.TableCreationOptions + 0, // 8: proto.Column.type:type_name -> proto.ColumnType + 15, // 9: proto.Column.meta:type_name -> proto.ColumnMeta + 16, // 10: proto.ColumnMeta.resolver:type_name -> proto.ResolverMeta + 1, // 11: proto.ConnectionDetails.type:type_name -> proto.ConnectionType + 18, // 12: proto.ConfigureProvider.Request.connection:type_name -> proto.ConnectionDetails + 23, // 13: proto.FetchResources.Response.finished_resources:type_name -> proto.FetchResources.Response.FinishedResourcesEntry + 8, // 14: proto.FetchResources.Response.partial_fetch_failed_resources:type_name -> proto.PartialFetchFailedResource + 7, // 15: proto.FetchResources.Response.summary:type_name -> proto.ResourceFetchSummary + 27, // 16: proto.GetProviderSchema.Response.resource_tables:type_name -> proto.GetProviderSchema.Response.ResourceTablesEntry + 28, // 17: proto.GetProviderSchema.Response.migrations:type_name -> proto.GetProviderSchema.Response.MigrationsEntry + 13, // 18: proto.GetProviderSchema.Response.ResourceTablesEntry.value:type_name -> proto.Table + 10, // 19: proto.GetProviderSchema.Response.MigrationsEntry.value:type_name -> proto.DialectMigration + 25, // 20: proto.Provider.GetProviderSchema:input_type -> proto.GetProviderSchema.Request + 29, // 21: proto.Provider.GetProviderConfig:input_type -> proto.GetProviderConfig.Request + 19, // 22: proto.Provider.ConfigureProvider:input_type -> proto.ConfigureProvider.Request + 21, // 23: proto.Provider.FetchResources:input_type -> proto.FetchResources.Request + 26, // 24: proto.Provider.GetProviderSchema:output_type -> proto.GetProviderSchema.Response + 30, // 25: proto.Provider.GetProviderConfig:output_type -> proto.GetProviderConfig.Response + 20, // 26: proto.Provider.ConfigureProvider:output_type -> proto.ConfigureProvider.Response + 22, // 27: proto.Provider.FetchResources:output_type -> proto.FetchResources.Response + 24, // [24:28] is the sub-list for method output_type + 20, // [20:24] is the sub-list for method input_type + 20, // [20:20] is the sub-list for extension type_name + 20, // [20:20] is the sub-list for extension extendee + 0, // [0:20] is the sub-list for field type_name } func init() { file_internal_plugin_proto_init() } @@ -1935,7 +1996,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProviderSchema); i { + switch v := v.(*DialectMigration); i { case 0: return &v.state case 1: @@ -1947,7 +2008,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProviderConfig); i { + switch v := v.(*GetProviderSchema); i { case 0: return &v.state case 1: @@ -1959,7 +2020,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Table); i { + switch v := v.(*GetProviderConfig); i { case 0: return &v.state case 1: @@ -1971,7 +2032,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Column); i { + switch v := v.(*Table); i { case 0: return &v.state case 1: @@ -1983,7 +2044,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ColumnMeta); i { + switch v := v.(*Column); i { case 0: return &v.state case 1: @@ -1995,7 +2056,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ResolverMeta); i { + switch v := v.(*ColumnMeta); i { case 0: return &v.state case 1: @@ -2007,7 +2068,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*TableCreationOptions); i { + switch v := v.(*ResolverMeta); i { case 0: return &v.state case 1: @@ -2019,7 +2080,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConnectionDetails); i { + switch v := v.(*TableCreationOptions); i { case 0: return &v.state case 1: @@ -2031,7 +2092,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConfigureProvider_Request); i { + switch v := v.(*ConnectionDetails); i { case 0: return &v.state case 1: @@ -2043,7 +2104,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*ConfigureProvider_Response); i { + switch v := v.(*ConfigureProvider_Request); i { case 0: return &v.state case 1: @@ -2055,7 +2116,7 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*FetchResources_Request); i { + switch v := v.(*ConfigureProvider_Response); i { case 0: return &v.state case 1: @@ -2067,6 +2128,18 @@ func file_internal_plugin_proto_init() { } } file_internal_plugin_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + switch v := v.(*FetchResources_Request); i { + case 0: + return &v.state + case 1: + return &v.sizeCache + case 2: + return &v.unknownFields + default: + return nil + } + } + file_internal_plugin_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*FetchResources_Response); i { case 0: return &v.state @@ -2078,7 +2151,7 @@ func file_internal_plugin_proto_init() { return nil } } - file_internal_plugin_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + file_internal_plugin_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetProviderSchema_Request); i { case 0: return &v.state @@ -2090,7 +2163,7 @@ func file_internal_plugin_proto_init() { return nil } } - file_internal_plugin_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + file_internal_plugin_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetProviderSchema_Response); i { case 0: return &v.state @@ -2102,7 +2175,7 @@ func file_internal_plugin_proto_init() { return nil } } - file_internal_plugin_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + file_internal_plugin_proto_msgTypes[24].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetProviderConfig_Request); i { case 0: return &v.state @@ -2114,7 +2187,7 @@ func file_internal_plugin_proto_init() { return nil } } - file_internal_plugin_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + file_internal_plugin_proto_msgTypes[25].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetProviderConfig_Response); i { case 0: return &v.state @@ -2127,14 +2200,14 @@ func file_internal_plugin_proto_init() { } } } - file_internal_plugin_proto_msgTypes[7].OneofWrappers = []interface{}{} + file_internal_plugin_proto_msgTypes[8].OneofWrappers = []interface{}{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_internal_plugin_proto_rawDesc, NumEnums: 5, - NumMessages: 24, + NumMessages: 26, NumExtensions: 0, NumServices: 1, }, diff --git a/cqproto/internal/plugin.proto b/cqproto/internal/plugin.proto index f0dafc74..71af436a 100644 --- a/cqproto/internal/plugin.proto +++ b/cqproto/internal/plugin.proto @@ -112,6 +112,9 @@ message Diagnostic { string resource = 5; } +message DialectMigration { + map migrations = 1; +} message GetProviderSchema { message Request {} @@ -119,7 +122,7 @@ message GetProviderSchema { string name = 1; string version = 2; map resource_tables = 3; - map migrations = 4; + map migrations = 5; } } diff --git a/cqproto/internal/plugin_grpc.pb.go b/cqproto/internal/plugin_grpc.pb.go index b831cb0a..188be5cb 100644 --- a/cqproto/internal/plugin_grpc.pb.go +++ b/cqproto/internal/plugin_grpc.pb.go @@ -4,7 +4,6 @@ package internal import ( context "context" - grpc "google.golang.org/grpc" codes "google.golang.org/grpc/codes" status "google.golang.org/grpc/status" diff --git a/cqproto/plugin.go b/cqproto/plugin.go index a60fe8b4..6037de97 100644 --- a/cqproto/plugin.go +++ b/cqproto/plugin.go @@ -8,6 +8,12 @@ import ( "google.golang.org/grpc" ) +const ( + V4 = 4 + + Vunmanaged = -1 +) + // CQPlugin This is the implementation of plugin.GRPCServer so we can serve/consume this. type CQPlugin struct { // GRPCPlugin must still implement the Stub interface diff --git a/cqproto/provider.go b/cqproto/provider.go index d3ed10e0..a21a878b 100644 --- a/cqproto/provider.go +++ b/cqproto/provider.go @@ -53,8 +53,8 @@ type GetProviderSchemaResponse struct { Version string // ResourceTables is a map of tables this provider creates ResourceTables map[string]*schema.Table - // Migrations scripts available for the provider - Migrations map[string][]byte + // Migrations scripts available for the provider, for all dialects + Migrations map[string]map[string][]byte } // GetProviderConfigRequest represents a CloudQuery RPC request for provider's config diff --git a/database/database.go b/database/database.go new file mode 100644 index 00000000..e694edf4 --- /dev/null +++ b/database/database.go @@ -0,0 +1,44 @@ +package database + +import ( + "context" + + "github.com/cloudquery/cq-provider-sdk/database/postgres" + "github.com/cloudquery/cq-provider-sdk/provider/schema" + "github.com/hashicorp/go-hclog" +) + +// DB encapsulates a schema.Storage and the (auto-detected) dialect it was configured with +type DB struct { + schema.Storage + + dialectType schema.DialectType +} + +// New creates a new DB using the provided DSN. It will auto detect the dialect based on the DSN and pass that info to NewPgDatabase +func New(ctx context.Context, logger hclog.Logger, dsn string) (*DB, error) { + dType, newDSN, err := ParseDialectDSN(dsn) + if err != nil { + return nil, err + } + + dialect, err := schema.GetDialect(dType) + if err != nil { + return nil, err + } + + db, err := postgres.NewPgDatabase(ctx, logger, newDSN, dialect) + if err != nil { + return nil, err + } + + return &DB{ + Storage: db, + dialectType: dType, + }, nil +} + +// DialectType returns the dialect type the DB was configured with +func (d *DB) DialectType() schema.DialectType { + return d.dialectType +} diff --git a/database/dialect.go b/database/dialect.go new file mode 100644 index 00000000..ecd2526f --- /dev/null +++ b/database/dialect.go @@ -0,0 +1,27 @@ +package database + +import ( + "strings" + + "github.com/cloudquery/cq-provider-sdk/database/dsn" + "github.com/cloudquery/cq-provider-sdk/provider/schema" +) + +// ParseDialectDSN parses a DSN and returns the suggested DialectType, as well as a new version of the DSN if applicable. +// The DSN change is done to support protocol-compatible databases without needing to add support for custom URL schemes to 3rd party packages. +func ParseDialectDSN(inputDSN string) (d schema.DialectType, newDSN string, err error) { + u, err := dsn.ParseConnectionString(inputDSN) + if err != nil { + return schema.Postgres, inputDSN, err + } + + switch u.Scheme { + case "timescaledb", "tsdb", "timescale": + // Replace tsdb schemes to look like postgres, so that postgres-protocol compatible tools (like go-migrate) work + // Keep/return the DialectType separately from the DSN so we can refer to it later + fixedDSN := strings.Replace(u.String(), u.Scheme+"://", "postgres://", 1) + return schema.TSDB, fixedDSN, nil + default: + return schema.Postgres, inputDSN, nil + } +} diff --git a/helpers/dsn.go b/database/dsn/dsn.go similarity index 73% rename from helpers/dsn.go rename to database/dsn/dsn.go index dc69f7ab..3d87dfbe 100644 --- a/helpers/dsn.go +++ b/database/dsn/dsn.go @@ -1,4 +1,4 @@ -package helpers +package dsn import ( "errors" @@ -8,16 +8,45 @@ import ( "github.com/xo/dburl" ) +func init() { + dburl.Register(dburl.Scheme{ + Driver: "timescale", + Generator: dburl.GenPostgres, + Transport: dburl.TransportTCP | dburl.TransportUnix, + Opaque: false, + Aliases: []string{"timescaledb", "tsdb", "ts"}, + Override: "", + }) +} + +// ParseConnectionString will try and parse any type of connection string and return a dburl func ParseConnectionString(connString string) (*dburl.URL, error) { - var err error - // connString may be a database URL or a DSN - if !(strings.HasPrefix(connString, "postgres://") || strings.HasPrefix(connString, "postgresql://")) { + u, err := dburl.Parse(connString) + if err == dburl.ErrInvalidDatabaseScheme { + // connString may be a database URL or a DSN connString, err = convertDSNToURL(connString) if err != nil { - return nil, fmt.Errorf("failed to parse dsn string, %w", err) + return nil, fmt.Errorf("failed to parse dsn string: %w", err) } + u, err = dburl.Parse(connString) + } + + return u, err +} + +// SetDSNElement parses the given DSN and sets/adds the given map values as query parameters, returning a URI DSN +func SetDSNElement(dsn string, elems map[string]string) (string, error) { + u, err := ParseConnectionString(dsn) + if err != nil { + return "", err + } + + vals := u.Query() + for k, v := range elems { + vals.Set(k, v) } - return dburl.Parse(connString) + u.RawQuery = vals.Encode() + return u.String(), nil } var asciiSpace = [256]uint8{'\t': 1, '\n': 1, '\v': 1, '\f': 1, '\r': 1, ' ': 1} diff --git a/database/dsn/dsn_test.go b/database/dsn/dsn_test.go new file mode 100644 index 00000000..ae9a5dac --- /dev/null +++ b/database/dsn/dsn_test.go @@ -0,0 +1,84 @@ +package dsn + +import ( + "net/url" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestParseConnectionString(t *testing.T) { + tbl := []struct { + input string + mod map[string]string + expected string + expectError bool + }{ + { + input: "postgres://a:b@c.d?x=y&z=f", + expected: "postgres://a:b@c.d?x=y&z=f", + }, + { + input: "host=localhost user=postgres password=pass database=postgres port=5432 sslmode=disable", + expected: "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", + }, + { + input: "tsdb://a:b@c.d?x=y&z=f", + expected: "tsdb://a:b@c.d?x=y&z=f", + }, + } + for _, tc := range tbl { + out, err := ParseConnectionString(tc.input) + if tc.expectError { + assert.Error(t, err) + } else { + assert.NoError(t, err) + } + if err != nil { + continue + } + + u, err := url.Parse(tc.expected) + assert.NoError(t, err) + assert.EqualValues(t, u.Scheme, out.Scheme) + assert.EqualValues(t, u.Host, out.Host) + assert.EqualValues(t, u.Path, out.Path) + assert.EqualValues(t, u.Query(), out.Query()) + } +} + +func TestDSNElement(t *testing.T) { + tbl := []struct { + input string + mod map[string]string + expected string + }{ + { + input: "postgres://a:b@c.d?x=y&z=f", + mod: map[string]string{"ADD": "THIS"}, + expected: "postgres://a:b@c.d?x=y&z=f&ADD=THIS", + }, + { + input: "host=localhost user=postgres password=pass database=postgres port=5432 sslmode=disable", + mod: map[string]string{"ADD": "THIS"}, + expected: "postgres://postgres:pass@localhost:5432/postgres?ADD=THIS&sslmode=disable", + }, + { + input: "tsdb://a:b@c.d?x=y&z=f", + mod: map[string]string{"ADD": "THIS"}, + expected: "tsdb://a:b@c.d?x=y&z=f&ADD=THIS", + }, + } + for _, tc := range tbl { + out, err := SetDSNElement(tc.input, tc.mod) + assert.NoError(t, err) + u1, err := url.Parse(tc.expected) + assert.NoError(t, err) + u2, err := url.Parse(out) + assert.NoError(t, err) + assert.EqualValues(t, u1.Scheme, u2.Scheme) + assert.EqualValues(t, u1.Host, u2.Host) + assert.EqualValues(t, u1.Path, u2.Path) + assert.EqualValues(t, u1.Query(), u2.Query()) + } +} diff --git a/database/postgres/connection.go b/database/postgres/connection.go new file mode 100644 index 00000000..e8b83439 --- /dev/null +++ b/database/postgres/connection.go @@ -0,0 +1,29 @@ +package postgres + +import ( + "context" + + "github.com/jackc/pgtype" + "github.com/jackc/pgx/v4" + "github.com/jackc/pgx/v4/pgxpool" +) + +// Connect connects to the given DSN and returns a pgxpool +func Connect(ctx context.Context, dsn string) (*pgxpool.Pool, error) { + poolCfg, err := pgxpool.ParseConfig(dsn) + if err != nil { + return nil, err + } + poolCfg.AfterConnect = func(ctx context.Context, conn *pgx.Conn) error { + UUIDType := pgtype.DataType{ + Value: &UUID{}, + Name: "uuid", + OID: pgtype.UUIDOID, + } + + conn.ConnInfo().RegisterDataType(UUIDType) + return nil + } + poolCfg.LazyConnect = true + return pgxpool.ConnectConfig(ctx, poolCfg) +} diff --git a/provider/schema/database.go b/database/postgres/pgdatabase.go similarity index 55% rename from provider/schema/database.go rename to database/postgres/pgdatabase.go index 44d6609b..e7000070 100644 --- a/provider/schema/database.go +++ b/database/postgres/pgdatabase.go @@ -1,76 +1,56 @@ -package schema +package postgres import ( "context" - "encoding/json" "fmt" - "reflect" "strconv" "time" + sq "github.com/Masterminds/squirrel" + "github.com/cloudquery/cq-provider-sdk/provider/schema" "github.com/cloudquery/cq-provider-sdk/provider/schema/diag" + "github.com/doug-martin/goqu/v9" + _ "github.com/doug-martin/goqu/v9/dialect/postgres" + "github.com/hashicorp/go-hclog" "github.com/jackc/pgconn" "github.com/jackc/pgerrcode" - - "github.com/spf13/cast" - - "github.com/hashicorp/go-hclog" - "github.com/modern-go/reflect2" - - "github.com/doug-martin/goqu/v9" - "github.com/jackc/pgx/v4" - - sq "github.com/Masterminds/squirrel" - _ "github.com/doug-martin/goqu/v9/dialect/postgres" "github.com/jackc/pgx/v4/pgxpool" + "github.com/spf13/cast" ) -const ( - // MaxTableLength in postgres is 63 when building _fk or _pk we want to truncate the name to 60 chars max - maxTableNamePKConstraint = 60 -) - -//go:generate mockgen -package=mock -destination=./mocks/mock_database.go . Database -type Database interface { - Insert(ctx context.Context, t *Table, instance Resources) error - Exec(ctx context.Context, query string, args ...interface{}) error - Delete(ctx context.Context, t *Table, kvFilters []interface{}) error - Query(ctx context.Context, query string, args ...interface{}) (pgx.Rows, error) - RemoveStaleData(ctx context.Context, t *Table, executionStart time.Time, kvFilters []interface{}) error - CopyFrom(ctx context.Context, resources Resources, shouldCascade bool, CascadeDeleteFilters map[string]interface{}) error - Close() -} - type PgDatabase struct { pool *pgxpool.Pool log hclog.Logger + sd schema.Dialect } -func NewPgDatabase(ctx context.Context, logger hclog.Logger, dsn string) (*PgDatabase, error) { - cfg, err := pgxpool.ParseConfig(dsn) +func NewPgDatabase(ctx context.Context, logger hclog.Logger, dsn string, sd schema.Dialect) (*PgDatabase, error) { + pool, err := Connect(ctx, dsn) if err != nil { return nil, err } - pool, err := pgxpool.ConnectConfig(ctx, cfg) - if err != nil { - return nil, err - } - return &PgDatabase{pool: pool, log: logger}, nil + return &PgDatabase{ + pool: pool, + log: logger, + sd: sd, + }, nil } +var _ schema.Storage = (*PgDatabase)(nil) + // Insert inserts all resources to given table, table and resources are assumed from same table. -func (p PgDatabase) Insert(ctx context.Context, t *Table, resources Resources) error { +func (p PgDatabase) Insert(ctx context.Context, t *schema.Table, resources schema.Resources) error { if len(resources) == 0 { return nil } // It is safe to assume that all resources have the same columns - cols := quoteColumns(resources[0].columns) + cols := quoteColumns(resources.ColumnNames()) psql := sq.StatementBuilder.PlaceholderFormat(sq.Dollar) sqlStmt := psql.Insert(t.Name).Columns(cols...) for _, res := range resources { - if res.table != t { - return fmt.Errorf("resource table expected %s got %s", t.Name, res.table.Name) + if res.TableName() != t.Name { + return fmt.Errorf("resource table expected %s got %s", t.Name, res.TableName()) } values, err := res.Values() if err != nil { @@ -101,7 +81,7 @@ func (p PgDatabase) Insert(ctx context.Context, t *Table, resources Resources) e } // CopyFrom copies all resources from []*Resource -func (p PgDatabase) CopyFrom(ctx context.Context, resources Resources, shouldCascade bool, cascadeDeleteFilters map[string]interface{}) error { +func (p PgDatabase) CopyFrom(ctx context.Context, resources schema.Resources, shouldCascade bool, cascadeDeleteFilters map[string]interface{}) error { if len(resources) == 0 { return nil } @@ -128,7 +108,7 @@ func (p PgDatabase) CopyFrom(ctx context.Context, resources Resources, shouldCas ctx, pgx.Identifier{resources.TableName()}, resources.ColumnNames(), pgx.CopyFromSlice(len(resources), func(i int) ([]interface{}, error) { // use getResourceValues instead of Resource.Values since values require some special encoding for CopyFrom - return getResourceValues(resources[i]) + return p.sd.GetResourceValues(resources[i]) })) if err != nil { return err @@ -159,7 +139,7 @@ func (p PgDatabase) QueryOne(ctx context.Context, query string, args ...interfac return row } -func (p PgDatabase) Delete(ctx context.Context, t *Table, kvFilters []interface{}) error { +func (p PgDatabase) Delete(ctx context.Context, t *schema.Table, kvFilters []interface{}) error { nc := len(kvFilters) if nc%2 != 0 { return fmt.Errorf("number of args to delete should be even. Got %d", nc) @@ -178,8 +158,8 @@ func (p PgDatabase) Delete(ctx context.Context, t *Table, kvFilters []interface{ return err } -func (p PgDatabase) RemoveStaleData(ctx context.Context, t *Table, executionStart time.Time, kvFilters []interface{}) error { - q := goqu.Delete(t.Name).WithDialect("postgres").Where(goqu.L(`extract(epoch from (meta->>'last_updated')::timestamp)`).Lt(executionStart.Unix())) +func (p PgDatabase) RemoveStaleData(ctx context.Context, t *schema.Table, executionStart time.Time, kvFilters []interface{}) error { + q := goqu.Delete(t.Name).WithDialect("postgres").Where(goqu.L(`extract(epoch from (cq_meta->>'last_updated')::timestamp)`).Lt(executionStart.Unix())) if len(kvFilters)%2 != 0 { return fmt.Errorf("expected even number of k,v delete filters received %s", kvFilters) } @@ -198,49 +178,8 @@ func (p PgDatabase) Close() { p.pool.Close() } -func GetPgTypeFromType(v ValueType) string { - switch v { - case TypeBool: - return "boolean" - case TypeInt: - return "integer" - case TypeBigInt: - return "bigint" - case TypeSmallInt: - return "smallint" - case TypeFloat: - return "float" - case TypeUUID: - return "uuid" - case TypeString: - return "text" - case TypeJSON: - return "jsonb" - case TypeIntArray: - return "integer[]" - case TypeStringArray: - return "text[]" - case TypeTimestamp: - return "timestamp without time zone" - case TypeByteArray: - return "bytea" - case TypeInvalid: - fallthrough - case TypeInet: - return "inet" - case TypeMacAddr: - return "mac" - case TypeInetArray: - return "inet[]" - case TypeMacAddrArray: - return "mac[]" - case TypeCIDR: - return "cidr" - case TypeCIDRArray: - return "cidr[]" - default: - panic("invalid type") - } +func (p PgDatabase) Dialect() schema.Dialect { + return p.sd } func quoteColumns(columns []string) []string { @@ -249,72 +188,3 @@ func quoteColumns(columns []string) []string { } return columns } - -func TruncateTableConstraint(name string) string { - if len(name) > maxTableNamePKConstraint { - return name[:maxTableNamePKConstraint] - } - return name -} - -func getResourceValues(r *Resource) ([]interface{}, error) { - values := make([]interface{}, 0) - for _, c := range append(r.table.Columns, GetDefaultSDKColumns()...) { - v := r.Get(c.Name) - if err := c.ValidateType(v); err != nil { - return nil, err - } - if c.Type == TypeJSON { - if v == nil { - values = append(values, v) - continue - } - if reflect2.TypeOf(v).Kind() == reflect.Map { - values = append(values, v) - continue - } - switch data := v.(type) { - case map[string]interface{}: - values = append(values, data) - case string: - newV := make(map[string]interface{}) - err := json.Unmarshal([]byte(data), &newV) - if err != nil { - return nil, err - } - values = append(values, newV) - case *string: - var newV interface{} - err := json.Unmarshal([]byte(*data), &newV) - if err != nil { - return nil, err - } - values = append(values, newV) - case []byte: - var newV interface{} - err := json.Unmarshal(data, &newV) - if err != nil { - return nil, err - } - values = append(values, newV) - default: - d, err := json.Marshal(data) - if err != nil { - return nil, err - } - var newV interface{} - err = json.Unmarshal(d, &newV) - if err != nil { - return nil, err - } - values = append(values, newV) - } - } else { - values = append(values, v) - } - } - for _, v := range r.extraFields { - values = append(values, v) - } - return values, nil -} diff --git a/database/postgres/uuid.go b/database/postgres/uuid.go new file mode 100644 index 00000000..2e60a116 --- /dev/null +++ b/database/postgres/uuid.go @@ -0,0 +1,38 @@ +package postgres + +import ( + "encoding/hex" + + "github.com/jackc/pgtype" +) + +type UUID struct { + pgtype.UUID +} + +func (dst UUID) Get() interface{} { + switch dst.Status { + case pgtype.Present: + // CQ-Change: Return entire object, not just Bytes + return dst + case pgtype.Null: + return nil + default: + return dst.Status + } +} + +func (u UUID) String() string { + buf := make([]byte, 36) + + hex.Encode(buf[0:8], u.Bytes[0:4]) + buf[8] = '-' + hex.Encode(buf[9:13], u.Bytes[4:6]) + buf[13] = '-' + hex.Encode(buf[14:18], u.Bytes[6:8]) + buf[18] = '-' + hex.Encode(buf[19:23], u.Bytes[8:10]) + buf[23] = '-' + hex.Encode(buf[24:], u.Bytes[10:]) + return string(buf) +} diff --git a/go.mod b/go.mod index 051cbabb..8becf813 100644 --- a/go.mod +++ b/go.mod @@ -16,10 +16,10 @@ require ( github.com/hashicorp/go-plugin v1.4.3 github.com/hashicorp/go-version v1.3.0 github.com/hashicorp/hcl/v2 v2.10.1 - github.com/huandu/go-sqlbuilder v1.13.0 github.com/iancoleman/strcase v0.2.0 github.com/jackc/pgconn v1.10.0 github.com/jackc/pgerrcode v0.0.0-20201024163028-a0d42d470451 + github.com/jackc/pgtype v1.8.1 github.com/jackc/pgx/v4 v4.13.0 github.com/mitchellh/hashstructure v1.1.0 github.com/modern-go/reflect2 v1.0.2 @@ -47,13 +47,11 @@ require ( github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/yamux v0.0.0-20210826001029-26ff87cf9493 // indirect - github.com/huandu/xstrings v1.3.2 // indirect github.com/jackc/chunkreader/v2 v2.0.1 // indirect github.com/jackc/pgio v1.0.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgproto3/v2 v2.1.1 // indirect github.com/jackc/pgservicefile v0.0.0-20200714003250-2b9c44734f2b // indirect - github.com/jackc/pgtype v1.8.1 // indirect github.com/jackc/puddle v1.1.4 // indirect github.com/kr/pretty v0.2.1 // indirect github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect diff --git a/go.sum b/go.sum index 336eebc9..ca4576f3 100644 --- a/go.sum +++ b/go.sum @@ -568,12 +568,6 @@ github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKe github.com/hashicorp/yamux v0.0.0-20210826001029-26ff87cf9493 h1:brI5vBRUlAlM34VFmnLPwjnCL/FxAJp9XvOdX6Zt+XE= github.com/hashicorp/yamux v0.0.0-20210826001029-26ff87cf9493/go.mod h1:CtWFDAQgb7dxtzFs4tWbplKIe2jSi3+5vKbgIO0SLnQ= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/go-assert v1.1.5 h1:fjemmA7sSfYHJD7CUqs9qTwwfdNAx7/j2/ZlHXzNB3c= -github.com/huandu/go-assert v1.1.5/go.mod h1:yOLvuqZwmcHIC5rIzrBhT7D3Q9c3GFnd0JrPVhn/06U= -github.com/huandu/go-sqlbuilder v1.13.0 h1:IN1VRzcyQ+Kx74L0g5ZAY5qDaRJjwMWVmb6GrFAF8Jc= -github.com/huandu/go-sqlbuilder v1.13.0/go.mod h1:LILlbQo0MOYjlIiGgOSR3UcWQpd5Y/oZ7HLNGyAUz0E= -github.com/huandu/xstrings v1.3.2 h1:L18LIDzqlW6xN2rEkpdV8+oL/IXWJ1APd+vsdYy4Wdw= -github.com/huandu/xstrings v1.3.2/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= diff --git a/migration/cli.go b/migration/cli.go new file mode 100644 index 00000000..a7c1992e --- /dev/null +++ b/migration/cli.go @@ -0,0 +1,117 @@ +package migration + +import ( + "context" + "flag" + "fmt" + "os" + + "github.com/cloudquery/cq-provider-sdk/database" + "github.com/cloudquery/cq-provider-sdk/provider" + "github.com/cloudquery/cq-provider-sdk/provider/schema" + "github.com/hashicorp/go-hclog" + "github.com/jackc/pgx/v4/pgxpool" +) + +const defaultPath = "./resources/provider/migrations" + +// Run is the main entry point for CLI usage. +func Run(ctx context.Context, p *provider.Provider, outputPath string) error { + const defaultPrefix = "unreleased" + + if outputPath == "" { + outputPath = defaultPath + } + + outputPathParam := flag.String("path", outputPath, "Path to migrations directory") + prefixParam := flag.String("prefix", defaultPrefix, "Prefix for files") + doFullParam := flag.Bool("full", false, "Generate initial migrations (prefix will be 'init')") + dialectParam := flag.String("dialect", "", "Dialect to generate initial migrations (empty: all)") + dsnParam := flag.String("dsn", os.Getenv("CQ_DSN"), "DSN to compare changes against in upgrade mode") + schemaName := flag.String("schema", "public", "Schema to compare tables from in upgrade mode") + flag.Parse() + if flag.NArg() > 0 { + flag.Usage() + return fmt.Errorf("more args than necessary") + } + + if *doFullParam && *prefixParam == defaultPrefix { + *prefixParam = "init" + } + + if *prefixParam != "" { + // Add the first "." in .up.sql, only if we have a prefix + *prefixParam += "." + } + + if *doFullParam { + dialects, err := parseInputDialect(dialectParam) + if err != nil { + return err + } + + if err := GenerateFull(ctx, hclog.L(), p, dialects, *outputPathParam, *prefixParam); err != nil { + return fmt.Errorf("failed to generate migrations: %w", err) + } + return nil + } + + if *dsnParam == "" { + return fmt.Errorf("DSN not specified: Use -dsn or set CQ_DSN") + } + + pool, dialectType, err := connect(ctx, *dsnParam) + if err != nil { + return err + } + conn, err := pool.Acquire(ctx) + if err != nil { + return err + } + defer conn.Release() + + if err := GenerateDiff(ctx, hclog.L(), conn, *schemaName, *dialectType, p, *outputPathParam, *prefixParam); err != nil { + return fmt.Errorf("failed to generate migrations: %w", err) + } + + return nil +} + +func connect(ctx context.Context, dsn string) (*pgxpool.Pool, *schema.DialectType, error) { + detectedDialect, newDSN, err := database.ParseDialectDSN(dsn) + if err != nil { + return nil, nil, fmt.Errorf("could not parse dsn: %w", err) + } + + poolCfg, err := pgxpool.ParseConfig(newDSN) + if err != nil { + return nil, nil, err + } + poolCfg.LazyConnect = true + pool, err := pgxpool.ConnectConfig(ctx, poolCfg) + return pool, &detectedDialect, err +} + +func parseInputDialect(inputDialect *string) ([]schema.DialectType, error) { + defaultDialectsFullMode := []schema.DialectType{ + schema.Postgres, + schema.TSDB, + } + + var dialects []schema.DialectType + if *inputDialect == "" { + dialects = defaultDialectsFullMode + } else { + for _, d := range defaultDialectsFullMode { + if string(d) == *inputDialect { + dialects = append(dialects, d) + break + } + } + if len(dialects) == 0 { + return nil, fmt.Errorf("invalid dialect %q", *inputDialect) + } + } + + return dialects, nil +} diff --git a/migration/longestcommon/lc.go b/migration/longestcommon/lc.go new file mode 100644 index 00000000..8cf37b40 --- /dev/null +++ b/migration/longestcommon/lc.go @@ -0,0 +1,84 @@ +// https://github.com/jpillora/longestcommon +// Doesn't have go.mod file +package longestcommon + +import "strings" + +// TrimPrefix removes the longest common prefix from all provided strings +func TrimPrefix(strs []string) { + p := Prefix(strs) + if p == "" { + return + } + for i, s := range strs { + strs[i] = strings.TrimPrefix(s, p) + } +} + +// TrimSuffix removes the longest common suffix from all provided strings +func TrimSuffix(strs []string) { + p := Suffix(strs) + if p == "" { + return + } + for i, s := range strs { + strs[i] = strings.TrimSuffix(s, p) + } +} + +// Prefix returns the longest common prefix of the provided strings +func Prefix(strs []string) string { + return longestCommonXfix(strs, true) +} + +// Suffix returns the longest common suffix of the provided strings +func Suffix(strs []string) string { + return longestCommonXfix(strs, false) +} + +func longestCommonXfix(strs []string, pre bool) string { + // short-circuit empty list + if len(strs) == 0 { + return "" + } + xfix := strs[0] + // short-circuit single-element list + if len(strs) == 1 { + return xfix + } + // compare first to rest + for _, str := range strs[1:] { + xfixl := len(xfix) + strl := len(str) + // short-circuit empty strings + if xfixl == 0 || strl == 0 { + return "" + } + // maximum possible length + maxl := xfixl + if strl < maxl { + maxl = strl + } + // compare letters + if pre { + // prefix, iterate left to right + for i := 0; i < maxl; i++ { + if xfix[i] != str[i] { + xfix = xfix[:i] + break + } + } + } else { + // suffix, iternate right to left + for i := 0; i < maxl; i++ { + xi := xfixl - i - 1 + si := strl - i - 1 + if xfix[xi] != str[si] { + xfix = xfix[xi+1:] + break + } + } + } + } + return xfix +} diff --git a/migration/longestcommon/lc_test.go b/migration/longestcommon/lc_test.go new file mode 100644 index 00000000..cc4b8ab9 --- /dev/null +++ b/migration/longestcommon/lc_test.go @@ -0,0 +1,112 @@ +// https://github.com/jpillora/longestcommon +package longestcommon + +import ( + "strings" + "testing" +) + +func doTest(t *testing.T, lines, pre, suf string) { + strs := []string{} + if lines != "" { + strs = strings.Split(lines, "\n") + } + p := Prefix(strs) + if p != pre { + t.Fatalf("fail: expected prefix '%s', got '%s'", pre, p) + } + s := Suffix(strs) + if s != suf { + t.Fatalf("fail: expected suffix '%s', got '%s'", suf, s) + } +} + +func TestXFix1(t *testing.T) { + doTest(t, ``, "", "") +} + +func TestXFix2(t *testing.T) { + doTest(t, `single`, "single", "single") +} + +func TestXFix3(t *testing.T) { + doTest(t, "single\ndouble", "", "le") +} + +func TestXFix4(t *testing.T) { + doTest(t, "flower\nflow\nfleet", "fl", "") +} + +func TestXFix5(t *testing.T) { + doTest(t, `My Awesome Album - 01.mp3 +My Awesome Album - 11.mp3 +My Awesome Album - 03.mp3 +My Awesome Album - 04.mp3 +My Awesome Album - 05.mp3 +My Awesome Album - 06.mp3 +My Awesome Album - 07.mp3 +My Awesome Album - 08.mp3 +My Awesome Album - 09.mp3 +My Awesome Album - 10.mp3 +My Awesome Album - 11.mp3 +My Awesome Album - 12.mp3 +My Awesome Album - 13.mp3 +My Awesome Album - 14.mp3 +My Awesome Album - 15.mp3 +My Awesome Album - 16.mp3 +My Awesome Album - 17.mp3 +My Awesome Album - 18.mp3 +My Awesome Album - 19.mp3 +My Awesome Album - 20.mp3 +My Awesome Album - 21.mp3 +My Awesome Album - 22.mp3 +My Awesome Album - 23.mp3 +My Awesome Album - 24.mp3 +My Awesome Album - 25.mp3 +My Awesome Album - 26.mp3 +My Awesome Album - 27.mp3 +My Awesome Album - 28.mp3 +My Awesome Album - 29.mp3 +My Awesome Album - 30.mp3 +My Awesome Album - 31.mp3 +My Awesome Album - 32.mp3 +My Awesome Album - 33.mp3 +My Awesome Album - 34.mp3 +My Awesome Album - 35.mp3 +My Awesome Album - 36.mp3 +My Awesome Album - 37.mp3 +My Awesome Album - 38.mp3 +My Awesome Album - 39.mp3`, "My Awesome Album - ", ".mp3") +} + +func TestTrimPrefix1(t *testing.T) { + strs := []string{"flower", "flow", "fleet"} + TrimPrefix(strs) + if strs[0] != "ower" { + t.Fatalf("fail: expected result string to be 'ower', got '%s'", strs[0]) + } +} + +func TestTrimPrefix2(t *testing.T) { + strs := []string{"flower", "tree"} + TrimPrefix(strs) //no common prefix + if strs[0] != "flower" { + t.Fatalf("fail: expected result string to be 'flower', got '%s'", strs[0]) + } +} + +func TestTrimSuffix1(t *testing.T) { + strs := []string{"flower", "power"} + TrimSuffix(strs) + if strs[0] != "fl" { + t.Fatalf("fail: expected result string to be 'fl', got '%s'", strs[0]) + } +} + +func TestTrimSuffix2(t *testing.T) { + strs := []string{"flower", "tree"} + TrimSuffix(strs) //no common suffix + if strs[0] != "flower" { + t.Fatalf("fail: expected result string to be 'flower', got '%s'", strs[0]) + } +} diff --git a/migration/migration.go b/migration/migration.go new file mode 100644 index 00000000..14469217 --- /dev/null +++ b/migration/migration.go @@ -0,0 +1,212 @@ +package migration + +import ( + "context" + "fmt" + "os" + "path/filepath" + "sort" + "time" + + "github.com/cloudquery/cq-provider-sdk/provider" + "github.com/cloudquery/cq-provider-sdk/provider/schema" + "github.com/hashicorp/go-hclog" + "github.com/jackc/pgx/v4/pgxpool" +) + +// GenerateFull creates initial table migrations for the provider based on it's ResourceMap +func GenerateFull(ctx context.Context, logger hclog.Logger, p *provider.Provider, dialects []schema.DialectType, outputPath, prefix string) error { + for _, d := range dialects { + dialect, err := schema.GetDialect(d) + if err != nil { + return err + } + if err := generateFullForDialect(ctx, logger, p, dialect, filepath.Join(outputPath, string(d)), prefix); err != nil { + return fmt.Errorf("failed for %v: %w", d, err) + } + } + return nil +} + +// GenerateDiff creates incremental table migrations for the provider based on it's ResourceMap. Entities are compared to a given conn. +func GenerateDiff(ctx context.Context, logger hclog.Logger, conn *pgxpool.Conn, schemaName string, dialectType schema.DialectType, p *provider.Provider, outputPath, prefix string) error { + dialect, err := schema.GetDialect(dialectType) + if err != nil { + return err + } + return generateDiffForDialect(ctx, logger, conn, schemaName, p, dialect, filepath.Join(outputPath, dialectType.MigrationDirectory()), prefix) +} + +func generateFullForDialect(ctx context.Context, logger hclog.Logger, p *provider.Provider, dialect schema.Dialect, outputPath, prefix string) (retErr error) { + if err := os.MkdirAll(outputPath, 0755); err != nil { + return err + } + + cName, dName := filepath.Join(outputPath, prefix+"up.sql"), filepath.Join(outputPath, prefix+"down.sql") + + defer func() { + if retErr != nil { + _ = os.Remove(cName) + _ = os.Remove(dName) + return + } + + logger.Info("Generated up migrations", "filename", cName) + logger.Info("Generated down migrations", "filename", dName) + }() + + tc := NewTableCreator(logger, dialect) + + safeClose := func(f *os.File) { + err := f.Close() + if retErr == nil { + retErr = err + } + } + + cf, err := os.Create(cName) + if err != nil { + return err + } + defer safeClose(cf) + + df, err := os.Create(dName) + if err != nil { + return err + } + defer safeClose(df) + + writeBoth := func(line string) { + _, _ = cf.WriteString(line) + _, _ = df.WriteString(line) + } + + writeBoth(fmt.Sprintf("-- Autogenerated by migration tool on %s\n", time.Now().UTC().Format("2006-01-02 15:04:05"))) + + for _, resName := range resourceKeys(p.ResourceMap) { + table := p.ResourceMap[resName] + + writeBoth("\n-- Resource: " + resName + "\n") + ups, downs, err := tc.CreateTableDefinitions(ctx, table, nil) + if err != nil { + return fmt.Errorf("CreateTable failed for %s: %w", table.Name, err) + } + + for _, s := range ups { + if _, err := cf.WriteString(s); err != nil { + return err + } + _, _ = cf.Write([]byte{'\n'}) + } + + for _, s := range downs { + if _, err := df.WriteString(s); err != nil { + return err + } + _, _ = df.Write([]byte{'\n'}) + } + } + + return nil +} + +func generateDiffForDialect(ctx context.Context, logger hclog.Logger, conn *pgxpool.Conn, schemaName string, p *provider.Provider, dialect schema.Dialect, outputPath, prefix string) (retErr error) { + cName, dName := filepath.Join(outputPath, prefix+"up.sql"), filepath.Join(outputPath, prefix+"down.sql") + + var errNoChange = fmt.Errorf("no change") + + defer func() { + if retErr == nil { + logger.Info("Generated up migrations", "filename", cName) + logger.Info("Generated down migrations", "filename", dName) + return + } + + _ = os.Remove(cName) + _ = os.Remove(dName) + + if retErr == errNoChange { + retErr = nil + logger.Info("Did not generate up migration (no change)") + logger.Info("Did not generate down migration (no change)") + } + }() + + tc := NewTableCreator(logger, dialect) + + safeClose := func(f *os.File) { + err := f.Close() + if retErr == nil { + retErr = err + } + } + + cf, err := os.Create(cName) + if err != nil { + return err + } + defer safeClose(cf) + + df, err := os.Create(dName) + if err != nil { + return err + } + defer safeClose(df) + + writeBoth := func(line string) { + _, _ = cf.WriteString(line) + _, _ = df.WriteString(line) + } + + writeBoth(fmt.Sprintf("-- Autogenerated by migration tool on %s\n", time.Now().UTC().Format("2006-01-02 15:04:05"))) + writeBoth("-- CHANGEME: Verify or edit this file before proceeding\n") + + changed := false + for _, resName := range resourceKeys(p.ResourceMap) { + table := p.ResourceMap[resName] + + ups, downs, err := tc.DiffTable(ctx, conn, schemaName, table, nil) + if err != nil { + return fmt.Errorf("DiffTable failed for %s: %w", table.Name, err) + } + + if len(ups)+len(downs) == 0 { + continue + } + + changed = true + writeBoth("\n-- Resource: " + resName + "\n") + + for _, s := range ups { + if _, err := cf.WriteString(s); err != nil { + return err + } + _, _ = cf.Write([]byte{'\n'}) + } + + for _, s := range downs { + if _, err := df.WriteString(s); err != nil { + return err + } + _, _ = df.Write([]byte{'\n'}) + } + } + + if !changed { + return errNoChange + } + + return nil +} + +// resourceKeys gets the keys from the resourceMap and sorts them +func resourceKeys(res map[string]*schema.Table) []string { + ret := make([]string, len(res)) + i := 0 + for k := range res { + ret[i] = k + i++ + } + sort.Strings(ret) + return ret +} diff --git a/provider/migrations.go b/migration/migrator/migrator.go similarity index 68% rename from provider/migrations.go rename to migration/migrator/migrator.go index 9abe8b31..4663b677 100644 --- a/provider/migrations.go +++ b/migration/migrator/migrator.go @@ -1,4 +1,4 @@ -package provider +package migrator import ( "context" @@ -9,16 +9,15 @@ import ( "strconv" "strings" - "github.com/cloudquery/cq-provider-sdk/helpers" - - "github.com/hashicorp/go-version" - + "github.com/cloudquery/cq-provider-sdk/database/dsn" "github.com/cloudquery/cq-provider-sdk/provider/schema" + "github.com/golang-migrate/migrate/v4" _ "github.com/golang-migrate/migrate/v4/database/postgres" "github.com/golang-migrate/migrate/v4/source" "github.com/golang-migrate/migrate/v4/source/iofs" "github.com/hashicorp/go-hclog" + "github.com/hashicorp/go-version" "github.com/jackc/pgx/v4" "github.com/spf13/afero" "github.com/spf13/cast" @@ -26,35 +25,56 @@ import ( ) const ( + Latest = "latest" + Initial = "initial" + Down = "down_testing" // used in testing + migrationsEmbeddedDirectoryPath = "migrations" dropTableSQL = "DROP TABLE IF EXISTS %s CASCADE" ) -func ReadMigrationFiles(log hclog.Logger, migrationFiles embed.FS) (map[string][]byte, error) { - var ( - err error - migrations = make(map[string][]byte) - ) - files, err := migrationFiles.ReadDir(migrationsEmbeddedDirectoryPath) +// ReadMigrationFiles reads the given embed.FS for the migration files and returns a map of dialect directories vs. filenames vs. data +func ReadMigrationFiles(log hclog.Logger, migrationFiles embed.FS) (map[string]map[string][]byte, error) { + dirs, err := migrationFiles.ReadDir(migrationsEmbeddedDirectoryPath) if err != nil { log.Info("Provider doesn't define any migration files") - return migrations, nil + return nil, nil } - for _, m := range files { - f, err := migrationFiles.Open(path.Join(migrationsEmbeddedDirectoryPath, m.Name())) - if err != nil { - return nil, err + + migrations := make(map[string]map[string][]byte) + + for _, d := range dirs { + if !d.IsDir() { + return nil, fmt.Errorf("bad migrations structure: missing dialect directories") } - info, _ := m.Info() - if info.Size() == 0 { - migrations[m.Name()] = []byte("") + + dialectMigrations := make(map[string][]byte) + + basePath := path.Join(migrationsEmbeddedDirectoryPath, d.Name()) + files, err := migrationFiles.ReadDir(basePath) + if err != nil { + log.Info("Provider doesn't define any migration files for dialect") continue } - data := make([]byte, info.Size()) - if _, err := f.Read(data); err != nil { - return nil, err + for _, m := range files { + f, err := migrationFiles.Open(path.Join(basePath, m.Name())) + if err != nil { + return nil, err + } + + info, _ := m.Info() + if info.Size() == 0 { + dialectMigrations[m.Name()] = []byte("") + continue + } + data := make([]byte, info.Size()) + if _, err := f.Read(data); err != nil { + return nil, err + } + dialectMigrations[m.Name()] = data } - migrations[m.Name()] = data + + migrations[d.Name()] = dialectMigrations } return migrations, nil } @@ -69,14 +89,17 @@ type Migrator struct { // maps between semantic version to the timestamp it was created at versionMapper map[string]uint versions version.Collection + + postHook func(context.Context) error } -func NewMigrator(log hclog.Logger, migrationFiles map[string][]byte, dsn string, providerName string) (*Migrator, error) { +func New(log hclog.Logger, dt schema.DialectType, migrationFiles map[string]map[string][]byte, dsnURI, providerName string, postHook func(context.Context) error) (*Migrator, error) { versionMapper := make(map[string]uint) versions := make(version.Collection, 0) mm := afero.NewMemMapFs() _ = mm.Mkdir("migrations", 0755) - for k, data := range migrationFiles { + + for k, data := range migrationFiles[dt.MigrationDirectory()] { log.Debug("adding migration file", "file", k) if err := afero.WriteFile(mm, path.Join(migrationsEmbeddedDirectoryPath, k), data, 0644); err != nil { return nil, err @@ -97,7 +120,7 @@ func NewMigrator(log hclog.Logger, migrationFiles map[string][]byte, dsn string, if err != nil { return nil, err } - u, err := helpers.ParseConnectionString(dsn) + u, err := dsn.ParseConnectionString(dsnURI) if err != nil { return nil, err } @@ -114,24 +137,40 @@ func NewMigrator(log hclog.Logger, migrationFiles map[string][]byte, dsn string, return &Migrator{ log: log, provider: providerName, - dsn: dsn, + dsn: dsnURI, migratorUrl: u, m: m, driver: driver, versionMapper: versionMapper, versions: versions, + postHook: postHook, }, nil } +func (m *Migrator) callPostHook(ctx context.Context) error { + if m.postHook == nil { + return nil + } + return m.postHook(ctx) +} + func (m *Migrator) Close() error { _, dbErr := m.m.Close() return dbErr } -func (m *Migrator) UpgradeProvider(version string) error { - if version == "latest" { +func (m *Migrator) UpgradeProvider(version string) (retErr error) { + defer func() { + if retErr != nil { + return + } + retErr = m.callPostHook(context.Background()) + }() + + if version == Latest { return m.m.Up() } + mv, err := m.FindLatestMigration(version) if err != nil { return fmt.Errorf("version %s upgrade doesn't exist", version) @@ -140,16 +179,35 @@ func (m *Migrator) UpgradeProvider(version string) error { return m.m.Migrate(mv) } -func (m *Migrator) DowngradeProvider(version string) error { +func (m *Migrator) DowngradeProvider(version string) (retErr error) { + defer func() { + if retErr != nil { + return + } + retErr = m.callPostHook(context.Background()) + }() + + if version == Down { // Used in testing + return m.m.Down() + } + mv, err := m.FindLatestMigration(version) if err != nil { return fmt.Errorf("version %s upgrade doesn't exist", version) } m.log.Debug("downgrading provider version", "version", version, "migrator_version", mv) + return m.m.Migrate(mv) } -func (m *Migrator) DropProvider(ctx context.Context, schema map[string]*schema.Table) error { +func (m *Migrator) DropProvider(ctx context.Context, schema map[string]*schema.Table) (retErr error) { + defer func() { + if retErr != nil { + return + } + retErr = m.callPostHook(context.Background()) + }() + // we don't use go-migrate's drop since its too violent and it will remove all tables of other providers, // instead we will only drop the migration table and all schema's tables // we additionally don't use a transaction since this results quite often in out of shared memory errors @@ -157,6 +215,8 @@ func (m *Migrator) DropProvider(ctx context.Context, schema map[string]*schema.T if err != nil { return err } + defer conn.Close(ctx) + q := fmt.Sprintf(dropTableSQL, strconv.Quote(fmt.Sprintf("%s_schema_migrations", m.provider))) if _, err := conn.Exec(ctx, q); err != nil { return err @@ -186,7 +246,14 @@ func (m *Migrator) Version() (string, bool, error) { return "v0.0.0", dirty, err } -func (m *Migrator) SetVersion(requestedVersion string) error { +func (m *Migrator) SetVersion(requestedVersion string) (retErr error) { + defer func() { + if retErr != nil { + return + } + retErr = m.callPostHook(context.Background()) + }() + mv, err := m.FindLatestMigration(requestedVersion) if err != nil { return err @@ -201,9 +268,12 @@ func (m *Migrator) SetVersion(requestedVersion string) error { // if we ask for 004 we get 001 // if we ask for 005 we get 005 func (m *Migrator) FindLatestMigration(requestedVersion string) (uint, error) { - if requestedVersion == "latest" { + if requestedVersion == Latest { mv := m.versionMapper[m.versions[len(m.versions)-1].Original()] return mv, nil + } else if requestedVersion == Initial { + mv := m.versionMapper[m.versions[0].Original()] + return mv, nil } // if we have a migration for specific version return that mv number mv, ok := m.versionMapper[requestedVersion] diff --git a/provider/migrations_test.go b/migration/migrator/migrator_test.go similarity index 55% rename from provider/migrations_test.go rename to migration/migrator/migrator_test.go index e64df99b..c4dce1c0 100644 --- a/provider/migrations_test.go +++ b/migration/migrator/migrator_test.go @@ -1,10 +1,10 @@ -package provider +package migrator import ( "context" "testing" - "github.com/cloudquery/cq-provider-sdk/helpers" + "github.com/cloudquery/cq-provider-sdk/provider/schema" "github.com/golang-migrate/migrate/v4" "github.com/hashicorp/go-hclog" @@ -17,44 +17,48 @@ const ( ) var ( - simpleMigrations = map[string][]byte{ - "1_v0.0.1.up.sql": []byte(defaultQuery), - "1_v0.0.1.down.sql": []byte(defaultQuery), - "3_v0.0.2.up.sql": []byte(defaultQuery), - "3_v0.0.2.down.sql": []byte(defaultQuery), - "2_v0.0.2-beta.up.sql": []byte(defaultQuery), - "2_v0.0.2-beta.down.sql": []byte(defaultQuery), - "4_v0.0.3.up.sql": []byte(defaultQuery), - "4_v0.0.3.down.sql": []byte(defaultQuery), - "5_v0.0.4.up.sql": []byte(emptyQuery), - "5_v0.0.4.down.sql": []byte(defaultQuery), + simpleMigrations = map[string]map[string][]byte{ + "postgres": { + "1_v0.0.1.up.sql": []byte(defaultQuery), + "1_v0.0.1.down.sql": []byte(defaultQuery), + "3_v0.0.2.up.sql": []byte(defaultQuery), + "3_v0.0.2.down.sql": []byte(defaultQuery), + "2_v0.0.2-beta.up.sql": []byte(defaultQuery), + "2_v0.0.2-beta.down.sql": []byte(defaultQuery), + "4_v0.0.3.up.sql": []byte(defaultQuery), + "4_v0.0.3.down.sql": []byte(defaultQuery), + "5_v0.0.4.up.sql": []byte(emptyQuery), + "5_v0.0.4.down.sql": []byte(defaultQuery), + }, } - complexMigrations = map[string][]byte{ - "1_v0.0.2.up.sql": []byte(defaultQuery), - "1_v0.0.2.down.sql": []byte(defaultQuery), - "2_v0.0.3-beta.up.sql": []byte(defaultQuery), - "2_v0.0.3-beta.down.sql": []byte(defaultQuery), - "3_v0.0.3.up.sql": []byte(defaultQuery), - "3_v0.0.3.down.sql": []byte(defaultQuery), - "4_v0.0.6.up.sql": []byte(defaultQuery), - "4_v0.0.6.down.sql": []byte(defaultQuery), - "5_v0.1.4.up.sql": []byte(emptyQuery), - "5_v0.1.4.down.sql": []byte(defaultQuery), + complexMigrations = map[string]map[string][]byte{ + "postgres": { + "1_v0.0.2.up.sql": []byte(defaultQuery), + "1_v0.0.2.down.sql": []byte(defaultQuery), + "2_v0.0.3-beta.up.sql": []byte(defaultQuery), + "2_v0.0.3-beta.down.sql": []byte(defaultQuery), + "3_v0.0.3.up.sql": []byte(defaultQuery), + "3_v0.0.3.down.sql": []byte(defaultQuery), + "4_v0.0.6.up.sql": []byte(defaultQuery), + "4_v0.0.6.down.sql": []byte(defaultQuery), + "5_v0.1.4.up.sql": []byte(emptyQuery), + "5_v0.1.4.down.sql": []byte(defaultQuery), + }, } ) func TestMigrations(t *testing.T) { - m, err := NewMigrator(hclog.Default(), simpleMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test") + m, err := New(hclog.Default(), schema.Postgres, simpleMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test", nil) assert.Nil(t, err) err = m.DropProvider(context.Background(), nil) assert.Nil(t, err) - err = m.UpgradeProvider("latest") + err = m.UpgradeProvider(Latest) assert.Nil(t, err) - err = m.UpgradeProvider("latest") + err = m.UpgradeProvider(Latest) assert.Equal(t, err, migrate.ErrNoChange) err = m.DowngradeProvider("v0.0.2-beta") @@ -66,7 +70,7 @@ func TestMigrations(t *testing.T) { version, dirty, err := m.Version() assert.Equal(t, []interface{}{"v0.0.3", false, nil}, []interface{}{version, dirty, err}) - err = m.UpgradeProvider("latest") + err = m.UpgradeProvider(Latest) assert.Nil(t, err) version, dirty, err = m.Version() @@ -81,7 +85,7 @@ func TestMigrations(t *testing.T) { // TestMigrationJumps tests an edge case we request a higher version but latest migration is a previous version func TestMigrationJumps(t *testing.T) { - m, err := NewMigrator(hclog.Default(), complexMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test") + m, err := New(hclog.Default(), schema.Postgres, complexMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test", nil) assert.Nil(t, err) err = m.DropProvider(context.Background(), nil) @@ -95,10 +99,10 @@ func TestMigrationJumps(t *testing.T) { } func TestMultiProviderMigrations(t *testing.T) { - mtest, err := NewMigrator(hclog.Default(), simpleMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test") + mtest, err := New(hclog.Default(), schema.Postgres, simpleMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test", nil) assert.Nil(t, err) - mtest2, err := NewMigrator(hclog.Default(), simpleMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test2") + mtest2, err := New(hclog.Default(), schema.Postgres, simpleMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test2", nil) assert.Nil(t, err) err = mtest.DropProvider(context.Background(), nil) @@ -106,9 +110,9 @@ func TestMultiProviderMigrations(t *testing.T) { err = mtest2.DropProvider(context.Background(), nil) assert.Nil(t, err) - err = mtest.UpgradeProvider("latest") + err = mtest.UpgradeProvider(Latest) assert.Nil(t, err) - err = mtest.UpgradeProvider("latest") + err = mtest.UpgradeProvider(Latest) assert.Equal(t, err, migrate.ErrNoChange) version, dirty, err := mtest.Version() assert.Equal(t, []interface{}{"v0.0.4", false, nil}, []interface{}{version, dirty, err}) @@ -130,7 +134,7 @@ func TestMultiProviderMigrations(t *testing.T) { } func TestFindLatestMigration(t *testing.T) { - mtest, err := NewMigrator(hclog.Default(), complexMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test") + mtest, err := New(hclog.Default(), schema.Postgres, complexMigrations, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", "test", nil) assert.Nil(t, err) mv, err := mtest.FindLatestMigration("v0.0.3") assert.Nil(t, err) @@ -152,16 +156,7 @@ func TestFindLatestMigration(t *testing.T) { assert.Nil(t, err) assert.Equal(t, uint(0), mv) - mv, err = mtest.FindLatestMigration("latest") + mv, err = mtest.FindLatestMigration(Latest) assert.Nil(t, err) assert.Equal(t, uint(5), mv) } - -func TestParseConnectionString(t *testing.T) { - url, err := helpers.ParseConnectionString("postgres://postgres:pass@localhost:5432/postgres?sslmode=disable") - assert.Nil(t, err) - assert.Equal(t, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", url.String()) - url, err = helpers.ParseConnectionString("host=localhost user=postgres password=pass database=postgres port=5432 sslmode=disable") - assert.Nil(t, err) - assert.Equal(t, "postgres://postgres:pass@localhost:5432/postgres?sslmode=disable", url.String()) -} diff --git a/migration/table.go b/migration/table.go new file mode 100644 index 00000000..a212ba44 --- /dev/null +++ b/migration/table.go @@ -0,0 +1,250 @@ +package migration + +import ( + "context" + "fmt" + "strconv" + "strings" + + "github.com/cloudquery/cq-provider-sdk/migration/longestcommon" + "github.com/cloudquery/cq-provider-sdk/provider/schema" + "github.com/georgysavva/scany/pgxscan" + "github.com/hashicorp/go-hclog" + "github.com/jackc/pgx/v4/pgxpool" + "github.com/thoas/go-funk" +) + +const ( + queryTableColumns = `SELECT array_agg(column_name::text) AS columns, array_agg(data_type::text) AS types FROM information_schema.columns WHERE table_name = $1 AND table_schema = $2` + addColumnToTable = `ALTER TABLE IF EXISTS %s ADD COLUMN IF NOT EXISTS %v %v;` + dropColumnFromTable = `ALTER TABLE IF EXISTS %s DROP COLUMN IF EXISTS %v;` + renameColumnInTable = `-- ALTER TABLE %s RENAME COLUMN %v TO %v; -- uncomment to activate, remove ADD/DROP COLUMN above and below` // Can't have IF EXISTS here + + dropTable = `DROP TABLE IF EXISTS %s;` +) + +// TableCreator handles creation of schema.Table in database as SQL strings +type TableCreator struct { + log hclog.Logger + dialect schema.Dialect +} + +func NewTableCreator(log hclog.Logger, dialect schema.Dialect) *TableCreator { + return &TableCreator{ + log: log, + dialect: dialect, + } +} + +// CreateTable generates CREATE TABLE definitions for the given table and runs them on the given conn +func (m TableCreator) CreateTable(ctx context.Context, conn schema.QueryExecer, t, p *schema.Table) error { + ups, _, err := m.CreateTableDefinitions(ctx, t, p) + if err != nil { + return err + } + for _, sql := range ups { + if err := conn.Exec(ctx, sql); err != nil { + return err + } + } + return nil +} + +// CreateTableDefinitions reads schema.Table and builds the CREATE TABLE and DROP TABLE statements for it, also processing and returning subrelation tables +func (m TableCreator) CreateTableDefinitions(ctx context.Context, t *schema.Table, parent *schema.Table) (up, down []string, err error) { + b := &strings.Builder{} + + // Build a SQL to create a table + b.WriteString("CREATE TABLE IF NOT EXISTS " + strconv.Quote(t.Name) + " (\n") + + for _, c := range m.dialect.Columns(t) { + b.WriteByte('\t') + b.WriteString(strconv.Quote(c.Name) + " " + m.dialect.DBTypeFromType(c.Type)) + if c.CreationOptions.NotNull { + b.WriteString(" NOT NULL") + } + // c.CreationOptions.Unique is handled in the Constraints() call below + b.WriteString(",\n") + } + + cons := m.dialect.Constraints(t, parent) + for i, cn := range cons { + b.WriteByte('\t') + b.WriteString(cn) + + if i < len(cons)-1 { + b.WriteByte(',') + } + + b.WriteByte('\n') + } + + b.WriteString(");") + + up, down = make([]string, 0, 1+len(t.Relations)), make([]string, 0, 1+len(t.Relations)) + up = append(up, b.String()) + up = append(up, m.dialect.Extra(t, parent)...) + + // Create relation tables + for _, r := range t.Relations { + if cr, dr, err := m.CreateTableDefinitions(ctx, r, t); err != nil { + return nil, nil, err + } else { + up = append(up, cr...) + down = append(down, dr...) + } + } + + down = append(down, fmt.Sprintf(dropTable, t.Name)) + + return up, down, nil +} + +// DiffTable reads current table info from the given conn for the given table, and returns ALTER TABLE ADD COLUMN statements for the missing columns. +// Newly appearing tables will return a CREATE TABLE statement. +// Column renames are detected (best effort) and ALTER TABLE RENAME COLUMN statements are generated as comments. +// Table renames or removals are not detected. +// FK changes are not detected. +func (m TableCreator) DiffTable(ctx context.Context, conn *pgxpool.Conn, schemaName string, t, parent *schema.Table) (up, down []string, err error) { + rows, err := conn.Query(ctx, queryTableColumns, t.Name, schemaName) + if err != nil { + return nil, nil, err + } + + var existingColumns struct { + Columns []string + Types []string + } + + if err := pgxscan.ScanOne(&existingColumns, rows); err != nil { + return nil, nil, err + } + + if len(existingColumns.Columns) == 0 { + // Table does not exist, CREATE TABLE instead + u, d, err := m.CreateTableDefinitions(ctx, t, parent) + if err != nil { + return nil, nil, fmt.Errorf("CreateTable: %w", err) + } + return u, d, nil + } + + dbColTypes := make(map[string]string, len(existingColumns.Columns)) + for i := range existingColumns.Columns { + dbColTypes[existingColumns.Columns[i]] = strings.ToLower(existingColumns.Types[i]) + } + + columnsToAdd, columnsToRemove := funk.DifferenceString(m.dialect.Columns(t).Names(), existingColumns.Columns) + similars := getSimilars(m.dialect, t, columnsToAdd, columnsToRemove, dbColTypes) + + capSize := len(columnsToAdd) + len(columnsToRemove) // relations not included... + up, down = make([]string, 0, capSize), make([]string, 0, capSize) + downLast := make([]string, 0, capSize) + + for _, d := range columnsToAdd { + m.log.Debug("adding column", "column", d) + col := t.Column(d) + if col == nil { + m.log.Warn("column missing from table, not adding it", "table", t.Name, "column", d) + continue + } + + var notice string + if v, ok := similars[d]; ok { + notice = " -- could this be " + strconv.Quote(v) + " ?" + } + + up = append(up, fmt.Sprintf(addColumnToTable, strconv.Quote(t.Name), strconv.Quote(d), m.dialect.DBTypeFromType(col.Type))+notice) + downLast = append(downLast, fmt.Sprintf(dropColumnFromTable, strconv.Quote(t.Name), strconv.Quote(d))+notice) + + if v, ok := similars[d]; ok { + up = append(up, fmt.Sprintf(renameColumnInTable, strconv.Quote(t.Name), strconv.Quote(v), strconv.Quote(d))) + downLast = append(downLast, fmt.Sprintf(renameColumnInTable, strconv.Quote(t.Name), strconv.Quote(d), strconv.Quote(v))) + } + } + + for _, d := range columnsToRemove { + m.log.Debug("removing column", "column", d) + if col := t.Column(d); col != nil { + m.log.Warn("column still in table, not removing it", "table", t.Name, "column", d) + continue + } + + var notice string + if v, ok := similars[d]; ok { + notice = " -- could this be " + strconv.Quote(v) + " ? Check the RENAME COLUMN statement above" + } + + up = append(up, fmt.Sprintf(dropColumnFromTable, strconv.Quote(t.Name), strconv.Quote(d))+notice) + downLast = append(downLast, fmt.Sprintf(addColumnToTable, strconv.Quote(t.Name), strconv.Quote(d), dbColTypes[d])+notice) + } + + // Do relation tables + for _, r := range t.Relations { + if cr, dr, err := m.DiffTable(ctx, conn, schemaName, r, t); err != nil { + return nil, nil, err + } else { + up = append(up, cr...) + down = append(down, dr...) + } + } + + down = append(down, downLast...) + + return up, down, nil +} + +func getSimilars(dialect schema.Dialect, t *schema.Table, columnsToAdd, columnsToRemove []string, existingColsTypes map[string]string) map[string]string { + upColsByType, downColsByType := make(map[string][]string), make(map[string][]string) + + for _, d := range columnsToAdd { + col := t.Column(d) + if col == nil { + continue + } + upColsByType[dialect.DBTypeFromType(col.Type)] = append(upColsByType[dialect.DBTypeFromType(col.Type)], d) + } + for _, d := range columnsToRemove { + if col := t.Column(d); col != nil { + continue + } + downColsByType[existingColsTypes[d]] = append(downColsByType[existingColsTypes[d]], d) + } + + return findSimilarColumnsWithSameType(upColsByType, downColsByType) +} + +func findSimilarColumnsWithSameType(setA, setB map[string][]string) map[string]string { + const threshold = 4 // minimum common prefix/suffix length + + ret := make(map[string]string) + + for typeKey, alist := range setA { + blist, ok := setB[typeKey] + if !ok { + continue + } + + for _, A := range alist { + for _, B := range blist { + if A == B { + panic("passed equal sets") // should not happen + } + + pref := longestcommon.Prefix([]string{A, B}) + suf := longestcommon.Suffix([]string{A, B}) + if len(suf) > len(pref) { + pref = suf + } + if len(pref) < threshold { + continue + } + + ret[A] = B + ret[B] = A + } + } + } + + return ret +} diff --git a/migration/testbuilder.go b/migration/testbuilder.go new file mode 100644 index 00000000..d608eacc --- /dev/null +++ b/migration/testbuilder.go @@ -0,0 +1,113 @@ +package migration + +import ( + "context" + "os" + "testing" + + "github.com/cloudquery/cq-provider-sdk/database" + "github.com/cloudquery/cq-provider-sdk/migration/migrator" + "github.com/cloudquery/cq-provider-sdk/provider" + "github.com/cloudquery/cq-provider-sdk/provider/schema" + "github.com/golang-migrate/migrate/v4" + "github.com/hashicorp/go-hclog" + "github.com/stretchr/testify/assert" +) + +// RunMigrationsTest helper tests the migration files of the provider using the database (and dialect) specified in CQ_MIGRATION_TEST_DSN +func RunMigrationsTest(t *testing.T, prov *provider.Provider, additionalVersionsToTest []string) { + dsn := os.Getenv("CQ_MIGRATION_TEST_DSN") + if dsn == "" { + t.Skip("CQ_MIGRATION_TEST_DSN not set") + return + } + + doMigrationsTest(t, context.Background(), dsn, prov, additionalVersionsToTest) +} + +func doMigrationsTest(t *testing.T, ctx context.Context, dsn string, prov *provider.Provider, additionalVersionsToTest []string) { + var dialect schema.DialectType + + const ( + setupTSDBChildFnMock = `CREATE OR REPLACE FUNCTION setup_tsdb_child(_table_name text, _column_name text, _parent_table_name text, _parent_column_name text) + RETURNS integer + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + AS $BODY$ + BEGIN + return 0; + END; + $BODY$;` + setupTSDBParentFnMock = `CREATE OR REPLACE FUNCTION setup_tsdb_parent(_table_name text) + RETURNS integer + LANGUAGE 'plpgsql' + COST 100 + VOLATILE PARALLEL UNSAFE + AS $BODY$ + DECLARE + result integer; + BEGIN + return 0; + END; + $BODY$;` + ) + + t.Run("Setup", func(t *testing.T) { + pool, _, err := connect(ctx, dsn) + assert.NoError(t, err) + + dialect, dsn, err = database.ParseDialectDSN(dsn) + assert.Nil(t, err) + + conn, err := pool.Acquire(ctx) + assert.NoError(t, err) + defer conn.Release() + + if dialect == schema.TSDB { + // mock history functions... in the default schema + for _, sql := range []string{ + setupTSDBChildFnMock, + setupTSDBParentFnMock, + } { + _, err := conn.Exec(ctx, sql) + assert.NoError(t, err) + } + } + assert.NoError(t, err) + }) + + migFiles, err := migrator.ReadMigrationFiles(hclog.L(), prov.Migrations) + assert.NoError(t, err) + + mig, err := migrator.New(hclog.L(), dialect, migFiles, dsn, prov.Name, nil) + assert.NoError(t, err) + + // clean up first... just as a precaution + assert.NoError(t, mig.DropProvider(ctx, prov.ResourceMap)) + + t.Run("Up", func(t *testing.T) { + assert.NoError(t, mig.UpgradeProvider(migrator.Latest)) + }) + t.Run("DowngradeToOldest", func(t *testing.T) { + err := mig.DowngradeProvider(migrator.Initial) + if err == migrate.ErrNoChange { + err = nil + } + assert.NoError(t, err) + }) + t.Run("Down", func(t *testing.T) { + assert.NoError(t, mig.DowngradeProvider(migrator.Down)) + }) + + // Run user supplied versions + for _, v := range additionalVersionsToTest { + t.Run("Version "+v, func(t *testing.T) { + assert.NoError(t, mig.UpgradeProvider(v)) + }) + } + + t.Run("Drop", func(t *testing.T) { + assert.NoError(t, mig.DropProvider(ctx, prov.ResourceMap)) + }) +} diff --git a/provider/docs/doc.go b/provider/docs/doc.go index ddde62f3..b3824902 100644 --- a/provider/docs/doc.go +++ b/provider/docs/doc.go @@ -73,7 +73,7 @@ func renderAllTables(t *schema.Table, outputPath string) error { func renderTable(table *schema.Table, path string) error { t := template.New("").Funcs(map[string]interface{}{ - "pgType": schema.GetPgTypeFromType, + "pgType": schema.PostgresDialect{}.DBTypeFromType, "removeLineBreaks": func(text string) string { return strings.ReplaceAll(text, "\n", " ") }, diff --git a/provider/provider.go b/provider/provider.go index 0fa4b112..2ba336ca 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -8,6 +8,8 @@ import ( "sync" "sync/atomic" + "github.com/cloudquery/cq-provider-sdk/database" + "github.com/cloudquery/cq-provider-sdk/migration/migrator" "github.com/cloudquery/cq-provider-sdk/provider/schema/diag" "github.com/thoas/go-funk" @@ -58,12 +60,12 @@ type Provider struct { disableDelete bool // Add extra fields to all resources, these fields don't show up in documentation and are used for internal CQ testing. extraFields map[string]interface{} - // databaseCreator creates a database based on requested engine - databaseCreator func(ctx context.Context, logger hclog.Logger, dbURL string) (schema.Database, error) + // storageCreator creates a database based on requested engine + storageCreator func(ctx context.Context, logger hclog.Logger, dbURL string) (schema.Storage, error) } func (p *Provider) GetProviderSchema(_ context.Context, _ *cqproto.GetProviderSchemaRequest) (*cqproto.GetProviderSchemaResponse, error) { - m, err := ReadMigrationFiles(p.Logger, p.Migrations) + m, err := migrator.ReadMigrationFiles(p.Logger, p.Migrations) if err != nil { return nil, err } @@ -100,9 +102,9 @@ func (p *Provider) ConfigureProvider(_ context.Context, request *cqproto.Configu return &cqproto.ConfigureProviderResponse{Error: fmt.Sprintf("provider %s logger not defined, make sure to run it with serve", p.Name)}, fmt.Errorf("provider %s logger not defined, make sure to run it with serve", p.Name) } // set database creator - if p.databaseCreator == nil { - p.databaseCreator = func(ctx context.Context, logger hclog.Logger, dbURL string) (schema.Database, error) { - return schema.NewPgDatabase(ctx, logger, dbURL) + if p.storageCreator == nil { + p.storageCreator = func(ctx context.Context, logger hclog.Logger, dbURL string) (schema.Storage, error) { + return database.New(ctx, logger, dbURL) } } @@ -156,7 +158,7 @@ func (p *Provider) FetchResources(ctx context.Context, request *cqproto.FetchRes return err } - conn, err := p.databaseCreator(ctx, p.Logger, p.dbURL) + conn, err := p.storageCreator(ctx, p.Logger, p.dbURL) if err != nil { return fmt.Errorf("failed to connect to database. %w", err) } diff --git a/provider/provider_test.go b/provider/provider_test.go index 38c91d71..e9d93f03 100644 --- a/provider/provider_test.go +++ b/provider/provider_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/cloudquery/cq-provider-sdk/provider/schema/mocks" + "github.com/cloudquery/cq-provider-sdk/provider/schema/mock" "github.com/golang/mock/gomock" "github.com/cloudquery/cq-provider-sdk/cqproto" @@ -283,7 +283,7 @@ type FetchResourceTableTest struct { Name string ExpectedFetchResponses []*cqproto.FetchResourcesResponse ExpectedError error - MockDBFunc func(ctrl *gomock.Controller) *mocks.MockDatabase + MockStorageFunc func(ctrl *gomock.Controller) *mock.MockStorage PartialFetch bool ResourcesToFetch []string } @@ -297,8 +297,8 @@ var fetchCases = []FetchResourceTableTest{ Error: "", }}, ExpectedError: nil, - MockDBFunc: func(ctrl *gomock.Controller) *mocks.MockDatabase { - mockDB := mocks.NewMockDatabase(ctrl) + MockStorageFunc: func(ctrl *gomock.Controller) *mock.MockStorage { + mockDB := mock.NewMockStorage(ctrl) //mockDB.EXPECT().Insert(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) mockDB.EXPECT().Close() return mockDB @@ -314,8 +314,8 @@ var fetchCases = []FetchResourceTableTest{ Error: "bad error", }}, ExpectedError: nil, - MockDBFunc: func(ctrl *gomock.Controller) *mocks.MockDatabase { - mockDB := mocks.NewMockDatabase(ctrl) + MockStorageFunc: func(ctrl *gomock.Controller) *mock.MockStorage { + mockDB := mock.NewMockStorage(ctrl) //mockDB.EXPECT().Insert(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil) mockDB.EXPECT().Close() return mockDB @@ -343,8 +343,8 @@ func TestProvider_FetchResources(t *testing.T) { ctrl := gomock.NewController(t) for _, tt := range fetchCases { t.Run(tt.Name, func(t *testing.T) { - tp.databaseCreator = func(ctx context.Context, logger hclog.Logger, dbURL string) (schema.Database, error) { - return tt.MockDBFunc(ctrl), nil + tp.storageCreator = func(ctx context.Context, logger hclog.Logger, dbURL string) (schema.Storage, error) { + return tt.MockStorageFunc(ctrl), nil } err = tp.FetchResources(context.Background(), &cqproto.FetchResourcesRequest{ Resources: tt.ResourcesToFetch, diff --git a/provider/schema/column.go b/provider/schema/column.go index 84f65872..8173add4 100644 --- a/provider/schema/column.go +++ b/provider/schema/column.go @@ -141,8 +141,8 @@ type ColumnResolver func(ctx context.Context, meta ClientMeta, resource *Resourc // ColumnCreationOptions allow modification of how column is defined when table is created type ColumnCreationOptions struct { - Nullable bool - Unique bool + Unique bool + NotNull bool } // Column definition for Table @@ -165,6 +165,8 @@ type Column struct { // to create a reproducible test environment with this column being non nill. For example various error columns and so on IgnoreInTests bool + // internal is true if this column is managed by the SDK + internal bool // meta holds serializable information about the column's resolvers and functions meta *ColumnMeta } @@ -321,3 +323,13 @@ func SetColumnMeta(c Column, m *ColumnMeta) Column { c.meta = m return c } + +type ColumnList []Column + +func (c ColumnList) Names() []string { + ret := make([]string, len(c)) + for i := range c { + ret[i] = c[i].Name + } + return ret +} diff --git a/provider/schema/dialect.go b/provider/schema/dialect.go new file mode 100644 index 00000000..3e9af6ff --- /dev/null +++ b/provider/schema/dialect.go @@ -0,0 +1,279 @@ +package schema + +import ( + "encoding/json" + "fmt" + "reflect" + "strings" + + "github.com/modern-go/reflect2" +) + +type DialectType string + +const ( + Postgres = DialectType("postgres") + TSDB = DialectType("timescale") +) + +func (t DialectType) MigrationDirectory() string { + return string(t) +} + +type Dialect interface { + // PrimaryKeys returns the primary keys of table according to dialect + PrimaryKeys(t *Table) []string + + // Columns returns the columns of table according to dialect + Columns(t *Table) ColumnList + + // Constraints returns constraint definitions for table, according to dialect + Constraints(t, parent *Table) []string + + // Extra returns additional definitions for table outside the CREATE TABLE statement, according to dialect + Extra(t, parent *Table) []string + + // DBTypeFromType returns the database type from the given ValueType. Always lowercase. + DBTypeFromType(v ValueType) string + + // GetResourceValues will return column values from the resource, ready to go in pgx.CopyFromSlice + GetResourceValues(r *Resource) ([]interface{}, error) +} + +var ( + _ Dialect = (*PostgresDialect)(nil) + _ Dialect = (*TSDBDialect)(nil) +) + +// GetDialect creates and returns a dialect specified by the DialectType +func GetDialect(t DialectType) (Dialect, error) { + switch t { + case Postgres: + return PostgresDialect{}, nil + case TSDB: + return TSDBDialect{}, nil + default: + return nil, fmt.Errorf("unknown dialect %q", t) + } +} + +type PostgresDialect struct{} + +func (d PostgresDialect) PrimaryKeys(t *Table) []string { + if len(t.Options.PrimaryKeys) > 0 { + return t.Options.PrimaryKeys + } + return []string{cqIdColumn.Name} +} + +func (d PostgresDialect) Columns(t *Table) ColumnList { + return append([]Column{cqIdColumn, cqMeta}, t.Columns...) +} + +func (d PostgresDialect) Constraints(t, parent *Table) []string { + ret := make([]string, 0, len(t.Columns)) + + ret = append(ret, fmt.Sprintf("CONSTRAINT %s_pk PRIMARY KEY(%s)", truncatePKConstraint(t.Name), strings.Join(d.PrimaryKeys(t), ","))) + + for _, c := range d.Columns(t) { + if !c.CreationOptions.Unique { + continue + } + + ret = append(ret, fmt.Sprintf("UNIQUE(%s)", c.Name)) + } + + if parent != nil { + pc := findParentIdColumn(t) + if pc != nil { + ret = append(ret, fmt.Sprintf("FOREIGN KEY (%s) REFERENCES %s(%s) ON DELETE CASCADE", pc.Name, parent.Name, cqIdColumn.Name)) + } + } + + return ret +} + +func (d PostgresDialect) Extra(_, _ *Table) []string { + return nil +} + +func (d PostgresDialect) DBTypeFromType(v ValueType) string { + switch v { + case TypeBool: + return "boolean" + case TypeInt: + return "integer" + case TypeBigInt: + return "bigint" + case TypeSmallInt: + return "smallint" + case TypeFloat: + return "float" + case TypeUUID: + return "uuid" + case TypeString: + return "text" + case TypeJSON: + return "jsonb" + case TypeIntArray: + return "integer[]" + case TypeStringArray: + return "text[]" + case TypeTimestamp: + return "timestamp without time zone" + case TypeByteArray: + return "bytea" + case TypeInvalid: + fallthrough + case TypeInet: + return "inet" + case TypeMacAddr: + return "mac" + case TypeInetArray: + return "inet[]" + case TypeMacAddrArray: + return "mac[]" + case TypeCIDR: + return "cidr" + case TypeCIDRArray: + return "cidr[]" + default: + panic("invalid type") + } +} + +func (d PostgresDialect) GetResourceValues(r *Resource) ([]interface{}, error) { + return doResourceValues(d, r) +} + +type TSDBDialect struct { + pg PostgresDialect +} + +func (d TSDBDialect) PrimaryKeys(t *Table) []string { + return append([]string{cqFetchDateColumn.Name}, d.pg.PrimaryKeys(t)...) +} + +func (d TSDBDialect) Columns(t *Table) ColumnList { + return append([]Column{cqIdColumn, cqMeta, cqFetchDateColumn}, t.Columns...) +} + +func (d TSDBDialect) Constraints(t, _ *Table) []string { + ret := make([]string, 0, len(t.Columns)) + + ret = append(ret, fmt.Sprintf("CONSTRAINT %s_pk PRIMARY KEY(%s)", truncatePKConstraint(t.Name), strings.Join(d.PrimaryKeys(t), ","))) + + for _, c := range d.Columns(t) { + if !c.CreationOptions.Unique { + continue + } + + ret = append(ret, fmt.Sprintf("UNIQUE(%s,%s)", cqFetchDateColumn.Name, c.Name)) + } + + return ret +} + +func (d TSDBDialect) Extra(t, parent *Table) []string { + pc := findParentIdColumn(t) + + if parent == nil || pc == nil { + return []string{ + fmt.Sprintf("SELECT setup_tsdb_parent('%s');", t.Name), + } + } + + return []string{ + fmt.Sprintf("CREATE INDEX ON %s (%s, %s);", t.Name, cqFetchDateColumn.Name, pc.Name), + fmt.Sprintf("SELECT setup_tsdb_child('%s', '%s', '%s', '%s');", t.Name, pc.Name, parent.Name, cqIdColumn.Name), + } +} + +func (d TSDBDialect) DBTypeFromType(v ValueType) string { + return d.pg.DBTypeFromType(v) +} + +func (d TSDBDialect) GetResourceValues(r *Resource) ([]interface{}, error) { + return doResourceValues(d, r) +} + +func doResourceValues(dialect Dialect, r *Resource) ([]interface{}, error) { + values := make([]interface{}, 0) + for _, c := range dialect.Columns(r.table) { + v := r.Get(c.Name) + if err := c.ValidateType(v); err != nil { + return nil, err + } + if c.Type == TypeJSON { + if v == nil { + values = append(values, v) + continue + } + if reflect2.TypeOf(v).Kind() == reflect.Map { + values = append(values, v) + continue + } + switch data := v.(type) { + case map[string]interface{}: + values = append(values, data) + case string: + newV := make(map[string]interface{}) + err := json.Unmarshal([]byte(data), &newV) + if err != nil { + return nil, err + } + values = append(values, newV) + case *string: + var newV interface{} + err := json.Unmarshal([]byte(*data), &newV) + if err != nil { + return nil, err + } + values = append(values, newV) + case []byte: + var newV interface{} + err := json.Unmarshal(data, &newV) + if err != nil { + return nil, err + } + values = append(values, newV) + default: + d, err := json.Marshal(data) + if err != nil { + return nil, err + } + var newV interface{} + err = json.Unmarshal(d, &newV) + if err != nil { + return nil, err + } + values = append(values, newV) + } + } else { + values = append(values, v) + } + } + return values, nil +} + +func findParentIdColumn(t *Table) (ret *Column) { + for _, c := range t.Columns { + if c.Meta().Resolver != nil && c.Meta().Resolver.Name == "schema.ParentIdResolver" { + return &c + } + } + + return nil +} + +func truncatePKConstraint(name string) string { + const ( + // MaxTableLength in postgres is 63 when building _fk or _pk we want to truncate the name to 60 chars max + maxTableNamePKConstraint = 60 + ) + + if len(name) > maxTableNamePKConstraint { + return name[:maxTableNamePKConstraint] + } + return name +} diff --git a/provider/schema/database_test.go b/provider/schema/dialect_test.go similarity index 93% rename from provider/schema/database_test.go rename to provider/schema/dialect_test.go index 056da435..13ae4a29 100644 --- a/provider/schema/database_test.go +++ b/provider/schema/dialect_test.go @@ -32,8 +32,8 @@ var ( resources = []Resource{ { data: map[string]interface{}{ - "test": stringJson, - "meta": make(map[string]string), + "test": stringJson, + "cq_meta": make(map[string]string), }, table: &jsonTestTable, }, @@ -141,12 +141,12 @@ var ( func TestJsonColumn(t *testing.T) { for _, r := range resources { - _, err := getResourceValues(&r) + _, err := PostgresDialect{}.GetResourceValues(&r) assert.Nil(t, err) } for _, r := range failResources { - _, err := getResourceValues(&r) + _, err := PostgresDialect{}.GetResourceValues(&r) assert.Error(t, err) } } diff --git a/provider/schema/execution.go b/provider/schema/execution.go index 661e8e2e..b6e8aa90 100644 --- a/provider/schema/execution.go +++ b/provider/schema/execution.go @@ -10,6 +10,7 @@ import ( "time" "github.com/cloudquery/cq-provider-sdk/provider/schema/diag" + "github.com/georgysavva/scany/pgxscan" "github.com/modern-go/reflect2" _ "github.com/doug-martin/goqu/v9/dialect/postgres" @@ -24,6 +25,24 @@ import ( // faster than the <1s it won't be deleted by remove stale. const executionJitter = -1 * time.Minute +//go:generate mockgen -package=mock -destination=./mock/mock_storage.go . Storage +type Storage interface { + QueryExecer + + Insert(ctx context.Context, t *Table, instance Resources) error + Delete(ctx context.Context, t *Table, kvFilters []interface{}) error + RemoveStaleData(ctx context.Context, t *Table, executionStart time.Time, kvFilters []interface{}) error + CopyFrom(ctx context.Context, resources Resources, shouldCascade bool, CascadeDeleteFilters map[string]interface{}) error + Close() + Dialect() Dialect +} + +type QueryExecer interface { + pgxscan.Querier + + Exec(ctx context.Context, query string, args ...interface{}) error +} + type ClientMeta interface { Logger() hclog.Logger } @@ -35,7 +54,7 @@ type ExecutionData struct { // Table this execution is associated with Table *Table // Database connection to insert data into - Db Database + Db Storage // Logger associated with this execution Logger hclog.Logger // disableDelete allows disabling deletion of table data for this execution @@ -77,7 +96,7 @@ const ( ) // NewExecutionData Create a new execution data -func NewExecutionData(db Database, logger hclog.Logger, table *Table, disableDelete bool, extraFields map[string]interface{}, partialFetch bool) ExecutionData { +func NewExecutionData(db Storage, logger hclog.Logger, table *Table, disableDelete bool, extraFields map[string]interface{}, partialFetch bool) ExecutionData { return ExecutionData{ Table: table, Db: db, @@ -240,7 +259,7 @@ func (e ExecutionData) callTableResolve(ctx context.Context, client ClientMeta, func (e *ExecutionData) resolveResources(ctx context.Context, meta ClientMeta, parent *Resource, objects []interface{}) error { var resources = make(Resources, 0, len(objects)) for _, o := range objects { - resource := NewResourceData(e.Table, parent, o, e.extraFields) + resource := NewResourceData(e.Db.Dialect(), e.Table, parent, o, e.extraFields, e.executionStart) // Before inserting resolve all table column resolvers if err := e.resolveResourceValues(ctx, meta, resource); err != nil { if partialFetchErr := e.checkPartialFetchError(err, resource, "failed to resolve resource"); partialFetchErr != nil { @@ -316,7 +335,10 @@ func (e *ExecutionData) resolveResourceValues(ctx context.Context, meta ClientMe err = fmt.Errorf("recovered from panic: %s", r) } }() - if err = e.resolveColumns(ctx, meta, resource, resource.table.Columns); err != nil { + + providerCols, internalCols := siftColumns(e.Db.Dialect().Columns(resource.table)) + + if err = e.resolveColumns(ctx, meta, resource, providerCols); err != nil { return fmt.Errorf("resolve columns error: %w", err) } // call PostRowResolver if defined after columns have been resolved @@ -325,8 +347,8 @@ func (e *ExecutionData) resolveResourceValues(ctx context.Context, meta ClientMe return fmt.Errorf("post resource resolver failed: %w", err) } } - // Finally, resolve default SDK columns resource - for _, c := range GetDefaultSDKColumns() { + // Finally, resolve columns internal to the SDK + for _, c := range internalCols { if err = c.Resolver(ctx, meta, resource, c); err != nil { return fmt.Errorf("default column %s resolver execution failed: %w", c.Name, err) } @@ -438,3 +460,28 @@ func (e *ExecutionData) checkPartialFetchError(err error, res *Resource, customM e.partialFetchChan <- partialFetchFailure return nil } + +// siftColumns gets a column list and returns a list of provider columns, and another list of internal columns, cqId column being the very last one +func siftColumns(cols []Column) ([]Column, []Column) { + providerCols, internalCols := make([]Column, 0, len(cols)), make([]Column, 0, len(cols)) + + cqIdColIndex := -1 + for i := range cols { + if cols[i].internal { + if cols[i].Name == cqIdColumn.Name { + cqIdColIndex = len(internalCols) + } + + internalCols = append(internalCols, cols[i]) + } else { + providerCols = append(providerCols, cols[i]) + } + } + + // resolve cqId last, as it would need other PKs to be resolved, some might be internal (cq_fetch_date) + if lastIndex := len(internalCols) - 1; cqIdColIndex > -1 && cqIdColIndex != lastIndex { + internalCols[cqIdColIndex], internalCols[lastIndex] = internalCols[lastIndex], internalCols[cqIdColIndex] + } + + return providerCols, internalCols +} diff --git a/provider/schema/execution_test.go b/provider/schema/execution_test.go index 7b936a0e..2b19ec86 100644 --- a/provider/schema/execution_test.go +++ b/provider/schema/execution_test.go @@ -191,6 +191,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("failed table resolver", func(t *testing.T) { testTable.Resolver = failingTableResolver mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) _, err := exec.ResolveTable(context.Background(), mockedClient, nil) assert.Error(t, err) @@ -198,6 +199,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("failing table column resolver", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) execFailing := NewExecutionData(mockDb, logger, testBadColumnResolverTable, false, nil, false) _, err := execFailing.ResolveTable(context.Background(), mockedClient, nil) assert.Error(t, err) @@ -205,6 +207,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("ignore error table column resolver w/partialFetch", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) exec := NewExecutionData(mockDb, logger, testIgnoreErrorColumnResolverTable, false, nil, true) var expectedResource *Resource @@ -226,6 +229,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { return nil } mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) exec := NewExecutionData(mockDb, logger, testBadColumnResolverTable, false, nil, true) _, err := exec.ResolveTable(context.Background(), mockedClient, nil) @@ -235,6 +239,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("doing nothing resolver", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) testTable.Resolver = doNothingResolver _, err := exec.ResolveTable(context.Background(), mockedClient, nil) @@ -243,6 +248,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("simple returning resources insert", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) testTable.Resolver = dataReturningResolver @@ -253,6 +259,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("simple returning resources insert w/disable_delete", func(t *testing.T) { mockDb := new(DatabaseMock) exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) + mockDb.On("Dialect").Return(PostgresDialect{}) mockDb.On("CopyFrom", mock.Anything, mock.Anything, true, mock.Anything).Return(nil) mockDb.On("RemoveStaleData", mock.Anything, testTable, exec.executionStart, mock.Anything).Return(nil) testTable.Resolver = dataReturningResolver @@ -264,6 +271,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { }) t.Run("simple returning single resources insert", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) testTable.Resolver = dataReturningSingleResolver @@ -272,6 +280,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { }) t.Run("simple returning nil resources insert", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) testTable.Resolver = passingNilResolver _, err := exec.ResolveTable(context.Background(), mockedClient, nil) @@ -280,6 +289,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { }) t.Run("check post row resolver", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) testTable.Resolver = dataReturningSingleResolver var expectedResource *Resource @@ -302,6 +312,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("test resolving with default column values", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) execDefault := NewExecutionData(mockDb, logger, testDefaultsTable, false, nil, false) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) testDefaultsTable.Resolver = func(ctx context.Context, meta ClientMeta, parent *Resource, res chan<- interface{}) error { @@ -320,6 +331,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("disable delete", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, true, nil, false) //mockDb.On("CopyFrom", mock.Anything, mock.Anything, true, mock.Anything).Return(nil) testTable.Resolver = dataReturningSingleResolver @@ -351,6 +363,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { }) t.Run("disable delete w/deleteFilter", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, true, map[string]interface{}{"test": 1}, false) //mockDb.On("CopyFrom", mock.Anything, mock.Anything, true, mock.Anything).Return(nil) testTable.Resolver = dataReturningSingleResolver @@ -383,6 +396,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("disable delete failed copy from", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, testTable, true, nil, false) testTable.Resolver = dataReturningSingleResolver testTable.DeleteFilter = func(meta ClientMeta, r *Resource) []interface{} { @@ -410,6 +424,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("always delete with disable delete", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) exec := NewExecutionData(mockDb, logger, alwaysDeleteTable, true, nil, false) alwaysDeleteTable.Resolver = dataReturningSingleResolver alwaysDeleteTable.DeleteFilter = func(meta ClientMeta, r *Resource) []interface{} { @@ -432,32 +447,9 @@ func TestExecutionData_ResolveTable(t *testing.T) { assert.Nil(t, err) }) - t.Run("inject fields into execution", func(t *testing.T) { - mockDb := new(DatabaseMock) - exec := NewExecutionData(mockDb, logger, testTable, false, map[string]interface{}{"injected_field": 1}, false) - testTable.Resolver = dataReturningSingleResolver - testTable.DeleteFilter = nil - var expectedResource *Resource - testTable.PostResourceResolver = func(ctx context.Context, meta ClientMeta, parent *Resource) error { - err := parent.Set("name", "other") - assert.Nil(t, err) - expectedResource = parent - return nil - } - mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, map[string]interface{}{"injected_field": 1}).Return(nil) - count, err := exec.ResolveTable(context.Background(), mockedClient, nil) - assert.Equal(t, count, uint64(1)) - assert.Nil(t, err) - assert.Equal(t, expectedResource.data["name"], "other") - assert.Equal(t, 1, expectedResource.extraFields["injected_field"]) - values, err := expectedResource.Values() - assert.Nil(t, err) - assert.Equal(t, []string{"name", "name_no_prefix", "prefix_name", "cq_id", "meta", "injected_field"}, expectedResource.columns) - assert.Equal(t, []interface{}{"other", "name_no_prefix", "prefix_name", expectedResource.cqId, expectedResource.Get("meta"), 1}, values) - }) - t.Run("test partial fetch post resource resolver", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) execDefault := NewExecutionData(mockDb, logger, testDefaultsTable, false, nil, true) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) testDefaultsTable.Resolver = func(ctx context.Context, meta ClientMeta, parent *Resource, res chan<- interface{}) error { @@ -478,6 +470,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("test partial fetch resolver", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) execDefault := NewExecutionData(mockDb, logger, testDefaultsTable, false, nil, true) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) testDefaultsTable.Resolver = func(ctx context.Context, meta ClientMeta, parent *Resource, res chan<- interface{}) error { @@ -498,6 +491,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("test partial fetch resolver panic", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) execDefault := NewExecutionData(mockDb, logger, testDefaultsTable, false, nil, true) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) testDefaultsTable.Resolver = func(ctx context.Context, meta ClientMeta, parent *Resource, res chan<- interface{}) error { @@ -518,6 +512,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("test partial fetch post resource resolver panic", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) execDefault := NewExecutionData(mockDb, logger, testDefaultsTable, false, nil, true) mockDb.On("CopyFrom", mock.Anything, mock.Anything, false, mock.Anything).Return(nil) testDefaultsTable.Resolver = func(ctx context.Context, meta ClientMeta, parent *Resource, res chan<- interface{}) error { @@ -538,6 +533,7 @@ func TestExecutionData_ResolveTable(t *testing.T) { t.Run("test table with multiplex", func(t *testing.T) { mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) execDefault := NewExecutionData(mockDb, logger, testMultiplexTable, false, nil, true) var parentMultiplexCalled, relationMultiplexCalled = false, false testMultiplexTable.Multiplex = func(meta ClientMeta) []ClientMeta { diff --git a/provider/schema/filters_test.go b/provider/schema/filters_test.go index 98eb849c..db7bbc15 100644 --- a/provider/schema/filters_test.go +++ b/provider/schema/filters_test.go @@ -2,6 +2,7 @@ package schema import ( "testing" + "time" "github.com/cloudquery/cq-provider-sdk/logging" "github.com/hashicorp/go-hclog" @@ -20,7 +21,7 @@ func TestDeleteParentId(t *testing.T) { mockedClient.On("Logger", mock.Anything).Return(logger) object := testTableStruct{} - r := NewResourceData(testTable, nil, object, nil) + r := NewResourceData(PostgresDialect{}, testTable, nil, object, nil, time.Now()) _ = r.Set("name", "test") assert.Equal(t, []interface{}{"name", r.Id()}, f(mockedClient, r)) diff --git a/provider/schema/meta.go b/provider/schema/meta.go index dc33c41a..13638db0 100644 --- a/provider/schema/meta.go +++ b/provider/schema/meta.go @@ -2,17 +2,32 @@ package schema import ( "context" + "encoding/json" + "fmt" "time" ) +type Meta struct { + LastUpdate time.Time `json:"last_updated"` + FetchId string `json:"fetch_id,omitempty"` +} + var ( - meta = Column{ - Name: "meta", + cqMeta = Column{ + Name: "cq_meta", Type: TypeJSON, Description: "Meta column holds fetch information", Resolver: func(ctx context.Context, meta ClientMeta, resource *Resource, c Column) error { - return resource.Set(c.Name, map[string]interface{}{"last_updated": time.Now().UTC().Format(time.RFC3339)}) + mi := Meta{ + LastUpdate: time.Now().UTC(), + } + if s, ok := resource.metadata["cq_fetch_id"].(string); ok { // will it work? + mi.FetchId = s + } + b, _ := json.Marshal(mi) + return resource.Set(c.Name, b) }, + internal: true, } cqIdColumn = Column{ Name: "cq_id", @@ -29,12 +44,28 @@ var ( return resource.Set(c.Name, resource.Id()) }, CreationOptions: ColumnCreationOptions{ - Unique: true, + Unique: true, + NotNull: true, }, + internal: true, + } + cqFetchDateColumn = Column{ + Name: "cq_fetch_date", + Type: TypeTimestamp, + Description: "Time of fetch for this resource", + Resolver: func(ctx context.Context, meta ClientMeta, resource *Resource, c Column) error { + val, ok := resource.metadata["cq_fetch_date"] + if !ok && !resource.executionStart.IsZero() { + val = resource.executionStart + } + if val == nil { + return fmt.Errorf("zero cq_fetch date") + } + return resource.Set(c.Name, val) + }, + CreationOptions: ColumnCreationOptions{ + NotNull: true, + }, + internal: true, } ) - -// GetDefaultSDKColumns Default columns of the SDK, these columns are added to each table by default -func GetDefaultSDKColumns() []Column { - return []Column{cqIdColumn, meta} -} diff --git a/provider/schema/mocks/mock_database.go b/provider/schema/mock/mock_storage.go similarity index 50% rename from provider/schema/mocks/mock_database.go rename to provider/schema/mock/mock_storage.go index 2cf9a5af..3c606965 100644 --- a/provider/schema/mocks/mock_database.go +++ b/provider/schema/mock/mock_storage.go @@ -1,8 +1,8 @@ // Code generated by MockGen. DO NOT EDIT. -// Source: github.com/cloudquery/cq-provider-sdk/provider/schema (interfaces: Database) +// Source: github.com/cloudquery/cq-provider-sdk/provider/schema (interfaces: Storage) -// Package mocks is a generated GoMock package. -package mocks +// Package mock is a generated GoMock package. +package mock import ( context "context" @@ -14,43 +14,43 @@ import ( pgx "github.com/jackc/pgx/v4" ) -// MockDatabase is a mock of Database interface. -type MockDatabase struct { +// MockStorage is a mock of Storage interface. +type MockStorage struct { ctrl *gomock.Controller - recorder *MockDatabaseMockRecorder + recorder *MockStorageMockRecorder } -// MockDatabaseMockRecorder is the mock recorder for MockDatabase. -type MockDatabaseMockRecorder struct { - mock *MockDatabase +// MockStorageMockRecorder is the mock recorder for MockStorage. +type MockStorageMockRecorder struct { + mock *MockStorage } -// NewMockDatabase creates a new mock instance. -func NewMockDatabase(ctrl *gomock.Controller) *MockDatabase { - mock := &MockDatabase{ctrl: ctrl} - mock.recorder = &MockDatabaseMockRecorder{mock} +// NewMockStorage creates a new mock instance. +func NewMockStorage(ctrl *gomock.Controller) *MockStorage { + mock := &MockStorage{ctrl: ctrl} + mock.recorder = &MockStorageMockRecorder{mock} return mock } // EXPECT returns an object that allows the caller to indicate expected use. -func (m *MockDatabase) EXPECT() *MockDatabaseMockRecorder { +func (m *MockStorage) EXPECT() *MockStorageMockRecorder { return m.recorder } // Close mocks base method. -func (m *MockDatabase) Close() { +func (m *MockStorage) Close() { m.ctrl.T.Helper() m.ctrl.Call(m, "Close") } // Close indicates an expected call of Close. -func (mr *MockDatabaseMockRecorder) Close() *gomock.Call { +func (mr *MockStorageMockRecorder) Close() *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockDatabase)(nil).Close)) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Close", reflect.TypeOf((*MockStorage)(nil).Close)) } // CopyFrom mocks base method. -func (m *MockDatabase) CopyFrom(arg0 context.Context, arg1 schema.Resources, arg2 bool, arg3 map[string]interface{}) error { +func (m *MockStorage) CopyFrom(arg0 context.Context, arg1 schema.Resources, arg2 bool, arg3 map[string]interface{}) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "CopyFrom", arg0, arg1, arg2, arg3) ret0, _ := ret[0].(error) @@ -58,13 +58,13 @@ func (m *MockDatabase) CopyFrom(arg0 context.Context, arg1 schema.Resources, arg } // CopyFrom indicates an expected call of CopyFrom. -func (mr *MockDatabaseMockRecorder) CopyFrom(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +func (mr *MockStorageMockRecorder) CopyFrom(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CopyFrom", reflect.TypeOf((*MockDatabase)(nil).CopyFrom), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "CopyFrom", reflect.TypeOf((*MockStorage)(nil).CopyFrom), arg0, arg1, arg2, arg3) } // Delete mocks base method. -func (m *MockDatabase) Delete(arg0 context.Context, arg1 *schema.Table, arg2 []interface{}) error { +func (m *MockStorage) Delete(arg0 context.Context, arg1 *schema.Table, arg2 []interface{}) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Delete", arg0, arg1, arg2) ret0, _ := ret[0].(error) @@ -72,13 +72,27 @@ func (m *MockDatabase) Delete(arg0 context.Context, arg1 *schema.Table, arg2 []i } // Delete indicates an expected call of Delete. -func (mr *MockDatabaseMockRecorder) Delete(arg0, arg1, arg2 interface{}) *gomock.Call { +func (mr *MockStorageMockRecorder) Delete(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockDatabase)(nil).Delete), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Delete", reflect.TypeOf((*MockStorage)(nil).Delete), arg0, arg1, arg2) +} + +// Dialect mocks base method. +func (m *MockStorage) Dialect() schema.Dialect { + m.ctrl.T.Helper() + ret := m.ctrl.Call(m, "Dialect") + ret0, _ := ret[0].(schema.Dialect) + return ret0 +} + +// Dialect indicates an expected call of Dialect. +func (mr *MockStorageMockRecorder) Dialect() *gomock.Call { + mr.mock.ctrl.T.Helper() + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Dialect", reflect.TypeOf((*MockStorage)(nil).Dialect)) } // Exec mocks base method. -func (m *MockDatabase) Exec(arg0 context.Context, arg1 string, arg2 ...interface{}) error { +func (m *MockStorage) Exec(arg0 context.Context, arg1 string, arg2 ...interface{}) error { m.ctrl.T.Helper() varargs := []interface{}{arg0, arg1} for _, a := range arg2 { @@ -90,14 +104,14 @@ func (m *MockDatabase) Exec(arg0 context.Context, arg1 string, arg2 ...interface } // Exec indicates an expected call of Exec. -func (mr *MockDatabaseMockRecorder) Exec(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { +func (mr *MockStorageMockRecorder) Exec(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Exec", reflect.TypeOf((*MockDatabase)(nil).Exec), varargs...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Exec", reflect.TypeOf((*MockStorage)(nil).Exec), varargs...) } // Insert mocks base method. -func (m *MockDatabase) Insert(arg0 context.Context, arg1 *schema.Table, arg2 schema.Resources) error { +func (m *MockStorage) Insert(arg0 context.Context, arg1 *schema.Table, arg2 schema.Resources) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "Insert", arg0, arg1, arg2) ret0, _ := ret[0].(error) @@ -105,13 +119,13 @@ func (m *MockDatabase) Insert(arg0 context.Context, arg1 *schema.Table, arg2 sch } // Insert indicates an expected call of Insert. -func (mr *MockDatabaseMockRecorder) Insert(arg0, arg1, arg2 interface{}) *gomock.Call { +func (mr *MockStorageMockRecorder) Insert(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Insert", reflect.TypeOf((*MockDatabase)(nil).Insert), arg0, arg1, arg2) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Insert", reflect.TypeOf((*MockStorage)(nil).Insert), arg0, arg1, arg2) } // Query mocks base method. -func (m *MockDatabase) Query(arg0 context.Context, arg1 string, arg2 ...interface{}) (pgx.Rows, error) { +func (m *MockStorage) Query(arg0 context.Context, arg1 string, arg2 ...interface{}) (pgx.Rows, error) { m.ctrl.T.Helper() varargs := []interface{}{arg0, arg1} for _, a := range arg2 { @@ -124,14 +138,14 @@ func (m *MockDatabase) Query(arg0 context.Context, arg1 string, arg2 ...interfac } // Query indicates an expected call of Query. -func (mr *MockDatabaseMockRecorder) Query(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { +func (mr *MockStorageMockRecorder) Query(arg0, arg1 interface{}, arg2 ...interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() varargs := append([]interface{}{arg0, arg1}, arg2...) - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Query", reflect.TypeOf((*MockDatabase)(nil).Query), varargs...) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "Query", reflect.TypeOf((*MockStorage)(nil).Query), varargs...) } // RemoveStaleData mocks base method. -func (m *MockDatabase) RemoveStaleData(arg0 context.Context, arg1 *schema.Table, arg2 time.Time, arg3 []interface{}) error { +func (m *MockStorage) RemoveStaleData(arg0 context.Context, arg1 *schema.Table, arg2 time.Time, arg3 []interface{}) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "RemoveStaleData", arg0, arg1, arg2, arg3) ret0, _ := ret[0].(error) @@ -139,7 +153,7 @@ func (m *MockDatabase) RemoveStaleData(arg0 context.Context, arg1 *schema.Table, } // RemoveStaleData indicates an expected call of RemoveStaleData. -func (mr *MockDatabaseMockRecorder) RemoveStaleData(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { +func (mr *MockStorageMockRecorder) RemoveStaleData(arg0, arg1, arg2, arg3 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RemoveStaleData", reflect.TypeOf((*MockDatabase)(nil).RemoveStaleData), arg0, arg1, arg2, arg3) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "RemoveStaleData", reflect.TypeOf((*MockStorage)(nil).RemoveStaleData), arg0, arg1, arg2, arg3) } diff --git a/provider/schema/mocks_test.go b/provider/schema/mocks_test.go index 03dbeaaf..57181503 100644 --- a/provider/schema/mocks_test.go +++ b/provider/schema/mocks_test.go @@ -8,7 +8,7 @@ import ( "github.com/stretchr/testify/mock" ) -// DatabaseMock is an autogenerated mock type for the Database type +// DatabaseMock is an autogenerated mock type for the Storage type type DatabaseMock struct { mock.Mock } @@ -116,3 +116,19 @@ func (_m *DatabaseMock) RemoveStaleData(ctx context.Context, t *Table, execution return r0 } + +// Dialect mocks base method. +func (_m *DatabaseMock) Dialect() Dialect { + ret := _m.Called() + + var r0 Dialect + if rf, ok := ret.Get(0).(func() Dialect); ok { + r0 = rf() + } else { + if ret.Get(0) != nil { + r0 = ret.Get(0).(Dialect) + } + } + + return r0 +} diff --git a/provider/schema/resolvers_test.go b/provider/schema/resolvers_test.go index 807adf25..15a08958 100644 --- a/provider/schema/resolvers_test.go +++ b/provider/schema/resolvers_test.go @@ -40,7 +40,7 @@ func TestPathResolver(t *testing.T) { r1 := PathResolver("Inner.Value") r2 := PathResolver("Value") r3 := PathResolver("unexported") - resource := NewResourceData(pathTestTable, nil, testStruct{Inner: innerStruct{Value: "bla"}, Value: 5, unexported: false}, nil) + resource := NewResourceData(PostgresDialect{}, pathTestTable, nil, testStruct{Inner: innerStruct{Value: "bla"}, Value: 5, unexported: false}, nil, time.Now()) err := r1(context.TODO(), nil, resource, Column{Name: "test"}) assert.Nil(t, err) @@ -86,7 +86,7 @@ type testDateStruct struct { func TestDateTimeResolver(t *testing.T) { r1 := DateResolver("Date") - resource := NewResourceData(dateTestTable, nil, testDateStruct{Date: "2011-10-05T14:48:00.000Z"}, nil) + resource := NewResourceData(PostgresDialect{}, dateTestTable, nil, testDateStruct{Date: "2011-10-05T14:48:00.000Z"}, nil, time.Now()) err := r1(context.TODO(), nil, resource, Column{Name: "date"}) assert.Nil(t, err) @@ -94,12 +94,12 @@ func TestDateTimeResolver(t *testing.T) { assert.Equal(t, resource.Get("date"), &t1) r2 := DateResolver("Date", time.RFC822) - resource = NewResourceData(dateTestTable, nil, testDateStruct{Date: "2011-10-05T14:48:00.000Z"}, nil) + resource = NewResourceData(PostgresDialect{}, dateTestTable, nil, testDateStruct{Date: "2011-10-05T14:48:00.000Z"}, nil, time.Now()) err = r2(context.TODO(), nil, resource, Column{Name: "date"}) assert.Error(t, err) - resource = NewResourceData(dateTestTable, nil, testDateStruct{Date: "03 Jan 06 15:04 EST"}, nil) + resource = NewResourceData(PostgresDialect{}, dateTestTable, nil, testDateStruct{Date: "03 Jan 06 15:04 EST"}, nil, time.Now()) err = r2(context.TODO(), nil, resource, Column{Name: "date"}) assert.Nil(t, err) @@ -107,7 +107,7 @@ func TestDateTimeResolver(t *testing.T) { assert.Equal(t, t2.Unix(), resource.Get("date").(*time.Time).UTC().Unix()) r3 := DateResolver("Date", time.RFC822, "2006-01-02") - resource = NewResourceData(dateTestTable, nil, testDateStruct{Date: "2011-10-05"}, nil) + resource = NewResourceData(PostgresDialect{}, dateTestTable, nil, testDateStruct{Date: "2011-10-05"}, nil, time.Now()) err = r3(context.TODO(), nil, resource, Column{Name: "date"}) assert.Nil(t, err) @@ -159,7 +159,7 @@ func TestNetResolvers(t *testing.T) { r3 := IPNetResolver("Net") r4 := IPAddressesResolver("IPS") for _, r := range netTests { - resource := NewResourceData(networkTestTable, nil, r, nil) + resource := NewResourceData(PostgresDialect{}, networkTestTable, nil, r, nil, time.Now()) err := r1(context.TODO(), nil, resource, Column{Name: "ip"}) assert.Nil(t, err) err = r2(context.TODO(), nil, resource, Column{Name: "mac"}) @@ -170,7 +170,7 @@ func TestNetResolvers(t *testing.T) { assert.Nil(t, err) } for _, r := range netTestsFails { - resource := NewResourceData(networkTestTable, nil, r, nil) + resource := NewResourceData(PostgresDialect{}, networkTestTable, nil, r, nil, time.Now()) err := r1(context.TODO(), nil, resource, Column{Name: "ip"}) assert.Error(t, err) err = r2(context.TODO(), nil, resource, Column{Name: "mac"}) @@ -216,7 +216,7 @@ func TestTransformersResolvers(t *testing.T) { r3 := IntResolver("String") r4 := IntResolver("Float") r5 := IntResolver("BadFloat") - resource := NewResourceData(TransformersTestTable, nil, testTransformersStruct{Int: 10, Float: 10.2, String: "123", BadFloat: "10,1"}, nil) + resource := NewResourceData(PostgresDialect{}, TransformersTestTable, nil, testTransformersStruct{Int: 10, Float: 10.2, String: "123", BadFloat: "10,1"}, nil, time.Now()) err := r1(context.TODO(), nil, resource, Column{Name: "int_to_string"}) assert.Nil(t, err) assert.Equal(t, resource.Get("int_to_string"), "10") @@ -254,7 +254,7 @@ type testUUIDStruct struct { func TestUUIDResolver(t *testing.T) { r1 := UUIDResolver("UUID") r2 := UUIDResolver("BadUUID") - resource := NewResourceData(UUIDTestTable, nil, testUUIDStruct{UUID: "123e4567-e89b-12d3-a456-426614174000", BadUUID: "123e4567-e89b-12d3-a456-4266141740001"}, nil) + resource := NewResourceData(PostgresDialect{}, UUIDTestTable, nil, testUUIDStruct{UUID: "123e4567-e89b-12d3-a456-426614174000", BadUUID: "123e4567-e89b-12d3-a456-4266141740001"}, nil, time.Now()) err := r1(context.TODO(), nil, resource, Column{Name: "uuid"}) assert.Nil(t, err) diff --git a/provider/schema/resource.go b/provider/schema/resource.go index 93160e4c..2fbacf98 100644 --- a/provider/schema/resource.go +++ b/provider/schema/resource.go @@ -4,11 +4,11 @@ import ( "crypto" "fmt" "strings" + "time" + "github.com/google/uuid" "github.com/mitchellh/hashstructure" "github.com/thoas/go-funk" - - "github.com/google/uuid" ) type Resources []*Resource @@ -21,26 +21,30 @@ type Resource struct { // Set if this is an embedded table Parent *Resource // internal fields - table *Table - data map[string]interface{} - cqId uuid.UUID - extraFields map[string]interface{} - columns []string + table *Table + data map[string]interface{} + cqId uuid.UUID + metadata map[string]interface{} + columns []string + dialect Dialect + executionStart time.Time } -func NewResourceData(t *Table, parent *Resource, item interface{}, extraFields map[string]interface{}) *Resource { +func NewResourceData(dialect Dialect, t *Table, parent *Resource, item interface{}, metadata map[string]interface{}, startTime time.Time) *Resource { return &Resource{ - Item: item, - Parent: parent, - table: t, - data: make(map[string]interface{}), - cqId: uuid.New(), - columns: getResourceColumns(t, extraFields), - extraFields: extraFields, + Item: item, + Parent: parent, + table: t, + data: make(map[string]interface{}), + cqId: uuid.New(), + columns: dialect.Columns(t).Names(), + metadata: metadata, + dialect: dialect, + executionStart: startTime, } } func (r *Resource) Keys() []string { - tablePrimKeys := r.table.PrimaryKeys() + tablePrimKeys := r.dialect.PrimaryKeys(r.table) if len(tablePrimKeys) == 0 { return []string{} } @@ -73,16 +77,13 @@ func (r *Resource) Id() uuid.UUID { func (r *Resource) Values() ([]interface{}, error) { values := make([]interface{}, 0) - for _, c := range append(r.table.Columns, GetDefaultSDKColumns()...) { + for _, c := range r.dialect.Columns(r.table) { v := r.Get(c.Name) if err := c.ValidateType(v); err != nil { return nil, err } values = append(values, v) } - for _, v := range r.extraFields { - values = append(values, v) - } return values, nil } @@ -90,13 +91,20 @@ func (r *Resource) GenerateCQId() error { if len(r.table.Options.PrimaryKeys) == 0 { return nil } - objs := make([]interface{}, len(r.table.PrimaryKeys())) - for i, pk := range r.table.PrimaryKeys() { + pks := r.dialect.PrimaryKeys(r.table) + objs := make([]interface{}, 0, len(pks)) + for _, pk := range pks { + if col := r.getColumnByName(pk); col == nil { + return fmt.Errorf("failed to generate cq_id for %s, pk column missing %s", r.table.Name, pk) + } else if col.internal { + continue + } + value := r.Get(pk) if value == nil { return fmt.Errorf("failed to generate cq_id for %s, pk field missing %s", r.table.Name, pk) } - objs[i] = value + objs = append(objs, value) } id, err := hashUUID(objs) if err != nil { @@ -106,8 +114,15 @@ func (r *Resource) GenerateCQId() error { return nil } +func (r *Resource) TableName() string { + if r.table == nil { + return "" + } + return r.table.Name +} + func (r Resource) getColumnByName(column string) *Column { - for _, c := range r.table.Columns { + for _, c := range r.dialect.Columns(r.table) { if strings.Compare(column, c.Name) == 0 { return &c } @@ -130,14 +145,6 @@ func hashUUID(objs interface{}) (uuid.UUID, error) { return uuid.NewSHA1(uuid.Nil, data), nil } -func getResourceColumns(t *Table, fields map[string]interface{}) []string { - columns := t.ColumnNames() - for k := range fields { - columns = append(columns, k) - } - return columns -} - func (rr Resources) GetIds() []uuid.UUID { rids := make([]uuid.UUID, len(rr)) for i, r := range rr { diff --git a/provider/schema/resources_test.go b/provider/schema/resources_test.go index c1e4022b..a40ba546 100644 --- a/provider/schema/resources_test.go +++ b/provider/schema/resources_test.go @@ -3,6 +3,7 @@ package schema import ( "context" "testing" + "time" "github.com/google/uuid" @@ -83,7 +84,7 @@ type zeroValuedStruct struct { // TestResourcePrimaryKey checks resource id generation when primary key is set on table func TestResourcePrimaryKey(t *testing.T) { - r := NewResourceData(testPrimaryKeyTable, nil, nil, nil) + r := NewResourceData(PostgresDialect{}, testPrimaryKeyTable, nil, nil, nil, time.Now()) // save random id randomId := r.cqId // test primary table no pk @@ -103,10 +104,10 @@ func TestResourcePrimaryKey(t *testing.T) { } func TestRelationResourcePrimaryKey(t *testing.T) { - r := NewResourceData(testPrimaryKeyTable, nil, nil, nil) - r2 := NewResourceData(r.table.Relations[0], r, map[string]interface{}{ + r := NewResourceData(PostgresDialect{}, testPrimaryKeyTable, nil, nil, nil, time.Now()) + r2 := NewResourceData(PostgresDialect{}, r.table.Relations[0], r, map[string]interface{}{ "rel_key_str": "test", - }, nil) + }, nil, time.Now()) mockedClient := new(mockedClientMeta) logger := logging.New(&hclog.LoggerOptions{ @@ -116,28 +117,31 @@ func TestRelationResourcePrimaryKey(t *testing.T) { }) mockedClient.On("Logger", mock.Anything).Return(logger) - exec := NewExecutionData(nil, logger, r2.table, false, nil, false) + mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) + + exec := NewExecutionData(mockDb, logger, r2.table, false, nil, false) err := exec.resolveResourceValues(context.TODO(), mockedClient, r2) assert.Nil(t, err) v, err := r2.Values() assert.Nil(t, err) - assert.Equal(t, v[1], r2.cqId) + assert.Equal(t, r2.cqId, v[0]) } // TestResourcePrimaryKey checks resource id generation when primary key is set on table func TestResourceAddColumns(t *testing.T) { - r := NewResourceData(testPrimaryKeyTable, nil, nil, map[string]interface{}{"new_field": 1}) - assert.Equal(t, []string{"primary_key_str", "cq_id", "meta", "new_field"}, r.columns) + r := NewResourceData(PostgresDialect{}, testPrimaryKeyTable, nil, nil, nil, time.Now()) + assert.Equal(t, []string{"cq_id", "cq_meta", "primary_key_str"}, r.columns) } func TestResourceColumns(t *testing.T) { - r := NewResourceData(testTable, nil, nil, nil) + r := NewResourceData(PostgresDialect{}, testTable, nil, nil, nil, time.Now()) errf := r.Set("name", "test") assert.Nil(t, errf) assert.Equal(t, r.Get("name"), "test") v, err := r.Values() assert.Nil(t, err) - assert.Equal(t, v, []interface{}{"test", nil, nil, nil, nil}) + assert.Equal(t, []interface{}{nil, nil, "test", nil, nil}, v) // Set invalid type to resource errf = r.Set("name", 5) assert.Nil(t, errf) @@ -154,7 +158,7 @@ func TestResourceColumns(t *testing.T) { assert.Nil(t, errf) v, err = r.Values() assert.Nil(t, err) - assert.Equal(t, v, []interface{}{"test", "name_no_prefix", "prefix_name", nil, nil}) + assert.Equal(t, []interface{}{nil, nil, "test", "name_no_prefix", "prefix_name"}, v) // check non existing col err = r.Set("non_exist_col", "test") @@ -173,62 +177,70 @@ func TestResourceResolveColumns(t *testing.T) { t.Run("test resolve column normal", func(t *testing.T) { object := testTableStruct{} _ = defaults.Set(&object) - r := NewResourceData(testTable, nil, object, nil) - assert.Equal(t, r.cqId, r.Id()) - // columns should be resolved from ColumnResolver functions or default functions logger := logging.New(&hclog.LoggerOptions{ Name: "test_log", Level: hclog.Error, Output: nil, }) - exec := NewExecutionData(nil, logger, testTable, false, nil, false) + + mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) + + exec := NewExecutionData(mockDb, logger, testTable, false, nil, false) + r := NewResourceData(PostgresDialect{}, testTable, nil, object, nil, exec.executionStart) + assert.Equal(t, r.cqId, r.Id()) + // columns should be resolved from ColumnResolver functions or default functions err := exec.resolveColumns(context.TODO(), mockedClient, r, testTable.Columns) assert.Nil(t, err) v, err := r.Values() assert.Nil(t, err) - assert.Equal(t, v, []interface{}{"test", "name_no_prefix", "prefix_name", nil, nil}) + assert.Equal(t, []interface{}{nil, nil, "test", "name_no_prefix", "prefix_name"}, v) }) t.Run("test resolve zero columns", func(t *testing.T) { object := zeroValuedStruct{} _ = defaults.Set(&object) - r := NewResourceData(testZeroTable, nil, object, nil) - assert.Equal(t, r.cqId, r.Id()) - // columns should be resolved from ColumnResolver functions or default functions logger := logging.New(&hclog.LoggerOptions{ Name: "test_log", Level: hclog.Error, Output: nil, }) - exec := NewExecutionData(nil, logger, testZeroTable, false, nil, false) + + mockDb := new(DatabaseMock) + mockDb.On("Dialect").Return(PostgresDialect{}) + + exec := NewExecutionData(mockDb, logger, testZeroTable, false, nil, false) + + r := NewResourceData(PostgresDialect{}, testZeroTable, nil, object, nil, exec.executionStart) + assert.Equal(t, r.cqId, r.Id()) + // columns should be resolved from ColumnResolver functions or default functions err := exec.resolveColumns(context.TODO(), mockedClient, r, testZeroTable.Columns) assert.Nil(t, err) v, err := r.Values() assert.Nil(t, err) - assert.Equal(t, []interface{}{false, 0, true}, v[:3]) - assert.Equal(t, 0, *v[4].(*int)) - assert.Equal(t, 5, *v[5].(*int)) - assert.Equal(t, "", v[6].(string)) - assert.Equal(t, nil, v[7]) - assert.Equal(t, nil, v[8]) + assert.Equal(t, nil, v[0]) + assert.Equal(t, nil, v[1]) + assert.Equal(t, []interface{}{false, 0, true}, v[2:5]) + assert.Equal(t, 0, *v[6].(*int)) + assert.Equal(t, 5, *v[7].(*int)) object.ZeroIntPtr = nil - r = NewResourceData(testZeroTable, nil, object, nil) + r = NewResourceData(PostgresDialect{}, testZeroTable, nil, object, nil, time.Now()) err = exec.resolveColumns(context.TODO(), mockedClient, r, testZeroTable.Columns) assert.Nil(t, err) v, _ = r.Values() - assert.Equal(t, nil, v[4]) + assert.Equal(t, nil, v[6]) }) } func TestResources(t *testing.T) { - r1 := NewResourceData(testPrimaryKeyTable, nil, nil, map[string]interface{}{"new_field": 1}) - r2 := NewResourceData(testPrimaryKeyTable, nil, nil, map[string]interface{}{"new_field": 1}) - assert.Equal(t, []string{"primary_key_str", "cq_id", "meta", "new_field"}, r1.columns) - assert.Equal(t, []string{"primary_key_str", "cq_id", "meta", "new_field"}, r2.columns) + r1 := NewResourceData(PostgresDialect{}, testPrimaryKeyTable, nil, nil, nil, time.Now()) + r2 := NewResourceData(PostgresDialect{}, testPrimaryKeyTable, nil, nil, nil, time.Now()) + assert.Equal(t, []string{"cq_id", "cq_meta", "primary_key_str"}, r1.columns) + assert.Equal(t, []string{"cq_id", "cq_meta", "primary_key_str"}, r2.columns) rr := Resources{r1, r2} - assert.Equal(t, []string{"primary_key_str", "cq_id", "meta", "new_field"}, rr.ColumnNames()) + assert.Equal(t, []string{"cq_id", "cq_meta", "primary_key_str"}, rr.ColumnNames()) assert.Equal(t, testPrimaryKeyTable.Name, rr.TableName()) _ = r1.Set("primary_key_str", "test") _ = r2.Set("primary_key_str", "test2") diff --git a/provider/schema/table.go b/provider/schema/table.go index b45f1fad..f394a58c 100644 --- a/provider/schema/table.go +++ b/provider/schema/table.go @@ -24,7 +24,7 @@ type Table struct { // table description Description string // Columns are the set of fields that are part of this table - Columns []Column + Columns ColumnList // Relations are a set of related tables defines Relations []*Table // Resolver is the main entry point to fetching table data and @@ -48,18 +48,6 @@ type Table struct { IgnoreInTests bool } -// ColumnNames returns all collected columns name of table (including all inner embedded columns) -func (t Table) ColumnNames() []string { - var cn = make([]string, len(t.Columns)) - for i, c := range t.Columns { - cn[i] = c.Name - } - for _, c := range GetDefaultSDKColumns() { - cn = append(cn, c.Name) - } - return cn -} - func (t Table) Column(name string) *Column { for _, c := range t.Columns { if c.Name == name { @@ -69,13 +57,6 @@ func (t Table) Column(name string) *Column { return nil } -func (t Table) PrimaryKeys() []string { - if len(t.Options.PrimaryKeys) > 0 { - return t.Options.PrimaryKeys - } - return []string{"cq_id"} -} - // TableCreationOptions allow modifying how table is created such as defining primary keys, indices, foreign keys and constraints. type TableCreationOptions struct { // List of columns to set as primary keys. If this is empty, a random unique ID is generated. diff --git a/provider/schema/table_test.go b/provider/schema/table_test.go index 300cf52e..eb486f41 100644 --- a/provider/schema/table_test.go +++ b/provider/schema/table_test.go @@ -18,7 +18,7 @@ var tableDefinitionTestCases = []tableTestCase{ }, }, }, - ExpectedColumnNames: []string{"some_string", "cq_id", "meta"}, + ExpectedColumnNames: []string{"cq_id", "cq_meta", "some_string"}, ExpectedHasId: false, }, { @@ -36,7 +36,7 @@ var tableDefinitionTestCases = []tableTestCase{ }, }, }, - ExpectedColumnNames: []string{"some_string", "some_int", "cq_id", "meta"}, + ExpectedColumnNames: []string{"cq_id", "cq_meta", "some_string", "some_int"}, ExpectedHasId: true, }, { @@ -62,7 +62,7 @@ var tableDefinitionTestCases = []tableTestCase{ }, }, }, - ExpectedColumnNames: []string{"some_string", "some_int", "embedded_some_string", "embedded_some_int", "cq_id", "meta"}, + ExpectedColumnNames: []string{"cq_id", "cq_meta", "some_string", "some_int", "embedded_some_string", "embedded_some_int"}, }, { @@ -84,7 +84,7 @@ var tableDefinitionTestCases = []tableTestCase{ }, }, }, - ExpectedColumnNames: []string{"some_int", "embedded_some_string", "embedded_inner_some_int", "cq_id", "meta"}, + ExpectedColumnNames: []string{"cq_id", "cq_meta", "some_int", "embedded_some_string", "embedded_inner_some_int"}, }, { Name: "simpleTableWithEmbedded", @@ -109,7 +109,7 @@ var tableDefinitionTestCases = []tableTestCase{ }, }, }, - ExpectedColumnNames: []string{"some_string", "some_int", "some_string_no_prefix", "some_int_no_prefix", "cq_id", "meta"}, + ExpectedColumnNames: []string{"cq_id", "cq_meta", "some_string", "some_int", "some_string_no_prefix", "some_int_no_prefix"}, }, } @@ -122,6 +122,6 @@ type tableTestCase struct { func TestTableDefinitionUseCases(t *testing.T) { for _, c := range tableDefinitionTestCases { - assert.Equal(t, c.ExpectedColumnNames, c.Table.ColumnNames(), "failed case %s", c.Name) + assert.Equal(t, c.ExpectedColumnNames, PostgresDialect{}.Columns(c.Table).Names(), "failed case %s", c.Name) } } diff --git a/provider/schema/validators.go b/provider/schema/validators.go index 9d5c8434..3fbca9b3 100644 --- a/provider/schema/validators.go +++ b/provider/schema/validators.go @@ -34,7 +34,7 @@ func validateTableAttributesNameLength(t *Table) error { } // validate table columns - for _, col := range t.ColumnNames() { + for _, col := range t.Columns.Names() { if len(col) > maxColumnName { return fmt.Errorf("column name %s has exceeded max length", col) } diff --git a/provider/table.go b/provider/table.go deleted file mode 100644 index 6e9278a6..00000000 --- a/provider/table.go +++ /dev/null @@ -1,117 +0,0 @@ -package provider - -import ( - "context" - "fmt" - "strconv" - "strings" - - "github.com/huandu/go-sqlbuilder" - - "github.com/cloudquery/cq-provider-sdk/provider/schema" - "github.com/georgysavva/scany/pgxscan" - "github.com/hashicorp/go-hclog" - "github.com/jackc/pgx/v4/pgxpool" - "github.com/thoas/go-funk" -) - -const ( - queryTableColumns = `SELECT array_agg(column_name::text) as columns FROM information_schema.columns WHERE table_name = $1` - addColumnToTable = `ALTER TABLE %s ADD COLUMN IF NOT EXISTS %v %v;` -) - -// TableCreator handles creation of schema.Table in database if they don't exist, and migration of tables if provider was upgraded. -type TableCreator struct { - log hclog.Logger -} - -func NewTableCreator(log hclog.Logger) *TableCreator { - return &TableCreator{ - log, - } -} - -func (m TableCreator) CreateTable(ctx context.Context, conn *pgxpool.Conn, t *schema.Table, parent *schema.Table) error { - // Build a SQL to create a table. - ctb := sqlbuilder.CreateTable(t.Name).IfNotExists() - for _, c := range schema.GetDefaultSDKColumns() { - if c.CreationOptions.Unique { - ctb.Define(c.Name, schema.GetPgTypeFromType(c.Type), "unique") - } else { - ctb.Define(c.Name, schema.GetPgTypeFromType(c.Type)) - } - - } - - m.buildColumns(ctb, t.Columns, parent) - ctb.Define(fmt.Sprintf("constraint %s_pk primary key(%s)", schema.TruncateTableConstraint(t.Name), strings.Join(t.PrimaryKeys(), ","))) - sql, _ := ctb.BuildWithFlavor(sqlbuilder.PostgreSQL) - - m.log.Debug("creating table if not exists", "table", t.Name) - if _, err := conn.Exec(ctx, sql); err != nil { - return err - } - - m.log.Debug("migrating table columns if required", "table", t.Name) - if err := m.upgradeTable(ctx, conn, t); err != nil { - return err - } - - if t.Relations == nil { - return nil - } - - m.log.Debug("creating table relations", "table", t.Name) - // Create relation tables - for _, r := range t.Relations { - m.log.Debug("creating table relation", "table", r.Name) - if err := m.CreateTable(ctx, conn, r, t); err != nil { - return err - } - } - return nil -} - -func (m TableCreator) upgradeTable(ctx context.Context, conn *pgxpool.Conn, t *schema.Table) error { - rows, err := conn.Query(ctx, queryTableColumns, t.Name) - if err != nil { - return err - } - - var existingColumns struct { - Columns []string - } - - if err := pgxscan.ScanOne(&existingColumns, rows); err != nil { - return err - } - - columnsToAdd, _ := funk.DifferenceString(t.ColumnNames(), existingColumns.Columns) - for _, d := range columnsToAdd { - m.log.Debug("adding column", "column", d) - col := t.Column(d) - if col == nil { - m.log.Warn("column missing from table, not adding it", "table", t.Name, "column", d) - continue - } - sql, _ := sqlbuilder.Buildf(addColumnToTable, sqlbuilder.Raw(t.Name), sqlbuilder.Raw(d), sqlbuilder.Raw(schema.GetPgTypeFromType(col.Type))).BuildWithFlavor(sqlbuilder.PostgreSQL) - if _, err := conn.Exec(ctx, sql); err != nil { - return err - } - } - return nil - -} - -func (m TableCreator) buildColumns(ctb *sqlbuilder.CreateTableBuilder, cc []schema.Column, parent *schema.Table) { - for _, c := range cc { - defs := []string{strconv.Quote(c.Name), schema.GetPgTypeFromType(c.Type)} - if c.CreationOptions.Unique { - defs = []string{strconv.Quote(c.Name), schema.GetPgTypeFromType(c.Type), "unique"} - } - if strings.HasSuffix(c.Name, "cq_id") && c.Name != "cq_id" { - defs = append(defs, "REFERENCES", fmt.Sprintf("%s(cq_id)", parent.Name), "ON DELETE CASCADE") - } - ctb.Define(defs...) - } -} diff --git a/provider/testing/resource.go b/provider/testing/resource.go index 6f1d7561..12ca87dd 100644 --- a/provider/testing/resource.go +++ b/provider/testing/resource.go @@ -11,13 +11,14 @@ import ( sq "github.com/Masterminds/squirrel" "github.com/cloudquery/cq-provider-sdk/cqproto" + "github.com/cloudquery/cq-provider-sdk/database" + "github.com/cloudquery/cq-provider-sdk/migration" "github.com/cloudquery/cq-provider-sdk/provider" "github.com/cloudquery/cq-provider-sdk/provider/schema" "github.com/cloudquery/cq-provider-sdk/testlog" "github.com/cloudquery/faker/v3" "github.com/georgysavva/scany/pgxscan" "github.com/hashicorp/go-hclog" - "github.com/jackc/pgx/v4/pgxpool" "github.com/stretchr/testify/assert" ) @@ -46,21 +47,15 @@ func TestResource(t *testing.T, resource ResourceTestCase) { // No need for configuration or db connection, get it out of the way first // testTableIdentifiersForProvider(t, resource.Provider) - pool, err := setupDatabase() + conn, err := setupDatabase() if err != nil { t.Fatal(err) } - ctx := context.Background() - conn, err := pool.Acquire(ctx) - if err != nil { - t.Fatal(err) - } - defer conn.Release() l := testlog.New(t) l.SetLevel(hclog.Debug) resource.Provider.Logger = l - tableCreator := provider.NewTableCreator(l) + tableCreator := migration.NewTableCreator(l, schema.PostgresDialect{}) if err := tableCreator.CreateTable(context.Background(), conn, resource.Table, nil); err != nil { assert.FailNow(t, fmt.Sprintf("failed to create tables %s", resource.Table.Name), err) } @@ -74,11 +69,6 @@ func TestResource(t *testing.T, resource ResourceTestCase) { } verifyNoEmptyColumns(t, resource, conn) - - if err := conn.Conn().Close(ctx); err != nil { - t.Fatal(err) - } - } // fetch - fetches resources from the cloud and puts them into database. database config can be specified via DATABASE_URL env variable @@ -116,18 +106,14 @@ func fetch(t *testing.T, resource *ResourceTestCase) error { return nil } -func deleteTables(conn *pgxpool.Conn, table *schema.Table) error { +func deleteTables(conn schema.QueryExecer, table *schema.Table) error { s := sq.Delete(table.Name) sql, args, err := s.ToSql() if err != nil { return err } - _, err = conn.Exec(context.TODO(), sql, args...) - if err != nil { - return err - } - return nil + return conn.Exec(context.TODO(), sql, args...) } func verifyNoEmptyColumns(t *testing.T, tc ResourceTestCase, conn pgxscan.Querier) { @@ -221,24 +207,18 @@ func (f *fakeResourceSender) Send(r *cqproto.FetchResourcesResponse) error { var ( dbConnOnce sync.Once - pool *pgxpool.Pool + pool schema.QueryExecer dbErr error ) -func setupDatabase() (*pgxpool.Pool, error) { +func setupDatabase() (schema.QueryExecer, error) { dbConnOnce.Do(func() { - var dbCfg *pgxpool.Config - dbCfg, dbErr = pgxpool.ParseConfig(getEnv("DATABASE_URL", "host=localhost user=postgres password=pass DB.name=postgres port=5432")) + pool, dbErr = database.New(context.Background(), hclog.NewNullLogger(), getEnv("DATABASE_URL", "host=localhost user=postgres password=pass DB.name=postgres port=5432")) if dbErr != nil { return } - ctx := context.Background() - dbCfg.MaxConns = 15 - dbCfg.LazyConnect = true - pool, dbErr = pgxpool.ConnectConfig(ctx, dbCfg) }) return pool, dbErr - } func getEnv(key, fallback string) string { diff --git a/serve/serve.go b/serve/serve.go index 05d16989..7d204c61 100644 --- a/serve/serve.go +++ b/serve/serve.go @@ -6,13 +6,12 @@ import ( "log" "os" - "github.com/cloudquery/cq-provider-sdk/provider" - "github.com/cloudquery/cq-provider-sdk/cqproto" - "google.golang.org/grpc" + "github.com/cloudquery/cq-provider-sdk/provider" "github.com/hashicorp/go-hclog" "github.com/hashicorp/go-plugin" + "google.golang.org/grpc" ) var Handshake = plugin.HandshakeConfig{ @@ -26,7 +25,7 @@ Set CQ_PROVIDER_DEBUG=1 to run plugin in debug mode, for additional info see htt ` type Options struct { - // Required: Name of provider. + // Required: Name of provider Name string // Required: Provider is the actual provider that will be served. @@ -117,10 +116,7 @@ func serve(opts *Options) { plugin.Serve(&plugin.ServeConfig{ HandshakeConfig: Handshake, VersionedPlugins: map[int]plugin.PluginSet{ - 3: { - "provider": &cqproto.CQPlugin{Impl: opts.Provider}, - }, - 2: { + cqproto.V4: { "provider": &cqproto.CQPlugin{Impl: opts.Provider}, }}, GRPCServer: func(opts []grpc.ServerOption) *grpc.Server {