diff --git a/handler/account/account.go b/handler/account/account.go index 640cdf2..e86b988 100644 --- a/handler/account/account.go +++ b/handler/account/account.go @@ -543,14 +543,15 @@ type EventOut struct { type MemberType string const ( - MemberTypeAdmin MemberType = "admin" - MemberTypeDeveloper MemberType = "developer" - MemberTypeOperator MemberType = "operator" - MemberTypeReadOnly MemberType = "read_only" + MemberTypeAdmin MemberType = "admin" + MemberTypeDeveloper MemberType = "developer" + MemberTypeOperator MemberType = "operator" + MemberTypeProjectreadPermissions MemberType = "project:read_permissions" + MemberTypeReadOnly MemberType = "read_only" ) func MemberTypeChoices() []string { - return []string{"admin", "developer", "operator", "read_only"} + return []string{"admin", "developer", "operator", "project:read_permissions", "read_only"} } type OrderByType string @@ -632,14 +633,15 @@ type TeamOutAlt struct { type TeamType string const ( - TeamTypeAdmin TeamType = "admin" - TeamTypeOperator TeamType = "operator" - TeamTypeDeveloper TeamType = "developer" - TeamTypeReadOnly TeamType = "read_only" + TeamTypeAdmin TeamType = "admin" + TeamTypeOperator TeamType = "operator" + TeamTypeDeveloper TeamType = "developer" + TeamTypeReadOnly TeamType = "read_only" + TeamTypeProjectreadPermissions TeamType = "project:read_permissions" ) func TeamTypeChoices() []string { - return []string{"admin", "operator", "developer", "read_only"} + return []string{"admin", "operator", "developer", "read_only", "project:read_permissions"} } type TechEmailOut struct { diff --git a/handler/accountteam/accountteam.go b/handler/accountteam/accountteam.go index 2616fba..f4b1e6c 100644 --- a/handler/accountteam/accountteam.go +++ b/handler/accountteam/accountteam.go @@ -244,14 +244,15 @@ type TeamOut struct { type TeamType string const ( - TeamTypeAdmin TeamType = "admin" - TeamTypeOperator TeamType = "operator" - TeamTypeDeveloper TeamType = "developer" - TeamTypeReadOnly TeamType = "read_only" + TeamTypeAdmin TeamType = "admin" + TeamTypeOperator TeamType = "operator" + TeamTypeDeveloper TeamType = "developer" + TeamTypeReadOnly TeamType = "read_only" + TeamTypeProjectreadPermissions TeamType = "project:read_permissions" ) func TeamTypeChoices() []string { - return []string{"admin", "operator", "developer", "read_only"} + return []string{"admin", "operator", "developer", "read_only", "project:read_permissions"} } // accountTeamCreateOut AccountTeamCreateResponse diff --git a/handler/billinggroup/billinggroup.go b/handler/billinggroup/billinggroup.go index 39ce55f..33b3d98 100644 --- a/handler/billinggroup/billinggroup.go +++ b/handler/billinggroup/billinggroup.go @@ -623,6 +623,7 @@ const ( ServiceTypeThanoscompactor ServiceType = "thanoscompactor" ServiceTypeThanosquery ServiceType = "thanosquery" ServiceTypeThanosreceiver ServiceType = "thanosreceiver" + ServiceTypeThanosruler ServiceType = "thanosruler" ServiceTypeThanosstore ServiceType = "thanosstore" ServiceTypeValkey ServiceType = "valkey" ServiceTypeVector ServiceType = "vector" @@ -631,7 +632,7 @@ const ( ) func ServiceTypeChoices() []string { - return []string{"alertmanager", "cassandra", "clickhouse", "dragonfly", "elasticsearch", "flink", "grafana", "influxdb", "kafka", "kafka_connect", "kafka_mirrormaker", "m3aggregator", "m3db", "mysql", "opensearch", "parca", "pg", "redis", "stresstester", "sw", "thanos", "thanoscompactor", "thanosquery", "thanosreceiver", "thanosstore", "valkey", "vector", "vmalert", "warpstream"} + return []string{"alertmanager", "cassandra", "clickhouse", "dragonfly", "elasticsearch", "flink", "grafana", "influxdb", "kafka", "kafka_connect", "kafka_mirrormaker", "m3aggregator", "m3db", "mysql", "opensearch", "parca", "pg", "redis", "stresstester", "sw", "thanos", "thanoscompactor", "thanosquery", "thanosreceiver", "thanosruler", "thanosstore", "valkey", "vector", "vmalert", "warpstream"} } // billingGroupCreateOut BillingGroupCreateResponse diff --git a/handler/kafkamirrormaker/kafkamirrormaker.go b/handler/kafkamirrormaker/kafkamirrormaker.go index 9504b46..75430b5 100644 --- a/handler/kafkamirrormaker/kafkamirrormaker.go +++ b/handler/kafkamirrormaker/kafkamirrormaker.go @@ -124,8 +124,9 @@ type ReplicationFlowOut struct { SyncGroupOffsetsEnabled *bool `json:"sync_group_offsets_enabled,omitempty"` // Sync consumer group offsets SyncGroupOffsetsIntervalSeconds *int `json:"sync_group_offsets_interval_seconds,omitempty"` // Frequency of consumer group offset sync TargetCluster string `json:"target_cluster"` // Target cluster alias - Topics []string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.blacklist". Currently defaults to [".*"]. + Topics []string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.exclude". Currently defaults to [".*"]. TopicsBlacklist []string `json:"topics.blacklist,omitempty"` // Topic or topic regular expression matching topic + TopicsExclude *string `json:"topics.exclude,omitempty"` // Topic or topic regular expression matching topic } type ReplicationPolicyClassType string @@ -152,8 +153,9 @@ type ServiceKafkaMirrorMakerCreateReplicationFlowIn struct { SyncGroupOffsetsEnabled *bool `json:"sync_group_offsets_enabled,omitempty"` // Sync consumer group offsets SyncGroupOffsetsIntervalSeconds *int `json:"sync_group_offsets_interval_seconds,omitempty"` // Frequency of consumer group offset sync TargetCluster string `json:"target_cluster"` // Target cluster alias - Topics *[]string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.blacklist". Currently defaults to [".*"]. + Topics *[]string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.exclude". Currently defaults to [".*"]. TopicsBlacklist *[]string `json:"topics.blacklist,omitempty"` // Topic or topic regular expression matching topic + TopicsExclude *string `json:"topics.exclude,omitempty"` // Topic or topic regular expression matching topic } // ServiceKafkaMirrorMakerGetReplicationFlowOut Replication flow @@ -171,8 +173,9 @@ type ServiceKafkaMirrorMakerGetReplicationFlowOut struct { SyncGroupOffsetsEnabled *bool `json:"sync_group_offsets_enabled,omitempty"` // Sync consumer group offsets SyncGroupOffsetsIntervalSeconds *int `json:"sync_group_offsets_interval_seconds,omitempty"` // Frequency of consumer group offset sync TargetCluster string `json:"target_cluster"` // Target cluster alias - Topics []string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.blacklist". Currently defaults to [".*"]. + Topics []string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.exclude". Currently defaults to [".*"]. TopicsBlacklist []string `json:"topics.blacklist,omitempty"` // Topic or topic regular expression matching topic + TopicsExclude *string `json:"topics.exclude,omitempty"` // Topic or topic regular expression matching topic } // ServiceKafkaMirrorMakerPatchReplicationFlowIn ServiceKafkaMirrorMakerPatchReplicationFlowRequestBody @@ -187,8 +190,9 @@ type ServiceKafkaMirrorMakerPatchReplicationFlowIn struct { ReplicationPolicyClass ReplicationPolicyClassType `json:"replication_policy_class,omitempty"` // Replication policy class SyncGroupOffsetsEnabled *bool `json:"sync_group_offsets_enabled,omitempty"` // Sync consumer group offsets SyncGroupOffsetsIntervalSeconds *int `json:"sync_group_offsets_interval_seconds,omitempty"` // Frequency of consumer group offset sync - Topics *[]string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.blacklist". Currently defaults to [".*"]. + Topics *[]string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.exclude". Currently defaults to [".*"]. TopicsBlacklist *[]string `json:"topics.blacklist,omitempty"` // Topic or topic regular expression matching topic + TopicsExclude *string `json:"topics.exclude,omitempty"` // Topic or topic regular expression matching topic } // ServiceKafkaMirrorMakerPatchReplicationFlowOut Replication flow @@ -206,8 +210,9 @@ type ServiceKafkaMirrorMakerPatchReplicationFlowOut struct { SyncGroupOffsetsEnabled *bool `json:"sync_group_offsets_enabled,omitempty"` // Sync consumer group offsets SyncGroupOffsetsIntervalSeconds *int `json:"sync_group_offsets_interval_seconds,omitempty"` // Frequency of consumer group offset sync TargetCluster string `json:"target_cluster"` // Target cluster alias - Topics []string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.blacklist". Currently defaults to [".*"]. + Topics []string `json:"topics,omitempty"` // List of topics and/or regular expressions to replicate. Topic names and regular expressions that match topic names that should be replicated. MirrorMaker will replicate these topics if they are not matched by "topics.exclude". Currently defaults to [".*"]. TopicsBlacklist []string `json:"topics.blacklist,omitempty"` // Topic or topic regular expression matching topic + TopicsExclude *string `json:"topics.exclude,omitempty"` // Topic or topic regular expression matching topic } // serviceKafkaMirrorMakerGetReplicationFlowOut ServiceKafkaMirrorMakerGetReplicationFlowResponse diff --git a/handler/kafkatopic/kafkatopic.go b/handler/kafkatopic/kafkatopic.go index ca0eb71..d86b614 100644 --- a/handler/kafkatopic/kafkatopic.go +++ b/handler/kafkatopic/kafkatopic.go @@ -129,9 +129,9 @@ func (h *KafkaTopicHandler) ServiceKafkaTopicUpdate(ctx context.Context, project // CleanupPolicyOut cleanup.policy value, source and synonyms type CleanupPolicyOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *string `json:"value,omitempty"` // cleanup.policy + Value string `json:"value"` // cleanup.policy } type CleanupPolicyType string @@ -162,9 +162,9 @@ func CompressionTypeChoices() []string { // CompressionTypeOut compression.type value, source and synonyms type CompressionTypeOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value CompressionTypeValue `json:"value,omitempty"` // compression.type + Value CompressionTypeValue `json:"value"` // compression.type } type CompressionTypeValue string @@ -249,30 +249,30 @@ type ConsumerGroupOut struct { // DeleteRetentionMsOut delete.retention.ms value, source and synonyms type DeleteRetentionMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // delete.retention.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // delete.retention.ms } // FileDeleteDelayMsOut file.delete.delay.ms value, source and synonyms type FileDeleteDelayMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // file.delete.delay.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // file.delete.delay.ms } // FlushMessagesOut flush.messages value, source and synonyms type FlushMessagesOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // flush.messages + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // flush.messages } // FlushMsOut flush.ms value, source and synonyms type FlushMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // flush.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // flush.ms } type FormatType string @@ -290,51 +290,51 @@ func FormatTypeChoices() []string { // IndexIntervalBytesOut index.interval.bytes value, source and synonyms type IndexIntervalBytesOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // index.interval.bytes + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // index.interval.bytes } // LocalRetentionBytesOut local.retention.bytes value, source and synonyms type LocalRetentionBytesOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // local.retention.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // local.retention.bytes } // LocalRetentionMsOut local.retention.ms value, source and synonyms type LocalRetentionMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // local.retention.bytes + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // local.retention.ms } // MaxCompactionLagMsOut max.compaction.lag.ms value, source and synonyms type MaxCompactionLagMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // max.compaction.lag.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // max.compaction.lag.ms } // MaxMessageBytesOut max.message.bytes value, source and synonyms type MaxMessageBytesOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // max.message.bytes + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // max.message.bytes } // MessageDownconversionEnableOut message.downconversion.enable value, source and synonyms type MessageDownconversionEnableOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *bool `json:"value,omitempty"` // message.downconversion.enable + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAltAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value bool `json:"value"` // message.downconversion.enable } // MessageFormatVersionOut message.format.version value, source and synonyms type MessageFormatVersionOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value MessageFormatVersionType `json:"value,omitempty"` // message.format.version + Value MessageFormatVersionType `json:"value"` // message.format.version } type MessageFormatVersionType string @@ -436,9 +436,9 @@ type MessageOut struct { // MessageTimestampDifferenceMaxMsOut message.timestamp.difference.max.ms value, source and synonyms type MessageTimestampDifferenceMaxMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // message.timestamp.difference.max.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // message.timestamp.difference.max.ms } type MessageTimestampType string @@ -453,9 +453,9 @@ func MessageTimestampTypeChoices() []string { // MessageTimestampTypeOut message.timestamp.type value, source and synonyms type MessageTimestampTypeOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value MessageTimestampTypeValue `json:"value,omitempty"` // message.timestamp.type + Value MessageTimestampTypeValue `json:"value"` // message.timestamp.type } type MessageTimestampTypeValue string @@ -470,23 +470,23 @@ func MessageTimestampTypeValueChoices() []string { // MinCleanableDirtyRatioOut min.cleanable.dirty.ratio value, source and synonyms type MinCleanableDirtyRatioOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *float64 `json:"value,omitempty"` // min.cleanable.dirty.ratio + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAltAltAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value float64 `json:"value"` // min.cleanable.dirty.ratio } // MinCompactionLagMsOut min.compaction.lag.ms value, source and synonyms type MinCompactionLagMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // min.compaction.lag.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // min.compaction.lag.ms } // MinInsyncReplicasOut min.insync.replicas value, source and synonyms type MinInsyncReplicasOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // min.insync.replicas + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // min.insync.replicas } type OffsetOut struct { Error *string `json:"error,omitempty"` // An error message describing why the operation failed, or null if it succeeded @@ -506,9 +506,9 @@ type PartitionOut struct { // PreallocateOut preallocate value, source and synonyms type PreallocateOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *bool `json:"value,omitempty"` // preallocate + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAltAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value bool `json:"value"` // preallocate } type RecordIn struct { Key *map[string]any `json:"key,omitempty"` // Key for the produced record @@ -518,51 +518,51 @@ type RecordIn struct { // RemoteStorageEnableOut remote.storage.enable value, source and synonyms type RemoteStorageEnableOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *bool `json:"value,omitempty"` // remote.storage.enable + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAltAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value bool `json:"value"` // remote.storage.enable } // RetentionBytesOut retention.bytes value, source and synonyms type RetentionBytesOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // retention.bytes + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // retention.bytes } // RetentionMsOut retention.ms value, source and synonyms type RetentionMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // retention.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // retention.ms } // SegmentBytesOut segment.bytes value, source and synonyms type SegmentBytesOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // segment.bytes + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // segment.bytes } // SegmentIndexBytesOut segment.index.bytes value, source and synonyms type SegmentIndexBytesOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // segment.index.bytes + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // segment.index.bytes } // SegmentJitterMsOut segment.jitter.ms value, source and synonyms type SegmentJitterMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // segment.jitter.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // segment.jitter.ms } // SegmentMsOut segment.ms value, source and synonyms type SegmentMsOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *int `json:"value,omitempty"` // segment.ms + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value int `json:"value"` // segment.ms } // ServiceKafkaTopicCreateIn ServiceKafkaTopicCreateRequestBody @@ -650,9 +650,24 @@ func SourceTypeChoices() []string { } type SynonymOut struct { - Name *string `json:"name,omitempty"` // Synonym name - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Value *bool `json:"value,omitempty"` // Synonym value + Name string `json:"name"` // Synonym name + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Value string `json:"value"` // Synonym value +} +type SynonymOutAlt struct { + Name string `json:"name"` // Synonym name + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Value int `json:"value"` // Synonym value +} +type SynonymOutAltAlt struct { + Name string `json:"name"` // Synonym name + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Value bool `json:"value"` // Synonym value +} +type SynonymOutAltAltAlt struct { + Name string `json:"name"` // Synonym name + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Value float64 `json:"value"` // Synonym value } type TagIn struct { Key string `json:"key"` // Tag key @@ -690,9 +705,9 @@ func TopicStateTypeChoices() []string { // UncleanLeaderElectionEnableOut unclean.leader.election.enable value, source and synonyms type UncleanLeaderElectionEnableOut struct { - Source SourceType `json:"source,omitempty"` // Source of the Kafka topic configuration entry - Synonyms []SynonymOut `json:"synonyms,omitempty"` // Configuration synonyms - Value *bool `json:"value,omitempty"` // unclean.leader.election.enable + Source SourceType `json:"source"` // Source of the Kafka topic configuration entry + Synonyms []SynonymOutAltAlt `json:"synonyms,omitempty"` // Configuration synonyms + Value bool `json:"value"` // unclean.leader.election.enable } // serviceKafkaTopicGetOut ServiceKafkaTopicGetResponse diff --git a/handler/organization/organization.go b/handler/organization/organization.go index b588ad6..3c84ca6 100644 --- a/handler/organization/organization.go +++ b/handler/organization/organization.go @@ -327,12 +327,11 @@ func PrincipalTypeChoices() []string { type ResourceType string const ( - ResourceTypeAccount ResourceType = "account" ResourceTypeProject ResourceType = "project" ) func ResourceTypeChoices() []string { - return []string{"account", "project"} + return []string{"project"} } type TierType string diff --git a/handler/project/project.go b/handler/project/project.go index b168a26..11b2b3b 100644 --- a/handler/project/project.go +++ b/handler/project/project.go @@ -439,14 +439,15 @@ type AlertOut struct { type AnyType string const ( - AnyTypeAdmin AnyType = "admin" - AnyTypeDeveloper AnyType = "developer" - AnyTypeOperator AnyType = "operator" - AnyTypeReadOnly AnyType = "read_only" + AnyTypeAdmin AnyType = "admin" + AnyTypeDeveloper AnyType = "developer" + AnyTypeOperator AnyType = "operator" + AnyTypeProjectreadPermissions AnyType = "project:read_permissions" + AnyTypeReadOnly AnyType = "read_only" ) func AnyTypeChoices() []string { - return []string{"admin", "developer", "operator", "read_only"} + return []string{"admin", "developer", "operator", "project:read_permissions", "read_only"} } // BackupConfigOut Backup configuration for this service plan @@ -621,14 +622,15 @@ type KafkaOut struct { type MemberType string const ( - MemberTypeAdmin MemberType = "admin" - MemberTypeDeveloper MemberType = "developer" - MemberTypeOperator MemberType = "operator" - MemberTypeReadOnly MemberType = "read_only" + MemberTypeAdmin MemberType = "admin" + MemberTypeDeveloper MemberType = "developer" + MemberTypeOperator MemberType = "operator" + MemberTypeProjectreadPermissions MemberType = "project:read_permissions" + MemberTypeReadOnly MemberType = "read_only" ) func MemberTypeChoices() []string { - return []string{"admin", "developer", "operator", "read_only"} + return []string{"admin", "developer", "operator", "project:read_permissions", "read_only"} } // MysqlOut Service type information diff --git a/handler/service/service.go b/handler/service/service.go index a5fa53e..18040ac 100644 --- a/handler/service/service.go +++ b/handler/service/service.go @@ -1079,6 +1079,7 @@ const ( IntegrationTypeDashboard IntegrationType = "dashboard" IntegrationTypeDatadog IntegrationType = "datadog" IntegrationTypeDatasource IntegrationType = "datasource" + IntegrationTypeDisasterRecovery IntegrationType = "disaster_recovery" IntegrationTypeExternalAwsCloudwatchLogs IntegrationType = "external_aws_cloudwatch_logs" IntegrationTypeExternalAwsCloudwatchMetrics IntegrationType = "external_aws_cloudwatch_metrics" IntegrationTypeExternalElasticsearchLogs IntegrationType = "external_elasticsearch_logs" @@ -1109,13 +1110,14 @@ const ( IntegrationTypeThanosMigrate IntegrationType = "thanos_migrate" IntegrationTypeThanoscompactor IntegrationType = "thanoscompactor" IntegrationTypeThanosquery IntegrationType = "thanosquery" + IntegrationTypeThanosruler IntegrationType = "thanosruler" IntegrationTypeThanosstore IntegrationType = "thanosstore" IntegrationTypeVector IntegrationType = "vector" IntegrationTypeVmalert IntegrationType = "vmalert" ) func IntegrationTypeChoices() []string { - return []string{"alertmanager", "autoscaler", "caching", "cassandra_cross_service_cluster", "clickhouse_credentials", "clickhouse_kafka", "clickhouse_postgresql", "dashboard", "datadog", "datasource", "external_aws_cloudwatch_logs", "external_aws_cloudwatch_metrics", "external_elasticsearch_logs", "external_google_cloud_logging", "external_opensearch_logs", "flink", "flink_external_bigquery", "flink_external_kafka", "flink_external_postgresql", "internal_connectivity", "jolokia", "kafka_connect", "kafka_connect_postgresql", "kafka_logs", "kafka_mirrormaker", "logs", "m3aggregator", "m3coordinator", "metrics", "opensearch_cross_cluster_replication", "opensearch_cross_cluster_search", "prometheus", "read_replica", "rsyslog", "schema_registry_proxy", "stresstester", "thanos_distributed_query", "thanos_migrate", "thanoscompactor", "thanosquery", "thanosstore", "vector", "vmalert"} + return []string{"alertmanager", "autoscaler", "caching", "cassandra_cross_service_cluster", "clickhouse_credentials", "clickhouse_kafka", "clickhouse_postgresql", "dashboard", "datadog", "datasource", "disaster_recovery", "external_aws_cloudwatch_logs", "external_aws_cloudwatch_metrics", "external_elasticsearch_logs", "external_google_cloud_logging", "external_opensearch_logs", "flink", "flink_external_bigquery", "flink_external_kafka", "flink_external_postgresql", "internal_connectivity", "jolokia", "kafka_connect", "kafka_connect_postgresql", "kafka_logs", "kafka_mirrormaker", "logs", "m3aggregator", "m3coordinator", "metrics", "opensearch_cross_cluster_replication", "opensearch_cross_cluster_search", "prometheus", "read_replica", "rsyslog", "schema_registry_proxy", "stresstester", "thanos_distributed_query", "thanos_migrate", "thanoscompactor", "thanosquery", "thanosruler", "thanosstore", "vector", "vmalert"} } type IntegrationTypeOut struct { diff --git a/handler/user/user.go b/handler/user/user.go index 05247c1..386bc16 100644 --- a/handler/user/user.go +++ b/handler/user/user.go @@ -515,14 +515,15 @@ func ActionTypeChoices() []string { type AnyType string const ( - AnyTypeAdmin AnyType = "admin" - AnyTypeDeveloper AnyType = "developer" - AnyTypeOperator AnyType = "operator" - AnyTypeReadOnly AnyType = "read_only" + AnyTypeAdmin AnyType = "admin" + AnyTypeDeveloper AnyType = "developer" + AnyTypeOperator AnyType = "operator" + AnyTypeProjectreadPermissions AnyType = "project:read_permissions" + AnyTypeReadOnly AnyType = "read_only" ) func AnyTypeChoices() []string { - return []string{"admin", "developer", "operator", "read_only"} + return []string{"admin", "developer", "operator", "project:read_permissions", "read_only"} } type AuthenticationMethodOut struct {