diff --git a/api/src/main/java/com/github/streamshub/console/api/model/KafkaCluster.java b/api/src/main/java/com/github/streamshub/console/api/model/KafkaCluster.java index 70da51971..64795ff0d 100644 --- a/api/src/main/java/com/github/streamshub/console/api/model/KafkaCluster.java +++ b/api/src/main/java/com/github/streamshub/console/api/model/KafkaCluster.java @@ -1,16 +1,11 @@ package com.github.streamshub.console.api.model; -import java.nio.charset.StandardCharsets; -import java.util.Base64; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; -import jakarta.json.Json; import jakarta.json.JsonObject; -import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonValue; import org.eclipse.microprofile.openapi.annotations.media.Schema; @@ -24,7 +19,7 @@ import static java.util.Comparator.nullsLast; @Schema(name = "KafkaCluster") -public class KafkaCluster extends Resource { +public class KafkaCluster extends Resource implements PaginatedKubeResource { public static class Fields { public static final String NAME = "name"; @@ -166,58 +161,35 @@ public KafkaCluster(String id, List nodes, Node controller, List a * of Topic fields used to compare entities for pagination/sorting. */ public static KafkaCluster fromCursor(JsonObject cursor) { - if (cursor == null) { - return null; - } - - KafkaCluster cluster = new KafkaCluster(cursor.getString("id"), null, null, null); - JsonObject attr = cursor.getJsonObject("attributes"); - cluster.name(attr.getString(Fields.NAME, null)); - cluster.namespace(attr.getString(Fields.NAMESPACE, null)); - cluster.creationTimestamp(attr.getString(Fields.CREATION_TIMESTAMP, null)); - - return cluster; - } - - public String toCursor(List sortFields) { - JsonObjectBuilder cursor = Json.createObjectBuilder() - .add("id", id == null ? Json.createValue("") : Json.createValue(id)); - - JsonObjectBuilder attrBuilder = Json.createObjectBuilder(); - maybeAddAttribute(attrBuilder, sortFields, Fields.NAME, attributes.name); - maybeAddAttribute(attrBuilder, sortFields, Fields.NAMESPACE, attributes.namespace); - maybeAddAttribute(attrBuilder, sortFields, Fields.CREATION_TIMESTAMP, attributes.creationTimestamp); - cursor.add("attributes", attrBuilder.build()); - - return Base64.getUrlEncoder().encodeToString(cursor.build().toString().getBytes(StandardCharsets.UTF_8)); - } - - static void maybeAddAttribute(JsonObjectBuilder attrBuilder, List sortFields, String key, String value) { - if (sortFields.contains(key)) { - attrBuilder.add(key, value != null ? Json.createValue(value) : JsonValue.NULL); - } + return PaginatedKubeResource.fromCursor(cursor, id -> new KafkaCluster(id, null, null, null)); } + @Override public String name() { return attributes.name; } + @Override public void name(String name) { attributes.name = name; } + @Override public String namespace() { return attributes.namespace; } + @Override public void namespace(String namespace) { attributes.namespace = namespace; } + @Override public String creationTimestamp() { return attributes.creationTimestamp; } + @Override public void creationTimestamp(String creationTimestamp) { attributes.creationTimestamp = creationTimestamp; } diff --git a/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalance.java b/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalance.java index a444eaa4a..80ef38296 100644 --- a/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalance.java +++ b/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalance.java @@ -1,17 +1,12 @@ package com.github.streamshub.console.api.model; -import java.nio.charset.StandardCharsets; -import java.util.Base64; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; -import jakarta.json.Json; import jakarta.json.JsonObject; -import jakarta.json.JsonObjectBuilder; -import jakarta.json.JsonValue; import org.eclipse.microprofile.openapi.annotations.media.Schema; import org.eclipse.microprofile.openapi.annotations.media.SchemaProperty; @@ -19,8 +14,8 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonFilter; import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; import com.github.streamshub.console.api.support.ComparatorBuilder; import com.github.streamshub.console.api.support.ErrorCategory; import com.github.streamshub.console.api.support.ListRequestContext; @@ -48,7 +43,7 @@ message = "resource type conflicts with operation", node = "type", payload = ErrorCategory.ResourceConflict.class) -public class KafkaRebalance extends Resource { +public class KafkaRebalance extends Resource implements PaginatedKubeResource { public static final String API_TYPE = "kafkaRebalances"; public static final String FIELDS_PARAM = "fields[" + API_TYPE + "]"; @@ -253,62 +248,39 @@ public KafkaRebalance(String id, String type, KafkaRebalance.Attributes attribut * of KafkaRebalance fields used to compare entities for pagination/sorting. */ public static KafkaRebalance fromCursor(JsonObject cursor) { - if (cursor == null) { - return null; - } - - KafkaRebalance rebalance = new KafkaRebalance(cursor.getString("id")); - JsonObject attr = cursor.getJsonObject("attributes"); - rebalance.name(attr.getString(Fields.NAME, null)); - rebalance.namespace(attr.getString(Fields.NAMESPACE, null)); - rebalance.creationTimestamp(attr.getString(Fields.CREATION_TIMESTAMP, null)); - - return rebalance; - } - - public String toCursor(List sortFields) { - JsonObjectBuilder cursor = Json.createObjectBuilder() - .add("id", id == null ? Json.createValue("") : Json.createValue(id)); - - JsonObjectBuilder attrBuilder = Json.createObjectBuilder(); - maybeAddAttribute(attrBuilder, sortFields, Fields.NAME, attributes.name); - maybeAddAttribute(attrBuilder, sortFields, Fields.NAMESPACE, attributes.namespace); - maybeAddAttribute(attrBuilder, sortFields, Fields.CREATION_TIMESTAMP, attributes.creationTimestamp); - cursor.add("attributes", attrBuilder.build()); - - return Base64.getUrlEncoder().encodeToString(cursor.build().toString().getBytes(StandardCharsets.UTF_8)); - } - - static void maybeAddAttribute(JsonObjectBuilder attrBuilder, List sortFields, String key, String value) { - if (sortFields.contains(key)) { - attrBuilder.add(key, value != null ? Json.createValue(value) : JsonValue.NULL); - } + return PaginatedKubeResource.fromCursor(cursor, KafkaRebalance::new); } public String action() { return ((Meta) super.getMeta()).action(); } + @Override public String name() { return attributes.name; } + @Override public void name(String name) { attributes.name = name; } + @Override public String namespace() { return attributes.namespace; } + @Override public void namespace(String namespace) { attributes.namespace = namespace; } + @Override public String creationTimestamp() { return attributes.creationTimestamp; } + @Override public void creationTimestamp(String creationTimestamp) { attributes.creationTimestamp = creationTimestamp; } @@ -333,82 +305,42 @@ public void mode(String mode) { attributes.mode = mode; } - public List brokers() { - return attributes.brokers; - } - public void brokers(List brokers) { attributes.brokers = brokers; } - public List goals() { - return attributes.goals; - } - public void goals(List goals) { attributes.goals = goals; } - public boolean skipHardGoalCheck() { - return attributes.skipHardGoalCheck; - } - public void skipHardGoalCheck(boolean skipHardGoalCheck) { attributes.skipHardGoalCheck = skipHardGoalCheck; } - public boolean rebalanceDisk() { - return attributes.rebalanceDisk; - } - public void rebalanceDisk(boolean rebalanceDisk) { attributes.rebalanceDisk = rebalanceDisk; } - public String excludedTopics() { - return attributes.excludedTopics; - } - public void excludedTopics(String excludedTopics) { attributes.excludedTopics = excludedTopics; } - public int concurrentPartitionMovementsPerBroker() { - return attributes.concurrentPartitionMovementsPerBroker; - } - public void concurrentPartitionMovementsPerBroker(int concurrentPartitionMovementsPerBroker) { attributes.concurrentPartitionMovementsPerBroker = concurrentPartitionMovementsPerBroker; } - public int concurrentIntraBrokerPartitionMovements() { - return attributes.concurrentIntraBrokerPartitionMovements; - } - public void concurrentIntraBrokerPartitionMovements(int concurrentIntraBrokerPartitionMovements) { attributes.concurrentIntraBrokerPartitionMovements = concurrentIntraBrokerPartitionMovements; } - public int concurrentLeaderMovements() { - return attributes.concurrentLeaderMovements; - } - public void concurrentLeaderMovements(int concurrentLeaderMovements) { attributes.concurrentLeaderMovements = concurrentLeaderMovements; } - public long replicationThrottle() { - return attributes.replicationThrottle; - } - public void replicationThrottle(long replicationThrottle) { attributes.replicationThrottle = replicationThrottle; } - public List replicaMovementStrategies() { - return attributes.replicaMovementStrategies; - } - public void replicaMovementStrategies(List replicaMovementStrategies) { attributes.replicaMovementStrategies = replicaMovementStrategies; } @@ -417,10 +349,6 @@ public Map optimizationResult() { return attributes.optimizationResult; } - public List conditions() { - return attributes.conditions; - } - public void conditions(List conditions) { attributes.conditions = conditions; } diff --git a/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalanceFilterParams.java b/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalanceFilterParams.java index aadd379eb..19f5889d5 100644 --- a/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalanceFilterParams.java +++ b/api/src/main/java/com/github/streamshub/console/api/model/KafkaRebalanceFilterParams.java @@ -17,25 +17,6 @@ public class KafkaRebalanceFilterParams { - @QueryParam("filter[id]") - @Parameter( - description = "Retrieve only rebalances with an ID matching this parameter", - schema = @Schema(implementation = String[].class, minItems = 2), - explode = Explode.FALSE) - @Expression( - when = "self != null", - value = "self.operator == 'eq' || self.operator == 'in'", - message = "unsupported filter operator, supported values: [ 'eq', 'in' ]", - payload = ErrorCategory.InvalidQueryParameter.class, - node = "filter[id]") - @Expression( - when = "self != null", - value = "self.operands.size() >= 1", - message = "at least 1 operand is required", - payload = ErrorCategory.InvalidQueryParameter.class, - node = "filter[id]") - FetchFilter idFilter; - @QueryParam("filter[name]") @Parameter( description = "Retrieve only rebalances with a name matching this parameter", @@ -100,10 +81,6 @@ public List> buildPredicates() { predicates.add(new FetchFilterPredicate<>(nameFilter, KafkaRebalance::name)); } - if (idFilter != null) { - predicates.add(new FetchFilterPredicate<>(idFilter, KafkaRebalance::getId)); - } - if (statusFilter != null) { predicates.add(new FetchFilterPredicate<>(statusFilter, KafkaRebalance::status)); } diff --git a/api/src/main/java/com/github/streamshub/console/api/model/PaginatedKubeResource.java b/api/src/main/java/com/github/streamshub/console/api/model/PaginatedKubeResource.java new file mode 100644 index 000000000..40eee49bb --- /dev/null +++ b/api/src/main/java/com/github/streamshub/console/api/model/PaginatedKubeResource.java @@ -0,0 +1,73 @@ +package com.github.streamshub.console.api.model; + +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.List; +import java.util.function.Function; + +import jakarta.json.Json; +import jakarta.json.JsonObject; +import jakarta.json.JsonObjectBuilder; +import jakarta.json.JsonValue; + +import com.github.streamshub.console.api.model.KafkaRebalance.Fields; + +/** + * Common interface to parse and generate pagination cursors for Kube resources + * having the common fields id, name, namespace, and creationTimestamp. + */ +interface PaginatedKubeResource { + + // Use getId to match Resource method. + // May rename later, see https://github.com/streamshub/console/issues/43 + String getId(); + + String name(); + + void name(String name); + + String namespace(); + + void namespace(String namespace); + + String creationTimestamp(); + + void creationTimestamp(String creationTimestamp); + + static T fromCursor(JsonObject cursor, Function resourceFactory) { + if (cursor == null) { + return null; + } + + JsonObject attr = cursor.getJsonObject("attributes"); + + T resource = resourceFactory.apply(cursor.getString("id")); + resource.name(attr.getString(Fields.NAME, null)); + resource.namespace(attr.getString(Fields.NAMESPACE, null)); + resource.creationTimestamp(attr.getString(Fields.CREATION_TIMESTAMP, null)); + + return resource; + } + + default String toCursor(List sortFields) { + String id = getId(); + + JsonObjectBuilder cursor = Json.createObjectBuilder() + .add("id", id == null ? Json.createValue("") : Json.createValue(id)); + + JsonObjectBuilder attrBuilder = Json.createObjectBuilder(); + maybeAddAttribute(attrBuilder, sortFields, Fields.NAME, name()); + maybeAddAttribute(attrBuilder, sortFields, Fields.NAMESPACE, namespace()); + maybeAddAttribute(attrBuilder, sortFields, Fields.CREATION_TIMESTAMP, creationTimestamp()); + cursor.add("attributes", attrBuilder.build()); + + return Base64.getUrlEncoder().encodeToString(cursor.build().toString().getBytes(StandardCharsets.UTF_8)); + } + + static void maybeAddAttribute(JsonObjectBuilder attrBuilder, List sortFields, String key, String value) { + if (sortFields.contains(key)) { + attrBuilder.add(key, value != null ? Json.createValue(value) : JsonValue.NULL); + } + } + +} diff --git a/api/src/test/java/com/github/streamshub/console/api/KafkaRebalancesResourceIT.java b/api/src/test/java/com/github/streamshub/console/api/KafkaRebalancesResourceIT.java index c2dacecfe..913ac5002 100644 --- a/api/src/test/java/com/github/streamshub/console/api/KafkaRebalancesResourceIT.java +++ b/api/src/test/java/com/github/streamshub/console/api/KafkaRebalancesResourceIT.java @@ -220,6 +220,7 @@ void testPatchRebalanceWithStatusProposalReady() { String rebalanceId = whenRequesting(req -> req .param("filter[mode]", KafkaRebalanceMode.FULL.toValue()) .param("filter[status]", KafkaRebalanceState.ProposalReady.name()) + .param("filter[name]", "like,rebalance-*") .get("", clusterId1)) .assertThat() .statusCode(is(Status.OK.getStatusCode()))