Skip to content

Commit

Permalink
Improve catalog information synchronisation with GraphQL (#507)
Browse files Browse the repository at this point in the history
* GraphQL

* rename method + make graphQLquery method private + add documentation

* rename test method names

* Reword doc + add link
  • Loading branch information
ThomasCAI-mlv authored Feb 26, 2025
1 parent c51cfc6 commit 11b759d
Show file tree
Hide file tree
Showing 8 changed files with 702 additions and 96 deletions.
11 changes: 8 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -295,18 +295,23 @@ The configuration will depend on the authentication method selected for your bro

### Stream Catalog

For Confluent Cloud only, the Stream Catalog API is used to retrieve the list of topics with their tags and their description. Reminder that the `config.cluster.id` parameter from [managed cluster properties](#managed-clusters) must be set to use Confluent Cloud.
For Confluent Cloud only, topic tags and description can be synchronized with Ns4kafka.

You can configure the page size of the response of the API using the following properties:
The synchronization is done with the [Confluent Stream Catalog GraphQL API](https://docs.confluent.io/cloud/current/stream-governance/graphql-apis.html) if you have the appropriate Stream Governance package on Confluent, otherwise with the [Confluent Stream Catalog REST API](https://docs.confluent.io/cloud/current/stream-governance/stream-catalog-rest-apis.html#list-all-topics).

You can configure the synchronization using the following properties:

```yaml
ns4kafka:
confluent-cloud:
stream-catalog:
page-size: 500
sync-catalog: true
```

The max page size is at 500 as described in the [Confluent Cloud documentation](https://docs.confluent.io/cloud/current/stream-governance/stream-catalog-rest-apis.html#list-all-topics).
The page size is used for the Stream Catalog REST API and is capped at 500 as described in the [Confluent Cloud documentation](https://docs.confluent.io/cloud/current/stream-governance/stream-catalog-rest-apis.html#limits-on-topic-listings).

Reminder that the `config.cluster.id` parameter from [managed cluster properties](#managed-clusters) must be set to use Confluent Cloud.

### Managed Kafka Clusters

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,5 +40,6 @@ public class ConfluentCloudProperties {
@ConfigurationProperties("stream-catalog")
public static class StreamCatalogProperties {
private int pageSize = 500;
private boolean syncCatalog;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package com.michelin.ns4kafka.service.client.schema;

import com.michelin.ns4kafka.property.ManagedClusterProperties;
import com.michelin.ns4kafka.service.client.schema.entities.GraphQueryResponse;
import com.michelin.ns4kafka.service.client.schema.entities.SchemaCompatibilityCheckResponse;
import com.michelin.ns4kafka.service.client.schema.entities.SchemaCompatibilityRequest;
import com.michelin.ns4kafka.service.client.schema.entities.SchemaCompatibilityResponse;
Expand Down Expand Up @@ -47,6 +48,7 @@
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import lombok.extern.slf4j.Slf4j;
import reactor.core.publisher.Flux;
Expand Down Expand Up @@ -283,11 +285,28 @@ public Mono<SchemaCompatibilityResponse> deleteCurrentCompatibilityBySubject(Str
}

/**
* Add a tag to a topic.
* List tags.
*
* @param kafkaCluster The Kafka cluster
* @param tagSpecs Tags to add
* @return Information about added tags
* @return List of existing tags
*/
public Mono<List<TagInfo>> listTags(String kafkaCluster) {
ManagedClusterProperties.SchemaRegistryProperties config = getSchemaRegistry(kafkaCluster);

HttpRequest<?> request = HttpRequest.GET(
URI.create(StringUtils.prependUri(config.getUrl(),
"/catalog/v1/types/tagdefs")))
.basicAuth(config.getBasicAuthUsername(), config.getBasicAuthPassword());

return Mono.from(httpClient.retrieve(request, Argument.listOf(TagInfo.class)));
}

/**
* Add tags to a topic.
*
* @param kafkaCluster The Kafka cluster
* @param tagSpecs The tags to add
* @return List of associated tags
*/
public Mono<List<TagTopicInfo>> associateTags(String kafkaCluster, List<TagTopicInfo> tagSpecs) {
ManagedClusterProperties.SchemaRegistryProperties config = getSchemaRegistry(kafkaCluster);
Expand All @@ -305,7 +324,7 @@ public Mono<List<TagTopicInfo>> associateTags(String kafkaCluster, List<TagTopic
*
* @param tags The list of tags to create
* @param kafkaCluster The Kafka cluster
* @return Information about created tags
* @return List of created tags
*/
public Mono<List<TagInfo>> createTags(String kafkaCluster, List<TagInfo> tags) {
ManagedClusterProperties.SchemaRegistryProperties config = getSchemaRegistry(kafkaCluster);
Expand All @@ -319,10 +338,10 @@ public Mono<List<TagInfo>> createTags(String kafkaCluster, List<TagInfo> tags) {
}

/**
* Delete a tag to a topic.
* Delete a tag from a topic.
*
* @param kafkaCluster The Kafka cluster
* @param entityName The topic's name
* @param entityName The topic name
* @param tagName The tag to delete
* @return The resume response
*/
Expand All @@ -338,12 +357,12 @@ public Mono<HttpResponse<Void>> dissociateTag(String kafkaCluster, String entity
}

/**
* List topics with catalog info, including tag & description.
* List topics with catalog info including tags & description, using Stream Catalog API.
*
* @param kafkaCluster The Kafka cluster
* @return A list of description
* @return The topics list with their catalog information
*/
public Mono<TopicListResponse> getTopicWithCatalogInfo(String kafkaCluster, int limit, int offset) {
public Mono<TopicListResponse> getTopicsWithStreamCatalog(String kafkaCluster, int limit, int offset) {
ManagedClusterProperties.SchemaRegistryProperties config = getSchemaRegistry(kafkaCluster);

HttpRequest<?> request = HttpRequest.GET(
Expand All @@ -354,11 +373,53 @@ public Mono<TopicListResponse> getTopicWithCatalogInfo(String kafkaCluster, int
return Mono.from(httpClient.retrieve(request, TopicListResponse.class));
}

/**
* Query Stream Catalog information, using GraphQL.
*
* @param kafkaCluster The Kafka cluster
* @param query The GraphQL query
* @return The GraphQL response
*/
private Mono<GraphQueryResponse> queryWithGraphQl(String kafkaCluster, String query) {
ManagedClusterProperties.SchemaRegistryProperties config = getSchemaRegistry(kafkaCluster);

HttpRequest<?> request = HttpRequest.POST(
URI.create(StringUtils.prependUri(config.getUrl(),
"/catalog/graphql")),
Map.of("query", query))
.basicAuth(config.getBasicAuthUsername(), config.getBasicAuthPassword());

return Mono.from(httpClient.retrieve(request, GraphQueryResponse.class));
}

/**
* List topics with tags, using GraphQL.
*
* @param kafkaCluster The Kafka cluster
* @return The GraphQL response containing the topics list with their tags
*/
public Mono<GraphQueryResponse> getTopicsWithTagsWithGraphQl(String kafkaCluster, List<String> tagsNames) {
String query = "query { kafka_topic(tags: [" + String.join(",", tagsNames) + "]) { nameLower tags } }";

return queryWithGraphQl(kafkaCluster, query);
}

/**
* List topics with description, using GraphQL.
*
* @param kafkaCluster The Kafka cluster
* @return The GraphQL query response containing the topics list with their descriptions
*/
public Mono<GraphQueryResponse> getTopicsWithDescriptionWithGraphQl(String kafkaCluster) {
String query = "query { kafka_topic(where: {description: {_gte: null}}) { nameLower description } }";
return queryWithGraphQl(kafkaCluster, query);
}

/**
* Update a topic description.
*
* @param kafkaCluster The Kafka cluster
* @param body The body passed to the request
* @param body The body given to the request
* @return Information about description
*/
public Mono<HttpResponse<TopicDescriptionUpdateResponse>> updateDescription(String kafkaCluster,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.michelin.ns4kafka.service.client.schema.entities;

import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import lombok.Builder;

/**
* GraphQL query data.
*
* @param kafkaTopic the list of queried kafka topics
*/
@Builder
public record GraphQueryData(@JsonProperty("kafka_topic") List<GraphQueryTopic> kafkaTopic) {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.michelin.ns4kafka.service.client.schema.entities;

import lombok.Builder;

/**
* GraphQL query response.
*
* @param data the response data
*/
@Builder
public record GraphQueryResponse(GraphQueryData data) {
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package com.michelin.ns4kafka.service.client.schema.entities;

import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
import lombok.Builder;

/**
* GraphQL query kafka topic.
*
* @param name the topic name
* @param description the topic description
* @param tags the topic tags
*/
@Builder
public record GraphQueryTopic(@JsonProperty("nameLower") String name, String description, List<String> tags) {
}
Loading

0 comments on commit 11b759d

Please sign in to comment.