Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
42 commits
Select commit Hold shift + click to select a range
8a03bfd
Use typespec contracts
germanosin Jun 9, 2025
7d5af90
Merge branch 'main' into typespec
germanosin Jun 9, 2025
b8b91fc
Fixed styling
germanosin Jun 10, 2025
155b038
Merge remote-tracking branch 'origin/typespec' into typespec
germanosin Jun 10, 2025
a2175a1
Fixed styling
germanosin Jun 10, 2025
5d4f419
enable typespec by default
germanosin Jun 11, 2025
8cd0a0a
Added info
germanosin Jun 11, 2025
6ce97f8
Frontend use new contracts
germanosin Jun 11, 2025
a7a59d8
Fixes in contracts for backward compatibility
germanosin Jun 11, 2025
d0ef634
use typespec build
germanosin Jun 12, 2025
c29091f
Install pnpm dependencies
germanosin Jun 12, 2025
d67068c
Merged main
germanosin Aug 7, 2025
f114d44
Actualize typespec
germanosin Aug 7, 2025
4279d6c
Actualize typespec
germanosin Aug 7, 2025
a1c261b
fixed frontend build
germanosin Aug 7, 2025
ea75152
fixed frontend build
germanosin Aug 7, 2025
5341c20
fixed frontend build
germanosin Aug 7, 2025
c61da13
fixed frontend build
germanosin Aug 7, 2025
2c670f6
fixed frontend build
germanosin Aug 7, 2025
4bf3779
fixed frontend build
germanosin Aug 7, 2025
fa8b03b
fixed frontend build
germanosin Aug 7, 2025
bcae096
Enabled contract validation
germanosin Aug 7, 2025
db746fa
Removed frontend test report
germanosin Aug 7, 2025
a42e81e
Merge branch 'main' into typespec
germanosin Aug 12, 2025
38ee6c9
Synced with main
germanosin Aug 12, 2025
0474fd7
Synced with main
germanosin Aug 12, 2025
c27b32d
Added lucene
germanosin Aug 13, 2025
153c44d
Close stats
germanosin Aug 13, 2025
c9a6d94
fts
germanosin Aug 14, 2025
3706fb2
Merge branch 'main' into issues/1087-fts
germanosin Aug 14, 2025
d887668
Merge branch 'main' into issues/1087-fts
germanosin Aug 14, 2025
56889da
Merge branch 'main' into issues/1087-fts
germanosin Aug 15, 2025
bb49399
Merge branch 'main' into issues/1087-fts
germanosin Aug 18, 2025
6da3269
Ngram config and toics optimizations
germanosin Aug 21, 2025
41a4b52
Merge branch 'main' into issues/1087-fts
germanosin Aug 21, 2025
fd28e66
Merge main
germanosin Aug 29, 2025
df6a364
Refactoring
germanosin Aug 29, 2025
5f73e9c
Fixes
germanosin Aug 29, 2025
82e6847
Fixes
germanosin Aug 29, 2025
0111aa5
Fixed schemas sorting
germanosin Aug 29, 2025
c283f04
Fixed checkstyle
germanosin Aug 29, 2025
6df0461
Added range query support
germanosin Aug 29, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion api/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,10 @@ dependencies {
antlr libs.antlr
implementation libs.antlr.runtime

implementation libs.lucene
implementation libs.lucene.queryparser
implementation libs.lucene.analysis.common

implementation libs.opendatadiscovery.oddrn
implementation(libs.opendatadiscovery.client) {
exclude group: 'org.springframework.boot', module: 'spring-boot-starter-webflux'
Expand All @@ -68,7 +72,6 @@ dependencies {
// CVE Fixes
implementation libs.apache.commons.compress
implementation libs.okhttp3.logging.intercepter
// CVE Fixes End

implementation libs.modelcontextprotocol.spring.webflux
implementation libs.victools.jsonschema.generator
Expand Down
22 changes: 22 additions & 0 deletions api/src/main/java/io/kafbat/ui/config/ClustersProperties.java
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ public class ClustersProperties {
MetricsStorage defaultMetricsStorage = new MetricsStorage();

CacheProperties cache = new CacheProperties();
ClusterFtsProperties fts = new ClusterFtsProperties();

@Data
public static class Cluster {
Expand Down Expand Up @@ -217,6 +218,27 @@ public static class CacheProperties {
Duration connectClusterCacheExpiry = Duration.ofHours(24);
}

@Data
@NoArgsConstructor
@AllArgsConstructor
public static class FtsProperties {
boolean ngram = true;
int ngramMin = 1;
int ngramMax = 4;
}

@Data
@NoArgsConstructor
@AllArgsConstructor
public static class ClusterFtsProperties {
boolean enabled = false;
FtsProperties topics = new FtsProperties(false, 3, 5);
FtsProperties schemas = new FtsProperties(true, 1, 4);
FtsProperties consumers = new FtsProperties(true, 1, 4);
FtsProperties connect = new FtsProperties(true, 1, 4);
FtsProperties acl = new FtsProperties(true, 1, 4);
}

@PostConstruct
public void validateAndSetDefaults() {
if (clusters != null) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
package io.kafbat.ui.controller;

import static org.apache.commons.lang3.Strings.CI;

import io.kafbat.ui.api.SchemasApi;
import io.kafbat.ui.config.ClustersProperties;
import io.kafbat.ui.exception.ValidationException;
import io.kafbat.ui.mapper.KafkaSrMapper;
import io.kafbat.ui.mapper.KafkaSrMapperImpl;
Expand All @@ -15,13 +14,13 @@
import io.kafbat.ui.model.rbac.AccessContext;
import io.kafbat.ui.model.rbac.permission.SchemaAction;
import io.kafbat.ui.service.SchemaRegistryService;
import io.kafbat.ui.service.index.SchemasFilter;
import io.kafbat.ui.service.mcp.McpTool;
import java.util.List;
import java.util.Map;
import javax.validation.Valid;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.server.ServerWebExchange;
Expand All @@ -38,6 +37,7 @@ public class SchemasController extends AbstractController implements SchemasApi,
private final KafkaSrMapper kafkaSrMapper = new KafkaSrMapperImpl();

private final SchemaRegistryService schemaRegistryService;
private final ClustersProperties clustersProperties;

@Override
protected KafkaCluster getCluster(String clusterName) {
Expand Down Expand Up @@ -214,6 +214,8 @@ public Mono<ResponseEntity<SchemaSubjectsResponseDTO>> getSchemas(String cluster
.operationName("getSchemas")
.build();

ClustersProperties.ClusterFtsProperties fts = clustersProperties.getFts();

return schemaRegistryService
.getAllSubjectNames(getCluster(clusterName))
.flatMapIterable(l -> l)
Expand All @@ -222,10 +224,10 @@ public Mono<ResponseEntity<SchemaSubjectsResponseDTO>> getSchemas(String cluster
.flatMap(subjects -> {
int pageSize = perPage != null && perPage > 0 ? perPage : DEFAULT_PAGE_SIZE;
int subjectToSkip = ((pageNum != null && pageNum > 0 ? pageNum : 1) - 1) * pageSize;
List<String> filteredSubjects = subjects
.stream()
.filter(subj -> search == null || CI.contains(subj, search))
.sorted().toList();

SchemasFilter filter = new SchemasFilter(subjects, fts.isEnabled(), fts.getSchemas());
List<String> filteredSubjects = filter.find(search);

var totalPages = (filteredSubjects.size() / pageSize)
+ (filteredSubjects.size() % pageSize == 0 ? 0 : 1);
List<String> subjectsToRender = filteredSubjects.stream()
Expand Down
26 changes: 15 additions & 11 deletions api/src/main/java/io/kafbat/ui/controller/TopicsController.java
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import static org.apache.commons.lang3.Strings.CI;

import io.kafbat.ui.api.TopicsApi;
import io.kafbat.ui.config.ClustersProperties;
import io.kafbat.ui.mapper.ClusterMapper;
import io.kafbat.ui.model.InternalTopic;
import io.kafbat.ui.model.InternalTopicConfig;
Expand Down Expand Up @@ -37,7 +38,6 @@
import javax.validation.Valid;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RestController;
Expand All @@ -55,6 +55,7 @@ public class TopicsController extends AbstractController implements TopicsApi, M
private final TopicsService topicsService;
private final TopicAnalysisService topicAnalysisService;
private final ClusterMapper clusterMapper;
private final ClustersProperties clustersProperties;

@Override
public Mono<ResponseEntity<TopicDTO>> createTopic(
Expand Down Expand Up @@ -181,23 +182,23 @@ public Mono<ResponseEntity<TopicsResponseDTO>> getTopics(String clusterName,
.operationName("getTopics")
.build();

return topicsService.getTopicsForPagination(getCluster(clusterName))
return topicsService.getTopicsForPagination(getCluster(clusterName), search, showInternal)
.flatMap(topics -> accessControlService.filterViewableTopics(topics, clusterName))
.flatMap(topics -> {
int pageSize = perPage != null && perPage > 0 ? perPage : DEFAULT_PAGE_SIZE;
var topicsToSkip = ((page != null && page > 0 ? page : 1) - 1) * pageSize;
ClustersProperties.ClusterFtsProperties fts = clustersProperties.getFts();
Comparator<InternalTopic> comparatorForTopic = getComparatorForTopic(orderBy, fts.isEnabled());
var comparator = sortOrder == null || !sortOrder.equals(SortOrderDTO.DESC)
? getComparatorForTopic(orderBy) : getComparatorForTopic(orderBy).reversed();
List<InternalTopic> filtered = topics.stream()
.filter(topic -> !topic.isInternal()
|| showInternal != null && showInternal)
.filter(topic -> search == null || CI.contains(topic.getName(), search))
.sorted(comparator)
.toList();
? comparatorForTopic : comparatorForTopic.reversed();

List<InternalTopic> filtered = topics.stream().sorted(comparator).toList();

var totalPages = (filtered.size() / pageSize)
+ (filtered.size() % pageSize == 0 ? 0 : 1);

List<String> topicsPage = filtered.stream()
.filter(t -> !t.isInternal() || showInternal != null && showInternal)
.skip(topicsToSkip)
.limit(pageSize)
.map(InternalTopic::getName)
Expand Down Expand Up @@ -348,9 +349,12 @@ public Mono<ResponseEntity<Flux<TopicProducerStateDTO>>> getActiveProducerStates
}

private Comparator<InternalTopic> getComparatorForTopic(
TopicColumnsToSortDTO orderBy) {
TopicColumnsToSortDTO orderBy,
boolean ftsEnabled) {
var defaultComparator = Comparator.comparing(InternalTopic::getName);
if (orderBy == null) {
if (orderBy == null && ftsEnabled) {
return (o1, o2) -> 0;
} else if (orderBy == null) {
return defaultComparator;
}
return switch (orderBy) {
Expand Down
16 changes: 14 additions & 2 deletions api/src/main/java/io/kafbat/ui/model/InternalTopic.java
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,16 @@ public class InternalTopic {
private final long segmentSize;
private final long segmentCount;


public InternalTopic withMetrics(Metrics metrics) {
var builder = toBuilder();
if (metrics != null) {
builder.bytesInPerSec(metrics.getIoRates().topicBytesInPerSec().get(this.name));
builder.bytesOutPerSec(metrics.getIoRates().topicBytesOutPerSec().get(this.name));
}
return builder.build();
}

public static InternalTopic from(TopicDescription topicDescription,
List<ConfigEntry> configs,
InternalPartitionsOffsets partitionsOffsets,
Expand Down Expand Up @@ -113,8 +123,10 @@ public static InternalTopic from(TopicDescription topicDescription,
topic.segmentSize(stats.getSegmentSize());
});

topic.bytesInPerSec(metrics.getIoRates().topicBytesInPerSec().get(topicDescription.name()));
topic.bytesOutPerSec(metrics.getIoRates().topicBytesOutPerSec().get(topicDescription.name()));
if (metrics != null) {
topic.bytesInPerSec(metrics.getIoRates().topicBytesInPerSec().get(topicDescription.name()));
topic.bytesOutPerSec(metrics.getIoRates().topicBytesOutPerSec().get(topicDescription.name()));
}

topic.topicConfigs(
configs.stream().map(InternalTopicConfig::from).collect(Collectors.toList()));
Expand Down
9 changes: 8 additions & 1 deletion api/src/main/java/io/kafbat/ui/model/Statistics.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@

@Value
@Builder(toBuilder = true)
public class Statistics {
public class Statistics implements AutoCloseable {
ServerStatusDTO status;
Throwable lastKafkaException;
String version;
Expand Down Expand Up @@ -46,4 +46,11 @@ public Stream<TopicDescription> topicDescriptions() {
public Statistics withClusterState(UnaryOperator<ScrapedClusterState> stateUpdate) {
return toBuilder().clusterState(stateUpdate.apply(clusterState)).build();
}

@Override
public void close() throws Exception {
if (clusterState != null) {
clusterState.close();
}
}
}
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
package io.kafbat.ui.service;

import static org.apache.commons.lang3.Strings.CI;

import com.google.common.collect.Streams;
import com.google.common.collect.Table;
import io.kafbat.ui.config.ClustersProperties;
import io.kafbat.ui.emitter.EnhancedConsumer;
import io.kafbat.ui.model.ConsumerGroupOrderingDTO;
import io.kafbat.ui.model.InternalConsumerGroup;
import io.kafbat.ui.model.InternalTopicConsumerGroup;
import io.kafbat.ui.model.KafkaCluster;
import io.kafbat.ui.model.SortOrderDTO;
import io.kafbat.ui.service.index.ConsumerGroupFilter;
import io.kafbat.ui.service.rbac.AccessControlService;
import io.kafbat.ui.util.ApplicationMetrics;
import io.kafbat.ui.util.KafkaClientSslPropertiesUtil;
Expand All @@ -25,7 +25,6 @@
import java.util.stream.Stream;
import javax.annotation.Nullable;
import lombok.RequiredArgsConstructor;
import org.apache.commons.lang3.StringUtils;
import org.apache.kafka.clients.admin.ConsumerGroupDescription;
import org.apache.kafka.clients.admin.ConsumerGroupListing;
import org.apache.kafka.clients.admin.OffsetSpec;
Expand All @@ -41,6 +40,7 @@ public class ConsumerGroupService {

private final AdminClientService adminClientService;
private final AccessControlService accessControlService;
private final ClustersProperties clustersProperties;

private Mono<List<InternalConsumerGroup>> getConsumerGroups(
ReactiveAdminClient ac,
Expand Down Expand Up @@ -114,11 +114,7 @@ public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
SortOrderDTO sortOrderDto) {
return adminClientService.get(cluster).flatMap(ac ->
ac.listConsumerGroups()
.map(listing -> search == null
? listing
: listing.stream()
.filter(g -> CI.contains(g.groupId(), search))
.toList()
.map(listing -> filterGroups(listing, search)
)
.flatMapIterable(lst -> lst)
.filterWhen(cg -> accessControlService.isConsumerGroupAccessible(cg.groupId(), cluster.getName()))
Expand All @@ -131,6 +127,12 @@ public Mono<ConsumerGroupsPage> getConsumerGroupsPage(
(allGroups.size() / perPage) + (allGroups.size() % perPage == 0 ? 0 : 1))))));
}

private Collection<ConsumerGroupListing> filterGroups(Collection<ConsumerGroupListing> groups, String search) {
ClustersProperties.ClusterFtsProperties fts = clustersProperties.getFts();
ConsumerGroupFilter filter = new ConsumerGroupFilter(groups, fts.isEnabled(), fts.getConsumers());
return filter.find(search, false);
}

private Mono<List<ConsumerGroupDescription>> loadSortedDescriptions(ReactiveAdminClient ac,
List<ConsumerGroupListing> groups,
int pageNum,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import io.kafbat.ui.model.NewConnectorDTO;
import io.kafbat.ui.model.TaskDTO;
import io.kafbat.ui.model.connect.InternalConnectorInfo;
import io.kafbat.ui.service.index.KafkaConnectNgramFilter;
import io.kafbat.ui.util.ReactiveFailover;
import jakarta.validation.Valid;
import java.util.List;
Expand Down Expand Up @@ -151,15 +152,16 @@ public Flux<FullConnectorInfoDTO> getAllConnectors(final KafkaCluster cluster,
.topics(tuple.getT4().getTopics())
.build())))
.map(kafkaConnectMapper::fullConnectorInfo)
.filter(matchesSearchTerm(search));
.collectList()
.map(lst -> filterConnectors(lst, search))
.flatMapMany(Flux::fromIterable);
}

private Predicate<FullConnectorInfoDTO> matchesSearchTerm(@Nullable final String search) {
if (search == null) {
return c -> true;
}
return connector -> getStringsForSearch(connector)
.anyMatch(string -> CI.contains(string, search));
private List<FullConnectorInfoDTO> filterConnectors(List<FullConnectorInfoDTO> connectors, String search) {
ClustersProperties.ClusterFtsProperties fts = clustersProperties.getFts();
KafkaConnectNgramFilter filter =
new KafkaConnectNgramFilter(connectors, fts.isEnabled(), fts.getConnect());
return filter.find(search);
}

private Stream<String> getStringsForSearch(FullConnectorInfoDTO fullConnectorInfo) {
Expand Down
18 changes: 16 additions & 2 deletions api/src/main/java/io/kafbat/ui/service/StatisticsCache.java
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
package io.kafbat.ui.service;

import io.kafbat.ui.config.ClustersProperties;
import io.kafbat.ui.model.InternalPartitionsOffsets;
import io.kafbat.ui.model.KafkaCluster;
import io.kafbat.ui.model.ServerStatusDTO;
import io.kafbat.ui.model.Statistics;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.admin.ConfigEntry;
import org.apache.kafka.clients.admin.TopicDescription;
import org.springframework.stereotype.Component;

@Slf4j
@Component
public class StatisticsCache {

Expand All @@ -28,12 +32,22 @@ public synchronized void replace(KafkaCluster c, Statistics stats) {
public synchronized void update(KafkaCluster c,
Map<String, TopicDescription> descriptions,
Map<String, List<ConfigEntry>> configs,
InternalPartitionsOffsets partitionsOffsets) {
InternalPartitionsOffsets partitionsOffsets,
ClustersProperties clustersProperties) {
var stats = get(c);
replace(
c,
stats.withClusterState(s -> s.updateTopics(descriptions, configs, partitionsOffsets))
stats.withClusterState(s ->
s.updateTopics(descriptions, configs, partitionsOffsets, clustersProperties)
)
);
try {
if (!stats.getStatus().equals(ServerStatusDTO.INITIALIZING)) {
stats.close();
}
} catch (Exception e) {
log.error("Error closing cluster {} stats", c.getName(), e);
}
}

public synchronized void onTopicDelete(KafkaCluster c, String topic) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import static io.kafbat.ui.service.ReactiveAdminClient.ClusterDescription;

import io.kafbat.ui.config.ClustersProperties;
import io.kafbat.ui.model.ClusterFeature;
import io.kafbat.ui.model.KafkaCluster;
import io.kafbat.ui.model.Metrics;
Expand All @@ -22,6 +23,7 @@ public class StatisticsService {
private final AdminClientService adminClientService;
private final FeatureService featureService;
private final StatisticsCache cache;
private final ClustersProperties clustersProperties;

public Mono<Statistics> updateCache(KafkaCluster c) {
return getStatistics(c).doOnSuccess(m -> cache.replace(c, m));
Expand Down Expand Up @@ -62,7 +64,7 @@ private Statistics createStats(ClusterDescription description,

private Mono<ScrapedClusterState> loadClusterState(ClusterDescription clusterDescription,
ReactiveAdminClient ac) {
return ScrapedClusterState.scrape(clusterDescription, ac);
return ScrapedClusterState.scrape(clusterDescription, ac, clustersProperties);
}

private Mono<Metrics> scrapeMetrics(KafkaCluster cluster,
Expand Down
Loading
Loading