diff --git a/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java b/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java index 184eb8725..81165e988 100644 --- a/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java +++ b/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java @@ -5,6 +5,7 @@ import io.kafbat.ui.service.ClustersStorage; import java.net.URLDecoder; import java.nio.charset.StandardCharsets; +import java.util.Set; import java.util.regex.Pattern; import lombok.RequiredArgsConstructor; import org.jetbrains.annotations.NotNull; @@ -23,6 +24,10 @@ public class ReadOnlyModeFilter implements WebFilter { private static final Pattern CLUSTER_NAME_REGEX = Pattern.compile("/api/clusters/(?[^/]++)"); + private static final Set SAFE_ENDPOINTS = Set.of( + Pattern.compile("/api/clusters/[^/]+/topics/[^/]+/(smartfilters)$") + ); + private final ClustersStorage clustersStorage; @NotNull @@ -35,10 +40,12 @@ public Mono filter(ServerWebExchange exchange, @NotNull WebFilterChain cha var path = exchange.getRequest().getPath().pathWithinApplication().value(); var decodedPath = URLDecoder.decode(path, StandardCharsets.UTF_8); + var matcher = CLUSTER_NAME_REGEX.matcher(decodedPath); if (!matcher.find()) { return chain.filter(exchange); } + var clusterName = matcher.group("clusterName"); var kafkaCluster = clustersStorage.getClusterByName(clusterName) .orElseThrow( @@ -49,6 +56,15 @@ public Mono filter(ServerWebExchange exchange, @NotNull WebFilterChain cha return chain.filter(exchange); } + var isSafeEndpoint = SAFE_ENDPOINTS + .stream() + .parallel() + .anyMatch(endpoint -> endpoint.matcher(decodedPath).matches()); + + if (isSafeEndpoint) { + return chain.filter(exchange); + } + return Mono.error(ReadOnlyModeException::new); } } diff --git a/api/src/main/java/io/kafbat/ui/service/KafkaConfigSanitizer.java b/api/src/main/java/io/kafbat/ui/service/KafkaConfigSanitizer.java index f2630175a..6cbf44e44 100644 --- a/api/src/main/java/io/kafbat/ui/service/KafkaConfigSanitizer.java +++ b/api/src/main/java/io/kafbat/ui/service/KafkaConfigSanitizer.java @@ -30,7 +30,8 @@ class KafkaConfigSanitizer { .add( "basic.auth.user.info", /* For Schema Registry credentials */ "password", "secret", "token", "key", ".*credentials.*", /* General credential patterns */ - "aws.access.*", "aws.secret.*", "aws.session.*" /* AWS-related credential patterns */ + "aws.access.*", "aws.secret.*", "aws.session.*", /* AWS-related credential patterns */ + "connection.uri" /* mongo credential patterns */ ) .build(); diff --git a/api/src/test/java/io/kafbat/ui/service/KafkaConfigSanitizerTest.java b/api/src/test/java/io/kafbat/ui/service/KafkaConfigSanitizerTest.java index e1cacaf1b..34f293220 100644 --- a/api/src/test/java/io/kafbat/ui/service/KafkaConfigSanitizerTest.java +++ b/api/src/test/java/io/kafbat/ui/service/KafkaConfigSanitizerTest.java @@ -34,6 +34,9 @@ void obfuscateCredentials() { assertThat(sanitizer.sanitize("aws.secret.access.key", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("aws.secretAccessKey", "secret")).isEqualTo("******"); assertThat(sanitizer.sanitize("aws.sessionToken", "secret")).isEqualTo("******"); + + //Mongo var sanitizing + assertThat(sanitizer.sanitize("connection.uri", "secret")).isEqualTo("******"); } @Test