From bd2c6fdbb32df6a4c87e6340ba70ab95e4c0d192 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Wed, 6 Nov 2024 13:54:36 +0100 Subject: [PATCH 01/84] Issues/300: rbac now supports regex for values --- .../extractor/OauthAuthorityExtractor.java | 4 +- .../ProviderAuthorityExtractorTest.java | 68 +++++++++++++++++++ api/src/test/resources/roles_definition.yaml | 34 ++++++++++ 3 files changed, 104 insertions(+), 2 deletions(-) create mode 100644 api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java create mode 100644 api/src/test/resources/roles_definition.yaml diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java index 6d14ab870..1712b6410 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java @@ -60,7 +60,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .filter(s -> s.getType().equals("user")) .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, s.getValue().equalsIgnoreCase(principalName))) - .anyMatch(s -> s.getValue().equalsIgnoreCase(principalName))) + .anyMatch(s -> principalName.matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); @@ -96,7 +96,7 @@ private Set extractRoles(AccessControlService acs, DefaultOAuth2User pri .filter(s -> s.getType().equals("role")) .anyMatch(subject -> { var roleName = subject.getValue(); - return principalRoles.contains(roleName); + return principalRoles.stream().anyMatch(s -> s.matches(subject.getValue())); }) ) .map(Role::getName) diff --git a/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java new file mode 100644 index 000000000..7ce5af2a7 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java @@ -0,0 +1,68 @@ +package io.kafbat.ui.config; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import io.kafbat.ui.config.auth.OAuthProperties; +import io.kafbat.ui.model.rbac.Role; +import io.kafbat.ui.service.rbac.AccessControlService; +import io.kafbat.ui.service.rbac.extractor.OauthAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.ProviderAuthorityExtractor; +import io.kafbat.ui.util.AccessControlServiceMock; +import java.io.InputStream; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.SneakyThrows; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.security.core.authority.AuthorityUtils; +import org.springframework.security.oauth2.core.user.DefaultOAuth2User; +import org.springframework.security.oauth2.core.user.OAuth2User; +import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.introspector.BeanAccess; + +public class ProviderAuthorityExtractorTest { + + + private final AccessControlService accessControlService = new AccessControlServiceMock().getMock(); + Yaml yaml; + ProviderAuthorityExtractor extractor; + + @BeforeEach + void setUp() { + yaml = new Yaml(); + yaml.setBeanAccess(BeanAccess.FIELD); + extractor = new OauthAuthorityExtractor(); + + InputStream rolesFile = this.getClass() + .getClassLoader() + .getResourceAsStream("roles_definition.yaml"); + + Role[] roleArray = yaml.loadAs(rolesFile, Role[].class); + when(accessControlService.getRoles()).thenReturn(List.of(roleArray)); + + } + + @SneakyThrows + @Test + void ExtractAuthoritiesFromRegex() { + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("role_definition", Set.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", "john@kafka.com"), + "user_name"); + + HashMap additionalParams = new HashMap<>(); + OAuthProperties.OAuth2Provider oAuth2Provider = new OAuthProperties.OAuth2Provider(); + oAuth2Provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", oAuth2Provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertEquals(Set.of("viewer", "admin"), roles); + + } + +} diff --git a/api/src/test/resources/roles_definition.yaml b/api/src/test/resources/roles_definition.yaml new file mode 100644 index 000000000..9428df22f --- /dev/null +++ b/api/src/test/resources/roles_definition.yaml @@ -0,0 +1,34 @@ +- name: 'admin' + subjects: + - provider: 'OAUTH' + value: 'ROLE-[A-Z]+' + type: 'role' + clusters: + - local + - remote + permissions: + - resource: APPLICATIONCONFIG + actions: [ all ] +- name: 'viewer' + subjects: + - provider: 'LDAP' + value: 'CS-XXX' + type: 'kafka-viewer' + - provider: 'OAUTH' + value: '.*@kafka.com' + type: 'user' + clusters: + - remote + permissions: + - resource: APPLICATIONCONFIG + actions: [ all ] +- name: 'editor' + subjects: + - provider: 'OAUTH' + value: 'ROLE_EDITOR' + type: 'role' + clusters: + - local + permissions: + - resource: APPLICATIONCONFIG + actions: [ all ] From 33b4a54c0c384daf10ba34cb47df0161ec53ffc6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Wed, 6 Nov 2024 15:23:48 +0100 Subject: [PATCH 02/84] Issues/300: fix checkstyle --- .../kafbat/ui/config/ProviderAuthorityExtractorTest.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java index 7ce5af2a7..39a0e8b07 100644 --- a/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java +++ b/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java @@ -47,7 +47,7 @@ void setUp() { @SneakyThrows @Test - void ExtractAuthoritiesFromRegex() { + void extractAuthoritiesFromRegex() { OAuth2User oauth2User = new DefaultOAuth2User( AuthorityUtils.createAuthorityList("SCOPE_message:read"), @@ -55,9 +55,9 @@ void ExtractAuthoritiesFromRegex() { "user_name"); HashMap additionalParams = new HashMap<>(); - OAuthProperties.OAuth2Provider oAuth2Provider = new OAuthProperties.OAuth2Provider(); - oAuth2Provider.setCustomParams(Map.of("roles-field", "role_definition")); - additionalParams.put("provider", oAuth2Provider); + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); From 10d4a11e8a7a19f15b875ff5bc0cb51bd00406c4 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Mon, 11 Nov 2024 19:18:27 +0400 Subject: [PATCH 03/84] Infra: Fix e2e compose (#655) --- documentation/compose/e2e-tests.yaml | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/documentation/compose/e2e-tests.yaml b/documentation/compose/e2e-tests.yaml index b8e746867..126c5aa29 100644 --- a/documentation/compose/e2e-tests.yaml +++ b/documentation/compose/e2e-tests.yaml @@ -29,7 +29,8 @@ services: KAFKA_CLUSTERS_0_KSQLDBSERVER: http://ksqldb:8088 kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.6.0 + user: "0:0" hostname: kafka0 container_name: kafka0 healthcheck: @@ -58,12 +59,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.6.0 ports: - 8085:8085 depends_on: @@ -88,7 +87,7 @@ services: build: context: ./kafka-connect args: - image: confluentinc/cp-kafka-connect:6.0.1 + image: confluentinc/cp-kafka-connect:7.6.0 ports: - 8083:8083 depends_on: @@ -122,7 +121,7 @@ services: # AWS_SECRET_ACCESS_KEY: "" kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.6.0 volumes: - ./data/message.json:/data/message.json depends_on: @@ -162,7 +161,7 @@ services: command: bash -c '/connectors/start.sh' ksqldb: - image: confluentinc/ksqldb-server:0.18.0 + image: confluentinc/cp-ksqldb-server:7.6.0 healthcheck: test: [ "CMD", "timeout", "1", "curl", "--silent", "--fail", "http://localhost:8088/info" ] interval: 30s From f15fc925364efb98b1d82b01223a153bac3b39a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Tue, 12 Nov 2024 14:54:58 +0100 Subject: [PATCH 04/84] Issues/300: Other extractors handle regex as values. --- .../extractor/CognitoAuthorityExtractor.java | 4 +- .../extractor/GithubAuthorityExtractor.java | 6 +- .../extractor/GoogleAuthorityExtractor.java | 7 +- .../extractor/OauthAuthorityExtractor.java | 8 +- .../ProviderAuthorityExtractorTest.java | 68 -------- ...exBasedProviderAuthorityExtractorTest.java | 159 ++++++++++++++++++ api/src/test/resources/roles_definition.yaml | 15 ++ 7 files changed, 186 insertions(+), 81 deletions(-) delete mode 100644 api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java create mode 100644 api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java index a246b8910..e75666d83 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java @@ -50,7 +50,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_COGNITO)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> s.getValue().equalsIgnoreCase(principal.getName()))) + .anyMatch(s -> principal.getName() != null && principal.getName().matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); @@ -76,7 +76,7 @@ private Set extractGroupRoles(AccessControlService acs, DefaultOAuth2Use .filter(s -> s.getType().equals("group")) .anyMatch(subject -> groups .stream() - .anyMatch(cognitoGroup -> cognitoGroup.equalsIgnoreCase(subject.getValue())) + .anyMatch(cognitoGroup -> cognitoGroup.matches(subject.getValue())) )) .map(Role::getName) .collect(Collectors.toSet()); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java index b50e76a16..f08e266d3 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java @@ -90,7 +90,7 @@ private Set extractUsernameRoles(DefaultOAuth2User principal, AccessCont .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> s.getValue().equals(username))) + .anyMatch(s -> username.matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); @@ -131,7 +131,7 @@ private Mono> getOrganizationRoles(DefaultOAuth2User principal, Map< .filter(s -> s.getType().equals(ORGANIZATION)) .anyMatch(subject -> orgsMap.stream() .map(org -> org.get(ORGANIZATION_NAME).toString()) - .anyMatch(orgName -> orgName.equalsIgnoreCase(subject.getValue())) + .anyMatch(orgName -> orgName.matches(subject.getValue())) )) .map(Role::getName) .collect(Collectors.toSet())); @@ -189,7 +189,7 @@ private Mono> getTeamRoles(WebClient webClient, Map .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) .filter(s -> s.getType().equals("team")) .anyMatch(subject -> teams.stream() - .anyMatch(teamName -> teamName.equalsIgnoreCase(subject.getValue())) + .anyMatch(teamName -> teamName.matches(subject.getValue())) )) .map(Role::getName) .collect(Collectors.toSet())); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java index 8ea6d2108..c323e7ffd 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java @@ -50,7 +50,10 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GOOGLE)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> s.getValue().equalsIgnoreCase(principal.getAttribute(EMAIL_ATTRIBUTE_NAME)))) + .anyMatch(s -> { + String email = principal.getAttribute(EMAIL_ATTRIBUTE_NAME); + return email != null && email.matches(s.getValue()); + })) .map(Role::getName) .collect(Collectors.toSet()); } @@ -68,7 +71,7 @@ private Set extractDomainRoles(AccessControlService acs, DefaultOAuth2Us .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GOOGLE)) .filter(s -> s.getType().equals("domain")) - .anyMatch(s -> s.getValue().equals(domain))) + .anyMatch(s -> domain.matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); } diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java index 1712b6410..7bf19c61a 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java @@ -60,7 +60,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .filter(s -> s.getType().equals("user")) .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, s.getValue().equalsIgnoreCase(principalName))) - .anyMatch(s -> principalName.matches(s.getValue()))) + .anyMatch(s -> principalName != null && principalName.matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); @@ -94,11 +94,7 @@ private Set extractRoles(AccessControlService acs, DefaultOAuth2User pri .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("role")) - .anyMatch(subject -> { - var roleName = subject.getValue(); - return principalRoles.stream().anyMatch(s -> s.matches(subject.getValue())); - }) - ) + .anyMatch(subject -> principalRoles.stream().anyMatch(s -> s.matches(subject.getValue())))) .map(Role::getName) .collect(Collectors.toSet()); diff --git a/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java deleted file mode 100644 index 39a0e8b07..000000000 --- a/api/src/test/java/io/kafbat/ui/config/ProviderAuthorityExtractorTest.java +++ /dev/null @@ -1,68 +0,0 @@ -package io.kafbat.ui.config; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.when; - -import io.kafbat.ui.config.auth.OAuthProperties; -import io.kafbat.ui.model.rbac.Role; -import io.kafbat.ui.service.rbac.AccessControlService; -import io.kafbat.ui.service.rbac.extractor.OauthAuthorityExtractor; -import io.kafbat.ui.service.rbac.extractor.ProviderAuthorityExtractor; -import io.kafbat.ui.util.AccessControlServiceMock; -import java.io.InputStream; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Set; -import lombok.SneakyThrows; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.springframework.security.core.authority.AuthorityUtils; -import org.springframework.security.oauth2.core.user.DefaultOAuth2User; -import org.springframework.security.oauth2.core.user.OAuth2User; -import org.yaml.snakeyaml.Yaml; -import org.yaml.snakeyaml.introspector.BeanAccess; - -public class ProviderAuthorityExtractorTest { - - - private final AccessControlService accessControlService = new AccessControlServiceMock().getMock(); - Yaml yaml; - ProviderAuthorityExtractor extractor; - - @BeforeEach - void setUp() { - yaml = new Yaml(); - yaml.setBeanAccess(BeanAccess.FIELD); - extractor = new OauthAuthorityExtractor(); - - InputStream rolesFile = this.getClass() - .getClassLoader() - .getResourceAsStream("roles_definition.yaml"); - - Role[] roleArray = yaml.loadAs(rolesFile, Role[].class); - when(accessControlService.getRoles()).thenReturn(List.of(roleArray)); - - } - - @SneakyThrows - @Test - void extractAuthoritiesFromRegex() { - - OAuth2User oauth2User = new DefaultOAuth2User( - AuthorityUtils.createAuthorityList("SCOPE_message:read"), - Map.of("role_definition", Set.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", "john@kafka.com"), - "user_name"); - - HashMap additionalParams = new HashMap<>(); - OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); - provider.setCustomParams(Map.of("roles-field", "role_definition")); - additionalParams.put("provider", provider); - - Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); - - assertEquals(Set.of("viewer", "admin"), roles); - - } - -} diff --git a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java new file mode 100644 index 000000000..7eb8c8bf1 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java @@ -0,0 +1,159 @@ +package io.kafbat.ui.config; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; +import static org.springframework.security.oauth2.client.registration.ClientRegistration.withRegistrationId; + +import io.kafbat.ui.config.auth.OAuthProperties; +import io.kafbat.ui.model.rbac.Role; +import io.kafbat.ui.service.rbac.AccessControlService; +import io.kafbat.ui.service.rbac.extractor.CognitoAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.GithubAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.GoogleAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.OauthAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.ProviderAuthorityExtractor; +import io.kafbat.ui.util.AccessControlServiceMock; +import java.io.InputStream; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.SneakyThrows; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.security.core.authority.AuthorityUtils; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest; +import org.springframework.security.oauth2.core.AuthorizationGrantType; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.user.DefaultOAuth2User; +import org.springframework.security.oauth2.core.user.OAuth2User; +import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.introspector.BeanAccess; + +public class RegexBasedProviderAuthorityExtractorTest { + + + private final AccessControlService accessControlService = new AccessControlServiceMock().getMock(); + Yaml yaml; + ProviderAuthorityExtractor extractor; + + @BeforeEach + void setUp() { + yaml = new Yaml(); + yaml.setBeanAccess(BeanAccess.FIELD); + + InputStream rolesFile = this.getClass() + .getClassLoader() + .getResourceAsStream("roles_definition.yaml"); + + Role[] roleArray = yaml.loadAs(rolesFile, Role[].class); + when(accessControlService.getRoles()).thenReturn(List.of(roleArray)); + + } + + @SneakyThrows + @Test + void extractOauth2Authorities() { + + extractor = new OauthAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("role_definition", Set.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", "john@kafka.com"), + "user_name"); + + HashMap additionalParams = new HashMap<>(); + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertEquals(Set.of("viewer", "admin"), roles); + + } + + @SneakyThrows + @Test + void extractCognitoAuthorities() { + + extractor = new CognitoAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("cognito:groups", List.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", "john@kafka.com"), + "user_name"); + + HashMap additionalParams = new HashMap<>(); + + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertEquals(Set.of("viewer", "admin"), roles); + + } + + @SneakyThrows + @Test + void extractGithubAuthorities() { + + extractor = new GithubAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("login", "john@kafka.com"), + "login"); + + HashMap additionalParams = new HashMap<>(); + + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + additionalParams.put("provider", provider); + + additionalParams.put("request", new OAuth2UserRequest( + withRegistrationId("registration-1") + .clientId("client-1") + .clientSecret("secret") + .redirectUri("https://client.com") + .authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE) + .authorizationUri("https://provider.com/oauth2/authorization") + .tokenUri("https://provider.com/oauth2/token") + .clientName("Client 1") + .build(), + new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "XXXX", Instant.now(), + Instant.now().plus(10, ChronoUnit.HOURS)))); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertEquals(Set.of("viewer"), roles); + + } + + @SneakyThrows + @Test + void extractGoogleAuthorities() { + + extractor = new GoogleAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("hd", "test.domain.com", "email", "john@kafka.com"), + "email"); + + HashMap additionalParams = new HashMap<>(); + + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertEquals(Set.of("viewer", "admin"), roles); + + } + +} diff --git a/api/src/test/resources/roles_definition.yaml b/api/src/test/resources/roles_definition.yaml index 9428df22f..25e22b8a1 100644 --- a/api/src/test/resources/roles_definition.yaml +++ b/api/src/test/resources/roles_definition.yaml @@ -3,6 +3,12 @@ - provider: 'OAUTH' value: 'ROLE-[A-Z]+' type: 'role' + - provider: 'OAUTH_COGNITO' + value: 'ROLE-[A-Z]+' + type: 'group' + - provider: 'OAUTH_GOOGLE' + value: '.*.domain.com' + type: 'domain' clusters: - local - remote @@ -17,6 +23,15 @@ - provider: 'OAUTH' value: '.*@kafka.com' type: 'user' + - provider: 'OAUTH_COGNITO' + value: '.*@kafka.com' + type: 'user' + - provider: 'OAUTH_GITHUB' + value: '.*@kafka.com' + type: 'user' + - provider: 'OAUTH_GOOGLE' + value: '.*@kafka.com' + type: 'user' clusters: - remote permissions: From 1c47f7c097c88c31cd1fcfdb7141a0c49b90c361 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Tue, 12 Nov 2024 15:52:54 +0100 Subject: [PATCH 05/84] Issues/300: Log message now reflects the new matching strategy --- .../ui/service/rbac/extractor/OauthAuthorityExtractor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java index 7bf19c61a..8812301a1 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java @@ -59,7 +59,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("user")) .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, - s.getValue().equalsIgnoreCase(principalName))) + principalName != null && principalName.matches(s.getValue()))) .anyMatch(s -> principalName != null && principalName.matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); From 5e510f98947c620a6c0333a8b6724b1d92838bd8 Mon Sep 17 00:00:00 2001 From: Yeikel Date: Fri, 15 Nov 2024 05:09:10 -0500 Subject: [PATCH 06/84] Chore: Deps: Use latest 17 jre image (#639) Co-authored-by: Roman Zabaluev --- .github/dependabot.yml | 17 +++++++++++++++++ api/Dockerfile | 5 ++++- 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 17e8484c9..ba0fe794a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -14,6 +14,23 @@ updates: - "type/dependencies" - "scope/backend" +- package-ecosystem: docker + directory: "/api" + schedule: + interval: weekly + time: "10:00" + timezone: Europe/London + reviewers: + - "kafbat/backend" + open-pull-requests-limit: 10 + ignore: + - dependency-name: "azul/zulu-openjdk-alpine" + # Limit dependabot pull requests to minor Java upgrades + update-types: ["version-update:semver-major"] + labels: + - "type/dependencies" + - "scope/backend" + - package-ecosystem: npm directory: "/frontend" schedule: diff --git a/api/Dockerfile b/api/Dockerfile index 9e1173250..82ac8c023 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,7 @@ -FROM azul/zulu-openjdk-alpine:17.0.11-jre-headless +# The tag is ignored when a sha is included but the reason to add it are: +# 1. Self Documentation: It is difficult to find out what the expected tag is given a sha alone +# 2. Helps dependabot during discovery of upgrades +FROM azul/zulu-openjdk-alpine:17-jre-headless-latest@sha256:af4df00adaec356d092651af50d9e80fd179f96722d267e79acb564aede10fda RUN apk add --no-cache \ # snappy codec From 8f9c63480a0f30d23345e3bfd060a398cc054c92 Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Sun, 17 Nov 2024 15:09:27 +0500 Subject: [PATCH 07/84] BE: Set default message polling mode (#649) Co-authored-by: Roman Zabaluev --- .../main/java/io/kafbat/ui/controller/MessagesController.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/src/main/java/io/kafbat/ui/controller/MessagesController.java b/api/src/main/java/io/kafbat/ui/controller/MessagesController.java index 62189b04a..b88e2d566 100644 --- a/api/src/main/java/io/kafbat/ui/controller/MessagesController.java +++ b/api/src/main/java/io/kafbat/ui/controller/MessagesController.java @@ -118,10 +118,11 @@ public Mono>> getTopicMessagesV2(Strin if (cursor != null) { messagesFlux = messagesService.loadMessages(getCluster(clusterName), topicName, cursor); } else { + var pollingMode = mode == null ? PollingModeDTO.LATEST : mode; messagesFlux = messagesService.loadMessages( getCluster(clusterName), topicName, - ConsumerPosition.create(checkNotNull(mode), checkNotNull(topicName), partitions, timestamp, offset), + ConsumerPosition.create(pollingMode, checkNotNull(topicName), partitions, timestamp, offset), stringFilter, smartFilterId, limit, From 01aa8ab36387c5f1d66d098e71488bfb0eb5f39c Mon Sep 17 00:00:00 2001 From: AkashDeep <57340046+AkashDeepSinghJassal@users.noreply.github.com> Date: Mon, 18 Nov 2024 00:03:01 +0530 Subject: [PATCH 08/84] BE: Support sending null headers (#651) Co-authored-by: Roman Zabaluev --- .../ui/serdes/ProducerRecordCreator.java | 2 +- .../kafbat/ui/service/SendAndReadTests.java | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java b/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java index 359c871d6..e7abbad90 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java +++ b/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java @@ -31,7 +31,7 @@ public ProducerRecord create(String topic, private Iterable
createHeaders(Map clientHeaders) { RecordHeaders headers = new RecordHeaders(); - clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v.getBytes()))); + clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v == null ? null : v.getBytes()))); return headers; } diff --git a/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java b/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java index cdf304509..fac5a5f59 100644 --- a/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java +++ b/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java @@ -19,6 +19,7 @@ import io.kafbat.ui.serdes.builtin.StringSerde; import io.kafbat.ui.serdes.builtin.sr.SchemaRegistrySerde; import java.time.Duration; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -425,6 +426,25 @@ void topicMessageMetadataJson() { }); } + @Test + void headerValueNullPresentTest() { + new SendAndReadSpec() + .withKeySchema(JSON_SCHEMA) + .withValueSchema(JSON_SCHEMA) + .withMsgToSend( + new CreateTopicMessageDTO() + .key(JSON_SCHEMA_RECORD) + .keySerde(SchemaRegistrySerde.name()) + .content(JSON_SCHEMA_RECORD) + .valueSerde(SchemaRegistrySerde.name()) + .headers(Collections.singletonMap("header123", null)) + ) + .doAssert(polled -> { + assertThat(polled.getHeaders().get("header123")).isNull(); + }); + } + + @Test void noKeyAndNoContentPresentTest() { new SendAndReadSpec() From c138f8039fcc8ae82f288e16affc74fbf60487cf Mon Sep 17 00:00:00 2001 From: Anton Patsev <10828883+patsevanton@users.noreply.github.com> Date: Sun, 24 Nov 2024 13:47:11 +0600 Subject: [PATCH 09/84] Docs: Fix typo (#670) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2b21a0b99..0f6e2fd73 100644 --- a/README.md +++ b/README.md @@ -50,7 +50,7 @@ We extend our gratitude to Provectus for their past support in groundbreaking wo * **View Consumer Groups** — view per-partition parked offsets, combined and per-partition lag * **Browse Messages** — browse messages with JSON, plain text, and Avro encoding * **Dynamic Topic Configuration** — create and configure new topics with dynamic configuration -* **Configurable Authentification** — [secure](https://ui.docs.kafbat.io/configuration/authentication) your installation with optional Github/Gitlab/Google OAuth 2.0 +* **Configurable Authentication** — [secure](https://ui.docs.kafbat.io/configuration/authentication) your installation with optional Github/Gitlab/Google OAuth 2.0 * **Custom serialization/deserialization plugins** - [use](https://ui.docs.kafbat.io/configuration/serialization-serde) a ready-to-go serde for your data like AWS Glue or Smile, or code your own! * **Role based access control** - [manage permissions](https://ui.docs.kafbat.io/configuration/rbac-role-based-access-control) to access the UI with granular precision * **Data masking** - [obfuscate](https://ui.docs.kafbat.io/configuration/data-masking) sensitive data in topic messages From 9c028edcafb4aa1e94c21e9d554951e55035009f Mon Sep 17 00:00:00 2001 From: Shubham Pisal <90398735+pisal-shubham@users.noreply.github.com> Date: Sun, 24 Nov 2024 13:51:48 +0530 Subject: [PATCH 10/84] FE: Filters: Display CEL help when editing filter (#646) Co-authored-by: Roman Zabaluev --- .../Messages/Filters/AddEditFilterContainer.tsx | 2 +- .../Topics/Topic/Messages/Filters/Filters.styled.ts | 2 +- .../__tests__/AddEditFilterContainer.spec.tsx | 12 ++++++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx b/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx index 4054ca51c..c05602990 100644 --- a/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx +++ b/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx @@ -196,7 +196,7 @@ const AddEditFilterContainer: React.FC = ({ - {!isEdit && } + \n" - + " \n"; - } - - private static String csrfToken(CsrfToken token) { - return " \n"; - } - - private static String createError(boolean isError) { - return isError - ? "
Invalid credentials
" - : ""; - } - - private static String createLogoutSuccess(boolean isLogoutSuccess) { - return isLogoutSuccess - ? "
You have been signed out
" - : ""; - } -} diff --git a/api/src/main/java/io/kafbat/ui/controller/AuthenticationController.java b/api/src/main/java/io/kafbat/ui/controller/AuthenticationController.java new file mode 100644 index 000000000..c94c344c9 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/controller/AuthenticationController.java @@ -0,0 +1,22 @@ +package io.kafbat.ui.controller; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.core.io.ClassPathResource; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; +import reactor.core.publisher.Mono; + +@RestController +@RequiredArgsConstructor +@Slf4j +public class AuthenticationController { + + private static final String INDEX_HTML = "/static/index.html"; + + @GetMapping(value = "/login", produces = {"text/html"}) + public Mono getLoginPage() { + return Mono.just(new ClassPathResource(INDEX_HTML)); + } + +} diff --git a/api/src/main/java/io/kafbat/ui/controller/AccessController.java b/api/src/main/java/io/kafbat/ui/controller/AuthorizationController.java similarity index 97% rename from api/src/main/java/io/kafbat/ui/controller/AccessController.java rename to api/src/main/java/io/kafbat/ui/controller/AuthorizationController.java index e5b1ea438..1ac0aeb85 100644 --- a/api/src/main/java/io/kafbat/ui/controller/AccessController.java +++ b/api/src/main/java/io/kafbat/ui/controller/AuthorizationController.java @@ -26,7 +26,7 @@ @RestController @RequiredArgsConstructor @Slf4j -public class AccessController implements AuthorizationApi { +public class AuthorizationController implements AuthorizationApi { private final AccessControlService accessControlService; diff --git a/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java b/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java index 7d380036c..7ee28b62d 100644 --- a/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java +++ b/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java @@ -1,16 +1,23 @@ package io.kafbat.ui.service; +import static io.kafbat.ui.api.model.AuthType.DISABLED; +import static io.kafbat.ui.api.model.AuthType.OAUTH2; import static io.kafbat.ui.model.ApplicationInfoDTO.EnabledFeaturesEnum; import static io.kafbat.ui.util.GithubReleaseInfo.GITHUB_RELEASE_INFO_TIMEOUT; import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Streams; +import io.kafbat.ui.model.AppAuthenticationSettingsDTO; import io.kafbat.ui.model.ApplicationInfoBuildDTO; import io.kafbat.ui.model.ApplicationInfoDTO; import io.kafbat.ui.model.ApplicationInfoLatestReleaseDTO; +import io.kafbat.ui.model.AuthTypeDTO; +import io.kafbat.ui.model.OAuthProviderDTO; import io.kafbat.ui.util.DynamicConfigOperations; import io.kafbat.ui.util.GithubReleaseInfo; import java.time.format.DateTimeFormatter; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Properties; @@ -18,20 +25,27 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.info.BuildProperties; import org.springframework.boot.info.GitProperties; +import org.springframework.context.ApplicationContext; +import org.springframework.core.ResolvableType; import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.security.oauth2.client.registration.ClientRegistration; +import org.springframework.security.oauth2.core.AuthorizationGrantType; import org.springframework.stereotype.Service; @Service public class ApplicationInfoService { private final GithubReleaseInfo githubReleaseInfo; + private final ApplicationContext applicationContext; private final DynamicConfigOperations dynamicConfigOperations; private final BuildProperties buildProperties; private final GitProperties gitProperties; public ApplicationInfoService(DynamicConfigOperations dynamicConfigOperations, + ApplicationContext applicationContext, @Autowired(required = false) BuildProperties buildProperties, @Autowired(required = false) GitProperties gitProperties, @Value("${" + GITHUB_RELEASE_INFO_TIMEOUT + ":10}") int githubApiMaxWaitTime) { + this.applicationContext = applicationContext; this.dynamicConfigOperations = dynamicConfigOperations; this.buildProperties = Optional.ofNullable(buildProperties).orElse(new BuildProperties(new Properties())); this.gitProperties = Optional.ofNullable(gitProperties).orElse(new GitProperties(new Properties())); @@ -70,6 +84,38 @@ private List getEnabledFeatures() { return enabledFeatures; } + public AppAuthenticationSettingsDTO getAuthenticationProperties() { + return new AppAuthenticationSettingsDTO() + .authType(AuthTypeDTO.fromValue(getAuthType())) + .oAuthProviders(getOAuthProviders()); + } + + private String getAuthType() { + return Optional.ofNullable(applicationContext.getEnvironment().getProperty("auth.type")) + .orElse(DISABLED.getValue()); + } + + @SuppressWarnings("unchecked") + private List getOAuthProviders() { + if (!getAuthType().equalsIgnoreCase(OAUTH2.getValue())) { + return Collections.emptyList(); + } + var type = ResolvableType.forClassWithGenerics(Iterable.class, ClientRegistration.class); + String[] names = this.applicationContext.getBeanNamesForType(type); + var bean = (Iterable) (names.length == 1 ? this.applicationContext.getBean(names[0]) : null); + + if (bean == null) { + return Collections.emptyList(); + } + + return Streams.stream(bean.iterator()) + .filter(r -> AuthorizationGrantType.AUTHORIZATION_CODE.equals(r.getAuthorizationGrantType())) + .map(r -> new OAuthProviderDTO() + .clientName(r.getClientName()) + .authorizationUri("/oauth2/authorization/" + r.getRegistrationId())) + .toList(); + } + // updating on startup and every hour @Scheduled(fixedRateString = "${github-release-info-update-rate:3600000}") public void updateGithubReleaseInfo() { diff --git a/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java b/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java new file mode 100644 index 000000000..1b74bd374 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java @@ -0,0 +1,61 @@ +package io.kafbat.ui.util; + +import java.io.IOException; +import org.jetbrains.annotations.NotNull; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.server.reactive.ServerHttpResponse; +import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatcher; +import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatchers; +import org.springframework.web.server.ServerWebExchange; +import org.springframework.web.server.WebFilter; +import org.springframework.web.server.WebFilterChain; +import reactor.core.publisher.Mono; + +public class StaticFileWebFilter implements WebFilter { + + private static final String INDEX_HTML = "/static/index.html"; + + private final ServerWebExchangeMatcher matcher; + private final String contents; + + public StaticFileWebFilter() { + this("/login", new ClassPathResource(INDEX_HTML)); + } + + public StaticFileWebFilter(String path, ClassPathResource resource) { + this.matcher = ServerWebExchangeMatchers.pathMatchers(HttpMethod.GET, path); + + try { + this.contents = ResourceUtil.readAsString(resource); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public @NotNull Mono filter(@NotNull ServerWebExchange exchange, WebFilterChain chain) { + return this.matcher.matches(exchange) + .filter(ServerWebExchangeMatcher.MatchResult::isMatch) + .switchIfEmpty(chain.filter(exchange).then(Mono.empty())) + .flatMap((matchResult) -> this.render(exchange)); + } + + private Mono render(ServerWebExchange exchange) { + String contextPath = exchange.getRequest().getPath().contextPath().value(); + + String contentBody = contents + .replace("\"assets/", "\"" + contextPath + "/assets/") + .replace("PUBLIC-PATH-VARIABLE", contextPath); + + ServerHttpResponse result = exchange.getResponse(); + result.setStatusCode(HttpStatus.OK); + result.getHeaders().setContentType(MediaType.TEXT_HTML); + DataBufferFactory bufferFactory = exchange.getResponse().bufferFactory(); + return result.writeWith(Mono.just(bufferFactory.wrap(contentBody.getBytes()))); + } + +} diff --git a/contract/src/main/resources/swagger/kafbat-ui-api.yaml b/contract/src/main/resources/swagger/kafbat-ui-api.yaml index 04cc17514..dff80b4ce 100644 --- a/contract/src/main/resources/swagger/kafbat-ui-api.yaml +++ b/contract/src/main/resources/swagger/kafbat-ui-api.yaml @@ -31,7 +31,6 @@ paths: items: $ref: '#/components/schemas/Cluster' - /api/clusters/{clusterName}/cache: post: tags: @@ -54,7 +53,6 @@ paths: 404: description: Not found - /api/clusters/{clusterName}/brokers: get: tags: @@ -432,7 +430,6 @@ paths: 404: description: Not found - /api/clusters/{clusterName}/topics/{topicName}: get: tags: @@ -2150,7 +2147,7 @@ paths: get: tags: - Authorization - summary: Get user authentication related info + summary: Get user authorization related info operationId: getUserAuthInfo responses: 200: @@ -2220,7 +2217,6 @@ paths: schema: $ref: '#/components/schemas/ApplicationConfigValidation' - /api/config/relatedfiles: post: tags: @@ -2244,6 +2240,40 @@ paths: schema: $ref: '#/components/schemas/UploadedFileInfo' + /api/config/authentication: + get: + tags: + - ApplicationConfig + summary: Get authentication methods enabled for the app and other related settings + operationId: getAuthenticationSettings + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/AppAuthenticationSettings' + + /login: + post: + summary: Authenticate + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + type: object + properties: + username: + type: string + password: + type: string + responses: + '200': + description: OK + '401': + description: Unauthorized + components: schemas: TopicSerdeSuggestion: @@ -2354,6 +2384,32 @@ components: htmlUrl: type: string + AppAuthenticationSettings: + type: object + properties: + authType: + $ref: '#/components/schemas/AuthType' + oAuthProviders: + type: array + items: + $ref: '#/components/schemas/OAuthProvider' + + OAuthProvider: + type: object + properties: + clientName: + type: string + authorizationUri: + type: string + + AuthType: + type: string + enum: + - DISABLED + - OAUTH2 + - LOGIN_FORM + - LDAP + Cluster: type: object properties: From 21edbf7f3227a77a71c77396f4fa7730582ef0a7 Mon Sep 17 00:00:00 2001 From: Renat Kalimulin <103274228+Nilumilak@users.noreply.github.com> Date: Sat, 28 Dec 2024 07:19:29 +0300 Subject: [PATCH 30/84] FE: Impl custom auth page (#402) Co-authored-by: Roman Zabaluev --- .../main/resources/swagger/kafbat-ui-api.yaml | 3 + frontend/public/serviceImage.png | Bin 0 -> 7361 bytes frontend/src/components/App.tsx | 94 ++++++++-------- .../components/AuthPage/AuthPage.styled.tsx | 14 +++ frontend/src/components/AuthPage/AuthPage.tsx | 21 ++++ .../AuthPage/Header/Header.styled.tsx | 33 ++++++ .../src/components/AuthPage/Header/Header.tsx | 81 ++++++++++++++ .../components/AuthPage/Header/HeaderLogo.tsx | 29 +++++ .../SignIn/BasicSignIn/BasicSignIn.styled.tsx | 56 ++++++++++ .../SignIn/BasicSignIn/BasicSignIn.tsx | 101 ++++++++++++++++++ .../OAuthSignIn/AuthCard/AuthCard.styled.tsx | 66 ++++++++++++ .../SignIn/OAuthSignIn/AuthCard/AuthCard.tsx | 41 +++++++ .../SignIn/OAuthSignIn/OAuthSignIn.styled.tsx | 25 +++++ .../SignIn/OAuthSignIn/OAuthSignIn.tsx | 55 ++++++++++ .../AuthPage/SignIn/SignIn.styled.tsx | 19 ++++ .../src/components/AuthPage/SignIn/SignIn.tsx | 27 +++++ .../components/NavBar/UserInfo/UserInfo.tsx | 2 +- .../src/components/common/Button/Button.tsx | 3 +- .../src/components/common/Icons/AlertIcon.tsx | 22 ++++ .../components/common/Icons/CognitoIcon.tsx | 49 +++++++++ .../components/common/Icons/GoogleIcon.tsx | 32 ++++++ .../components/common/Icons/KeycloakIcon.tsx | 21 ++++ .../src/components/common/Icons/OktaIcon.tsx | 20 ++++ .../components/common/Icons/ServiceImage.tsx | 11 ++ .../contexts/GlobalSettingsContext.tsx | 28 +++-- frontend/src/lib/api.ts | 2 + frontend/src/lib/hooks/api/appConfig.ts | 47 ++++++-- frontend/src/theme/theme.ts | 72 +++++++++++++ frontend/vite.config.ts | 16 +++ 29 files changed, 929 insertions(+), 61 deletions(-) create mode 100644 frontend/public/serviceImage.png create mode 100644 frontend/src/components/AuthPage/AuthPage.styled.tsx create mode 100644 frontend/src/components/AuthPage/AuthPage.tsx create mode 100644 frontend/src/components/AuthPage/Header/Header.styled.tsx create mode 100644 frontend/src/components/AuthPage/Header/Header.tsx create mode 100644 frontend/src/components/AuthPage/Header/HeaderLogo.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.styled.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.styled.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.styled.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/SignIn.styled.tsx create mode 100644 frontend/src/components/AuthPage/SignIn/SignIn.tsx create mode 100644 frontend/src/components/common/Icons/AlertIcon.tsx create mode 100644 frontend/src/components/common/Icons/CognitoIcon.tsx create mode 100644 frontend/src/components/common/Icons/GoogleIcon.tsx create mode 100644 frontend/src/components/common/Icons/KeycloakIcon.tsx create mode 100644 frontend/src/components/common/Icons/OktaIcon.tsx create mode 100644 frontend/src/components/common/Icons/ServiceImage.tsx diff --git a/contract/src/main/resources/swagger/kafbat-ui-api.yaml b/contract/src/main/resources/swagger/kafbat-ui-api.yaml index dff80b4ce..97d0e5b11 100644 --- a/contract/src/main/resources/swagger/kafbat-ui-api.yaml +++ b/contract/src/main/resources/swagger/kafbat-ui-api.yaml @@ -2256,7 +2256,10 @@ paths: /login: post: + tags: + - Unmapped summary: Authenticate + operationId: authenticate requestBody: required: true content: diff --git a/frontend/public/serviceImage.png b/frontend/public/serviceImage.png new file mode 100644 index 0000000000000000000000000000000000000000..8006b13f5c09b4cee6289790a38df545177b25dc GIT binary patch literal 7361 zcmV;y96sZTP)Py6dr3q=RCodHoe6lB#ku$SzJBL?XSu$fo>Q_XBt(|TZrNRGL{UMCT18P%st8ua zR_keNTdQ#cX^&b3WDg->2?>x5v+oJn$iDCUzOV20EWhuX_sy%3iUjh4FVW3*iZvW4O|M8rB{;#*%CU`Bj zPk#Rnhp{5=iD$9fm;6|y6?sJ70Iym9EXuOdW?E0R1W0KgE&tabFV7zdAjg7hiMSvy z!OQbUX|FE-SJtijf%UZfTdauyUG&n}?RWp!5+Gkm8DgaT!;&FX%eMjgr3ex#tLoEL z|My$$i`V|u=H7lS)XSxSz0&*Vy>7J?qvG15ah=}~_1Ag-{&f@Ql>&6V(tl@&5UR$S zd;H}Hy+PU!o~ zRrI_5{99W$2;2U<4(@o{m%V@CzkRi9UgN9p!T+i!RuFaFNcdju{5|;Qk#fz|k(hAJ zFZ(|HdLp=16SK9MngzbkP3_7$aA#xczD21!S2|ithsbz;sh!15RI$jiCOf|~{@vC3 z=#5o!#KWo*@t8bv-Ycr|+|HY=g#@FCYRSc#HLY;#LVveR8?>@e_MeC9KO4n=E>gfE z`V!y$DDI;*PS!$#STS3~782Wp*3u|ZkHP3yeGJEDqK4cCQEMUMew2WD7(!ML_hbjI zv!=REcv7}DyNHKnv%PZ2t&sI=DD;B7KcFT)fplsy!&$rht%-mM&j-Q{YfTO6 zPy6(@hR{{0nOmX32ik+7IRMIcqC~7jAOD-7+Sr5htf|goyYs44k+rFw!s9Qv56Ay8 zR*GJOTCo#)<6$Hh1~Z^F03~ZP(z%D}Ej+!=ng|ee5l3ULm;`IGdPE~SV%~m47qMwj z$$s7lIJ#m`8o*Eu18Jz0?;}V53{Cn+iP=6c-(*cB2&?Fd2(Y%^FTfm1d~1&(;z87k z_naB=v8d&5VTgE$q0GOXur>mW6Y_Z02G-d7I}>-T(?@MU$$trYqQFoN zM)3QAUehQTzJS`e4>{oxv`N2hDe&Dk)!InN_8N!R)Y^J~_TSb`8H#%?L`r!QwdO+@ zxd2KN&<6|vf`m@cmqAZBYW|BDq8_6sYtI&IBjB=gL4ud(zgSzP+hSh#)WpBoD(7s4 z){~$w1+{$?_`y*UdO)g!!F1H>_mC4GVJLma>5NZTIa^zMzR@Cz^t#XLj*<+ z(o-mv??PYv=;YfyN`M#vLi#{$#b_C|9z)LEj4tV={-&hAthP1+#F^BuwTQ&3*V-zZ z_R$)rk>p<&8`3wTHhaTJwn^U$y>C=KV+b%##?{ahjZ*O@`uN8fNZp3^{M+sD<00{fNo-R~vzjQ57Usz?YIX6IuPy z(C|L$g{`Wg_>+$+PXEE9$p7V=O7^dtlJ?OGr_`OdeXHA;F~7g(KfbVr!nfaHGtnL* zfXmyv-mNO&wOREsuiw%f`_A&VgdLupsUNHw%=qBJf%EUIA58z#rjd+4zc`%r=4&Gt z-t^LB{b`5p!doBf^4{L9%YXNfA^%-pec}6oa^d^YYVNCwTJtC9j|D^NsQT#|B|w;= zX1SOM;UB68Z4Pw$K+%h+Ij{CBr61&}W!qDwqIYAYf_Fm<`EUE{^WHkD%YSdLF6ZsP zY4hHBeYzzcQG$eM^7Vj>r4bx3BFv|L(nA@!Rig3j5QN+OXfx&pWc-ZK7C- z{845HotM|7|2>fcGY_tt+7bQo+nShXEA)xa^~fo|)F`RXOG?TXl;q7QNxwi5c6|JL zl=#O{;vPkbe*`7|QIy2TP?8@WuInm3^9IOqyNsWDBkMhi%g zAS@vv=C}yWB1TCw$d#Z;gN}Vr`zDmVhFY`@HE%0w&a`24Jav3 zpd>$ulC;4j$xopqZ!)$)PJULElb$u`VmI{;#lDo(5d7*r6Twa7HO>R#M-&q&s44u_ zZ9U=7Xoo@`M0@sjr0~0uV;(?Fd>A$PQPkA+sOLAJragt4u?Z#P8KduB$$kMP_hr<= z*HBAdM=ke4t=W#+_zAQfg1%tTW`itrzfeDIqbf)Me6g4b5ute2Y8nM{1!&TtKMXpL znaXYc6t&?)<0P$Czl~b{25RZ+sKu|L7XBJFe;aD<3#i#!P_s6pW;}(OvK~2hEr!qU zMsxN~1_ORd_n8NCt3rNr%S5md^vJ zBME{Cvj~@jS;W1uabYnvfLaYw8R!d5*+e}NniA13>WM`TYnU(BLt-<616WD2I62K5eAcBFxeym00D#mMCfaMJ{XEkU@4WL z3bj)Y2*+FV=o}zGHH#o*Z2aPD@l50sqeqZfYbsN%0#UwPZX#JK0z-j`99_1lKAH>| zPD33^M(&G6-w}eQavuX(f21qq9@>0n((FBlhSM7+w9-!9dE5V+G*SOgQ_Qygt>>Op z+X8Q=Gx8z&3jT+oh5)oZu^0x=qi8OmiXLqc02hFv5DcQx0_+ly%RngywGt3LUZ@5r$s^)y|T`)Nu4 zjS>9XkX6)#KB}t>-}>GK@1MI*r01LSifzhC<_k-Ob#cFXy(wl>Yjea}T4NukGxyK* z)*NQ2JqT@YB+_sKvNi=(p9Y2ukTOBa0yzhiTu=lV1)vs!S_G;X8G;lM3b4e6%vgfV zM47|UG$I`$Psx{|u^dsR5K+F6d=xncNtcdpIFX^w5PBPYX)kz>rle=63%!?`;8j$H zKAvBCX8psK!pU5Y#R(A0{F-ym|GXjg>5j&T2Wg3Wh>pw`>8;+&V5>jc-p?@%#UN`F zQT54SI1h$2kTO8d1X*l33cr6YsChC6j*Do@IX35av1ehyVd}hny`nb*E zLU%Srt)?w?1Kovh(cgH4k&Zz01Lu%NqEWQ*pi2OQAS1;@h?EBMC?QHFC|O3ZPz5nL zqu`AV#mEsNKsDdbHW8@^LWPJ?CJ4>|B^^~dkE|0$LJXSTPzIWg(p~l!TFyU1eZ*>N zf>ux&yh^GJe(ZyceQTzh+hytSnh_u>n(wn|c3te2;D)GmN@LV&T9O{2v+#HH)$e7v z!yjE=5b|(1sx}IAv7nC!Ln0WGOk_wYAe{$U5Rwjxd6Wnc1(;@X1VN+MM&d*|q29zE zLb|C8QI3=fqD(^)iY^{$Boa-}83vlY=`8v)&8ZuyJ9iJ&L7r3u-QQCd{K(sB@2$9| zuIx#cnuIUaGZJ>La;}bf;pK+7$NL&0SJNE7hW5;->8X65q1MCb`%WPboki7#fi4{M zQJ{|jgK)ydffNr?B1lQ2k_?g%K~6Hr=Ie_?kzV8zqe~DQXA*r3s!q&Bp=f(g(O>s5 zop~?O8213RVRunw1bBD%Tfz(-PlYO?wr*`me5$n}aurSStLeyi zoW9am(6sJB>hnV#4gk$r(1n0bknuU_1tDT9Qk(^f215);W5_Xs`b8*66VGEnA3Z7} ze}su#QKl{oWh5AVuP?)m@6(n03z`$xP#bn9RY7-BarXYy0^gsl7*Crfn#O2`TEZ1Q z-iqX(eEOF9q^B|(qaRRPWA33V<$i_>)}ycg9ZJtW7(50er(oo?i3~xARuB>j`Y_O+ z8x=v$s2Ik?&CiVlhMLIK3a4HWXwQNs5QYw;^z1}i|1twb8)!>jMSa8yszdM5Rh(UY zF#Tgsi@2M|gGrj#^8cE!D`Gq^8k?ppVAH~exQ%|TQ7h<-xr3p!yU`Ush}`%BYWI6E zBsO4f?zLIkKmpc@U1_snTdgI16j2pU1?DHuKi13RJXRg{(& z&{k}u|H98`iCals;;k5pn`Ff}F8(;>%~ow8vmr zsI?=ggP)-Gy@lHSDr(ouC~aGj>o=k;Ttk1xeY7P%SX~qP&=Z%c>$SXZbjD589euSG z%=+-BE*CqYfQL9omaJ_z`N~JE*>Dt)C1=u3p7SztosGZNCwr@ghe;TE2BTCD9 zl=_EJ%I-sd;Z}w-J!wl=yXk7%FwuO*4Vj5j;B%DMfwdWZBL-I3PK-4 zX?X;t=^>QrRVXE%7%~^3Ilq|h#FZ2B>0fR8MZPbNrd%&mbZli?PyOV+p>wl(be~T} zikXg_z5u!KcI4`NQR-HsG^|Bw6l93G;SrSPbto+Y$j4Ax*P|MN72u8n+`0~>RS@?G zO3Tktnjb=G68YDlG(LdRunMJaB}%0yO3`wp^abcr<}i?aTUOqYmDg;gJGRYZ<3?As z`B+5u2lrg}FI{y7&UtM(bcU)w=Y|wL4e9(`^H7HF#Lup!z(!ACbZx+|1KsT;JX}lk$;Xai5l_>RhqtxAvQhO&#^&Kc>%TWpz zA*atnpL{bz$xFKG0&n*m3(ob3i{Dy@DK)N7C!ffQcIVP}*79bbRM-o!F z0A0a+H2L!x&YjOt&H{#V=QENwA8p}$^u-G>lq^6hS%6$J54m^_Qqdfw!r4d#HzVhH zAZH7rXCSA}KuVl}F2RGrghlnjEWTX5E%n^E0{%k(j7F~KdZ6>sVqKAvrXrJ`i!Ya8E(iKu1IOFNU1JJ$>JR@NO7)6vF_-jZek>EHl3d@ed%(*S?c-UsUI^5 z*Fz0e>{~K_;LMCt%_$f3fsPo?IA92}#}HtcqLwZh2xTaJoi!(h-Bd zJqCX}3<0(n&f1_4wm}zSi#F7r;b1!k&f3!*Xh+*gI~x7$sP(a@!rOt;Lyi=CJ5zkr zMJ+n&ijZR8DU|uTP<_Igh5#qpf*j}xvu7y60d15$`e=I$QFcg?wn*W27(RDEf6fKn z=k932r!f$eFf8{F(fyYw+JI##W{~VD66L0C#l9?J=CNLw~{+ z{Rta%rzWF2Wy8oR8wO9=(0js$&SN$-AGM|4+m5P3_LLoTpyaSK#YbEz^u4K~z|X_0 z=!D17V*eT1;*&EdK0b}oV{TOVO`-O<6V3jPbe^%JKgbqMs4cosTMVH#7(#8)huEPD zc0?QMh9=CNfrwdy%>fG^7+Y6M$GG6P2^)8jLl4)4h{AF`qTfGt(~?I_>xKH0-sZ<}+I=_l|;p$c56QQu)8gD0>eC+A;wWaTv4THyR7&eL>!>61WIz5H{;AwQ8 znH73*xt6*fSMN8I5%t%5Cv;-^QkAYfyl8E&uNwo0>=-y?OaB2Iy7x_{#$xzAL!?D+Ji65lz$%>3(;%brHdKd^XqNywsIu`dK3N16`V(SF#LZf{$9kJ!=Y=Sbg47rF!7=s4}se67|4PRq||3B<%g$GeZY~r{q{8Ox2NTRJ?-8Oboe;Z z?&m^_|8%K*-z^qD|8qsX-%Ns~Eh1s!)g8F$WaAz?8h4pMul>}9>Q8N{*kw=o9tX?sxX*f)ie{T>!|%zee{X$E$@VL-s7A6RNa&iru6jM80B zIi;W2VU#_Nl+TY7_v&)r=~BgUkM4?-v!hG==59#;c-eJ3 zi(@7v{o`dfmzwdQ`}{-LGZ5yle_7Xx{<*J5p8DXXa9;Jg*3 zzO!E`^;z&h-nRg}nE-Q`wrkR}1>Z`7Ajfm>d6H z3@xJfF9zRy{r%_bzy6c;V>?pz)=B?l#_O7dmYS5uR{FaVx7eh6Ip(gFtKvo`Vq9ND zyd1Ds^89-ezEnro9eguU->aiL9Il(%F@t__C?;l0nW#6C3KAZIYd+y#4ECk2g^Bu) zM8Y*SH7^D7L|Hc)5k?DVJfE7k1UT2T3~MJs^muVB z$Eqh;OAvO%%ky8ve;1mlZnji@aa#EsKPzvEMq6VUVzL&aBF*die>Pu${twaH$4k7K z3}HPbczI5?RPQ#Ile_&68}YSTLHKuG#f%s9Z%QXX5=Uc1FBOX)p|ZqC5kP+3K>=~k n^vrXzNGtM)yh62DGvNOpcNnhV7f#5V00000NkvXXu0mjfcl}=D literal 0 HcmV?d00001 diff --git a/frontend/src/components/App.tsx b/frontend/src/components/App.tsx index 16dd1305d..d15cb95a0 100644 --- a/frontend/src/components/App.tsx +++ b/frontend/src/components/App.tsx @@ -1,5 +1,5 @@ import React, { Suspense, useContext } from 'react'; -import { Routes, Route, Navigate } from 'react-router-dom'; +import { Routes, Route, Navigate, useMatch } from 'react-router-dom'; import { accessErrorPage, clusterPath, @@ -24,6 +24,7 @@ import { GlobalSettingsProvider } from './contexts/GlobalSettingsContext'; import { UserInfoRolesAccessProvider } from './contexts/UserInfoRolesAccessContext'; import PageContainer from './PageContainer/PageContainer'; +const AuthPage = React.lazy(() => import('components/AuthPage/AuthPage')); const Dashboard = React.lazy(() => import('components/Dashboard/Dashboard')); const ClusterPage = React.lazy( () => import('components/ClusterPage/ClusterPage') @@ -49,54 +50,59 @@ const queryClient = new QueryClient({ }); const App: React.FC = () => { const { isDarkMode } = useContext(ThemeModeContext); + const isAuthRoute = useMatch('/login'); return ( - - - }> - - - - - - - {['/', '/ui', '/ui/clusters'].map((path) => ( + + {isAuthRoute ? ( + + ) : ( + + }> + + + + + + + {['/', '/ui', '/ui/clusters'].map((path) => ( + } + /> + ))} } + path={getNonExactPath(clusterNewConfigPath)} + element={} /> - ))} - } - /> - } - /> - - } - /> - } /> - } - /> - - - - - - - - - - + } + /> + + } + /> + } /> + } + /> + + + + + + + + + + )} + ); diff --git a/frontend/src/components/AuthPage/AuthPage.styled.tsx b/frontend/src/components/AuthPage/AuthPage.styled.tsx new file mode 100644 index 000000000..16f86f714 --- /dev/null +++ b/frontend/src/components/AuthPage/AuthPage.styled.tsx @@ -0,0 +1,14 @@ +import styled, { css } from 'styled-components'; + +export const AuthPageStyled = styled.div( + ({ theme }) => css` + display: flex; + flex-direction: column; + align-items: center; + justify-content: space-between; + min-height: 100vh; + background-color: ${theme.auth_page.backgroundColor}; + font-family: ${theme.auth_page.fontFamily}; + overflow-x: hidden; + ` +); diff --git a/frontend/src/components/AuthPage/AuthPage.tsx b/frontend/src/components/AuthPage/AuthPage.tsx new file mode 100644 index 000000000..ceae3069a --- /dev/null +++ b/frontend/src/components/AuthPage/AuthPage.tsx @@ -0,0 +1,21 @@ +import React from 'react'; +import { useAuthSettings } from 'lib/hooks/api/appConfig'; + +import Header from './Header/Header'; +import SignIn from './SignIn/SignIn'; +import * as S from './AuthPage.styled'; + +function AuthPage() { + const { data } = useAuthSettings(); + + return ( + +
+ {data && ( + + )} + + ); +} + +export default AuthPage; diff --git a/frontend/src/components/AuthPage/Header/Header.styled.tsx b/frontend/src/components/AuthPage/Header/Header.styled.tsx new file mode 100644 index 000000000..4ba86f2bc --- /dev/null +++ b/frontend/src/components/AuthPage/Header/Header.styled.tsx @@ -0,0 +1,33 @@ +import styled, { css } from 'styled-components'; + +export const HeaderStyled = styled.div` + display: grid; + grid-template-columns: repeat(47, 41.11px); + grid-template-rows: repeat(4, 41.11px); + justify-content: center; + margin-bottom: 13.5px; +`; + +export const HeaderCell = styled.div<{ $sections?: number }>( + ({ theme, $sections }) => css` + border: 1.23px solid ${theme.auth_page.header.cellBorderColor}; + border-radius: 75.98px; + ${$sections && `grid-column: span ${$sections};`} + ` +); + +export const StyledSVG = styled.svg` + grid-column: span 3; +`; + +export const StyledRect = styled.rect( + ({ theme }) => css` + fill: ${theme.auth_page.header.LogoBgColor}; + ` +); + +export const StyledPath = styled.path( + ({ theme }) => css` + fill: ${theme.auth_page.header.LogoTextColor}; + ` +); diff --git a/frontend/src/components/AuthPage/Header/Header.tsx b/frontend/src/components/AuthPage/Header/Header.tsx new file mode 100644 index 000000000..16980af29 --- /dev/null +++ b/frontend/src/components/AuthPage/Header/Header.tsx @@ -0,0 +1,81 @@ +import React from 'react'; + +import * as S from './Header.styled'; +import HeaderLogo from './HeaderLogo'; + +function Header() { + return ( + + + {Array(2).fill()} + + {Array(2).fill()} + + {Array(2).fill()} + {Array(4).fill()} + {Array(2).fill()} + + {Array(2).fill()} + + {Array(3).fill()} + + {Array(2).fill()} + {Array(2).fill()} + {Array(2).fill()} + + + {Array(3).fill()} + {Array(8).fill()} + + {Array(2).fill()} + + {Array(3).fill()} + + {Array(6).fill()} + {Array(3).fill()} + + + {Array(2).fill()} + + + + + + + + {Array(2).fill()} + + + + {Array(3).fill()} + + + {Array(3).fill()} + + {Array(3).fill()} + {Array(3).fill()} + + + + + + {Array(2).fill()} + + {Array(2).fill()} + {Array(5).fill()} + {Array(2).fill()} + + + + {Array(5).fill()} + {Array(2).fill()} + + {Array(2).fill()} + + + + + ); +} + +export default Header; diff --git a/frontend/src/components/AuthPage/Header/HeaderLogo.tsx b/frontend/src/components/AuthPage/Header/HeaderLogo.tsx new file mode 100644 index 000000000..e5d9ca12d --- /dev/null +++ b/frontend/src/components/AuthPage/Header/HeaderLogo.tsx @@ -0,0 +1,29 @@ +import React from 'react'; + +import * as S from './Header.styled'; + +const HeaderLogo = () => ( + + + + + + + + + + +); + +export default HeaderLogo; diff --git a/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.styled.tsx b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.styled.tsx new file mode 100644 index 000000000..da1388b0a --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.styled.tsx @@ -0,0 +1,56 @@ +import styled from 'styled-components'; + +export const Fieldset = styled.fieldset` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + gap: 16px; + border: none; + width: 100%; +`; + +export const Form = styled.form` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + gap: 40px; + width: 100%; + + ${Fieldset} div { + width: 100%; + } +`; + +export const Field = styled.div` + ${({ theme }) => theme.auth_page.signIn.label}; + display: flex; + flex-direction: column; + justify-content: flex-start; + align-items: flex-start; + gap: 4px; +`; + +export const Label = styled.label` + font-size: 12px; + font-weight: 500; + line-height: 16px; +`; + +export const ErrorMessage = styled.div` + display: flex; + column-gap: 2px; + align-items: center; + justify-content: center; + font-weight: 400; + font-size: 14px; + line-height: 20px; +`; + +export const ErrorMessageText = styled.span` + ${({ theme }) => theme.auth_page.signIn.errorMessage}; + font-weight: 400; + font-size: 14px; + line-height: 20px; +`; diff --git a/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.tsx b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.tsx new file mode 100644 index 000000000..044f4781b --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.tsx @@ -0,0 +1,101 @@ +import React from 'react'; +import { Button } from 'components/common/Button/Button'; +import Input from 'components/common/Input/Input'; +import { Controller, FormProvider, useForm } from 'react-hook-form'; +import { useAuthenticate } from 'lib/hooks/api/appConfig'; +import AlertIcon from 'components/common/Icons/AlertIcon'; +import { useNavigate } from 'react-router-dom'; +import { useQueryClient } from '@tanstack/react-query'; + +import * as S from './BasicSignIn.styled'; + +interface FormValues { + username: string; + password: string; +} + +function BasicSignIn() { + const methods = useForm({ + defaultValues: { username: '', password: '' }, + }); + const navigate = useNavigate(); + const { mutateAsync, isLoading } = useAuthenticate(); + const client = useQueryClient(); + + const onSubmit = async (data: FormValues) => { + await mutateAsync(data, { + onSuccess: async (response) => { + if (response.raw.url.includes('error')) { + methods.setError('root', { message: 'error' }); + } else { + await client.invalidateQueries({ queryKey: ['app', 'info'] }); + navigate('/'); + } + }, + }); + }; + + return ( + + + + {methods.formState.errors.root && ( + + + + Username or password entered incorrectly + + + )} + ( + + Username + + + )} + /> + ( + + Password + + + )} + /> + + + + + ); +} + +export default BasicSignIn; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.styled.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.styled.tsx new file mode 100644 index 000000000..d1eae050f --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.styled.tsx @@ -0,0 +1,66 @@ +import styled, { css } from 'styled-components'; +import GitHubIcon from 'components/common/Icons/GitHubIcon'; +import { Button } from 'components/common/Button/Button'; + +export const AuthCardStyled = styled.div( + ({ theme }) => css` + display: flex; + flex-direction: column; + gap: 16px; + padding: 16px; + width: 400px; + border: 1px solid black; + border: 1px solid ${theme.auth_page.signIn.authCard.borderColor}; + border-radius: ${theme.auth_page.signIn.authCard.borderRadius}; + background-color: ${theme.auth_page.signIn.authCard.backgroundColor}; + ` +); + +export const ServiceData = styled.div( + ({ theme }) => css` + display: flex; + gap: 8px; + align-items: center; + + svg, + img { + margin: 8px; + width: 48px; + height: 48px; + } + + ${GitHubIcon} { + fill: ${theme.auth_page.icons.githubColor}; + } + ` +); + +export const ServiceDataTextContainer = styled.div` + display: flex; + flex-direction: column; +`; + +export const ServiceNameStyled = styled.span( + ({ theme }) => css` + color: ${theme.auth_page.signIn.authCard.serviceNamecolor}; + font-size: 16px; + font-weight: 500; + line-height: 24px; + ` +); + +export const ServiceTextStyled = styled.span( + ({ theme }) => css` + color: ${theme.auth_page.signIn.authCard.serviceTextColor}; + font-size: 12px; + font-weight: 500; + line-height: 16px; + ` +); + +export const ServiceButton = styled(Button)` + width: 100%; + border-radius: 8px; + font-size: 14px; + text-decoration: none; +`; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.tsx new file mode 100644 index 000000000..b9a09812b --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.tsx @@ -0,0 +1,41 @@ +import React, { ElementType, useState } from 'react'; +import ServiceImage from 'components/common/Icons/ServiceImage'; + +import * as S from './AuthCard.styled'; + +interface Props { + serviceName: string; + authPath: string | undefined; + Icon?: ElementType; +} + +function AuthCard({ serviceName, authPath, Icon = ServiceImage }: Props) { + const [isLoading, setIsLoading] = useState(false); + + return ( + + + + + {serviceName} + + Use an account issued by the organization + + + + { + setIsLoading(true); + window.location.replace(`${window.basePath}${authPath}`); + }} + inProgress={isLoading} + > + {!isLoading && `Log in with ${serviceName}`} + + + ); +} + +export default AuthCard; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.styled.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.styled.tsx new file mode 100644 index 000000000..bf238e9b2 --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.styled.tsx @@ -0,0 +1,25 @@ +import styled from 'styled-components'; + +export const OAuthSignInStyled = styled.div` + display: flex; + flex-direction: column; + gap: 8px; +`; + +export const ErrorMessage = styled.div` + display: flex; + column-gap: 2px; + align-items: center; + justify-content: center; + font-weight: 400; + font-size: 14px; + line-height: 20px; + margin-bottom: 8px; +`; + +export const ErrorMessageText = styled.span` + ${({ theme }) => theme.auth_page.signIn.errorMessage}; + font-weight: 400; + font-size: 14px; + line-height: 20px; +`; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.tsx new file mode 100644 index 000000000..fca5b4925 --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.tsx @@ -0,0 +1,55 @@ +import React, { ElementType } from 'react'; +import GitHubIcon from 'components/common/Icons/GitHubIcon'; +import GoogleIcon from 'components/common/Icons/GoogleIcon'; +import CognitoIcon from 'components/common/Icons/CognitoIcon'; +import OktaIcon from 'components/common/Icons/OktaIcon'; +import KeycloakIcon from 'components/common/Icons/KeycloakIcon'; +import ServiceImage from 'components/common/Icons/ServiceImage'; +import { OAuthProvider } from 'generated-sources'; +import { useLocation } from 'react-router-dom'; +import AlertIcon from 'components/common/Icons/AlertIcon'; + +import * as S from './OAuthSignIn.styled'; +import AuthCard from './AuthCard/AuthCard'; + +interface Props { + oAuthProviders: OAuthProvider[] | undefined; +} + +const ServiceIconMap: Record = { + github: GitHubIcon, + google: GoogleIcon, + cognito: CognitoIcon, + keycloak: KeycloakIcon, + okta: OktaIcon, + unknownService: ServiceImage, +}; + +function OAuthSignIn({ oAuthProviders }: Props) { + const { search } = useLocation(); + + return ( + + {search.includes('error') && ( + + + Invalid credentials + + )} + {oAuthProviders?.map((provider) => ( + + ))} + + ); +} + +export default OAuthSignIn; diff --git a/frontend/src/components/AuthPage/SignIn/SignIn.styled.tsx b/frontend/src/components/AuthPage/SignIn/SignIn.styled.tsx new file mode 100644 index 000000000..0f24b45fd --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/SignIn.styled.tsx @@ -0,0 +1,19 @@ +import styled, { css } from 'styled-components'; + +export const SignInStyled = styled.div` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + width: 320px; + gap: 56px; + flex-grow: 1; +`; + +export const SignInTitle = styled.span( + ({ theme }) => css` + color: ${theme.auth_page.signIn.titleColor}; + font-size: 24px; + font-weight: 600; + ` +); diff --git a/frontend/src/components/AuthPage/SignIn/SignIn.tsx b/frontend/src/components/AuthPage/SignIn/SignIn.tsx new file mode 100644 index 000000000..987ee5ebf --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/SignIn.tsx @@ -0,0 +1,27 @@ +import React from 'react'; +import { AuthType, OAuthProvider } from 'generated-sources'; + +import BasicSignIn from './BasicSignIn/BasicSignIn'; +import * as S from './SignIn.styled'; +import OAuthSignIn from './OAuthSignIn/OAuthSignIn'; + +interface Props { + authType?: AuthType; + oAuthProviders?: OAuthProvider[]; +} + +function SignInForm({ authType, oAuthProviders }: Props) { + return ( + + Sign in + {(authType === AuthType.LDAP || authType === AuthType.LOGIN_FORM) && ( + + )} + {authType === AuthType.OAUTH2 && ( + + )} + + ); +} + +export default SignInForm; diff --git a/frontend/src/components/NavBar/UserInfo/UserInfo.tsx b/frontend/src/components/NavBar/UserInfo/UserInfo.tsx index dae43364c..b52cc7631 100644 --- a/frontend/src/components/NavBar/UserInfo/UserInfo.tsx +++ b/frontend/src/components/NavBar/UserInfo/UserInfo.tsx @@ -19,7 +19,7 @@ const UserInfo = () => { } > - + Log out diff --git a/frontend/src/components/common/Button/Button.tsx b/frontend/src/components/common/Button/Button.tsx index 828b5d301..8964b6e17 100644 --- a/frontend/src/components/common/Button/Button.tsx +++ b/frontend/src/components/common/Button/Button.tsx @@ -9,6 +9,7 @@ export interface Props ButtonProps { to?: string | object; inProgress?: boolean; + className?: string; } export const Button: FC = ({ @@ -20,7 +21,7 @@ export const Button: FC = ({ }) => { if (to) { return ( - + {children} diff --git a/frontend/src/components/common/Icons/AlertIcon.tsx b/frontend/src/components/common/Icons/AlertIcon.tsx new file mode 100644 index 000000000..3c79f78e6 --- /dev/null +++ b/frontend/src/components/common/Icons/AlertIcon.tsx @@ -0,0 +1,22 @@ +import React from 'react'; + +const AlertIcon: React.FC = () => { + return ( + + + + ); +}; + +export default AlertIcon; diff --git a/frontend/src/components/common/Icons/CognitoIcon.tsx b/frontend/src/components/common/Icons/CognitoIcon.tsx new file mode 100644 index 000000000..2d0b0d38a --- /dev/null +++ b/frontend/src/components/common/Icons/CognitoIcon.tsx @@ -0,0 +1,49 @@ +import React from 'react'; +import styled from 'styled-components'; + +function CognitoIcon() { + return ( + + + + + + + + + + + + + + + ); +} + +export default styled(CognitoIcon)``; diff --git a/frontend/src/components/common/Icons/GoogleIcon.tsx b/frontend/src/components/common/Icons/GoogleIcon.tsx new file mode 100644 index 000000000..2e569dbfe --- /dev/null +++ b/frontend/src/components/common/Icons/GoogleIcon.tsx @@ -0,0 +1,32 @@ +import React from 'react'; +import styled from 'styled-components'; + +function GoogleIcon() { + return ( + + + + + + + ); +} + +export default styled(GoogleIcon)``; diff --git a/frontend/src/components/common/Icons/KeycloakIcon.tsx b/frontend/src/components/common/Icons/KeycloakIcon.tsx new file mode 100644 index 000000000..e6b45ef69 --- /dev/null +++ b/frontend/src/components/common/Icons/KeycloakIcon.tsx @@ -0,0 +1,21 @@ +import React from 'react'; +import styled from 'styled-components'; + +function KeycloakIcon() { + return ( + + + + + ); +} + +export default styled(KeycloakIcon)``; diff --git a/frontend/src/components/common/Icons/OktaIcon.tsx b/frontend/src/components/common/Icons/OktaIcon.tsx new file mode 100644 index 000000000..a9d6871b0 --- /dev/null +++ b/frontend/src/components/common/Icons/OktaIcon.tsx @@ -0,0 +1,20 @@ +import React from 'react'; +import styled from 'styled-components'; + +function OktaIcon() { + return ( + + + + ); +} + +export default styled(OktaIcon)``; diff --git a/frontend/src/components/common/Icons/ServiceImage.tsx b/frontend/src/components/common/Icons/ServiceImage.tsx new file mode 100644 index 000000000..9311334f1 --- /dev/null +++ b/frontend/src/components/common/Icons/ServiceImage.tsx @@ -0,0 +1,11 @@ +import React from 'react'; + +interface Props { + serviceName: string; +} + +function ServiceImage({ serviceName }: Props) { + return {serviceName}; +} + +export default ServiceImage; diff --git a/frontend/src/components/contexts/GlobalSettingsContext.tsx b/frontend/src/components/contexts/GlobalSettingsContext.tsx index 4de05307b..5e906c292 100644 --- a/frontend/src/components/contexts/GlobalSettingsContext.tsx +++ b/frontend/src/components/contexts/GlobalSettingsContext.tsx @@ -1,6 +1,7 @@ import { useAppInfo } from 'lib/hooks/api/appConfig'; import React from 'react'; import { ApplicationInfoEnabledFeaturesEnum } from 'generated-sources'; +import { useNavigate } from 'react-router-dom'; interface GlobalSettingsContextProps { hasDynamicConfig: boolean; @@ -15,13 +16,26 @@ export const GlobalSettingsProvider: React.FC< React.PropsWithChildren > = ({ children }) => { const info = useAppInfo(); - const value = React.useMemo(() => { - const features = info.data?.enabledFeatures || []; - return { - hasDynamicConfig: features.includes( - ApplicationInfoEnabledFeaturesEnum.DYNAMIC_CONFIG - ), - }; + const navigate = useNavigate(); + const [value, setValue] = React.useState({ + hasDynamicConfig: false, + }); + + React.useEffect(() => { + if (info.data?.redirect && !info.isFetching) { + navigate('login'); + return; + } + + const features = info?.data?.response?.enabledFeatures; + + if (features) { + setValue({ + hasDynamicConfig: features.includes( + ApplicationInfoEnabledFeaturesEnum.DYNAMIC_CONFIG + ), + }); + } }, [info.data]); return ( diff --git a/frontend/src/lib/api.ts b/frontend/src/lib/api.ts index 19423d2ac..d6f409ea2 100644 --- a/frontend/src/lib/api.ts +++ b/frontend/src/lib/api.ts @@ -11,6 +11,7 @@ import { AuthorizationApi, ApplicationConfigApi, AclsApi, + UnmappedApi, } from 'generated-sources'; import { BASE_PARAMS } from 'lib/constants'; @@ -27,3 +28,4 @@ export const consumerGroupsApiClient = new ConsumerGroupsApi(apiClientConf); export const authApiClient = new AuthorizationApi(apiClientConf); export const appConfigApiClient = new ApplicationConfigApi(apiClientConf); export const aclApiClient = new AclsApi(apiClientConf); +export const internalApiClient = new UnmappedApi(apiClientConf); diff --git a/frontend/src/lib/hooks/api/appConfig.ts b/frontend/src/lib/hooks/api/appConfig.ts index e3ee0fdcb..a91c6eb4b 100644 --- a/frontend/src/lib/hooks/api/appConfig.ts +++ b/frontend/src/lib/hooks/api/appConfig.ts @@ -1,21 +1,52 @@ -import { appConfigApiClient as api } from 'lib/api'; +import { + appConfigApiClient as appConfig, + internalApiClient as internalApi, +} from 'lib/api'; import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; import { ApplicationConfig, ApplicationConfigPropertiesKafkaClusters, + ApplicationInfo, } from 'generated-sources'; import { QUERY_REFETCH_OFF_OPTIONS } from 'lib/constants'; -export function useAppInfo() { +export function useAuthSettings() { return useQuery( - ['app', 'info'], - () => api.getApplicationInfo(), + ['app', 'authSettings'], + () => appConfig.getAuthenticationSettings(), QUERY_REFETCH_OFF_OPTIONS ); } +export function useAuthenticate() { + return useMutation({ + mutationFn: (params: { username: string; password: string }) => + internalApi.authenticateRaw(params, { + headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, + }), + }); +} + +export function useAppInfo() { + return useQuery(['app', 'info'], async () => { + const data = await appConfig.getApplicationInfoRaw(); + + let response: ApplicationInfo = {}; + try { + response = await data.value(); + } catch { + response = {}; + } + + return { + redirect: data.raw.url.includes('auth'), + response, + }; + }); +} + export function useAppConfig() { - return useQuery(['app', 'config'], () => api.getCurrentConfig()); + return useQuery(['app', 'config'], () => appConfig.getCurrentConfig()); } function aggregateClusters( @@ -47,7 +78,7 @@ export function useUpdateAppConfig({ const client = useQueryClient(); return useMutation( async (cluster: ApplicationConfigPropertiesKafkaClusters) => { - const existingConfig = await api.getCurrentConfig(); + const existingConfig = await appConfig.getCurrentConfig(); const clusters = aggregateClusters( cluster, @@ -63,7 +94,7 @@ export function useUpdateAppConfig({ kafka: { clusters }, }, }; - return api.restartWithConfig({ restartRequest: { config } }); + return appConfig.restartWithConfig({ restartRequest: { config } }); }, { onSuccess: () => client.invalidateQueries(['app', 'config']), @@ -82,7 +113,7 @@ export function useAppConfigFilesUpload() { export function useValidateAppConfig() { return useMutation((config: ApplicationConfigPropertiesKafkaClusters) => - api.validateConfig({ + appConfig.validateConfig({ applicationConfig: { properties: { kafka: { clusters: [config] } } }, }) ); diff --git a/frontend/src/theme/theme.ts b/frontend/src/theme/theme.ts index f6cd2bacc..bdfe93271 100644 --- a/frontend/src/theme/theme.ts +++ b/frontend/src/theme/theme.ts @@ -57,6 +57,7 @@ const Colors = { '10': '#FAD1D1', '20': '#F5A3A3', '50': '#E51A1A', + '52': '#E63B19', '55': '#CF1717', '60': '#B81414', }, @@ -79,6 +80,45 @@ const Colors = { const baseTheme = { defaultIconColor: Colors.neutral[50], + auth_page: { + backgroundColor: Colors.brand[0], + fontFamily: 'Inter, sans-serif', + header: { + cellBorderColor: Colors.brand[10], + LogoBgColor: Colors.brand[90], + LogoTextColor: Colors.brand[0], + }, + signIn: { + titleColor: Colors.brand[90], + errorMessage: { + color: Colors.red[52], + }, + label: { + color: Colors.brand[70], + }, + authCard: { + borderRadius: '16px', + borderColor: Colors.brand[10], + backgroundColor: Colors.brand[0], + serviceNamecolor: Colors.brand[90], + serviceTextColor: Colors.brand[50], + }, + }, + footer: { + fontSize: '12px', + span: { + color: Colors.brand[70], + fontWeight: 500, + }, + p: { + color: Colors.brand[50], + fontWeight: 400, + }, + }, + icons: { + githubColor: Colors.brand[90], + }, + }, heading: { h1: { color: Colors.neutral[90], @@ -821,6 +861,38 @@ export type ThemeType = typeof theme; export const darkTheme: ThemeType = { ...baseTheme, + auth_page: { + backgroundColor: Colors.neutral[90], + fontFamily: baseTheme.auth_page.fontFamily, + header: { + cellBorderColor: Colors.brand[80], + LogoBgColor: Colors.brand[0], + LogoTextColor: Colors.brand[90], + }, + signIn: { + ...baseTheme.auth_page.signIn, + titleColor: Colors.brand[0], + label: { + color: Colors.brand[30], + }, + authCard: { + ...baseTheme.auth_page.signIn.authCard, + borderColor: Colors.brand[80], + backgroundColor: Colors.brand[85], + serviceNamecolor: Colors.brand[0], + }, + }, + footer: { + ...baseTheme.auth_page.footer, + span: { + color: Colors.brand[10], + fontWeight: 500, + }, + }, + icons: { + githubColor: Colors.brand[0], + }, + }, logo: { color: '#FDFDFD', }, diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts index 3a4e861e9..455ef39ae 100644 --- a/frontend/vite.config.ts +++ b/frontend/vite.config.ts @@ -3,6 +3,7 @@ import react from '@vitejs/plugin-react-swc'; import tsconfigPaths from 'vite-tsconfig-paths'; import { ViteEjsPlugin } from 'vite-plugin-ejs'; import checker from 'vite-plugin-checker'; +import { IncomingMessage } from 'http'; export default defineConfig(({ mode }) => { process.env = { ...process.env, ...loadEnv(mode, process.cwd()) }; @@ -87,6 +88,21 @@ export default defineConfig(({ mode }) => { ...defaultConfig.server, open: true, proxy: { + '/login': { + target: isProxy, + changeOrigin: true, + secure: false, + bypass: (req: IncomingMessage) => { + if (req.method === 'GET') { + return req.url; + } + }, + }, + '/logout': { + target: isProxy, + changeOrigin: true, + secure: false, + }, '/api': { target: isProxy, changeOrigin: true, From 64f63d102a33dc0fc886fe67526d48178f115a19 Mon Sep 17 00:00:00 2001 From: Yeikel Date: Fri, 27 Dec 2024 23:54:26 -0500 Subject: [PATCH 31/84] BE: Chore: standarize Protobuf import paths (#723) --- .../ui/serdes/builtin/ProtobufFileSerde.java | 24 ++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java index 723474cae..2c0939c03 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java @@ -64,6 +64,7 @@ import javax.annotation.Nullable; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.SystemUtils; import org.jetbrains.annotations.NotNull; @Slf4j @@ -416,7 +417,7 @@ private Map loadFilesWithLocations() { files.filter(p -> !Files.isDirectory(p) && p.toString().endsWith(".proto")) .forEach(path -> { // relative path will be used as "import" statement - String relativePath = baseLocation.relativize(path).toString(); + String relativePath = removeBackSlashes(baseLocation.relativize(path).toString()); var protoFileElement = ProtoParser.Companion.parse( Location.get(baseLocation.toString(), relativePath), readFileAsString(path) @@ -426,6 +427,27 @@ private Map loadFilesWithLocations() { } return filesByLocations; } + + /** + * Replaces backslashes in the given file path with forward slashes if the operating system is Windows. + * + *

This method is designed to standardize file paths by converting Windows-style backslashes (`\`) + * to Linux/Unix-style forward slashes (`/`) when the application is running on a Windows OS. + * On other operating systems, the input path is returned unchanged.

+ * + *

This is needed because imports in Protobuf use forward slashes (`/`) + * which causes a conflict with Windows paths. For example,`language/language.proto` + * would be converted to `language\language.proto` in Windows causing a resolution exception

+ * + * @param path the file path to standardize; must not be {@code null}. + * @return the standardized file path with forward slashes if running on Windows, or the original path otherwise. + */ + private @NotNull String removeBackSlashes(@NotNull final String path) { + if (SystemUtils.IS_OS_WINDOWS) { + return path.replace("\\", "/"); + } + return path; + } } } From 0f0e2a99e0ad5e85b60a014be74237e3819f6948 Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Sat, 28 Dec 2024 12:55:02 +0500 Subject: [PATCH 32/84] BE: RBAC: Ignore values for non-applicable resources (#503) --- .../main/java/io/kafbat/ui/model/rbac/AccessContext.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java b/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java index 9ccc10ccf..dbf5c456b 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java @@ -69,8 +69,10 @@ public boolean isAccessible(List userPermissions) throws AccessDenie if (name == null && permission.getCompiledValuePattern() == null) { return true; } - Preconditions.checkState(permission.getCompiledValuePattern() != null && name != null); - return permission.getCompiledValuePattern().matcher(name).matches(); + if (permission.getCompiledValuePattern() != null && name != null) { + return permission.getCompiledValuePattern().matcher(name).matches(); + } + return false; }) .flatMap(p -> p.getParsedActions().stream()) .collect(Collectors.toSet()); From d093752b7df445625c24daae57727ac1fb93ce10 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Mon, 30 Dec 2024 06:22:25 +0400 Subject: [PATCH 33/84] BE: Implement a mechanism to skip SSL verification (#422) --- .../kafbat/ui/config/ClustersProperties.java | 39 ++++++++++++------- .../ui/service/AdminClientServiceImpl.java | 4 +- .../ui/service/ConsumerGroupService.java | 4 +- .../io/kafbat/ui/service/MessagesService.java | 4 +- .../kafbat/ui/service/ksql/KsqlApiClient.java | 4 +- .../ui/util/KafkaClientSslPropertiesUtil.java | 35 +++++++++++++++++ .../ui/util/KafkaServicesValidation.java | 2 +- .../io/kafbat/ui/util/SslPropertiesUtil.java | 23 ----------- .../kafbat/ui/util/WebClientConfigurator.java | 16 ++++++++ .../main/resources/swagger/kafbat-ui-api.yaml | 4 ++ 10 files changed, 89 insertions(+), 46 deletions(-) create mode 100644 api/src/main/java/io/kafbat/ui/util/KafkaClientSslPropertiesUtil.java delete mode 100644 api/src/main/java/io/kafbat/ui/util/SslPropertiesUtil.java diff --git a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java index e91a5bc9a..5931602b2 100644 --- a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java +++ b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java @@ -35,22 +35,31 @@ public class ClustersProperties { public static class Cluster { String name; String bootstrapServers; + + TruststoreConfig ssl; + String schemaRegistry; SchemaRegistryAuth schemaRegistryAuth; KeystoreConfig schemaRegistrySsl; + String ksqldbServer; KsqldbServerAuth ksqldbServerAuth; KeystoreConfig ksqldbServerSsl; + List kafkaConnect; - MetricsConfigData metrics; - Map properties; - boolean readOnly = false; + List serde; String defaultKeySerde; String defaultValueSerde; - List masking; + + MetricsConfigData metrics; + Map properties; + boolean readOnly = false; + Long pollingThrottleRate; - TruststoreConfig ssl; + + List masking; + AuditProperties audit; } @@ -99,6 +108,16 @@ public static class SchemaRegistryAuth { public static class TruststoreConfig { String truststoreLocation; String truststorePassword; + boolean verifySsl = true; + } + + @Data + @NoArgsConstructor + @AllArgsConstructor + @ToString(exclude = {"keystorePassword"}) + public static class KeystoreConfig { + String keystoreLocation; + String keystorePassword; } @Data @@ -118,15 +137,6 @@ public static class KsqldbServerAuth { String password; } - @Data - @NoArgsConstructor - @AllArgsConstructor - @ToString(exclude = {"keystorePassword"}) - public static class KeystoreConfig { - String keystoreLocation; - String keystorePassword; - } - @Data public static class Masking { Type type; @@ -182,6 +192,7 @@ private void flattenClusterProperties() { } } + @SuppressWarnings("unchecked") private Map flattenClusterProperties(@Nullable String prefix, @Nullable Map propertiesMap) { Map flattened = new HashMap<>(); diff --git a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java index bc175b980..e3613c94e 100644 --- a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java +++ b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java @@ -2,7 +2,7 @@ import io.kafbat.ui.config.ClustersProperties; import io.kafbat.ui.model.KafkaCluster; -import io.kafbat.ui.util.SslPropertiesUtil; +import io.kafbat.ui.util.KafkaClientSslPropertiesUtil; import java.io.Closeable; import java.time.Instant; import java.util.Map; @@ -42,7 +42,7 @@ public Mono get(KafkaCluster cluster) { private Mono createAdminClient(KafkaCluster cluster) { return Mono.fromSupplier(() -> { Properties properties = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); properties.putAll(cluster.getProperties()); properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers()); properties.putIfAbsent(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout); diff --git a/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java b/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java index b2d6bd20f..282bdc5b6 100644 --- a/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java +++ b/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java @@ -10,7 +10,7 @@ import io.kafbat.ui.model.SortOrderDTO; import io.kafbat.ui.service.rbac.AccessControlService; import io.kafbat.ui.util.ApplicationMetrics; -import io.kafbat.ui.util.SslPropertiesUtil; +import io.kafbat.ui.util.KafkaClientSslPropertiesUtil; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; @@ -264,7 +264,7 @@ public EnhancedConsumer createConsumer(KafkaCluster cluster) { public EnhancedConsumer createConsumer(KafkaCluster cluster, Map properties) { Properties props = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), props); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), props); props.putAll(cluster.getProperties()); props.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafbat-ui-consumer-" + System.currentTimeMillis()); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers()); diff --git a/api/src/main/java/io/kafbat/ui/service/MessagesService.java b/api/src/main/java/io/kafbat/ui/service/MessagesService.java index 2f6192e11..c94472d56 100644 --- a/api/src/main/java/io/kafbat/ui/service/MessagesService.java +++ b/api/src/main/java/io/kafbat/ui/service/MessagesService.java @@ -23,7 +23,7 @@ import io.kafbat.ui.model.TopicMessageEventDTO; import io.kafbat.ui.serdes.ConsumerRecordDeserializer; import io.kafbat.ui.serdes.ProducerRecordCreator; -import io.kafbat.ui.util.SslPropertiesUtil; +import io.kafbat.ui.util.KafkaClientSslPropertiesUtil; import java.time.Instant; import java.time.OffsetDateTime; import java.time.ZoneOffset; @@ -199,7 +199,7 @@ private Mono sendMessageImpl(KafkaCluster cluster, public static KafkaProducer createProducer(KafkaCluster cluster, Map additionalProps) { Properties properties = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); properties.putAll(cluster.getProperties()); properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers()); properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); diff --git a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java index 3a0b46c81..90192eb2d 100644 --- a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java +++ b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java @@ -130,8 +130,8 @@ private Flux executeSelect(String ksql, Map s * Some version of ksqldb (?..0.24) can cut off json streaming without respect proper array ending like

* [{"header":{"queryId":"...","schema":"..."}}, ] * which will cause json parsing error and will be propagated to UI. - * This is a know issue(https://github.com/confluentinc/ksql/issues/8746), but we don't know when it will be fixed. - * To workaround this we need to check DecodingException err msg. + * This is a known issue(...), but we don't know when it will be fixed. + * To work around this we need to check DecodingException err msg. */ private boolean isUnexpectedJsonArrayEndCharException(Throwable th) { return th instanceof DecodingException diff --git a/api/src/main/java/io/kafbat/ui/util/KafkaClientSslPropertiesUtil.java b/api/src/main/java/io/kafbat/ui/util/KafkaClientSslPropertiesUtil.java new file mode 100644 index 000000000..324e2e4d0 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/util/KafkaClientSslPropertiesUtil.java @@ -0,0 +1,35 @@ +package io.kafbat.ui.util; + +import io.kafbat.ui.config.ClustersProperties; +import java.util.Properties; +import javax.annotation.Nullable; +import org.apache.kafka.common.config.SslConfigs; + +public final class KafkaClientSslPropertiesUtil { + + private KafkaClientSslPropertiesUtil() { + } + + public static void addKafkaSslProperties(@Nullable ClustersProperties.TruststoreConfig truststoreConfig, + Properties sink) { + if (truststoreConfig == null) { + return; + } + + if (!truststoreConfig.isVerifySsl()) { + sink.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, ""); + } + + if (truststoreConfig.getTruststoreLocation() == null) { + return; + } + + sink.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststoreConfig.getTruststoreLocation()); + + if (truststoreConfig.getTruststorePassword() != null) { + sink.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, truststoreConfig.getTruststorePassword()); + } + + } + +} diff --git a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java index 629d0f339..397fa3839 100644 --- a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java +++ b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java @@ -65,7 +65,7 @@ public static Mono validateClusterConnection(S @Nullable TruststoreConfig ssl) { Properties properties = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(ssl, properties); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(ssl, properties); properties.putAll(clusterProps); properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); // editing properties to make validation faster diff --git a/api/src/main/java/io/kafbat/ui/util/SslPropertiesUtil.java b/api/src/main/java/io/kafbat/ui/util/SslPropertiesUtil.java deleted file mode 100644 index fda959a2b..000000000 --- a/api/src/main/java/io/kafbat/ui/util/SslPropertiesUtil.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.kafbat.ui.util; - -import io.kafbat.ui.config.ClustersProperties; -import java.util.Properties; -import javax.annotation.Nullable; -import org.apache.kafka.common.config.SslConfigs; - -public final class SslPropertiesUtil { - - private SslPropertiesUtil() { - } - - public static void addKafkaSslProperties(@Nullable ClustersProperties.TruststoreConfig truststoreConfig, - Properties sink) { - if (truststoreConfig != null && truststoreConfig.getTruststoreLocation() != null) { - sink.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststoreConfig.getTruststoreLocation()); - if (truststoreConfig.getTruststorePassword() != null) { - sink.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, truststoreConfig.getTruststorePassword()); - } - } - } - -} diff --git a/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java b/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java index 5d364f6dc..1c289f54f 100644 --- a/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java +++ b/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java @@ -7,6 +7,7 @@ import io.kafbat.ui.exception.ValidationException; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import java.io.FileInputStream; import java.security.KeyStore; import java.util.function.Consumer; @@ -45,6 +46,10 @@ private static ObjectMapper defaultOM() { public WebClientConfigurator configureSsl(@Nullable ClustersProperties.TruststoreConfig truststoreConfig, @Nullable ClustersProperties.KeystoreConfig keystoreConfig) { + if (truststoreConfig != null && !truststoreConfig.isVerifySsl()) { + return configureNoSsl(); + } + return configureSsl( keystoreConfig != null ? keystoreConfig.getKeystoreLocation() : null, keystoreConfig != null ? keystoreConfig.getKeystorePassword() : null, @@ -97,6 +102,17 @@ private WebClientConfigurator configureSsl( return this; } + @SneakyThrows + public WebClientConfigurator configureNoSsl() { + var contextBuilder = SslContextBuilder.forClient(); + contextBuilder.trustManager(InsecureTrustManagerFactory.INSTANCE); + + SslContext context = contextBuilder.build(); + + httpClient = httpClient.secure(t -> t.sslContext(context)); + return this; + } + public WebClientConfigurator configureBasicAuth(@Nullable String username, @Nullable String password) { if (username != null && password != null) { builder.defaultHeaders(httpHeaders -> httpHeaders.setBasicAuth(username, password)); diff --git a/contract/src/main/resources/swagger/kafbat-ui-api.yaml b/contract/src/main/resources/swagger/kafbat-ui-api.yaml index 97d0e5b11..315c4a17e 100644 --- a/contract/src/main/resources/swagger/kafbat-ui-api.yaml +++ b/contract/src/main/resources/swagger/kafbat-ui-api.yaml @@ -4242,6 +4242,10 @@ components: type: string truststorePassword: type: string + verifySsl: + type: boolean + description: Skip SSL verification for the host. + default: true schemaRegistry: type: string schemaRegistryAuth: From 9f79a56d42f88feae7f1d3b020b598427d3ea1b2 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Tue, 31 Dec 2024 08:55:25 +0400 Subject: [PATCH 34/84] BE: RBAC: Impl Active Directory populator (#717) + BE: RBAC: LDAP: Implement user subject type for LDAP & AD. Resolves #54, resolves #730 --- .../ui/config/auth/LdapSecurityConfig.java | 93 +++++++++++-------- .../ui/service/AdminClientServiceImpl.java | 2 +- ...acActiveDirectoryAuthoritiesExtractor.java | 50 ++++++++++ .../RbacLdapAuthoritiesExtractor.java | 10 +- 4 files changed, 113 insertions(+), 42 deletions(-) create mode 100644 api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java diff --git a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java index 4d89a9568..9b1445507 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java @@ -1,6 +1,7 @@ package io.kafbat.ui.config.auth; import io.kafbat.ui.service.rbac.AccessControlService; +import io.kafbat.ui.service.rbac.extractor.RbacActiveDirectoryAuthoritiesExtractor; import io.kafbat.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor; import io.kafbat.ui.util.StaticFileWebFilter; import java.util.Collection; @@ -8,6 +9,7 @@ import java.util.Optional; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; @@ -17,7 +19,6 @@ import org.springframework.ldap.core.DirContextOperations; import org.springframework.ldap.core.support.BaseLdapPathContextSource; import org.springframework.ldap.core.support.LdapContextSource; -import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.ProviderManager; import org.springframework.security.authentication.ReactiveAuthenticationManager; import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter; @@ -29,10 +30,11 @@ import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider; import org.springframework.security.ldap.authentication.BindAuthenticator; import org.springframework.security.ldap.authentication.LdapAuthenticationProvider; +import org.springframework.security.ldap.authentication.NullLdapAuthoritiesPopulator; import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider; +import org.springframework.security.ldap.authentication.ad.DefaultActiveDirectoryAuthoritiesPopulator; import org.springframework.security.ldap.search.FilterBasedLdapUserSearch; import org.springframework.security.ldap.search.LdapUserSearch; -import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator; import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator; import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper; import org.springframework.security.web.server.SecurityWebFilterChain; @@ -49,14 +51,43 @@ public class LdapSecurityConfig extends AbstractAuthSecurityConfig { private final LdapProperties props; @Bean - public ReactiveAuthenticationManager authenticationManager(LdapContextSource ldapContextSource, - LdapAuthoritiesPopulator authoritiesExtractor, - AccessControlService acs) { + public ReactiveAuthenticationManager authenticationManager(AbstractLdapAuthenticationProvider authProvider) { + return new ReactiveAuthenticationManagerAdapter(new ProviderManager(List.of(authProvider))); + } + + @Bean + public AbstractLdapAuthenticationProvider authenticationProvider(LdapAuthoritiesPopulator authoritiesExtractor, + @Autowired(required = false) BindAuthenticator ba, + AccessControlService acs) { var rbacEnabled = acs.isRbacEnabled(); + + AbstractLdapAuthenticationProvider authProvider; + + if (!props.isActiveDirectory()) { + authProvider = new LdapAuthenticationProvider(ba, authoritiesExtractor); + } else { + authProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(), + props.getUrls()); + authProvider.setUseAuthenticationRequestCredentials(true); + ((ActiveDirectoryLdapAuthenticationProvider) authProvider).setAuthoritiesPopulator(authoritiesExtractor); + } + + if (rbacEnabled) { + authProvider.setUserDetailsContextMapper(new RbacUserDetailsMapper()); + } + + return authProvider; + } + + @Bean + @ConditionalOnProperty(value = "oauth2.ldap.activeDirectory", havingValue = "false") + public BindAuthenticator ldapBindAuthentication(LdapContextSource ldapContextSource) { BindAuthenticator ba = new BindAuthenticator(ldapContextSource); + if (props.getBase() != null) { ba.setUserDnPatterns(new String[] {props.getBase()}); } + if (props.getUserFilterSearchFilter() != null) { LdapUserSearch userSearch = new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(), @@ -64,24 +95,7 @@ public ReactiveAuthenticationManager authenticationManager(LdapContextSource lda ba.setUserSearch(userSearch); } - AbstractLdapAuthenticationProvider authenticationProvider; - if (!props.isActiveDirectory()) { - authenticationProvider = rbacEnabled - ? new LdapAuthenticationProvider(ba, authoritiesExtractor) - : new LdapAuthenticationProvider(ba); - } else { - authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(), - props.getUrls()); // TODO Issue #3741 - authenticationProvider.setUseAuthenticationRequestCredentials(true); - } - - if (rbacEnabled) { - authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper()); - } - - AuthenticationManager am = new ProviderManager(List.of(authenticationProvider)); - - return new ReactiveAuthenticationManagerAdapter(am); + return ba; } @Bean @@ -95,24 +109,27 @@ public LdapContextSource ldapContextSource() { } @Bean - public DefaultLdapAuthoritiesPopulator ldapAuthoritiesExtractor(ApplicationContext context, - BaseLdapPathContextSource contextSource, - AccessControlService acs) { - var rbacEnabled = acs != null && acs.isRbacEnabled(); + public LdapAuthoritiesPopulator authoritiesExtractor(ApplicationContext ctx, + BaseLdapPathContextSource ldapCtx, + AccessControlService acs) { + if (!props.isActiveDirectory()) { + if (!acs.isRbacEnabled()) { + return new NullLdapAuthoritiesPopulator(); + } - DefaultLdapAuthoritiesPopulator extractor; + var extractor = new RbacLdapAuthoritiesExtractor(ctx, ldapCtx, props.getGroupFilterSearchBase()); - if (rbacEnabled) { - extractor = new RbacLdapAuthoritiesExtractor(context, contextSource, props.getGroupFilterSearchBase()); + Optional.ofNullable(props.getGroupFilterSearchFilter()).ifPresent(extractor::setGroupSearchFilter); + extractor.setRolePrefix(""); + extractor.setConvertToUpperCase(false); + extractor.setSearchSubtree(true); + + return extractor; } else { - extractor = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase()); + return acs.isRbacEnabled() + ? new RbacActiveDirectoryAuthoritiesExtractor(ctx) + : new DefaultActiveDirectoryAuthoritiesPopulator(); } - - Optional.ofNullable(props.getGroupFilterSearchFilter()).ifPresent(extractor::setGroupSearchFilter); - extractor.setRolePrefix(""); - extractor.setConvertToUpperCase(false); - extractor.setSearchSubtree(true); - return extractor; } @Bean @@ -142,7 +159,7 @@ public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) { return builder.build(); } - private static class UserDetailsMapper extends LdapUserDetailsMapper { + private static class RbacUserDetailsMapper extends LdapUserDetailsMapper { @Override public UserDetails mapUserFromContext(DirContextOperations ctx, String username, Collection authorities) { diff --git a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java index e3613c94e..6c018ba31 100644 --- a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java +++ b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java @@ -53,7 +53,7 @@ private Mono createAdminClient(KafkaCluster cluster) { return AdminClient.create(properties); }).flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close())) .onErrorMap(th -> new IllegalStateException( - "Error while creating AdminClient for Cluster " + cluster.getName(), th)); + "Error while creating AdminClient for the cluster " + cluster.getName(), th)); } @Override diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java new file mode 100644 index 000000000..cefef5a7e --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java @@ -0,0 +1,50 @@ +package io.kafbat.ui.service.rbac.extractor; + +import io.kafbat.ui.model.rbac.Role; +import io.kafbat.ui.model.rbac.provider.Provider; +import io.kafbat.ui.service.rbac.AccessControlService; +import java.util.Collection; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.springframework.context.ApplicationContext; +import org.springframework.ldap.core.DirContextOperations; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.ldap.authentication.ad.DefaultActiveDirectoryAuthoritiesPopulator; +import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator; + +@Slf4j +public class RbacActiveDirectoryAuthoritiesExtractor implements LdapAuthoritiesPopulator { + + private final DefaultActiveDirectoryAuthoritiesPopulator populator = new DefaultActiveDirectoryAuthoritiesPopulator(); + private final AccessControlService acs; + + public RbacActiveDirectoryAuthoritiesExtractor(ApplicationContext context) { + this.acs = context.getBean(AccessControlService.class); + } + + @Override + public Collection getGrantedAuthorities(DirContextOperations userData, String username) { + var adGroups = populator.getGrantedAuthorities(userData, username) + .stream() + .map(GrantedAuthority::getAuthority) + .peek(group -> log.trace("Found AD group [{}] for user [{}]", group, username)) + .collect(Collectors.toSet()); + + return acs.getRoles() + .stream() + .filter(r -> r.getSubjects() + .stream() + .filter(subject -> subject.getProvider().equals(Provider.LDAP_AD)) + .anyMatch(subject -> switch (subject.getType()) { + case "user" -> username.equalsIgnoreCase(subject.getValue()); + case "group" -> adGroups.contains(subject.getValue()); + default -> false; + }) + ) + .map(Role::getName) + .peek(role -> log.trace("Mapped role [{}] for user [{}]", role, username)) + .map(SimpleGrantedAuthority::new) + .collect(Collectors.toSet()); + } +} diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java index 3282ab1e2..261b30cfe 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java @@ -19,7 +19,8 @@ public class RbacLdapAuthoritiesExtractor extends NestedLdapAuthoritiesPopulator private final AccessControlService acs; public RbacLdapAuthoritiesExtractor(ApplicationContext context, - BaseLdapPathContextSource contextSource, String groupFilterSearchBase) { + BaseLdapPathContextSource contextSource, + String groupFilterSearchBase) { super(contextSource, groupFilterSearchBase); this.acs = context.getBean(AccessControlService.class); } @@ -37,8 +38,11 @@ protected Set getAdditionalRoles(DirContextOperations user, St .filter(r -> r.getSubjects() .stream() .filter(subject -> subject.getProvider().equals(Provider.LDAP)) - .filter(subject -> subject.getType().equals("group")) - .anyMatch(subject -> ldapGroups.contains(subject.getValue())) + .anyMatch(subject -> switch (subject.getType()) { + case "user" -> username.equalsIgnoreCase(subject.getValue()); + case "group" -> ldapGroups.contains(subject.getValue()); + default -> false; + }) ) .map(Role::getName) .peek(role -> log.trace("Mapped role [{}] for user [{}]", role, username)) From 582e9a9902cb3114d079a2d9ad0a5ed495ceadb2 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Wed, 1 Jan 2025 17:18:16 +0400 Subject: [PATCH 35/84] Update copyright year --- LICENSE | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/LICENSE b/LICENSE index bef1db1d9..bedb0fad0 100644 --- a/LICENSE +++ b/LICENSE @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 CloudHut + Copyright 2025 Kafbat Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -199,4 +199,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file + limitations under the License. From 7a1a6fa475bd492601011b9c125cd488b5b34b81 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 19:36:28 +0400 Subject: [PATCH 36/84] Bump jsonpath-plus from 10.0.7 to 10.1.0 in /frontend in the npm_and_yarn group (#725) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/package.json | 2 +- frontend/pnpm-lock.yaml | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index 249c91a8f..a0bb2dcab 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -16,7 +16,7 @@ "ajv": "8.8.2", "ajv-formats": "2.1.1", "json-schema-faker": "0.5.6", - "jsonpath-plus": "10.0.7", + "jsonpath-plus": "10.1.0", "lossless-json": "2.0.11", "pretty-ms": "7.0.1", "react": "18.2.0", diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 29518a5cf..0d66036d8 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -57,8 +57,8 @@ importers: specifier: 0.5.6 version: 0.5.6 jsonpath-plus: - specifier: 10.0.7 - version: 10.0.7 + specifier: 10.1.0 + version: 10.1.0 lossless-json: specifier: 2.0.11 version: 2.0.11 @@ -2946,8 +2946,8 @@ packages: jsonfile@6.1.0: resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - jsonpath-plus@10.0.7: - resolution: {integrity: sha512-GDA8d8fu9+s4QzAzo5LMGiLL/9YjecAX+ytlnqdeXYpU55qME57StDgaHt9R2pA7Dr8U31nwzxNJMJiHkrkRgw==} + jsonpath-plus@10.1.0: + resolution: {integrity: sha512-gHfV1IYqH8uJHYVTs8BJX1XKy2/rR93+f8QQi0xhx95aCiXn1ettYAd5T+7FU6wfqyDoX/wy0pm/fL3jOKJ9Lg==} engines: {node: '>=18.0.0'} hasBin: true @@ -7560,7 +7560,7 @@ snapshots: optionalDependencies: graceful-fs: 4.2.10 - jsonpath-plus@10.0.7: + jsonpath-plus@10.1.0: dependencies: '@jsep-plugin/assignment': 1.3.0(jsep@1.4.0) '@jsep-plugin/regex': 1.0.4(jsep@1.4.0) From 911271ad363c5cea7e88b789c3c2ca062d90a9a3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 19:39:51 +0400 Subject: [PATCH 37/84] Bump aquasecurity/trivy-action from 0.19.0 to 0.29.0 (#672) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/cve_checks.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cve_checks.yml b/.github/workflows/cve_checks.yml index 2bb98dd5a..bb3dc467e 100644 --- a/.github/workflows/cve_checks.yml +++ b/.github/workflows/cve_checks.yml @@ -62,7 +62,7 @@ jobs: cache-to: type=local,dest=/tmp/.buildx-cache - name: Run CVE checks - uses: aquasecurity/trivy-action@0.19.0 + uses: aquasecurity/trivy-action@0.29.0 with: image-ref: "ghcr.io/kafbat/kafka-ui:${{ steps.build.outputs.version }}" format: "table" From 02932fc3c27f513789cbdb34e28746d39d54c1f3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 19:55:22 +0400 Subject: [PATCH 38/84] Bump jsonpath-plus from 10.0.7 to 10.2.0 in /frontend (#734) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/package.json | 2 +- frontend/pnpm-lock.yaml | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index a0bb2dcab..c05150b42 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -16,7 +16,7 @@ "ajv": "8.8.2", "ajv-formats": "2.1.1", "json-schema-faker": "0.5.6", - "jsonpath-plus": "10.1.0", + "jsonpath-plus": "10.2.0", "lossless-json": "2.0.11", "pretty-ms": "7.0.1", "react": "18.2.0", diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 0d66036d8..07a2d9c19 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -57,8 +57,8 @@ importers: specifier: 0.5.6 version: 0.5.6 jsonpath-plus: - specifier: 10.1.0 - version: 10.1.0 + specifier: 10.2.0 + version: 10.2.0 lossless-json: specifier: 2.0.11 version: 2.0.11 @@ -2946,8 +2946,8 @@ packages: jsonfile@6.1.0: resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} - jsonpath-plus@10.1.0: - resolution: {integrity: sha512-gHfV1IYqH8uJHYVTs8BJX1XKy2/rR93+f8QQi0xhx95aCiXn1ettYAd5T+7FU6wfqyDoX/wy0pm/fL3jOKJ9Lg==} + jsonpath-plus@10.2.0: + resolution: {integrity: sha512-T9V+8iNYKFL2n2rF+w02LBOT2JjDnTjioaNFrxRy0Bv1y/hNsqR/EBK7Ojy2ythRHwmz2cRIls+9JitQGZC/sw==} engines: {node: '>=18.0.0'} hasBin: true @@ -7560,7 +7560,7 @@ snapshots: optionalDependencies: graceful-fs: 4.2.10 - jsonpath-plus@10.1.0: + jsonpath-plus@10.2.0: dependencies: '@jsep-plugin/assignment': 1.3.0(jsep@1.4.0) '@jsep-plugin/regex': 1.0.4(jsep@1.4.0) From a8811d1be3a0db2b02afa744c404ac961572b0d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 2 Jan 2025 20:03:36 +0400 Subject: [PATCH 39/84] Bump spring-boot.version from 3.3.6 to 3.4.1 (#742) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 43427b3f1..05f68f576 100644 --- a/pom.xml +++ b/pom.xml @@ -44,7 +44,7 @@ 3.25.5 2.13.9 2.3 - 3.3.6 + 3.4.1 1.0.0 0.1.17 0.1.39 From 713932af9379354e2dc3746c200a5a71b786cc9b Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Fri, 3 Jan 2025 15:37:31 +0400 Subject: [PATCH 40/84] Infra: Discord hook for failed CVE runs (#219) --- .github/workflows/cve_checks.yml | 10 +++++++++ .github/workflows/infra_discord_hook.yml | 27 ++++++++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100644 .github/workflows/infra_discord_hook.yml diff --git a/.github/workflows/cve_checks.yml b/.github/workflows/cve_checks.yml index bb3dc467e..da8019760 100644 --- a/.github/workflows/cve_checks.yml +++ b/.github/workflows/cve_checks.yml @@ -9,6 +9,7 @@ permissions: contents: read jobs: + build-and-test: runs-on: ubuntu-latest @@ -67,3 +68,12 @@ jobs: image-ref: "ghcr.io/kafbat/kafka-ui:${{ steps.build.outputs.version }}" format: "table" exit-code: "1" + + notify: + needs: build-and-test + if: ${{ always() && needs.build-and-test.result == 'failure' }} + uses: ./.github/workflows/infra_discord_hook.yml + with: + message: "Attention! CVE checks run failed! Please fix them CVEs :(" + secrets: + DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_WEBHOOK_URL_CVE }} diff --git a/.github/workflows/infra_discord_hook.yml b/.github/workflows/infra_discord_hook.yml new file mode 100644 index 000000000..929a19cd4 --- /dev/null +++ b/.github/workflows/infra_discord_hook.yml @@ -0,0 +1,27 @@ +name: 'Discord hook' + +on: + workflow_call: + inputs: + message: + description: 'Message text' + required: true + type: string + secrets: + DISCORD_WEBHOOK_URL: + required: true + +permissions: + contents: read + +jobs: + + hook: + runs-on: ubuntu-latest + steps: + - name: Notify Discord on Failure + uses: Ilshidur/action-discord@0.3.2 + with: + args: ${{ inputs.message }} + env: + DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_URL }} From 0674286137ba417299eba61108617c883e9be089 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Sat, 4 Jan 2025 21:41:24 +0800 Subject: [PATCH 41/84] Update workflow jobs' naming --- .github/workflows/backend_main.yml | 2 +- .github/workflows/backend_pr.yml | 2 +- .github/workflows/cve_checks.yml | 4 ++-- .github/workflows/frontend_main.yml | 2 +- .github/workflows/frontend_pr.yml | 2 +- .github/workflows/md-links.yml | 2 +- .github/workflows/pr_linter.yml | 3 +-- .github/workflows/workflow_linter.yml | 2 +- 8 files changed, 9 insertions(+), 10 deletions(-) diff --git a/.github/workflows/backend_main.yml b/.github/workflows/backend_main.yml index b0c7f8ab4..a6c24cc22 100644 --- a/.github/workflows/backend_main.yml +++ b/.github/workflows/backend_main.yml @@ -19,7 +19,7 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/backend_tests.yml with: event_name: ${{ github.event_name }} diff --git a/.github/workflows/backend_pr.yml b/.github/workflows/backend_pr.yml index 3570dfa97..5812a14c5 100644 --- a/.github/workflows/backend_pr.yml +++ b/.github/workflows/backend_pr.yml @@ -20,7 +20,7 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/backend_tests.yml with: event_name: ${{ github.event_name }} diff --git a/.github/workflows/cve_checks.yml b/.github/workflows/cve_checks.yml index da8019760..e9c90ac14 100644 --- a/.github/workflows/cve_checks.yml +++ b/.github/workflows/cve_checks.yml @@ -10,7 +10,7 @@ permissions: jobs: - build-and-test: + check-cves: runs-on: ubuntu-latest steps: @@ -70,7 +70,7 @@ jobs: exit-code: "1" notify: - needs: build-and-test + needs: check-cves if: ${{ always() && needs.build-and-test.result == 'failure' }} uses: ./.github/workflows/infra_discord_hook.yml with: diff --git a/.github/workflows/frontend_main.yml b/.github/workflows/frontend_main.yml index b680a7334..008517f7a 100644 --- a/.github/workflows/frontend_main.yml +++ b/.github/workflows/frontend_main.yml @@ -15,5 +15,5 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/frontend_tests.yml diff --git a/.github/workflows/frontend_pr.yml b/.github/workflows/frontend_pr.yml index 22a4d6f5f..a49b3193a 100644 --- a/.github/workflows/frontend_pr.yml +++ b/.github/workflows/frontend_pr.yml @@ -16,5 +16,5 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/frontend_tests.yml diff --git a/.github/workflows/md-links.yml b/.github/workflows/md-links.yml index b885e014d..45a8e920a 100644 --- a/.github/workflows/md-links.yml +++ b/.github/workflows/md-links.yml @@ -13,7 +13,7 @@ permissions: contents: read jobs: - build-and-test: + lint-md: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/pr_linter.yml b/.github/workflows/pr_linter.yml index f4562345f..703dccd4f 100644 --- a/.github/workflows/pr_linter.yml +++ b/.github/workflows/pr_linter.yml @@ -5,13 +5,12 @@ on: permissions: checks: write jobs: - task-check: + check-tasks: runs-on: ubuntu-latest steps: - uses: kentaro-m/task-completed-checker-action@v0.1.2 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" - uses: dekinderfiets/pr-description-enforcer@0.0.1 - if: false # TODO remove when public with: repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/workflow_linter.yml b/.github/workflows/workflow_linter.yml index 728aaa251..12dc3656c 100644 --- a/.github/workflows/workflow_linter.yml +++ b/.github/workflows/workflow_linter.yml @@ -9,7 +9,7 @@ permissions: contents: read jobs: - build-and-test: + lint-workflows: runs-on: ubuntu-latest steps: From 00ebb0da0fb16f187c1e562c2a6a7202d5c8e7c4 Mon Sep 17 00:00:00 2001 From: "H@di" Date: Mon, 6 Jan 2025 15:18:18 +0330 Subject: [PATCH 42/84] BE: Chore: CORS config on error handling (#555) Co-authored-by: Roman Zabaluev --- .../kafbat/ui/config/CorsGlobalConfiguration.java | 12 ++++++++---- .../io/kafbat/ui/config/ReadOnlyModeFilter.java | 3 ++- .../exception/GlobalErrorWebExceptionHandler.java | 13 +++++++++++++ 3 files changed, 23 insertions(+), 5 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java b/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java index 4713dfd37..d39fda91d 100644 --- a/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java +++ b/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java @@ -22,10 +22,7 @@ public WebFilter corsFilter() { final ServerHttpResponse response = ctx.getResponse(); final HttpHeaders headers = response.getHeaders(); - headers.add("Access-Control-Allow-Origin", "*"); - headers.add("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, OPTIONS"); - headers.add("Access-Control-Max-Age", "3600"); - headers.add("Access-Control-Allow-Headers", "Content-Type"); + fillCorsHeader(headers, request); if (request.getMethod() == HttpMethod.OPTIONS) { response.setStatusCode(HttpStatus.OK); @@ -36,4 +33,11 @@ public WebFilter corsFilter() { }; } + public static void fillCorsHeader(HttpHeaders responseHeaders, ServerHttpRequest request) { + responseHeaders.add("Access-Control-Allow-Origin", request.getHeaders().getOrigin()); + responseHeaders.add("Access-Control-Allow-Credentials", "true"); + responseHeaders.add("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, OPTIONS"); + responseHeaders.add("Access-Control-Max-Age", "3600"); + responseHeaders.add("Access-Control-Allow-Headers", "Content-Type"); + } } diff --git a/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java b/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java index ac7c6747f..acfe1929c 100644 --- a/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java +++ b/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java @@ -33,7 +33,8 @@ public class ReadOnlyModeFilter implements WebFilter { @NotNull @Override public Mono filter(ServerWebExchange exchange, @NotNull WebFilterChain chain) { - var isSafeMethod = exchange.getRequest().getMethod() == HttpMethod.GET; + var isSafeMethod = + exchange.getRequest().getMethod() == HttpMethod.GET || exchange.getRequest().getMethod() == HttpMethod.OPTIONS; if (isSafeMethod) { return chain.filter(exchange); } diff --git a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java index b4c978ac2..61236f801 100644 --- a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java +++ b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java @@ -2,12 +2,14 @@ import com.google.common.base.Throwables; import com.google.common.collect.Sets; +import io.kafbat.ui.config.CorsGlobalConfiguration; import io.kafbat.ui.model.ErrorResponseDTO; import java.math.BigDecimal; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; import org.springframework.boot.autoconfigure.web.WebProperties; @@ -16,6 +18,7 @@ import org.springframework.context.ApplicationContext; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; +import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.codec.ServerCodecConfigurer; @@ -78,6 +81,7 @@ private Mono renderDefault(Throwable throwable, ServerRequest re return ServerResponse .status(ErrorCode.UNEXPECTED.httpStatus()) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } @@ -92,6 +96,7 @@ private Mono render(CustomBaseException baseException, ServerReq return ServerResponse .status(errorCode.httpStatus()) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } @@ -122,6 +127,7 @@ private Mono render(WebExchangeBindException exception, ServerRe return ServerResponse .status(HttpStatus.BAD_REQUEST) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } @@ -136,6 +142,7 @@ private Mono render(ResponseStatusException exception, ServerReq return ServerResponse .status(exception.getStatusCode()) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } @@ -143,6 +150,12 @@ private String requestId(ServerRequest request) { return request.exchange().getRequest().getId(); } + private Consumer headers(ServerRequest request) { + return (HttpHeaders headers) -> { + CorsGlobalConfiguration.fillCorsHeader(headers, request.exchange().getRequest()); + }; + } + private BigDecimal currentTimestamp() { return BigDecimal.valueOf(System.currentTimeMillis()); } From 91f95f6ccaff3640dd6b444b44f7fa6c74fb3c62 Mon Sep 17 00:00:00 2001 From: DoHyeon Kim Date: Mon, 6 Jan 2025 20:50:42 +0900 Subject: [PATCH 43/84] FE: Consumers: Fix lag is displayed as 'N/A' in case of null value (#720) --- .../src/components/ConsumerGroups/List.tsx | 2 +- .../ConsumerGroups/__test__/List.spec.tsx | 55 +++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 frontend/src/components/ConsumerGroups/__test__/List.spec.tsx diff --git a/frontend/src/components/ConsumerGroups/List.tsx b/frontend/src/components/ConsumerGroups/List.tsx index 54d3ebf4b..683f8b27b 100644 --- a/frontend/src/components/ConsumerGroups/List.tsx +++ b/frontend/src/components/ConsumerGroups/List.tsx @@ -62,7 +62,7 @@ const List = () => { header: 'Consumer Lag', accessorKey: 'consumerLag', cell: (args) => { - return args.getValue() || 'N/A'; + return args.getValue() ?? 'N/A'; }, }, { diff --git a/frontend/src/components/ConsumerGroups/__test__/List.spec.tsx b/frontend/src/components/ConsumerGroups/__test__/List.spec.tsx new file mode 100644 index 000000000..ad79c05f2 --- /dev/null +++ b/frontend/src/components/ConsumerGroups/__test__/List.spec.tsx @@ -0,0 +1,55 @@ +import React from 'react'; +import { screen } from '@testing-library/react'; +import { render } from 'lib/testHelpers'; +import { useConsumerGroups } from 'lib/hooks/api/consumers'; +import List from 'components/ConsumerGroups/List'; + +// Mock hooks +jest.mock('lib/hooks/api/consumers', () => ({ + useConsumerGroups: jest.fn(), +})); + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useSearchParams: () => [new URLSearchParams(), jest.fn()], + useNavigate: () => jest.fn(), +})); + +const mockUseConsumerGroups = useConsumerGroups as jest.Mock; + +describe('ConsumerGroups List', () => { + beforeEach(() => { + mockUseConsumerGroups.mockImplementation(() => ({ + data: { + consumerGroups: [ + { + groupId: 'group1', + consumerLag: 0, + members: 1, + topics: 1, + coordinator: { id: 1 }, + state: 'STABLE', + }, + { + groupId: 'group2', + consumerLag: null, + members: 1, + topics: 1, + coordinator: { id: 2 }, + state: 'STABLE', + }, + ], + pageCount: 1, + }, + isSuccess: true, + isFetching: false, + })); + }); + + it('renders consumer lag values correctly', () => { + render(); + const tableRows = screen.getAllByRole('row'); + expect(tableRows[1]).toHaveTextContent('0'); + expect(tableRows[2]).toHaveTextContent('N/A'); + }); +}); From 10c4f7ad972d9cbe829016868053f26cfa59df17 Mon Sep 17 00:00:00 2001 From: DimaVilda Date: Mon, 6 Jan 2025 12:52:23 +0100 Subject: [PATCH 44/84] BE: Serde: Fix avro schema serde for nullable enums (#685) --- .../jsonschema/AvroJsonSchemaConverter.java | 12 +++--- .../AvroJsonSchemaConverterTest.java | 42 +++++++++++++++++++ .../jsonschema/JsonAvroConversionTest.java | 37 ++++++++++++++++ 3 files changed, 84 insertions(+), 7 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java b/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java index 55c75c950..b5640eb06 100644 --- a/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java +++ b/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java @@ -80,14 +80,12 @@ private FieldSchema createUnionSchema(Schema schema, Map de final Map fields = schema.getTypes().stream() .filter(t -> !t.getType().equals(Schema.Type.NULL)) .map(f -> { - String oneOfFieldName; - if (f.getType().equals(Schema.Type.RECORD)) { - // for records using full record name - oneOfFieldName = f.getFullName(); - } else { + String oneOfFieldName = switch (f.getType()) { + case RECORD -> f.getFullName(); + case ENUM -> f.getName(); // for primitive types - using type name - oneOfFieldName = f.getType().getName().toLowerCase(); - } + default -> f.getType().getName().toLowerCase(); + }; return Tuples.of(oneOfFieldName, convertSchema(f, definitions, false)); }).collect(Collectors.toMap( Tuple2::getT1, diff --git a/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java b/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java index 299283aed..35c230f56 100644 --- a/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java +++ b/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java @@ -244,6 +244,48 @@ void testRecordReferences() { convertAndCompare(expectedJsonSchema, avroSchema); } + @Test + void testNullableUnionEnum() { + String avroSchema = + " {" + + " \"type\": \"record\"," + + " \"name\": \"Message\"," + + " \"namespace\": \"com.provectus.kafka\"," + + " \"fields\": [" + + " {" + + " \"name\": \"enum_nullable_union\"," + + " \"type\": [\"null\", {" + + " \"type\": \"enum\"," + + " \"name\": \"Suit\"," + + " \"symbols\": [\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]" + + " }]" + + " }" + + " ]" + + " }"; + + String expectedJsonSchema = + "{\"$id\":\"http://example.com/Message\"," + + "\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"," + + "\"type\":\"object\"," + + "\"properties\":{" + + "\"enum_nullable_union\":{" + + "\"oneOf\":[" + + "{\"type\":\"null\"}," + + "{\"type\":\"object\"," + + "\"properties\":{" + + "\"Suit\":{" + + "\"type\":\"string\"," + + "\"enum\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]" + + "}}}" + + "]" + + "}}," + + "\"definitions\":{" + + "\"com.provectus.kafka.Message\":{\"$ref\":\"#\"}" + + "}}"; + + convertAndCompare(expectedJsonSchema, avroSchema); + } + @SneakyThrows private void convertAndCompare(String expectedJsonSchema, String sourceAvroSchema) { var parseAvroSchema = new Schema.Parser().parse(sourceAvroSchema); diff --git a/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java b/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java index 03b690ff1..01e31875e 100644 --- a/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java +++ b/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java @@ -700,6 +700,43 @@ void unionFieldWithInnerTypesNamesClash() { } + @Test + void unionNullableEnumField() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "enum_nullable_union", + "type" : [ "null", { + "type" : "enum", + "name" : "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } ] + } + ] + }""" + ); + + GenericData.Record inputRecord = new GenericData.Record(schema); + inputRecord.put("enum_nullable_union", + new GenericData.EnumSymbol( + schema.getField("enum_nullable_union").schema().getTypes().get(1), "SPADES")); + String expectedJsonWithEnum = """ + { + "enum_nullable_union": { "Suit": "SPADES"}\s + } + \s"""; + assertJsonsEqual(expectedJsonWithEnum, convertAvroToJson(inputRecord, schema)); + + GenericData.Record inputNullRecord = new GenericData.Record(schema); + inputNullRecord.put("enum_nullable_union", null); + assertJsonsEqual("{}", convertAvroToJson(inputNullRecord, schema)); + } + private Schema createSchema(String schema) { return new AvroSchema(schema).rawSchema(); } From d40fc30db6905a36276bc545bcc88c766183cd50 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Mon, 6 Jan 2025 16:12:55 +0400 Subject: [PATCH 45/84] FE: Deps: Bump transitive axios (#749) --- frontend/package.json | 1 + frontend/pnpm-lock.yaml | 17 +++++++++-------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index c05150b42..49f9bc732 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -117,6 +117,7 @@ "json5@>=2.0.0 <2.2.2": ">=2.2.2", "semver@>=7.0.0 <7.5.2": ">=7.5.2", "axios@>=0.8.1 <0.28.0": ">=0.28.0", + "axios@>=1.3.2 <=1.7.3": ">=1.7.4", "braces": "3.0.3" } } diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index 07a2d9c19..d23c01a88 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -14,6 +14,7 @@ overrides: json5@>=2.0.0 <2.2.2: '>=2.2.2' semver@>=7.0.0 <7.5.2: '>=7.5.2' axios@>=0.8.1 <0.28.0: '>=0.28.0' + axios@>=1.3.2 <=1.7.3: '>=1.7.4' braces: 3.0.3 importers: @@ -815,7 +816,7 @@ packages: resolution: {integrity: sha512-Z6GuOUdNQjP7FX+OuV2Ybyamse+/e0BFdTWBX5JxpBDKA+YkdLynDgG6HTF04zy6e9zPa19UX0WA2VDoehwhXQ==} peerDependencies: '@nestjs/common': ^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0 - axios: ^1.3.1 + axios: '>=1.7.4' rxjs: ^6.0.0 || ^7.0.0 '@nestjs/common@10.3.0': @@ -1511,8 +1512,8 @@ packages: resolution: {integrity: sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==} engines: {node: '>=4'} - axios@1.6.8: - resolution: {integrity: sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==} + axios@1.7.9: + resolution: {integrity: sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==} axobject-query@3.2.1: resolution: {integrity: sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg==} @@ -4891,10 +4892,10 @@ snapshots: '@microsoft/fetch-event-source@2.0.1': {} - '@nestjs/axios@3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.6.8)(rxjs@7.8.1)': + '@nestjs/axios@3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.7.9)(rxjs@7.8.1)': dependencies: '@nestjs/common': 10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1) - axios: 1.6.8 + axios: 1.7.9 rxjs: 7.8.1 '@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1)': @@ -4941,11 +4942,11 @@ snapshots: '@openapitools/openapi-generator-cli@2.13.4': dependencies: - '@nestjs/axios': 3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.6.8)(rxjs@7.8.1) + '@nestjs/axios': 3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.7.9)(rxjs@7.8.1) '@nestjs/common': 10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1) '@nestjs/core': 10.3.0(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(reflect-metadata@0.1.13)(rxjs@7.8.1) '@nuxtjs/opencollective': 0.3.2 - axios: 1.6.8 + axios: 1.7.9 chalk: 4.1.2 commander: 8.3.0 compare-versions: 4.1.4 @@ -5623,7 +5624,7 @@ snapshots: axe-core@4.7.0: {} - axios@1.6.8: + axios@1.7.9: dependencies: follow-redirects: 1.15.6 form-data: 4.0.0 From 1710e2a23df7c7c78093c349ca917c40a14c6f48 Mon Sep 17 00:00:00 2001 From: Renat Kalimulin <103274228+Nilumilak@users.noreply.github.com> Date: Wed, 8 Jan 2025 19:38:23 +0300 Subject: [PATCH 46/84] UX: SR: Adjust subjects name list max width (#755) --- frontend/src/components/common/NewTable/Table.styled.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/frontend/src/components/common/NewTable/Table.styled.ts b/frontend/src/components/common/NewTable/Table.styled.ts index 3cb247654..b43972eac 100644 --- a/frontend/src/components/common/NewTable/Table.styled.ts +++ b/frontend/src/components/common/NewTable/Table.styled.ts @@ -157,7 +157,6 @@ export const Table = styled.table( & a { color: ${table.td.color.normal}; font-weight: 500; - max-width: 450px; white-space: nowrap; overflow: hidden; text-overflow: ellipsis; From 8ddda3df0b2aa1564f1d1ff5cd63fc30a221d5e6 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Fri, 10 Jan 2025 20:44:40 +0400 Subject: [PATCH 47/84] BE: Auth: Fix startup error on missing index (#758) --- .../main/java/io/kafbat/ui/util/StaticFileWebFilter.java | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java b/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java index 1b74bd374..f76ffad6b 100644 --- a/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java +++ b/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java @@ -1,6 +1,7 @@ package io.kafbat.ui.util; import java.io.IOException; +import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.springframework.core.io.ClassPathResource; import org.springframework.core.io.buffer.DataBufferFactory; @@ -15,6 +16,7 @@ import org.springframework.web.server.WebFilterChain; import reactor.core.publisher.Mono; +@Slf4j public class StaticFileWebFilter implements WebFilter { private static final String INDEX_HTML = "/static/index.html"; @@ -29,6 +31,12 @@ public StaticFileWebFilter() { public StaticFileWebFilter(String path, ClassPathResource resource) { this.matcher = ServerWebExchangeMatchers.pathMatchers(HttpMethod.GET, path); + if (!resource.exists()) { + log.warn("Resource [{}] does not exist. Frontend might not be available.", resource.getPath()); + contents = "Missing index.html. Make sure the app has been built with a correct (prod) profile."; + return; + } + try { this.contents = ResourceUtil.readAsString(resource); } catch (IOException e) { From 4cf17a0b2b17bdebd533caffd06978180b29c0ab Mon Sep 17 00:00:00 2001 From: Yeikel Date: Sat, 11 Jan 2025 09:41:42 -0500 Subject: [PATCH 48/84] Infra: FE: Enforce pnpm using a range (#747) --- frontend/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/package.json b/frontend/package.json index 49f9bc732..b1133c3a7 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -104,7 +104,7 @@ }, "engines": { "node": "18.17.1", - "pnpm": "9.15.0" + "pnpm": "^9.15.0" }, "pnpm": { "overrides": { From 2b3abd212d79f6c1e99c9f19893ee8b90ad4c48c Mon Sep 17 00:00:00 2001 From: Yeikel Date: Mon, 13 Jan 2025 13:21:04 -0500 Subject: [PATCH 49/84] BE: Chore: remove unnecessary architecture check in unit test (#704) --- api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java | 5 +---- pom.xml | 2 +- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java b/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java index 7224649b1..9e9e903cc 100644 --- a/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java +++ b/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java @@ -37,10 +37,7 @@ public abstract class AbstractIntegrationTest { public static final String LOCAL = "local"; public static final String SECOND_LOCAL = "secondLocal"; - private static final boolean IS_ARM = - System.getProperty("os.arch").contains("arm") || System.getProperty("os.arch").contains("aarch64"); - - private static final String CONFLUENT_PLATFORM_VERSION = IS_ARM ? "7.8.0.arm64" : "7.8.0"; + private static final String CONFLUENT_PLATFORM_VERSION = "7.8.0"; public static final KafkaContainer kafka = new KafkaContainer( DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION)) diff --git a/pom.xml b/pom.xml index 05f68f576..a66cd8b83 100644 --- a/pom.xml +++ b/pom.xml @@ -55,7 +55,7 @@ 5.11.2 5.14.2 4.12.0 - 1.20.2 + 1.20.4 v18.17.1 From c1f4424eac95f869102add67eb649523b13c4381 Mon Sep 17 00:00:00 2001 From: Yeikel Date: Tue, 14 Jan 2025 02:22:09 -0500 Subject: [PATCH 50/84] Docs: Update confluent version in the examples (#772) --- documentation/compose/auth-context.yaml | 2 +- documentation/compose/cluster-sr-auth.yaml | 6 +++--- documentation/compose/kafbat-ui.yaml | 12 ++++++------ documentation/compose/kafka-ssl-components.yaml | 6 +++--- documentation/compose/kafka-ssl.yml | 2 +- documentation/compose/kafka-zookeeper.yaml | 6 +++--- documentation/compose/ui-acl-with-zk.yaml | 2 +- documentation/compose/ui-connectors-auth.yaml | 8 ++++---- documentation/compose/ui-jmx-secured.yml | 2 +- documentation/compose/ui-ldap.yaml | 4 ++-- documentation/compose/ui-sasl.yaml | 2 +- documentation/compose/ui-serdes.yaml | 4 ++-- documentation/compose/ui-with-jmx-exporter.yaml | 2 +- 13 files changed, 29 insertions(+), 29 deletions(-) diff --git a/documentation/compose/auth-context.yaml b/documentation/compose/auth-context.yaml index f2587255c..dd2755786 100644 --- a/documentation/compose/auth-context.yaml +++ b/documentation/compose/auth-context.yaml @@ -19,7 +19,7 @@ services: SPRING_SECURITY_USER_PASSWORD: pass kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: diff --git a/documentation/compose/cluster-sr-auth.yaml b/documentation/compose/cluster-sr-auth.yaml index 5845d1e66..83f81da52 100644 --- a/documentation/compose/cluster-sr-auth.yaml +++ b/documentation/compose/cluster-sr-auth.yaml @@ -3,7 +3,7 @@ version: '2' services: kafka1: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka1 container_name: kafka1 ports: @@ -31,7 +31,7 @@ services: command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" schemaregistry1: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 18085:8085 depends_on: @@ -55,7 +55,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/kafbat-ui.yaml b/documentation/compose/kafbat-ui.yaml index 8848d6e5f..c6df25737 100644 --- a/documentation/compose/kafbat-ui.yaml +++ b/documentation/compose/kafbat-ui.yaml @@ -27,7 +27,7 @@ services: DYNAMIC_CONFIG_ENABLED: 'true' kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -55,7 +55,7 @@ services: command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" kafka1: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka1 container_name: kafka1 ports: @@ -83,7 +83,7 @@ services: command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: @@ -99,7 +99,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas schemaregistry1: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 18085:8085 depends_on: @@ -115,7 +115,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-connect0: - image: confluentinc/cp-kafka-connect:7.2.1 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: @@ -140,7 +140,7 @@ services: CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components" kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/kafka-ssl-components.yaml b/documentation/compose/kafka-ssl-components.yaml index e1be6999d..c95639902 100644 --- a/documentation/compose/kafka-ssl-components.yaml +++ b/documentation/compose/kafka-ssl-components.yaml @@ -39,7 +39,7 @@ services: - ./ssl/kafka.keystore.jks:/kafka.keystore.jks kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -80,7 +80,7 @@ services: command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 depends_on: - kafka0 environment: @@ -111,7 +111,7 @@ services: - ./ssl/kafka.keystore.jks:/kafka.keystore.jks kafka-connect0: - image: confluentinc/cp-kafka-connect:7.2.1 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: diff --git a/documentation/compose/kafka-ssl.yml b/documentation/compose/kafka-ssl.yml index 2adb0c12b..c427ed66d 100644 --- a/documentation/compose/kafka-ssl.yml +++ b/documentation/compose/kafka-ssl.yml @@ -22,7 +22,7 @@ services: - ./ssl/kafka.keystore.jks:/kafka.keystore.jks kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: diff --git a/documentation/compose/kafka-zookeeper.yaml b/documentation/compose/kafka-zookeeper.yaml index 7342a9763..20015f19f 100644 --- a/documentation/compose/kafka-zookeeper.yaml +++ b/documentation/compose/kafka-zookeeper.yaml @@ -3,7 +3,7 @@ version: '2' services: zookeeper: - image: confluentinc/cp-zookeeper:7.2.1 + image: confluentinc/cp-zookeeper:7.8.0 hostname: zookeeper container_name: zookeeper ports: @@ -13,7 +13,7 @@ services: ZOOKEEPER_TICK_TIME: 2000 kafka: - image: confluentinc/cp-server:7.2.1 + image: confluentinc/cp-server:7.8.0 hostname: kafka container_name: kafka depends_on: @@ -36,7 +36,7 @@ services: KAFKA_JMX_HOSTNAME: kafka kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/ui-acl-with-zk.yaml b/documentation/compose/ui-acl-with-zk.yaml index 97aad1791..14e433485 100644 --- a/documentation/compose/ui-acl-with-zk.yaml +++ b/documentation/compose/ui-acl-with-zk.yaml @@ -27,7 +27,7 @@ services: - 2181:2181 kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: diff --git a/documentation/compose/ui-connectors-auth.yaml b/documentation/compose/ui-connectors-auth.yaml index a4c51f10f..b86c1d3f8 100644 --- a/documentation/compose/ui-connectors-auth.yaml +++ b/documentation/compose/ui-connectors-auth.yaml @@ -21,7 +21,7 @@ services: KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD: admin-secret kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -50,7 +50,7 @@ services: command: 'bash -c ''if [ ! -f /tmp/update_run.sh ]; then echo "ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi''' schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: @@ -69,7 +69,7 @@ services: build: context: ./kafka-connect args: - image: confluentinc/cp-kafka-connect:7.2.1 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: @@ -102,7 +102,7 @@ services: # AWS_SECRET_ACCESS_KEY: "" kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/ui-jmx-secured.yml b/documentation/compose/ui-jmx-secured.yml index edb2439b1..5d7ab8a00 100644 --- a/documentation/compose/ui-jmx-secured.yml +++ b/documentation/compose/ui-jmx-secured.yml @@ -27,7 +27,7 @@ services: - ./jmx/clientkeystore:/jmx/clientkeystore kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: diff --git a/documentation/compose/ui-ldap.yaml b/documentation/compose/ui-ldap.yaml index b7855dfdb..bf0d80f74 100644 --- a/documentation/compose/ui-ldap.yaml +++ b/documentation/compose/ui-ldap.yaml @@ -34,7 +34,7 @@ services: - 10389:10389 kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -63,7 +63,7 @@ services: command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: diff --git a/documentation/compose/ui-sasl.yaml b/documentation/compose/ui-sasl.yaml index 8512ff9a1..3653ea332 100644 --- a/documentation/compose/ui-sasl.yaml +++ b/documentation/compose/ui-sasl.yaml @@ -18,7 +18,7 @@ services: DYNAMIC_CONFIG_ENABLED: true # not necessary for sasl auth, added for tests kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: diff --git a/documentation/compose/ui-serdes.yaml b/documentation/compose/ui-serdes.yaml index b4060059a..6b6a32995 100644 --- a/documentation/compose/ui-serdes.yaml +++ b/documentation/compose/ui-serdes.yaml @@ -68,7 +68,7 @@ services: - ./proto:/protofiles kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -97,7 +97,7 @@ services: command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: diff --git a/documentation/compose/ui-with-jmx-exporter.yaml b/documentation/compose/ui-with-jmx-exporter.yaml index 3283e76e3..eeb164830 100644 --- a/documentation/compose/ui-with-jmx-exporter.yaml +++ b/documentation/compose/ui-with-jmx-exporter.yaml @@ -3,7 +3,7 @@ version: '2' services: kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: From 79e39101fd620985bc4c811cacadbaca8e23f76e Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Tue, 14 Jan 2025 12:26:55 +0500 Subject: [PATCH 51/84] BE: Chore: Cleanup model package (#763) --- .../java/io/kafbat/ui/model/BrokerMetrics.java | 1 - .../java/io/kafbat/ui/model/CleanupPolicy.java | 12 ++---------- .../kafbat/ui/model/InternalBrokerConfig.java | 7 +++---- .../kafbat/ui/model/InternalClusterMetrics.java | 3 --- .../kafbat/ui/model/InternalClusterState.java | 3 --- .../io/kafbat/ui/model/InternalPartition.java | 1 - .../ui/model/InternalPartitionsOffsets.java | 1 - .../kafbat/ui/model/InternalSegmentSizeDto.java | 13 ------------- .../java/io/kafbat/ui/model/InternalTopic.java | 3 --- .../io/kafbat/ui/model/InternalTopicConfig.java | 1 - .../main/java/io/kafbat/ui/model/Metrics.java | 1 - .../java/io/kafbat/ui/model/Statistics.java | 1 - .../ui/model/schemaregistry/ErrorResponse.java | 14 -------------- .../InternalCompatibilityCheck.java | 10 ---------- .../InternalCompatibilityLevel.java | 8 -------- .../model/schemaregistry/InternalNewSchema.java | 17 ----------------- .../model/schemaregistry/SubjectIdResponse.java | 8 -------- 17 files changed, 5 insertions(+), 99 deletions(-) delete mode 100644 api/src/main/java/io/kafbat/ui/model/InternalSegmentSizeDto.java delete mode 100644 api/src/main/java/io/kafbat/ui/model/schemaregistry/ErrorResponse.java delete mode 100644 api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityCheck.java delete mode 100644 api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityLevel.java delete mode 100644 api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalNewSchema.java delete mode 100644 api/src/main/java/io/kafbat/ui/model/schemaregistry/SubjectIdResponse.java diff --git a/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java b/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java index 31d4ad7e7..dbd57c9c1 100644 --- a/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java +++ b/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java @@ -1,6 +1,5 @@ package io.kafbat.ui.model; -import io.kafbat.ui.model.MetricDTO; import java.util.List; import lombok.Builder; import lombok.Data; diff --git a/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java b/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java index 81186059f..815a8549d 100644 --- a/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java +++ b/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java @@ -20,18 +20,10 @@ public enum CleanupPolicy { this.policies = policies; } - public String getPolicy() { - return policies.get(0); - } - public static CleanupPolicy fromString(String string) { return Arrays.stream(CleanupPolicy.values()) - .filter(v -> - v.policies.stream().anyMatch( - s -> s.equals(string.replace(" ", "") - ) - ) - ).findFirst() + .filter(v -> v.policies.stream().anyMatch(s -> s.equals(string.replace(" ", "")))) + .findFirst() .orElse(UNKNOWN); } } diff --git a/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java b/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java index 5f87b0487..eb37270ed 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java @@ -1,6 +1,5 @@ package io.kafbat.ui.model; - import java.util.List; import lombok.Builder; import lombok.Data; @@ -17,13 +16,13 @@ public class InternalBrokerConfig { private final List synonyms; public static InternalBrokerConfig from(ConfigEntry configEntry, boolean readOnlyCluster) { - InternalBrokerConfig.InternalBrokerConfigBuilder builder = InternalBrokerConfig.builder() + return InternalBrokerConfig.builder() .name(configEntry.name()) .value(configEntry.value()) .source(configEntry.source()) .isReadOnly(readOnlyCluster || configEntry.isReadOnly()) .isSensitive(configEntry.isSensitive()) - .synonyms(configEntry.synonyms()); - return builder.build(); + .synonyms(configEntry.synonyms()) + .build(); } } diff --git a/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java b/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java index df59cf007..6c04fadeb 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java @@ -1,7 +1,5 @@ package io.kafbat.ui.model; -import io.kafbat.ui.model.MetricDTO; -import io.kafbat.ui.model.ServerStatusDTO; import java.math.BigDecimal; import java.util.List; import java.util.Map; @@ -9,7 +7,6 @@ import lombok.Builder; import lombok.Data; - @Data @Builder(toBuilder = true) public class InternalClusterMetrics { diff --git a/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java b/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java index 5e1874d10..5f3c1f308 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java @@ -1,9 +1,6 @@ package io.kafbat.ui.model; import com.google.common.base.Throwables; -import io.kafbat.ui.model.BrokerDiskUsageDTO; -import io.kafbat.ui.model.MetricsCollectionErrorDTO; -import io.kafbat.ui.model.ServerStatusDTO; import java.math.BigDecimal; import java.util.List; import java.util.Optional; diff --git a/api/src/main/java/io/kafbat/ui/model/InternalPartition.java b/api/src/main/java/io/kafbat/ui/model/InternalPartition.java index 61646207e..0b7c46ae3 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalPartition.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalPartition.java @@ -20,5 +20,4 @@ public class InternalPartition { private final Long segmentSize; private final Integer segmentCount; - } diff --git a/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java b/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java index 3084be5c3..b4febb56d 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java @@ -7,7 +7,6 @@ import lombok.Value; import org.apache.kafka.common.TopicPartition; - public class InternalPartitionsOffsets { @Value diff --git a/api/src/main/java/io/kafbat/ui/model/InternalSegmentSizeDto.java b/api/src/main/java/io/kafbat/ui/model/InternalSegmentSizeDto.java deleted file mode 100644 index 8ce8014ae..000000000 --- a/api/src/main/java/io/kafbat/ui/model/InternalSegmentSizeDto.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.model; - -import java.util.Map; -import lombok.Builder; -import lombok.Data; - -@Data -@Builder(toBuilder = true) -public class InternalSegmentSizeDto { - - private final Map internalTopicWithSegmentSize; - private final InternalClusterMetrics clusterMetricsWithSegmentSize; -} diff --git a/api/src/main/java/io/kafbat/ui/model/InternalTopic.java b/api/src/main/java/io/kafbat/ui/model/InternalTopic.java index 3a6134fa0..6aa3a0a1a 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalTopic.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalTopic.java @@ -1,6 +1,5 @@ package io.kafbat.ui.model; -import io.kafbat.ui.config.ClustersProperties; import java.math.BigDecimal; import java.util.List; import java.util.Map; @@ -16,8 +15,6 @@ @Builder(toBuilder = true) public class InternalTopic { - ClustersProperties clustersProperties; - // from TopicDescription private final String name; private final boolean internal; diff --git a/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java b/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java index b68348418..84620e2e4 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java @@ -5,7 +5,6 @@ import lombok.Data; import org.apache.kafka.clients.admin.ConfigEntry; - @Data @Builder public class InternalTopicConfig { diff --git a/api/src/main/java/io/kafbat/ui/model/Metrics.java b/api/src/main/java/io/kafbat/ui/model/Metrics.java index e9b7c9f0e..bb6d2ff0c 100644 --- a/api/src/main/java/io/kafbat/ui/model/Metrics.java +++ b/api/src/main/java/io/kafbat/ui/model/Metrics.java @@ -11,7 +11,6 @@ import lombok.Builder; import lombok.Value; - @Builder @Value public class Metrics { diff --git a/api/src/main/java/io/kafbat/ui/model/Statistics.java b/api/src/main/java/io/kafbat/ui/model/Statistics.java index 35de5e251..43e6a26e0 100644 --- a/api/src/main/java/io/kafbat/ui/model/Statistics.java +++ b/api/src/main/java/io/kafbat/ui/model/Statistics.java @@ -1,6 +1,5 @@ package io.kafbat.ui.model; -import io.kafbat.ui.model.ServerStatusDTO; import io.kafbat.ui.service.ReactiveAdminClient; import java.util.List; import java.util.Map; diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/ErrorResponse.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/ErrorResponse.java deleted file mode 100644 index 00bc760b9..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/ErrorResponse.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -@Data -public class ErrorResponse { - - @JsonProperty("error_code") - private int errorCode; - - private String message; - -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityCheck.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityCheck.java deleted file mode 100644 index bd59b6f6e..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityCheck.java +++ /dev/null @@ -1,10 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -@Data -public class InternalCompatibilityCheck { - @JsonProperty("is_compatible") - private boolean isCompatible; -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityLevel.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityLevel.java deleted file mode 100644 index af4acf8f7..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityLevel.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import lombok.Data; - -@Data -public class InternalCompatibilityLevel { - private String compatibilityLevel; -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalNewSchema.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalNewSchema.java deleted file mode 100644 index 9c09eccff..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalNewSchema.java +++ /dev/null @@ -1,17 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import com.fasterxml.jackson.annotation.JsonInclude; -import io.kafbat.ui.model.SchemaTypeDTO; -import lombok.Data; - -@Data -public class InternalNewSchema { - private String schema; - @JsonInclude(JsonInclude.Include.NON_NULL) - private SchemaTypeDTO schemaType; - - public InternalNewSchema(String schema, SchemaTypeDTO schemaType) { - this.schema = schema; - this.schemaType = schemaType; - } -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/SubjectIdResponse.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/SubjectIdResponse.java deleted file mode 100644 index 951477676..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/SubjectIdResponse.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import lombok.Data; - -@Data -public class SubjectIdResponse { - private Integer id; -} From aaac4d7c87765303b6b39c2a2a43ee307ce7ab2a Mon Sep 17 00:00:00 2001 From: Yeikel Date: Tue, 14 Jan 2025 02:30:56 -0500 Subject: [PATCH 52/84] Infra: FE: migrate to node 22 (#748) --- .github/workflows/frontend_tests.yml | 2 +- frontend/.nvmrc | 2 +- frontend/package.json | 2 +- pom.xml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index 7c4955373..a48708123 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -28,7 +28,7 @@ jobs: - name: Install node uses: actions/setup-node@v4.0.2 with: - node-version: "18.17.1" + node-version: "22.12.0" cache: "pnpm" cache-dependency-path: "./frontend/pnpm-lock.yaml" diff --git a/frontend/.nvmrc b/frontend/.nvmrc index 860cc5000..dc0bb0f43 100644 --- a/frontend/.nvmrc +++ b/frontend/.nvmrc @@ -1 +1 @@ -v18.17.1 +v22.12.0 diff --git a/frontend/package.json b/frontend/package.json index b1133c3a7..b42a0d12e 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -103,7 +103,7 @@ "whatwg-fetch": "3.6.20" }, "engines": { - "node": "18.17.1", + "node": "^22.12.0", "pnpm": "^9.15.0" }, "pnpm": { diff --git a/pom.xml b/pom.xml index a66cd8b83..64b10b982 100644 --- a/pom.xml +++ b/pom.xml @@ -58,7 +58,7 @@ 1.20.4 - v18.17.1 + v22.12.0 v9.15.0 From a159ef63a0471ec83970550b7e5fa4b6846abd61 Mon Sep 17 00:00:00 2001 From: Dugong Date: Tue, 14 Jan 2025 12:41:38 +0100 Subject: [PATCH 53/84] KC: Stop Connectors and Reset Connector Offsets (#573) Co-authored-by: NOZAIS Julien Co-authored-by: Roman Zabaluev --- .mvn/jvm.config | 1 + api/pom.xml | 1 + .../ui/client/RetryingKafkaConnectClient.java | 22 +++++ .../ui/controller/KafkaConnectController.java | 21 +++++ .../ConnectorOffsetsResetException.java | 13 +++ .../io/kafbat/ui/exception/ErrorCode.java | 1 + .../model/rbac/permission/ConnectAction.java | 5 +- .../ui/service/KafkaConnectService.java | 18 ++++ .../kafbat/ui/KafkaConnectServiceTests.java | 60 ++++++++++++- contract/pom.xml | 1 + .../main/resources/swagger/kafbat-ui-api.yaml | 39 +++++++- .../resources/swagger/kafka-connect-api.yaml | 60 +++++++++++++ .../Connect/Details/Actions/Actions.tsx | 52 +++++++++-- .../Actions/__tests__/Actions.spec.tsx | 89 ++++++++++++++++++- .../components/Connect/List/ActionsCell.tsx | 80 +++++++++++++++-- .../Connect/List/__tests__/List.spec.tsx | 52 +++++++++-- frontend/src/lib/fixtures/kafkaConnect.ts | 12 +++ frontend/src/lib/hooks/api/kafkaConnect.ts | 13 ++- pom.xml | 16 ++-- 19 files changed, 517 insertions(+), 39 deletions(-) create mode 100644 .mvn/jvm.config create mode 100644 api/src/main/java/io/kafbat/ui/exception/ConnectorOffsetsResetException.java diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 000000000..2bf66750a --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1 @@ +-Djava.net.useSystemProxies=true \ No newline at end of file diff --git a/api/pom.xml b/api/pom.xml index dc774c09a..bbeb9dff8 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -492,6 +492,7 @@ build + false diff --git a/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java b/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java index 474a0c159..df2da3e55 100644 --- a/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java +++ b/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java @@ -238,6 +238,16 @@ public Mono> pauseConnectorWithHttpInfo(String connectorNam return withRetryOnConflictOrRebalance(super.pauseConnectorWithHttpInfo(connectorName)); } + @Override + public Mono stopConnector(String connectorName) throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.stopConnector(connectorName)); + } + + @Override + public Mono> stopConnectorWithHttpInfo(String connectorName) throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.stopConnectorWithHttpInfo(connectorName)); + } + @Override public Mono restartConnector(String connectorName, Boolean includeTasks, Boolean onlyFailed) throws WebClientResponseException { @@ -261,6 +271,18 @@ public Mono> restartConnectorTaskWithHttpInfo(String connec return withRetryOnConflictOrRebalance(super.restartConnectorTaskWithHttpInfo(connectorName, taskId)); } + @Override + public Mono resetConnectorOffsets(String connectorName) + throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.resetConnectorOffsets(connectorName)); + } + + @Override + public Mono> resetConnectorOffsetsWithHttpInfo(String connectorName) + throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.resetConnectorOffsetsWithHttpInfo(connectorName)); + } + @Override public Mono resumeConnector(String connectorName) throws WebClientResponseException { return withRetryOnRebalance(super.resumeConnector(connectorName)); diff --git a/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java b/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java index 08eb304c0..328a7353e 100644 --- a/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java +++ b/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java @@ -3,6 +3,8 @@ import static io.kafbat.ui.model.ConnectorActionDTO.RESTART; import static io.kafbat.ui.model.ConnectorActionDTO.RESTART_ALL_TASKS; import static io.kafbat.ui.model.ConnectorActionDTO.RESTART_FAILED_TASKS; +import static io.kafbat.ui.model.rbac.permission.ConnectAction.RESET_OFFSETS; +import static io.kafbat.ui.model.rbac.permission.ConnectAction.VIEW; import io.kafbat.ui.api.KafkaConnectApi; import io.kafbat.ui.model.ConnectDTO; @@ -285,4 +287,23 @@ private Comparator getConnectorsComparator(ConnectorColumn default -> defaultComparator; }; } + + @Override + public Mono> resetConnectorOffsets(String clusterName, String connectName, + String connectorName, + ServerWebExchange exchange) { + + var context = AccessContext.builder() + .cluster(clusterName) + .connectActions(connectName, VIEW, RESET_OFFSETS) + .operationName("resetConnectorOffsets") + .operationParams(Map.of(CONNECTOR_NAME, connectorName)) + .build(); + + return validateAccess(context).then( + kafkaConnectService + .resetConnectorOffsets(getCluster(clusterName), connectName, connectorName) + .map(ResponseEntity::ok)) + .doOnEach(sig -> audit(context, sig)); + } } diff --git a/api/src/main/java/io/kafbat/ui/exception/ConnectorOffsetsResetException.java b/api/src/main/java/io/kafbat/ui/exception/ConnectorOffsetsResetException.java new file mode 100644 index 000000000..f76feddc3 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/exception/ConnectorOffsetsResetException.java @@ -0,0 +1,13 @@ +package io.kafbat.ui.exception; + +public class ConnectorOffsetsResetException extends CustomBaseException { + + public ConnectorOffsetsResetException(String message) { + super(message); + } + + @Override + public ErrorCode getErrorCode() { + return ErrorCode.CONNECTOR_OFFSETS_RESET_ERROR; + } +} diff --git a/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java b/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java index a1b499aff..6d4a732e3 100644 --- a/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java +++ b/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java @@ -32,6 +32,7 @@ public enum ErrorCode { TOPIC_ANALYSIS_ERROR(4018, HttpStatus.BAD_REQUEST), FILE_UPLOAD_EXCEPTION(4019, HttpStatus.INTERNAL_SERVER_ERROR), CEL_ERROR(4020, HttpStatus.BAD_REQUEST), + CONNECTOR_OFFSETS_RESET_ERROR(4021, HttpStatus.BAD_REQUEST), ; static { diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java b/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java index 7634e89c0..a357245bc 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java @@ -10,7 +10,8 @@ public enum ConnectAction implements PermissibleAction { EDIT(VIEW), CREATE(VIEW), RESTART(VIEW), - DELETE(VIEW) + DELETE(VIEW), + RESET_OFFSETS(VIEW) ; @@ -20,7 +21,7 @@ public enum ConnectAction implements PermissibleAction { this.dependantActions = dependantActions; } - public static final Set ALTER_ACTIONS = Set.of(CREATE, EDIT, DELETE, RESTART); + public static final Set ALTER_ACTIONS = Set.of(CREATE, EDIT, DELETE, RESTART, RESET_OFFSETS); @Nullable public static ConnectAction fromString(String name) { diff --git a/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java b/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java index 815069d07..31e4268a0 100644 --- a/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java +++ b/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java @@ -6,6 +6,7 @@ import io.kafbat.ui.connect.model.ConnectorStatusConnector; import io.kafbat.ui.connect.model.ConnectorTopics; import io.kafbat.ui.connect.model.TaskStatus; +import io.kafbat.ui.exception.ConnectorOffsetsResetException; import io.kafbat.ui.exception.NotFoundException; import io.kafbat.ui.exception.ValidationException; import io.kafbat.ui.mapper.ClusterMapper; @@ -213,6 +214,7 @@ public Mono updateConnectorState(KafkaCluster cluster, String connectName, case RESTART_FAILED_TASKS -> restartTasks(cluster, connectName, connectorName, t -> t.getStatus().getState() == ConnectorTaskStatusDTO.FAILED); case PAUSE -> client.pauseConnector(connectorName); + case STOP -> client.stopConnector(connectorName); case RESUME -> client.resumeConnector(connectorName); }); } @@ -272,4 +274,20 @@ private ReactiveFailover api(KafkaCluster cluster, String } return client; } + + public Mono resetConnectorOffsets(KafkaCluster cluster, String connectName, + String connectorName) { + return api(cluster, connectName) + .mono(client -> client.resetConnectorOffsets(connectorName)) + .onErrorResume(WebClientResponseException.NotFound.class, + e -> { + throw new NotFoundException("Connector %s not found in %s".formatted(connectorName, connectName)); + }) + .onErrorResume(WebClientResponseException.BadRequest.class, + e -> { + throw new ConnectorOffsetsResetException( + "Failed to reset offsets of connector %s of %s. Make sure it is STOPPED first." + .formatted(connectorName, connectName)); + }); + } } diff --git a/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java b/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java index 26d55b5ad..c5fbb14b4 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java @@ -24,6 +24,8 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.ParameterizedTypeReference; +import org.springframework.http.HttpStatus; +import org.springframework.test.web.reactive.server.ExchangeResult; import org.springframework.test.web.reactive.server.WebTestClient; @Slf4j @@ -45,6 +47,7 @@ public class KafkaConnectServiceTests extends AbstractIntegrationTest { @BeforeEach public void setUp() { + webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) .bodyValue(new NewConnectorDTO() @@ -54,11 +57,10 @@ public void setUp() { "tasks.max", "1", "topics", "output-topic", "file", "/tmp/test", - "test.password", "test-credentials" - )) - ) + "test.password", "test-credentials"))) .exchange() .expectStatus().isOk(); + } @AfterEach @@ -418,4 +420,56 @@ public void shouldReturn400WhenTryingToCreateConnectorWithExistingName() { .expectStatus() .isBadRequest(); } + + @Test + public void shouldResetConnectorWhenInStoppedState() { + + webTestClient.get() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk() + .expectBody(ConnectorDTO.class) + .value(connector -> assertThat(connector.getStatus().getState()).isEqualTo(ConnectorStateDTO.RUNNING)); + + webTestClient.post() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/action/STOP", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk(); + + webTestClient.get() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk() + .expectBody(ConnectorDTO.class) + .value(connector -> assertThat(connector.getStatus().getState()).isEqualTo(ConnectorStateDTO.STOPPED)); + + webTestClient.delete() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/offsets", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk(); + + } + + @Test + public void shouldReturn400WhenResettingConnectorInRunningState() { + + webTestClient.get() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk() + .expectBody(ConnectorDTO.class) + .value(connector -> assertThat(connector.getStatus().getState()).isEqualTo(ConnectorStateDTO.RUNNING)); + + webTestClient.delete() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/offsets", LOCAL, + connectName, connectorName) + .exchange() + .expectStatus().isBadRequest(); + + } } diff --git a/contract/pom.xml b/contract/pom.xml index 8d7e76cea..55d86d40b 100644 --- a/contract/pom.xml +++ b/contract/pom.xml @@ -201,6 +201,7 @@ gen:sources + false diff --git a/contract/src/main/resources/swagger/kafbat-ui-api.yaml b/contract/src/main/resources/swagger/kafbat-ui-api.yaml index 315c4a17e..24530bcb4 100644 --- a/contract/src/main/resources/swagger/kafbat-ui-api.yaml +++ b/contract/src/main/resources/swagger/kafbat-ui-api.yaml @@ -1565,7 +1565,7 @@ paths: post: tags: - Kafka Connect - summary: update connector state (restart, pause or resume) + summary: update connector state (restart, pause, stop or resume) operationId: updateConnectorState parameters: - name: clusterName @@ -1722,6 +1722,31 @@ paths: 200: description: OK + /api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/offsets: + delete: + tags: + - Kafka Connect + summary: reset the offsets for the specified connector + operationId: resetConnectorOffsets + parameters: + - name: clusterName + in: path + required: true + schema: + type: string + - name: connectName + in: path + required: true + schema: + type: string + - name: connectorName + in: path + required: true + schema: + type: string + responses: + 200: + description: OK /api/clusters/{clusterName}/ksql/v2: post: @@ -3567,6 +3592,7 @@ components: - RESTART_FAILED_TASKS - PAUSE - RESUME + - STOP TaskAction: type: string @@ -3953,7 +3979,16 @@ components: KafkaAcl: type: object - required: [resourceType, resourceName, namePatternType, principal, host, operation, permission] + required: + [ + resourceType, + resourceName, + namePatternType, + principal, + host, + operation, + permission, + ] properties: resourceType: $ref: '#/components/schemas/KafkaAclResourceType' diff --git a/contract/src/main/resources/swagger/kafka-connect-api.yaml b/contract/src/main/resources/swagger/kafka-connect-api.yaml index e014d5529..5fa8dc230 100644 --- a/contract/src/main/resources/swagger/kafka-connect-api.yaml +++ b/contract/src/main/resources/swagger/kafka-connect-api.yaml @@ -144,6 +144,42 @@ paths: 500: description: Internal server error + /connectors/{connector}/offsets: + delete: + tags: + - KafkaConnectClient + summary: Reset the offsets for the specified connector + operationId: resetConnectorOffsets + parameters: + - in: path + name: connector + required: true + schema: + type: string + responses: + 200: + description: OK + 400: + description: Bad request + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectorOffsetsError' + + get: + tags: + - KafkaConnectClient + summary: Get the offsets for the specified connector + operationId: getConnectorOffsets + parameters: + - in: path + name: connector + required: true + schema: + type: string + responses: + 200: + description: OK /connectors/{connectorName}/status: get: @@ -230,6 +266,22 @@ paths: 202: description: Accepted + /connectors/{connectorName}/stop: + put: + tags: + - KafkaConnectClient + summary: stop the connector + operationId: stopConnector + parameters: + - name: connectorName + in: path + required: true + schema: + type: string + responses: + 204: + description: No Content + /connectors/{connectorName}/tasks: get: tags: @@ -432,6 +484,14 @@ components: trace: type: string + ConnectorOffsetsError: + type: object + properties: + error_code: + type: number + message: + type: string + ConnectorStatus: type: object properties: diff --git a/frontend/src/components/Connect/Details/Actions/Actions.tsx b/frontend/src/components/Connect/Details/Actions/Actions.tsx index 61eeabda4..6909b4617 100644 --- a/frontend/src/components/Connect/Details/Actions/Actions.tsx +++ b/frontend/src/components/Connect/Details/Actions/Actions.tsx @@ -11,6 +11,7 @@ import useAppParams from 'lib/hooks/useAppParams'; import { useConnector, useDeleteConnector, + useResetConnectorOffsets, useUpdateConnectorState, } from 'lib/hooks/api/kafkaConnect'; import { @@ -37,7 +38,7 @@ const Actions: React.FC = () => { const deleteConnectorHandler = () => confirm( <> - Are you sure you want to remove {routerProps.connectorName}{' '} + Are you sure you want to remove the {routerProps.connectorName}{' '} connector? , async () => { @@ -59,11 +60,25 @@ const Actions: React.FC = () => { stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS); const pauseConnectorHandler = () => stateMutation.mutateAsync(ConnectorAction.PAUSE); + const stopConnectorHandler = () => + stateMutation.mutateAsync(ConnectorAction.STOP); const resumeConnectorHandler = () => stateMutation.mutateAsync(ConnectorAction.RESUME); + + const resetConnectorOffsetsMutation = useResetConnectorOffsets(routerProps); + const resetConnectorOffsetsHandler = () => + confirm( + <> + Are you sure you want to reset the {routerProps.connectorName}{' '} + connector offsets? + , + () => resetConnectorOffsetsMutation.mutateAsync() + ); + return ( Restart @@ -74,7 +89,6 @@ const Actions: React.FC = () => { {connector?.status.state === ConnectorState.RUNNING && ( { Pause )} - {connector?.status.state === ConnectorState.PAUSED && ( + {connector?.status.state === ConnectorState.RUNNING && ( + + Stop + + )} + {(connector?.status.state === ConnectorState.PAUSED || + connector?.status.state === ConnectorState.STOPPED) && ( { )} { { { + + Reset Offsets + ({ @@ -34,12 +35,14 @@ jest.mock('lib/hooks/api/kafkaConnect', () => ({ useConnector: jest.fn(), useDeleteConnector: jest.fn(), useUpdateConnectorState: jest.fn(), + useResetConnectorOffsets: jest.fn(), })); const expectActionButtonsExists = () => { expect(screen.getByText('Restart Connector')).toBeInTheDocument(); expect(screen.getByText('Restart All Tasks')).toBeInTheDocument(); expect(screen.getByText('Restart Failed Tasks')).toBeInTheDocument(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); expect(screen.getByText('Delete')).toBeInTheDocument(); }; const afterClickDropDownButton = async () => { @@ -55,6 +58,7 @@ describe('Actions', () => { mockHistoryPush.mockClear(); deleteConnector.mockClear(); cancelMock.mockClear(); + resetConnectorOffsets.mockClear(); }); describe('view', () => { @@ -82,6 +86,30 @@ describe('Actions', () => { expect(screen.getAllByRole('menuitem').length).toEqual(4); expect(screen.getByText('Resume')).toBeInTheDocument(); expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); + expectActionButtonsExists(); + }); + + it('renders buttons when stopped', async () => { + (useConnector as jest.Mock).mockImplementation(() => ({ + data: setConnectorStatus(connector, ConnectorState.STOPPED), + })); + renderComponent(); + await afterClickRestartButton(); + expect(screen.getAllByRole('menuitem').length).toEqual(4); + expect(screen.getByText('Resume')).toBeInTheDocument(); + expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).not.toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -94,6 +122,12 @@ describe('Actions', () => { expect(screen.getAllByRole('menuitem').length).toEqual(3); expect(screen.queryByText('Resume')).not.toBeInTheDocument(); expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -106,6 +140,12 @@ describe('Actions', () => { expect(screen.getAllByRole('menuitem').length).toEqual(3); expect(screen.queryByText('Resume')).not.toBeInTheDocument(); expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -115,9 +155,15 @@ describe('Actions', () => { })); renderComponent(); await afterClickRestartButton(); - expect(screen.getAllByRole('menuitem').length).toEqual(4); + expect(screen.getAllByRole('menuitem').length).toEqual(5); expect(screen.queryByText('Resume')).not.toBeInTheDocument(); expect(screen.getByText('Pause')).toBeInTheDocument(); + expect(screen.getByText('Stop')).toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -137,6 +183,20 @@ describe('Actions', () => { expect(screen.getByRole('dialog')).toBeInTheDocument(); }); + it('opens confirmation modal when reset offsets button clicked on a STOPPED connector', async () => { + (useConnector as jest.Mock).mockImplementation(() => ({ + data: setConnectorStatus(connector, ConnectorState.STOPPED), + })); + renderComponent(); + await afterClickDropDownButton(); + await waitFor(async () => + userEvent.click( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ) + ); + expect(screen.getByRole('dialog')).toBeInTheDocument(); + }); + it('calls restartConnector when restart button clicked', async () => { const restartConnector = jest.fn(); (useUpdateConnectorState as jest.Mock).mockImplementation(() => ({ @@ -191,7 +251,18 @@ describe('Actions', () => { expect(pauseConnector).toHaveBeenCalledWith(ConnectorAction.PAUSE); }); - it('calls resumeConnector when resume button clicked', async () => { + it('calls stopConnector when stop button clicked', async () => { + const stopConnector = jest.fn(); + (useUpdateConnectorState as jest.Mock).mockImplementation(() => ({ + mutateAsync: stopConnector, + })); + renderComponent(); + await afterClickRestartButton(); + await userEvent.click(screen.getByRole('menuitem', { name: 'Stop' })); + expect(stopConnector).toHaveBeenCalledWith(ConnectorAction.STOP); + }); + + it('calls resumeConnector when resume button clicked from PAUSED state', async () => { const resumeConnector = jest.fn(); (useConnector as jest.Mock).mockImplementation(() => ({ data: setConnectorStatus(connector, ConnectorState.PAUSED), @@ -204,6 +275,20 @@ describe('Actions', () => { await userEvent.click(screen.getByRole('menuitem', { name: 'Resume' })); expect(resumeConnector).toHaveBeenCalledWith(ConnectorAction.RESUME); }); + + it('calls resumeConnector when resume button clicked from STOPPED state', async () => { + const resumeConnector = jest.fn(); + (useConnector as jest.Mock).mockImplementation(() => ({ + data: setConnectorStatus(connector, ConnectorState.STOPPED), + })); + (useUpdateConnectorState as jest.Mock).mockImplementation(() => ({ + mutateAsync: resumeConnector, + })); + renderComponent(); + await afterClickRestartButton(); + await userEvent.click(screen.getByRole('menuitem', { name: 'Resume' })); + expect(resumeConnector).toHaveBeenCalledWith(ConnectorAction.RESUME); + }); }); }); }); diff --git a/frontend/src/components/Connect/List/ActionsCell.tsx b/frontend/src/components/Connect/List/ActionsCell.tsx index 9b219f20e..4ad50c00e 100644 --- a/frontend/src/components/Connect/List/ActionsCell.tsx +++ b/frontend/src/components/Connect/List/ActionsCell.tsx @@ -9,9 +9,10 @@ import { import { CellContext } from '@tanstack/react-table'; import { ClusterNameRoute } from 'lib/paths'; import useAppParams from 'lib/hooks/useAppParams'; -import { Dropdown, DropdownItem } from 'components/common/Dropdown'; +import { Dropdown } from 'components/common/Dropdown'; import { useDeleteConnector, + useResetConnectorOffsets, useUpdateConnectorState, } from 'lib/hooks/api/kafkaConnect'; import { useConfirm } from 'lib/hooks/useConfirm'; @@ -36,10 +37,15 @@ const ActionsCell: React.FC> = ({ connectName: connect, connectorName: name, }); + const resetConnectorOffsetsMutation = useResetConnectorOffsets({ + clusterName, + connectName: connect, + connectorName: name, + }); const handleDelete = () => { confirm( <> - Are you sure want to remove {name} connector? + Are you sure you want to remove the {name} connector? , async () => { await deleteMutation.mutateAsync(); @@ -58,9 +64,25 @@ const ActionsCell: React.FC> = ({ const restartFailedTasksHandler = () => stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS); + const pauseConnectorHandler = () => + stateMutation.mutateAsync(ConnectorAction.PAUSE); + + const stopConnectorHandler = () => + stateMutation.mutateAsync(ConnectorAction.STOP); + + const resetOffsetsHandler = () => { + confirm( + <> + Are you sure you want to reset the {name} connector offsets? + , + () => resetConnectorOffsetsMutation.mutateAsync() + ); + }; + return ( - {status.state === ConnectorState.PAUSED && ( + {(status.state === ConnectorState.PAUSED || + status.state === ConnectorState.STOPPED) && ( > = ({ Resume )} + {status.state === ConnectorState.RUNNING && ( + + Pause + + )} + {status.state === ConnectorState.RUNNING && ( + + Stop + + )} > = ({ > Restart Failed Tasks - - Remove Connector - + + Reset Offsets + + + Delete + ); }; diff --git a/frontend/src/components/Connect/List/__tests__/List.spec.tsx b/frontend/src/components/Connect/List/__tests__/List.spec.tsx index 82b4aab21..194d97246 100644 --- a/frontend/src/components/Connect/List/__tests__/List.spec.tsx +++ b/frontend/src/components/Connect/List/__tests__/List.spec.tsx @@ -12,11 +12,13 @@ import { clusterConnectConnectorPath, clusterConnectorsPath } from 'lib/paths'; import { useConnectors, useDeleteConnector, + useResetConnectorOffsets, useUpdateConnectorState, } from 'lib/hooks/api/kafkaConnect'; const mockedUsedNavigate = jest.fn(); const mockDelete = jest.fn(); +const mockResetOffsets = jest.fn(); jest.mock('react-router-dom', () => ({ ...jest.requireActual('react-router-dom'), @@ -27,6 +29,7 @@ jest.mock('lib/hooks/api/kafkaConnect', () => ({ useConnectors: jest.fn(), useDeleteConnector: jest.fn(), useUpdateConnectorState: jest.fn(), + useResetConnectorOffsets: jest.fn(), })); const clusterName = 'local'; @@ -56,7 +59,7 @@ describe('Connectors List', () => { it('renders', async () => { renderComponent(); expect(screen.getByRole('table')).toBeInTheDocument(); - expect(screen.getAllByRole('row').length).toEqual(3); + expect(screen.getAllByRole('row').length).toEqual(4); }); it('opens broker when row clicked', async () => { @@ -94,7 +97,7 @@ describe('Connectors List', () => { }); }); - describe('when remove connector modal is open', () => { + describe('when delete modal is open', () => { beforeEach(() => { (useConnectors as jest.Mock).mockImplementation(() => ({ data: connectors, @@ -104,10 +107,10 @@ describe('Connectors List', () => { })); }); - it('calls removeConnector on confirm', async () => { + it('calls deleteConnector on confirm', async () => { renderComponent(); - const removeButton = screen.getAllByText('Remove Connector')[0]; - await waitFor(() => userEvent.click(removeButton)); + const deleteButton = screen.getAllByText('Delete')[0]; + await waitFor(() => userEvent.click(deleteButton)); const submitButton = screen.getAllByRole('button', { name: 'Confirm', @@ -118,8 +121,43 @@ describe('Connectors List', () => { it('closes the modal when cancel button is clicked', async () => { renderComponent(); - const removeButton = screen.getAllByText('Remove Connector')[0]; - await waitFor(() => userEvent.click(removeButton)); + const deleteButton = screen.getAllByText('Delete')[0]; + await waitFor(() => userEvent.click(deleteButton)); + + const cancelButton = screen.getAllByRole('button', { + name: 'Cancel', + })[0]; + await waitFor(() => userEvent.click(cancelButton)); + expect(cancelButton).not.toBeInTheDocument(); + }); + }); + + describe('when reset connector offsets modal is open', () => { + beforeEach(() => { + (useConnectors as jest.Mock).mockImplementation(() => ({ + data: connectors, + })); + (useResetConnectorOffsets as jest.Mock).mockImplementation(() => ({ + mutateAsync: mockResetOffsets, + })); + }); + + it('calls resetConnectorOffsets on confirm', async () => { + renderComponent(); + const resetButton = screen.getAllByText('Reset Offsets')[2]; + await waitFor(() => userEvent.click(resetButton)); + + const submitButton = screen.getAllByRole('button', { + name: 'Confirm', + })[0]; + await userEvent.click(submitButton); + expect(mockResetOffsets).toHaveBeenCalledWith(); + }); + + it('closes the modal when cancel button is clicked', async () => { + renderComponent(); + const resetButton = screen.getAllByText('Reset Offsets')[2]; + await waitFor(() => userEvent.click(resetButton)); const cancelButton = screen.getAllByRole('button', { name: 'Cancel', diff --git a/frontend/src/lib/fixtures/kafkaConnect.ts b/frontend/src/lib/fixtures/kafkaConnect.ts index 8a79760e6..4185695ee 100644 --- a/frontend/src/lib/fixtures/kafkaConnect.ts +++ b/frontend/src/lib/fixtures/kafkaConnect.ts @@ -38,6 +38,18 @@ export const connectors: FullConnectorInfo[] = [ tasksCount: 3, failedTasksCount: 1, }, + { + connect: 'third', + name: 'hdfs3-source-connector', + connectorClass: 'FileStreamSource', + type: ConnectorType.SINK, + topics: ['test-topic'], + status: { + state: ConnectorState.STOPPED, + }, + tasksCount: 0, + failedTasksCount: 0, + }, ]; export const connector: Connector = { diff --git a/frontend/src/lib/hooks/api/kafkaConnect.ts b/frontend/src/lib/hooks/api/kafkaConnect.ts index 225e72165..743d78307 100644 --- a/frontend/src/lib/hooks/api/kafkaConnect.ts +++ b/frontend/src/lib/hooks/api/kafkaConnect.ts @@ -98,7 +98,10 @@ export function useUpdateConnectorState(props: UseConnectorProps) { (action: ConnectorAction) => api.updateConnectorState({ ...props, action }), { onSuccess: () => - client.invalidateQueries(['clusters', props.clusterName, 'connectors']), + Promise.all([ + client.invalidateQueries(connectorsKey(props.clusterName)), + client.invalidateQueries(connectorKey(props)), + ]), } ); } @@ -161,3 +164,11 @@ export function useDeleteConnector(props: UseConnectorProps) { onSuccess: () => client.invalidateQueries(connectorsKey(props.clusterName)), }); } + +export function useResetConnectorOffsets(props: UseConnectorProps) { + const client = useQueryClient(); + + return useMutation(() => api.resetConnectorOffsets(props), { + onSuccess: () => client.invalidateQueries(connectorKey(props)), + }); +} diff --git a/pom.xml b/pom.xml index 64b10b982..5fe46b1fe 100644 --- a/pom.xml +++ b/pom.xml @@ -73,10 +73,6 @@ - - confluent - https://packages.confluent.io/maven/ - central Central Repository @@ -86,13 +82,13 @@ false + + confluent + https://packages.confluent.io/maven/ + - - confluent - https://packages.confluent.io/maven/ - central Central Repository @@ -105,6 +101,10 @@ never + + confluent + https://packages.confluent.io/maven/ + From 38fccefd572086bd416f412f6563363605a47b8a Mon Sep 17 00:00:00 2001 From: Yeikel Date: Tue, 14 Jan 2025 10:35:08 -0500 Subject: [PATCH 54/84] Docs: Remove references to arm-specific local setup (#771) --- .dev/{dev_arm64.yaml => dev.yaml} | 16 +++++----------- documentation/compose/DOCKER_COMPOSE.md | 2 +- 2 files changed, 6 insertions(+), 12 deletions(-) rename .dev/{dev_arm64.yaml => dev.yaml} (91%) diff --git a/.dev/dev_arm64.yaml b/.dev/dev.yaml similarity index 91% rename from .dev/dev_arm64.yaml rename to .dev/dev.yaml index dc1a8726e..8c2ba5e74 100644 --- a/.dev/dev_arm64.yaml +++ b/.dev/dev.yaml @@ -1,9 +1,3 @@ -# This is a compose file designed for arm64/Apple Silicon systems -# To adapt this to x86 please find and replace ".arm64" with empty - -# ARM64 supported images for kafka can be found here -# https://hub.docker.com/r/confluentinc/cp-kafka/tags?page=1&name=arm64 ---- version: '3.8' name: "kafbat-ui-dev" @@ -32,7 +26,7 @@ services: KAFKA_CLUSTERS_0_AUDIT_CONSOLEAUDITENABLED: 'true' kafka0: - image: confluentinc/cp-kafka:7.8.0.arm64 + image: confluentinc/cp-kafka:7.8.0 user: "0:0" hostname: kafka0 container_name: kafka0 @@ -60,7 +54,7 @@ services: CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schema-registry0: - image: confluentinc/cp-schema-registry:7.8.0.arm64 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: @@ -76,7 +70,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-connect0: - image: confluentinc/cp-kafka-connect:7.8.0.arm64 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: @@ -101,7 +95,7 @@ services: CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components,/usr/local/share/kafka/plugins,/usr/share/filestream-connectors" ksqldb0: - image: confluentinc/cp-ksqldb-server:7.8.0.arm64 + image: confluentinc/cp-ksqldb-server:7.8.0 depends_on: - kafka0 - kafka-connect0 @@ -119,7 +113,7 @@ services: KSQL_CACHE_MAX_BYTES_BUFFERING: 0 kafka-init-topics: - image: confluentinc/cp-kafka:7.8.0.arm64 + image: confluentinc/cp-kafka:7.8.0 volumes: - ../documentation/compose/data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/DOCKER_COMPOSE.md b/documentation/compose/DOCKER_COMPOSE.md index 57a5cf4d0..186c15c45 100644 --- a/documentation/compose/DOCKER_COMPOSE.md +++ b/documentation/compose/DOCKER_COMPOSE.md @@ -1,7 +1,7 @@ # Descriptions of docker-compose configurations (*.yaml) 1. [kafka-ui.yaml](./kafbat-ui.yaml) - Default configuration with 2 kafka clusters with two nodes of Schema Registry, one kafka-connect and a few dummy topics. -2. [kafka-ui-arm64.yaml](../../.dev/dev_arm64.yaml) - Default configuration for ARM64(Mac M1) architecture with 1 kafka cluster without zookeeper with one node of Schema Registry, one kafka-connect and a few dummy topics. +2. [kafka-ui.yaml](../../.dev/dev_arm64.yaml) - Default configuration with 1 kafka cluster without zookeeper with one node of Schema Registry, one kafka-connect and a few dummy topics. 3. [kafka-ui-ssl.yml](./kafka-ssl.yml) - Connect to Kafka via TLS/SSL 4. [kafka-cluster-sr-auth.yaml](./cluster-sr-auth.yaml) - Schema registry with authentication. 5. [kafka-ui-auth-context.yaml](./auth-context.yaml) - Basic (username/password) authentication with custom path (URL) (issue 861). From 6f89bfacc6bd7966437da3aaa6267fa086b80bd2 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Wed, 15 Jan 2025 12:56:04 +0400 Subject: [PATCH 55/84] LDAP: Fix property nullability condition (#779) --- .../main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java index 9b1445507..4b7473942 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java @@ -80,7 +80,7 @@ public AbstractLdapAuthenticationProvider authenticationProvider(LdapAuthorities } @Bean - @ConditionalOnProperty(value = "oauth2.ldap.activeDirectory", havingValue = "false") + @ConditionalOnProperty(value = "oauth2.ldap.activeDirectory", havingValue = "false", matchIfMissing = true) public BindAuthenticator ldapBindAuthentication(LdapContextSource ldapContextSource) { BindAuthenticator ba = new BindAuthenticator(ldapContextSource); From b918ab000ff66e022ea379d82360b63f87105f06 Mon Sep 17 00:00:00 2001 From: Yeikel Date: Wed, 15 Jan 2025 04:33:17 -0500 Subject: [PATCH 56/84] Infra: Run cve checks workflow on pull requests and pushes to main (#745) --- .github/workflows/cve_checks.yml | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/cve_checks.yml b/.github/workflows/cve_checks.yml index e9c90ac14..cfdc40a60 100644 --- a/.github/workflows/cve_checks.yml +++ b/.github/workflows/cve_checks.yml @@ -1,5 +1,9 @@ name: "Infra: CVE checks" on: + pull_request: + types: [ "opened", "reopened", "synchronize" ] + push: + branches: [ "main" ] workflow_dispatch: schedule: # * is a special character in YAML so you have to quote this string @@ -71,7 +75,7 @@ jobs: notify: needs: check-cves - if: ${{ always() && needs.build-and-test.result == 'failure' }} + if: ${{ always() && needs.build-and-test.result == 'failure' && github.event_name == 'schedule' }} uses: ./.github/workflows/infra_discord_hook.yml with: message: "Attention! CVE checks run failed! Please fix them CVEs :(" From ed49499241b429da5c8372e8cd4b100f22005e88 Mon Sep 17 00:00:00 2001 From: Yeikel Date: Wed, 15 Jan 2025 04:34:32 -0500 Subject: [PATCH 57/84] Infra: FE: limit pnpm and node version enforcement to the major versions (#777) --- frontend/package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/package.json b/frontend/package.json index b42a0d12e..6c55031f5 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -103,8 +103,8 @@ "whatwg-fetch": "3.6.20" }, "engines": { - "node": "^22.12.0", - "pnpm": "^9.15.0" + "node": "^22", + "pnpm": "^9" }, "pnpm": { "overrides": { From 1c5242f9bb4e7ec714afeae6f0bfd6b97fb1b9ad Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Thu, 16 Jan 2025 19:10:00 +0500 Subject: [PATCH 58/84] BE: RBAC: Subject type/value is unintended to be optional (#719) Co-authored-by: Roman Zabaluev --- api/src/main/java/io/kafbat/ui/model/rbac/Role.java | 7 +++++-- .../main/java/io/kafbat/ui/model/rbac/Subject.java | 11 +++++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Role.java b/api/src/main/java/io/kafbat/ui/model/rbac/Role.java index db97d7649..e01392cc6 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Role.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Role.java @@ -1,6 +1,7 @@ package io.kafbat.ui.model.rbac; -import com.google.common.base.Preconditions; +import static com.google.common.base.Preconditions.checkArgument; + import java.util.List; import lombok.Data; @@ -13,9 +14,11 @@ public class Role { List permissions; public void validate() { - Preconditions.checkArgument(!clusters.isEmpty(), "Role clusters cannot be empty"); + checkArgument(!clusters.isEmpty(), "Role clusters cannot be empty"); + checkArgument(!subjects.isEmpty(), "Role subjects cannot be empty"); permissions.forEach(Permission::transform); permissions.forEach(Permission::validate); + subjects.forEach(Subject::validate); } } diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java index a71cebea3..d653c1d0b 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java @@ -1,5 +1,8 @@ package io.kafbat.ui.model.rbac; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + import io.kafbat.ui.model.rbac.provider.Provider; import lombok.Getter; @@ -21,4 +24,12 @@ public void setType(String type) { public void setValue(String value) { this.value = value; } + + public void validate() { + checkNotNull(type, "Subject type cannot be null"); + checkNotNull(value, "Subject value cannot be null"); + + checkArgument(!type.isEmpty(), "Subject type cannot be empty"); + checkArgument(!value.isEmpty(), "Subject value cannot be empty"); + } } From 96b2c8bd38bc97c973b9f462252e9004224e4ede Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Thu, 16 Jan 2025 18:12:28 +0400 Subject: [PATCH 59/84] BE: Chore: Dynamic config file checks adjustments (#229) --- .../ui/util/DynamicConfigOperations.java | 45 ++++++++++--------- 1 file changed, 25 insertions(+), 20 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java b/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java index 9ac25c517..6c3a99b04 100644 --- a/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java +++ b/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java @@ -38,6 +38,7 @@ import org.yaml.snakeyaml.nodes.Tag; import org.yaml.snakeyaml.representer.Representer; import reactor.core.publisher.Mono; +import reactor.core.scheduler.Schedulers; @Slf4j @RequiredArgsConstructor @@ -125,26 +126,30 @@ public Mono uploadConfigRelatedFile(FilePart file) { String targetDirStr = ctx.getEnvironment() .getProperty(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT); - Path targetDir = Path.of(targetDirStr); - if (!Files.exists(targetDir)) { - try { - Files.createDirectories(targetDir); - } catch (IOException e) { - return Mono.error( - new FileUploadException("Error creating directory for uploads %s".formatted(targetDir), e)); + Mono directoryCreationMono = Mono.fromCallable(() -> { + Path targetDir = Path.of(targetDirStr); + if (!Files.exists(targetDir)) { + try { + Files.createDirectories(targetDir); + } catch (IOException e) { + throw new FileUploadException("Error creating directory for uploads %s".formatted(targetDir), e); + } + } + return targetDir; + }).subscribeOn(Schedulers.boundedElastic()); + + return directoryCreationMono.flatMap(dir -> { + Path targetFilePath = dir.resolve(file.filename() + "-" + Instant.now().getEpochSecond()); + log.info("Uploading config-related file {}", targetFilePath); + if (Files.exists(targetFilePath)) { + log.info("File {} already exists, it will be overwritten", targetFilePath); } - } - - Path targetFilePath = targetDir.resolve(file.filename() + "-" + Instant.now().getEpochSecond()); - log.info("Uploading config-related file {}", targetFilePath); - if (Files.exists(targetFilePath)) { - log.info("File {} already exists, it will be overwritten", targetFilePath); - } - return file.transferTo(targetFilePath) - .thenReturn(targetFilePath) - .doOnError(th -> log.error("Error uploading file {}", targetFilePath, th)) - .onErrorMap(th -> new FileUploadException(targetFilePath, th)); + return file.transferTo(targetFilePath) + .thenReturn(targetFilePath) + .doOnError(th -> log.error("Error uploading file {}", targetFilePath, th)) + .onErrorMap(th -> new FileUploadException(targetFilePath, th)); + }); } private void checkIfDynamicConfigEnabled() { @@ -163,8 +168,8 @@ private void writeYamlToFile(String yaml, Path path) { if (!Files.exists(path.getParent())) { Files.createDirectories(path.getParent()); } - if (Files.exists(path) && !Files.isWritable(path)) { - throw new ValidationException("File already exists and is not writable"); + if (Files.exists(path) && (!Files.isReadable(path) || !Files.isWritable(path))) { + throw new ValidationException("File already exists and is not readable or writable"); } try { Files.writeString( From ff64818699034055db0da49530025d2ad6c9296b Mon Sep 17 00:00:00 2001 From: Yeikel Date: Thu, 16 Jan 2025 09:16:44 -0500 Subject: [PATCH 60/84] Infra: FE: Bump pnpm to 9.15.4 (#776) --- .github/workflows/frontend_tests.yml | 2 +- pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index a48708123..00d9b619a 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -23,7 +23,7 @@ jobs: - uses: pnpm/action-setup@v4.0.0 with: - version: 9.15.0 + version: 9.15.4 - name: Install node uses: actions/setup-node@v4.0.2 diff --git a/pom.xml b/pom.xml index 5fe46b1fe..93e516712 100644 --- a/pom.xml +++ b/pom.xml @@ -59,7 +59,7 @@ v22.12.0 - v9.15.0 + v9.15.4 0.45.1 From a5d34a702017539ce10300628a508120aa372f58 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 17 Jan 2025 23:54:37 +0400 Subject: [PATCH 61/84] Bump net.bytebuddy:byte-buddy from 1.14.19 to 1.15.11 (#715) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 93e516712..3426dbf86 100644 --- a/pom.xml +++ b/pom.xml @@ -34,7 +34,7 @@ 2.12.0 3.25.3 1.11.4 - 1.14.19 + 1.15.11 7.8.0 3.1.0 3.0.13 From 840a5c2169ebc4ecde3889110d66c460577dd0aa Mon Sep 17 00:00:00 2001 From: "George L. Yermulnik" Date: Thu, 30 Jan 2025 14:07:41 +0200 Subject: [PATCH 62/84] Infra: Adjust publish action for AWS ECR (#794) --- .github/workflows/docker_publish.yml | 31 ++++++++++++++-------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml index e359ea740..1806dc017 100644 --- a/.github/workflows/docker_publish.yml +++ b/.github/workflows/docker_publish.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - registry: [ 'docker.io', 'ghcr.io', 'ecr' ] + registry: [ 'docker.io', 'ghcr.io', 'public.ecr.aws' ] runs-on: ubuntu-latest steps: @@ -31,7 +31,8 @@ jobs: name: image path: /tmp - # setup containerd to preserve provenance attestations :https://docs.docker.com/build/attestations/#creating-attestations + # setup containerd to preserve provenance attestations: + # https://docs.docker.com/build/attestations/#creating-attestations - name: Setup docker with containerd uses: crazy-max/ghaction-setup-docker@v3 with: @@ -63,33 +64,33 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Configure AWS credentials - if: matrix.registry == 'ecr' + if: matrix.registry == 'public.ecr.aws' uses: aws-actions/configure-aws-credentials@v4 with: aws-region: us-east-1 # This region only for public ECR role-to-assume: ${{ secrets.AWS_ROLE }} - name: Login to public ECR - if: matrix.registry == 'ecr' + if: matrix.registry == 'public.ecr.aws' id: login-ecr-public uses: aws-actions/amazon-ecr-login@v2 with: registry-type: public - - name: define env vars + - name: Define env vars for container registry URL run: | - if [ ${{matrix.registry }} == 'docker.io' ]; then - echo "REGISTRY=${{ matrix.registry }}" >> $GITHUB_ENV - echo "REPOSITORY=${{ github.repository }}" >> $GITHUB_ENV - elif [ ${{ matrix.registry }} == 'ghcr.io' ]; then - echo "REGISTRY=${{ matrix.registry }}" >> $GITHUB_ENV - echo "REPOSITORY=${{ github.repository }}" >> $GITHUB_ENV - elif [ ${{ matrix.registry }} == 'ecr' ]; then + if [ ${{ matrix.registry }} == 'public.ecr.aws' ]; then + # vars.ECR_REGISTRY value is expected to be of the `public.ecr.aws/` form + # The `public_ecr_id` must be a *default* alias associated with public regsitry (rather + # than a custom alias) echo "REGISTRY=${{ vars.ECR_REGISTRY }}" >> $GITHUB_ENV + # Trim GH Org name so that resulting Public ECR URL has no duplicate org name + # Public ECR default alias: public.ecr.aws//kafka-ui + # Public ECR custom alias: public.ecr.aws/kafbat/kafka-ui + echo "REPOSITORY=$(basename ${{ github.repository }})" >> $GITHUB_ENV + else # this covers the case of docker.io and ghcr.io + echo "REGISTRY=${{ matrix.registry }}" >> $GITHUB_ENV echo "REPOSITORY=${{ github.repository }}" >> $GITHUB_ENV - else - echo "REGISTRY=" >> $GITHUB_ENV - echo "REPOSITORY=notworking" >> $GITHUB_ENV fi - name: Push images to ${{ matrix.registry }} From ff17c7a5b6ebb5820de058a6dbe3c314d97fa216 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Thu, 30 Jan 2025 16:12:57 +0400 Subject: [PATCH 63/84] SR: Fix API config path (#811) --- contract/src/main/resources/swagger/kafka-sr-api.yaml | 2 +- .../test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/contract/src/main/resources/swagger/kafka-sr-api.yaml b/contract/src/main/resources/swagger/kafka-sr-api.yaml index 0320e891e..2b082d689 100644 --- a/contract/src/main/resources/swagger/kafka-sr-api.yaml +++ b/contract/src/main/resources/swagger/kafka-sr-api.yaml @@ -165,7 +165,7 @@ paths: schema: $ref: '#/components/schemas/SubjectId' - /config/: + /config: get: tags: - KafkaSrClient diff --git a/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java b/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java index 64fb73cc4..ae5867bfc 100644 --- a/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java +++ b/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java @@ -139,7 +139,7 @@ public void brokersSourceInfoCheck() { Assert.assertEquals(sourceInfoTooltip, Common.BROKER_SOURCE_INFO_TOOLTIP, "getSourceInfoTooltipText()"); } - @Test + @Test(enabled = false) // flaky, TODO issues/322 public void brokersConfigEditCheck() { navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); brokersDetails From 5a4011710cfc4df54e961e2a027fd329ecbf8b75 Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Mon, 3 Feb 2025 10:38:42 +0500 Subject: [PATCH 64/84] Docs: Fix broken composes (#796) --- .dev/dev.yaml | 1 - documentation/compose/auth-context.yaml | 4 +--- documentation/compose/cluster-sr-auth.yaml | 4 +--- documentation/compose/e2e-tests.yaml | 1 - documentation/compose/kafbat-ui.yaml | 8 ++------ documentation/compose/kafka-ssl-components.yaml | 3 +-- documentation/compose/kafka-ssl.yml | 3 +-- documentation/compose/ui-acl-with-zk.yaml | 1 - documentation/compose/ui-connectors-auth.yaml | 4 +--- documentation/compose/ui-jmx-secured.yml | 3 +-- documentation/compose/ui-ldap.yaml | 4 +--- documentation/compose/ui-sasl.yaml | 3 +-- documentation/compose/ui-serdes.yaml | 4 +--- documentation/compose/ui-with-jmx-exporter.yaml | 4 ++-- 14 files changed, 13 insertions(+), 34 deletions(-) diff --git a/.dev/dev.yaml b/.dev/dev.yaml index 8c2ba5e74..47149ed92 100644 --- a/.dev/dev.yaml +++ b/.dev/dev.yaml @@ -27,7 +27,6 @@ services: kafka0: image: confluentinc/cp-kafka:7.8.0 - user: "0:0" hostname: kafka0 container_name: kafka0 ports: diff --git a/documentation/compose/auth-context.yaml b/documentation/compose/auth-context.yaml index dd2755786..d8d465e51 100644 --- a/documentation/compose/auth-context.yaml +++ b/documentation/compose/auth-context.yaml @@ -42,6 +42,4 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' diff --git a/documentation/compose/cluster-sr-auth.yaml b/documentation/compose/cluster-sr-auth.yaml index 83f81da52..78e80b4f1 100644 --- a/documentation/compose/cluster-sr-auth.yaml +++ b/documentation/compose/cluster-sr-auth.yaml @@ -26,9 +26,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry1: image: confluentinc/cp-schema-registry:7.8.0 diff --git a/documentation/compose/e2e-tests.yaml b/documentation/compose/e2e-tests.yaml index e18eb7a16..cdece30ce 100644 --- a/documentation/compose/e2e-tests.yaml +++ b/documentation/compose/e2e-tests.yaml @@ -30,7 +30,6 @@ services: kafka0: image: confluentinc/cp-kafka:7.8.0 - user: "0:0" hostname: kafka0 container_name: kafka0 healthcheck: diff --git a/documentation/compose/kafbat-ui.yaml b/documentation/compose/kafbat-ui.yaml index c6df25737..afbe0d26c 100644 --- a/documentation/compose/kafbat-ui.yaml +++ b/documentation/compose/kafbat-ui.yaml @@ -50,9 +50,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' kafka1: image: confluentinc/cp-kafka:7.8.0 @@ -78,9 +76,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'zlFiTJelTOuhnklFwLWixw' schemaregistry0: image: confluentinc/cp-schema-registry:7.8.0 diff --git a/documentation/compose/kafka-ssl-components.yaml b/documentation/compose/kafka-ssl-components.yaml index c95639902..e616a52e1 100644 --- a/documentation/compose/kafka-ssl-components.yaml +++ b/documentation/compose/kafka-ssl-components.yaml @@ -72,12 +72,11 @@ services: #KAFKA_SSL_CLIENT_AUTH: 'required' KAFKA_SSL_CLIENT_AUTH: 'requested' KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # COMMON NAME VERIFICATION IS DISABLED SERVER-SIDE + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./ssl/creds:/etc/kafka/secrets/creds - ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks - ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" schemaregistry0: image: confluentinc/cp-schema-registry:7.8.0 diff --git a/documentation/compose/kafka-ssl.yml b/documentation/compose/kafka-ssl.yml index c427ed66d..5680a5cc6 100644 --- a/documentation/compose/kafka-ssl.yml +++ b/documentation/compose/kafka-ssl.yml @@ -55,9 +55,8 @@ services: #KAFKA_SSL_CLIENT_AUTH: 'required' KAFKA_SSL_CLIENT_AUTH: 'requested' KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # COMMON NAME VERIFICATION IS DISABLED SERVER-SIDE + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./ssl/creds:/etc/kafka/secrets/creds - ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks - ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" diff --git a/documentation/compose/ui-acl-with-zk.yaml b/documentation/compose/ui-acl-with-zk.yaml index 14e433485..9572e0c8c 100644 --- a/documentation/compose/ui-acl-with-zk.yaml +++ b/documentation/compose/ui-acl-with-zk.yaml @@ -55,5 +55,4 @@ services: KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT' KAFKA_SUPER_USERS: 'User:admin' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./jaas:/etc/kafka/jaas diff --git a/documentation/compose/ui-connectors-auth.yaml b/documentation/compose/ui-connectors-auth.yaml index b86c1d3f8..723fa1534 100644 --- a/documentation/compose/ui-connectors-auth.yaml +++ b/documentation/compose/ui-connectors-auth.yaml @@ -45,9 +45,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: "PLAINTEXT" KAFKA_CONTROLLER_LISTENER_NAMES: "CONTROLLER" KAFKA_LOG_DIRS: "/tmp/kraft-combined-logs" - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: 'bash -c ''if [ ! -f /tmp/update_run.sh ]; then echo "ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi''' + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: image: confluentinc/cp-schema-registry:7.8.0 diff --git a/documentation/compose/ui-jmx-secured.yml b/documentation/compose/ui-jmx-secured.yml index 5d7ab8a00..531ec4368 100644 --- a/documentation/compose/ui-jmx-secured.yml +++ b/documentation/compose/ui-jmx-secured.yml @@ -49,6 +49,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' # CHMOD 700 FOR JMXREMOTE.* FILES KAFKA_JMX_OPTS: >- -Dcom.sun.management.jmxremote @@ -69,5 +70,3 @@ services: - ./jmx/servertruststore:/jmx/servertruststore - ./jmx/jmxremote.password:/jmx/jmxremote.password - ./jmx/jmxremote.access:/jmx/jmxremote.access - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" diff --git a/documentation/compose/ui-ldap.yaml b/documentation/compose/ui-ldap.yaml index bf0d80f74..94a8200a2 100644 --- a/documentation/compose/ui-ldap.yaml +++ b/documentation/compose/ui-ldap.yaml @@ -58,9 +58,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: image: confluentinc/cp-schema-registry:7.8.0 diff --git a/documentation/compose/ui-sasl.yaml b/documentation/compose/ui-sasl.yaml index 3653ea332..89708a54c 100644 --- a/documentation/compose/ui-sasl.yaml +++ b/documentation/compose/ui-sasl.yaml @@ -46,7 +46,6 @@ services: KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT' KAFKA_SUPER_USERS: 'User:admin,User:enzo' + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./jaas:/etc/kafka/jaas - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" diff --git a/documentation/compose/ui-serdes.yaml b/documentation/compose/ui-serdes.yaml index 6b6a32995..6ac5701de 100644 --- a/documentation/compose/ui-serdes.yaml +++ b/documentation/compose/ui-serdes.yaml @@ -92,9 +92,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: image: confluentinc/cp-schema-registry:7.8.0 diff --git a/documentation/compose/ui-with-jmx-exporter.yaml b/documentation/compose/ui-with-jmx-exporter.yaml index eeb164830..df781ca75 100644 --- a/documentation/compose/ui-with-jmx-exporter.yaml +++ b/documentation/compose/ui-with-jmx-exporter.yaml @@ -25,10 +25,10 @@ services: KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent.jar=11001:/usr/share/jmx_exporter/kafka-broker.yml + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - ./jmx-exporter:/usr/share/jmx_exporter/ - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /usr/share/jmx_exporter/kafka-prepare-and-run ; fi'" + command: "bash -c /usr/share/jmx_exporter/kafka-prepare-and-run" kafbat-ui: container_name: kafbat-ui From d8cc886e63acbb372fb35a605a32dc2fabbcea0e Mon Sep 17 00:00:00 2001 From: SIX Douglas Date: Sat, 8 Feb 2025 19:23:38 +0100 Subject: [PATCH 65/84] BE: Chore: Upgrade spring boot to 3.4.2 (#830) --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 3426dbf86..f3556c3e8 100644 --- a/pom.xml +++ b/pom.xml @@ -32,19 +32,19 @@ 4.12.0 2.12.0 - 3.25.3 + 3.26.3 1.11.4 1.15.11 7.8.0 3.1.0 3.0.13 - 2.14.0 + 2.18.2 1.6.2 1.18.34 3.25.5 2.13.9 2.3 - 3.4.1 + 3.4.2 1.0.0 0.1.17 0.1.39 @@ -52,7 +52,7 @@ 0.3.0 33.3.1-jre - 5.11.2 + 5.11.4 5.14.2 4.12.0 1.20.4 From cb52c0648d1549c67ff8b3df8204567dcde6a489 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Mon, 10 Feb 2025 14:27:39 +0400 Subject: [PATCH 66/84] Update FUNDING.yml Add open collective --- .github/FUNDING.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index d8a0e0375..e450aaedd 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1 +1,2 @@ github: [kafbat] +open_collective: kafka-ui From 844fbc908c9c40ba5bf5e7a646f543f6a8986c0a Mon Sep 17 00:00:00 2001 From: SIX Douglas Date: Mon, 10 Feb 2025 11:45:26 +0100 Subject: [PATCH 67/84] BE: Clean some Sonar issues (#832) --- api/pom.xml | 2 +- .../io/kafbat/ui/exception/CelException.java | 5 +- .../GlobalErrorWebExceptionHandler.java | 16 +++--- .../ui/model/rbac/provider/Provider.java | 8 +-- .../kafbat/ui/serdes/builtin/Base64Serde.java | 2 +- .../io/kafbat/ui/serdes/builtin/HexSerde.java | 2 +- .../ui/serdes/builtin/ProtobufFileSerde.java | 4 +- .../kafbat/ui/service/audit/AuditRecord.java | 14 +++--- .../integration/odd/ConnectorInfo.java | 16 +++--- .../odd/schema/JsonSchemaExtractor.java | 30 ++++------- .../kafbat/ui/service/ksql/KsqlGrammar.java | 11 ++-- .../service/metrics/JmxMetricsFormatter.java | 8 +-- .../jsonschema/AvroJsonSchemaConverter.java | 3 +- .../kafbat/ui/KafkaConnectServiceTests.java | 50 +++++++++---------- .../io/kafbat/ui/KafkaConsumerGroupTests.java | 2 +- .../java/io/kafbat/ui/KafkaConsumerTests.java | 36 ++++++------- .../io/kafbat/ui/KafkaTopicCreateTests.java | 4 +- .../java/io/kafbat/ui/ReadOnlyModeTests.java | 10 ++-- .../kafbat/ui/SchemaRegistryServiceTests.java | 14 +++--- .../AzureEntraLoginCallbackHandlerTest.java | 18 +++---- .../azure/AzureEntraOAuthBearerTokenTest.java | 2 +- .../ApplicationConfigControllerTest.java | 2 +- .../kafbat/ui/emitter/MessageFiltersTest.java | 2 +- .../ui/serdes/builtin/HexSerdeTest.java | 2 +- .../serdes/builtin/ProtobufRawSerdeTest.java | 2 +- .../service/ApplicationInfoServiceTest.java | 4 +- .../java/io/kafbat/ui/service/ConfigTest.java | 6 +-- .../io/kafbat/ui/service/LogDirsTest.java | 22 ++++---- .../ui/service/OffsetsResetServiceTest.java | 2 +- .../service/SchemaRegistryPaginationTest.java | 4 +- .../kafbat/ui/service/SendAndReadTests.java | 22 ++++---- .../service/TopicsServicePaginationTest.java | 18 +++---- .../service/audit/AuditIntegrationTest.java | 2 +- .../ui/service/masking/DataMaskingTest.java | 11 ++-- 34 files changed, 172 insertions(+), 184 deletions(-) diff --git a/api/pom.xml b/api/pom.xml index bbeb9dff8..dd2c3a378 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -329,7 +329,7 @@ org.apache.maven.plugins maven-surefire-plugin - @{argLine} --illegal-access=permit + @{argLine} diff --git a/api/src/main/java/io/kafbat/ui/exception/CelException.java b/api/src/main/java/io/kafbat/ui/exception/CelException.java index e904368f6..d71d0da20 100644 --- a/api/src/main/java/io/kafbat/ui/exception/CelException.java +++ b/api/src/main/java/io/kafbat/ui/exception/CelException.java @@ -1,7 +1,10 @@ package io.kafbat.ui.exception; +import lombok.Getter; + +@Getter public class CelException extends CustomBaseException { - private String celOriginalExpression; + private final String celOriginalExpression; public CelException(String celOriginalExpression, String errorMessage) { super("CEL error. Original expression: %s. Error message: %s".formatted(celOriginalExpression, errorMessage)); diff --git a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java index 61236f801..e6c0c76a5 100644 --- a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java +++ b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java @@ -54,18 +54,18 @@ private Mono renderErrorResponse(ServerRequest request) { Throwable throwable = getError(request); // validation and params binding errors - if (throwable instanceof WebExchangeBindException) { - return render((WebExchangeBindException) throwable, request); + if (throwable instanceof WebExchangeBindException webExchangeBindException) { + return render(webExchangeBindException, request); } // requests mapping & access errors - if (throwable instanceof ResponseStatusException) { - return render((ResponseStatusException) throwable, request); + if (throwable instanceof ResponseStatusException responseStatusException) { + return render(responseStatusException, request); } // custom exceptions - if (throwable instanceof CustomBaseException) { - return render((CustomBaseException) throwable, request); + if (throwable instanceof CustomBaseException customBaseException) { + return render(customBaseException, request); } return renderDefault(throwable, request); @@ -151,9 +151,7 @@ private String requestId(ServerRequest request) { } private Consumer headers(ServerRequest request) { - return (HttpHeaders headers) -> { - CorsGlobalConfiguration.fillCorsHeader(headers, request.exchange().getRequest()); - }; + return (HttpHeaders headers) -> CorsGlobalConfiguration.fillCorsHeader(headers, request.exchange().getRequest()); } private BigDecimal currentTimestamp() { diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java b/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java index 94b1cd039..3fbae2423 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java @@ -21,11 +21,11 @@ public static Provider fromString(String name) { } public static class Name { - public static String GOOGLE = "google"; - public static String GITHUB = "github"; - public static String COGNITO = "cognito"; + public static final String GOOGLE = "google"; + public static final String GITHUB = "github"; + public static final String COGNITO = "cognito"; - public static String OAUTH = "oauth"; + public static final String OAUTH = "oauth"; } } diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java index 02e56ff22..515354695 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java @@ -40,7 +40,7 @@ public Serde.Serializer serializer(String topic, Serde.Target type) { return inputString -> { inputString = inputString.trim(); // it is actually a hack to provide ability to sent empty array as a key/value - if (inputString.length() == 0) { + if (inputString.isEmpty()) { return new byte[] {}; } return decoder.decode(inputString); diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java index a3c958a06..ab7f66ebb 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java @@ -62,7 +62,7 @@ public Serializer serializer(String topic, Target type) { return input -> { input = input.trim(); // it is a hack to provide ability to sent empty array as a key/value - if (input.length() == 0) { + if (input.isEmpty()) { return new byte[] {}; } return HexFormat.of().parseHex(prepareInputForParse(input)); diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java index 2c0939c03..20ea47a06 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java @@ -383,9 +383,9 @@ private ProtoFile loadKnownProtoFile(String path, Descriptors.FileDescriptor fil String protoFileString = null; // know type file contains either message or enum if (!fileDescriptor.getMessageTypes().isEmpty()) { - protoFileString = new ProtobufSchema(fileDescriptor.getMessageTypes().get(0)).canonicalString(); + protoFileString = new ProtobufSchema(fileDescriptor.getMessageTypes().getFirst()).canonicalString(); } else if (!fileDescriptor.getEnumTypes().isEmpty()) { - protoFileString = new ProtobufSchema(fileDescriptor.getEnumTypes().get(0)).canonicalString(); + protoFileString = new ProtobufSchema(fileDescriptor.getEnumTypes().getFirst()).canonicalString(); } else { throw new IllegalStateException(); } diff --git a/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java b/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java index d7ef659bf..2bb69c20f 100644 --- a/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java +++ b/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java @@ -53,14 +53,12 @@ static OperationResult successful() { } static OperationResult error(Throwable th) { - OperationError err = OperationError.UNRECOGNIZED_ERROR; - if (th instanceof AccessDeniedException) { - err = OperationError.ACCESS_DENIED; - } else if (th instanceof ValidationException) { - err = OperationError.VALIDATION_ERROR; - } else if (th instanceof CustomBaseException) { - err = OperationError.EXECUTION_ERROR; - } + OperationError err = switch (th) { + case AccessDeniedException ignored -> OperationError.ACCESS_DENIED; + case ValidationException ignored -> OperationError.VALIDATION_ERROR; + case CustomBaseException ignored -> OperationError.EXECUTION_ERROR; + case null, default -> OperationError.UNRECOGNIZED_ERROR; + }; return new OperationResult(false, err); } diff --git a/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java b/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java index c1703f0b1..186c4a8a1 100644 --- a/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java +++ b/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java @@ -5,7 +5,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Function; +import java.util.function.UnaryOperator; import java.util.stream.Stream; import javax.annotation.Nullable; import org.apache.commons.collections.CollectionUtils; @@ -22,7 +22,7 @@ static ConnectorInfo extract(String className, ConnectorTypeDTO type, Map config, List topicsFromApi, // can be empty for old Connect API versions - Function topicOddrnBuilder) { + UnaryOperator topicOddrnBuilder) { return switch (className) { case "org.apache.kafka.connect.file.FileStreamSinkConnector", "org.apache.kafka.connect.file.FileStreamSourceConnector", @@ -43,7 +43,7 @@ static ConnectorInfo extract(String className, private static ConnectorInfo extractFileIoConnector(ConnectorTypeDTO type, List topics, Map config, - Function topicOddrnBuilder) { + UnaryOperator topicOddrnBuilder) { return new ConnectorInfo( extractInputs(type, topics, config, topicOddrnBuilder), extractOutputs(type, topics, config, topicOddrnBuilder) @@ -53,7 +53,7 @@ private static ConnectorInfo extractFileIoConnector(ConnectorTypeDTO type, private static ConnectorInfo extractJdbcSink(ConnectorTypeDTO type, List topics, Map config, - Function topicOddrnBuilder) { + UnaryOperator topicOddrnBuilder) { String tableNameFormat = (String) config.getOrDefault("table.name.format", "${topic}"); List targetTables = extractTopicNamesBestEffort(topics, config) .map(topic -> tableNameFormat.replace("${kafka}", topic)) @@ -106,7 +106,7 @@ private static ConnectorInfo extractDebeziumMysql(Map config) { private static ConnectorInfo extractS3Sink(ConnectorTypeDTO type, List topics, Map config, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { String bucketName = (String) config.get("s3.bucket.name"); String topicsDir = (String) config.getOrDefault("topics.dir", "topics"); String directoryDelim = (String) config.getOrDefault("directory.delim", "/"); @@ -122,7 +122,7 @@ private static ConnectorInfo extractS3Sink(ConnectorTypeDTO type, private static List extractInputs(ConnectorTypeDTO type, List topicsFromApi, Map config, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { return type == ConnectorTypeDTO.SINK ? extractTopicsOddrns(config, topicsFromApi, topicOrrdnBuilder) : List.of(); @@ -131,7 +131,7 @@ private static List extractInputs(ConnectorTypeDTO type, private static List extractOutputs(ConnectorTypeDTO type, List topicsFromApi, Map config, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { return type == ConnectorTypeDTO.SOURCE ? extractTopicsOddrns(config, topicsFromApi, topicOrrdnBuilder) : List.of(); @@ -158,7 +158,7 @@ private static Stream extractTopicNamesBestEffort( private static List extractTopicsOddrns(Map config, List topicsFromApi, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { return extractTopicNamesBestEffort(topicsFromApi, config) .map(topicOrrdnBuilder) .toList(); diff --git a/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java b/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java index 328fcda26..aadf93d7c 100644 --- a/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java @@ -286,25 +286,17 @@ private static DataSetField createDataSetField(Schema schema, } private static DataSetFieldType.TypeEnum mapType(Schema type) { - if (type instanceof NumberSchema) { - return DataSetFieldType.TypeEnum.NUMBER; - } - if (type instanceof StringSchema) { - return DataSetFieldType.TypeEnum.STRING; - } - if (type instanceof BooleanSchema || type instanceof TrueSchema || type instanceof FalseSchema) { - return DataSetFieldType.TypeEnum.BOOLEAN; - } - if (type instanceof ObjectSchema) { - return DataSetFieldType.TypeEnum.STRUCT; - } - if (type instanceof ReferenceSchema s) { - return mapType(s.getReferredSchema()); - } - if (type instanceof CombinedSchema) { - return DataSetFieldType.TypeEnum.UNION; - } - return DataSetFieldType.TypeEnum.UNKNOWN; + return switch (type) { + case NumberSchema ignored -> DataSetFieldType.TypeEnum.NUMBER; + case StringSchema ignored -> DataSetFieldType.TypeEnum.STRING; + case BooleanSchema ignored -> DataSetFieldType.TypeEnum.BOOLEAN; + case TrueSchema ignored -> DataSetFieldType.TypeEnum.BOOLEAN; + case FalseSchema ignored -> DataSetFieldType.TypeEnum.BOOLEAN; + case ObjectSchema ignored -> DataSetFieldType.TypeEnum.STRUCT; + case ReferenceSchema referenceSchema -> mapType(referenceSchema.getReferredSchema()); + case CombinedSchema ignored -> DataSetFieldType.TypeEnum.UNION; + default -> DataSetFieldType.TypeEnum.UNKNOWN; + }; } } diff --git a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java index 3243841eb..1068ac193 100644 --- a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java +++ b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java @@ -74,13 +74,10 @@ public static CaseInsensitiveStream from(CharStream stream) { @Override public int LA(final int i) { final int result = stream.LA(i); - switch (result) { - case 0: - case IntStream.EOF: - return result; - default: - return Character.toUpperCase(result); - } + return switch (result) { + case 0, IntStream.EOF -> result; + default -> Character.toUpperCase(result); + }; } }; } diff --git a/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java b/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java index e4eeed278..37323c7dd 100644 --- a/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java +++ b/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java @@ -51,10 +51,10 @@ private static Optional convertNumericValue(Object value) { return Optional.empty(); } try { - if (value instanceof Long) { - return Optional.of(new BigDecimal((Long) value)); - } else if (value instanceof Integer) { - return Optional.of(new BigDecimal((Integer) value)); + if (value instanceof Long longValue) { + return Optional.of(new BigDecimal(longValue)); + } else if (value instanceof Integer integerValue) { + return Optional.of(new BigDecimal(integerValue)); } return Optional.of(new BigDecimal(value.toString())); } catch (NumberFormatException nfe) { diff --git a/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java b/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java index b5640eb06..4756c36b0 100644 --- a/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java +++ b/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java @@ -153,12 +153,11 @@ private JsonType convertType(Schema schema) { case INT, LONG -> new SimpleJsonType(JsonType.Type.INTEGER); case MAP, RECORD -> new SimpleJsonType(JsonType.Type.OBJECT); case ENUM -> new EnumJsonType(schema.getEnumSymbols()); - case BYTES, STRING -> new SimpleJsonType(JsonType.Type.STRING); case NULL -> new SimpleJsonType(JsonType.Type.NULL); case ARRAY -> new SimpleJsonType(JsonType.Type.ARRAY); case FIXED, FLOAT, DOUBLE -> new SimpleJsonType(JsonType.Type.NUMBER); case BOOLEAN -> new SimpleJsonType(JsonType.Type.BOOLEAN); - default -> new SimpleJsonType(JsonType.Type.STRING); + default -> new SimpleJsonType(JsonType.Type.STRING); // BYTES, STRING and the remaining possibilities }; } } diff --git a/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java b/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java index c5fbb14b4..82d7a3041 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java @@ -24,12 +24,10 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.core.ParameterizedTypeReference; -import org.springframework.http.HttpStatus; -import org.springframework.test.web.reactive.server.ExchangeResult; import org.springframework.test.web.reactive.server.WebTestClient; @Slf4j -public class KafkaConnectServiceTests extends AbstractIntegrationTest { +class KafkaConnectServiceTests extends AbstractIntegrationTest { private final String connectName = "kafka-connect"; private final String connectorName = UUID.randomUUID().toString(); private final Map config = Map.of( @@ -46,7 +44,7 @@ public class KafkaConnectServiceTests extends AbstractIntegrationTest { @BeforeEach - public void setUp() { + void setUp() { webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) @@ -64,7 +62,7 @@ public void setUp() { } @AfterEach - public void tearDown() { + void tearDown() { webTestClient.delete() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", LOCAL, connectName, connectorName) @@ -73,7 +71,7 @@ public void tearDown() { } @Test - public void shouldListAllConnectors() { + void shouldListAllConnectors() { webTestClient.get() .uri("/api/clusters/{clusterName}/connectors", LOCAL) .exchange() @@ -84,7 +82,7 @@ public void shouldListAllConnectors() { } @Test - public void shouldFilterByNameConnectors() { + void shouldFilterByNameConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -98,7 +96,7 @@ public void shouldFilterByNameConnectors() { } @Test - public void shouldFilterByStatusConnectors() { + void shouldFilterByStatusConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -112,7 +110,7 @@ public void shouldFilterByStatusConnectors() { } @Test - public void shouldFilterByTypeConnectors() { + void shouldFilterByTypeConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -126,7 +124,7 @@ public void shouldFilterByTypeConnectors() { } @Test - public void shouldNotFilterConnectors() { + void shouldNotFilterConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -140,7 +138,7 @@ public void shouldNotFilterConnectors() { } @Test - public void shouldListConnectors() { + void shouldListConnectors() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) .exchange() @@ -150,7 +148,7 @@ public void shouldListConnectors() { } @Test - public void shouldReturnNotFoundForNonExistingCluster() { + void shouldReturnNotFoundForNonExistingCluster() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", "nonExistingCluster", connectName) @@ -159,7 +157,7 @@ public void shouldReturnNotFoundForNonExistingCluster() { } @Test - public void shouldReturnNotFoundForNonExistingConnectName() { + void shouldReturnNotFoundForNonExistingConnectName() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, "nonExistingConnect") @@ -168,7 +166,7 @@ public void shouldReturnNotFoundForNonExistingConnectName() { } @Test - public void shouldRetrieveConnector() { + void shouldRetrieveConnector() { ConnectorDTO expected = new ConnectorDTO() .connect(connectName) .status(new ConnectorStatusDTO() @@ -190,7 +188,7 @@ public void shouldRetrieveConnector() { } @Test - public void shouldUpdateConfig() { + void shouldUpdateConfig() { webTestClient.put() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/config", LOCAL, connectName, connectorName) @@ -221,7 +219,7 @@ public void shouldUpdateConfig() { } @Test - public void shouldReturn400WhenConnectReturns400ForInvalidConfigCreate() { + void shouldReturn400WhenConnectReturns400ForInvalidConfigCreate() { var connectorName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) @@ -247,7 +245,7 @@ public void shouldReturn400WhenConnectReturns400ForInvalidConfigCreate() { } @Test - public void shouldReturn400WhenConnectReturns500ForInvalidConfigCreate() { + void shouldReturn400WhenConnectReturns500ForInvalidConfigCreate() { var connectorName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) @@ -272,7 +270,7 @@ public void shouldReturn400WhenConnectReturns500ForInvalidConfigCreate() { @Test @SuppressWarnings("checkstyle:LineLength") - public void shouldReturn400WhenConnectReturns400ForInvalidConfigUpdate() { + void shouldReturn400WhenConnectReturns400ForInvalidConfigUpdate() { webTestClient.put() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/config", LOCAL, connectName, connectorName) @@ -312,7 +310,7 @@ public void shouldReturn400WhenConnectReturns400ForInvalidConfigUpdate() { } @Test - public void shouldReturn400WhenConnectReturns500ForInvalidConfigUpdate() { + void shouldReturn400WhenConnectReturns500ForInvalidConfigUpdate() { webTestClient.put() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/config", LOCAL, connectName, connectorName) @@ -341,7 +339,7 @@ public void shouldReturn400WhenConnectReturns500ForInvalidConfigUpdate() { } @Test - public void shouldRetrieveConnectorPlugins() { + void shouldRetrieveConnectorPlugins() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/plugins", LOCAL, connectName) .exchange() @@ -351,7 +349,7 @@ public void shouldRetrieveConnectorPlugins() { } @Test - public void shouldSuccessfullyValidateConnectorPluginConfiguration() { + void shouldSuccessfullyValidateConnectorPluginConfiguration() { var pluginName = "FileStreamSinkConnector"; var path = "/api/clusters/{clusterName}/connects/{connectName}/plugins/{pluginName}/config/validate"; @@ -372,7 +370,7 @@ public void shouldSuccessfullyValidateConnectorPluginConfiguration() { } @Test - public void shouldValidateAndReturnErrorsOfConnectorPluginConfiguration() { + void shouldValidateAndReturnErrorsOfConnectorPluginConfiguration() { var pluginName = "FileStreamSinkConnector"; var path = "/api/clusters/{clusterName}/connects/{connectName}/plugins/{pluginName}/config/validate"; @@ -398,13 +396,13 @@ public void shouldValidateAndReturnErrorsOfConnectorPluginConfiguration() { .findFirst().orElseThrow(); assertEquals( "Invalid value 0 for configuration tasks.max: Value must be at least 1", - error.get(0) + error.getFirst() ); }); } @Test - public void shouldReturn400WhenTryingToCreateConnectorWithExistingName() { + void shouldReturn400WhenTryingToCreateConnectorWithExistingName() { webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) .bodyValue(new NewConnectorDTO() @@ -422,7 +420,7 @@ public void shouldReturn400WhenTryingToCreateConnectorWithExistingName() { } @Test - public void shouldResetConnectorWhenInStoppedState() { + void shouldResetConnectorWhenInStoppedState() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", @@ -455,7 +453,7 @@ public void shouldResetConnectorWhenInStoppedState() { } @Test - public void shouldReturn400WhenResettingConnectorInRunningState() { + void shouldReturn400WhenResettingConnectorInRunningState() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", diff --git a/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java b/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java index b1bf4baa7..c23ea5fb0 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java @@ -27,7 +27,7 @@ import reactor.core.publisher.Mono; @Slf4j -public class KafkaConsumerGroupTests extends AbstractIntegrationTest { +class KafkaConsumerGroupTests extends AbstractIntegrationTest { @Autowired WebTestClient webTestClient; diff --git a/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java b/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java index afe5f2d87..97ab381e6 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java @@ -25,14 +25,14 @@ import reactor.core.publisher.Mono; @Slf4j -public class KafkaConsumerTests extends AbstractIntegrationTest { +class KafkaConsumerTests extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @Test - public void shouldDeleteRecords() { + void shouldDeleteRecords() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -97,7 +97,7 @@ public void shouldDeleteRecords() { } @Test - public void shouldIncreasePartitionsUpTo10() { + void shouldIncreasePartitionsUpTo10() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -144,7 +144,7 @@ public void shouldIncreasePartitionsUpTo10() { } @Test - public void shouldReturn404ForNonExistingTopic() { + void shouldReturn404ForNonExistingTopic() { var topicName = UUID.randomUUID().toString(); webTestClient.delete() @@ -161,7 +161,7 @@ public void shouldReturn404ForNonExistingTopic() { } @Test - public void shouldReturnConfigsForBroker() { + void shouldReturnConfigsForBroker() { List configs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/{id}/configs", LOCAL, @@ -175,15 +175,16 @@ public void shouldReturnConfigsForBroker() { Assertions.assertNotNull(configs); Assertions.assertFalse(configs.isEmpty()); - Assertions.assertNotNull(configs.get(0).getName()); - Assertions.assertNotNull(configs.get(0).getIsReadOnly()); - Assertions.assertNotNull(configs.get(0).getIsSensitive()); - Assertions.assertNotNull(configs.get(0).getSource()); - Assertions.assertNotNull(configs.get(0).getSynonyms()); + BrokerConfigDTO brokerConfigDto = configs.getFirst(); + Assertions.assertNotNull(brokerConfigDto.getName()); + Assertions.assertNotNull(brokerConfigDto.getIsReadOnly()); + Assertions.assertNotNull(brokerConfigDto.getIsSensitive()); + Assertions.assertNotNull(brokerConfigDto.getSource()); + Assertions.assertNotNull(brokerConfigDto.getSynonyms()); } @Test - public void shouldReturn404ForNonExistingBroker() { + void shouldReturn404ForNonExistingBroker() { webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/{id}/configs", LOCAL, @@ -194,7 +195,7 @@ public void shouldReturn404ForNonExistingBroker() { } @Test - public void shouldRetrieveTopicConfig() { + void shouldRetrieveTopicConfig() { var topicName = UUID.randomUUID().toString(); webTestClient.post() @@ -220,10 +221,11 @@ public void shouldRetrieveTopicConfig() { Assertions.assertNotNull(configs); Assertions.assertFalse(configs.isEmpty()); - Assertions.assertNotNull(configs.get(0).getName()); - Assertions.assertNotNull(configs.get(0).getIsReadOnly()); - Assertions.assertNotNull(configs.get(0).getIsSensitive()); - Assertions.assertNotNull(configs.get(0).getSource()); - Assertions.assertNotNull(configs.get(0).getSynonyms()); + TopicConfigDTO topicConfigDto = configs.getFirst(); + Assertions.assertNotNull(topicConfigDto.getName()); + Assertions.assertNotNull(topicConfigDto.getIsReadOnly()); + Assertions.assertNotNull(topicConfigDto.getIsSensitive()); + Assertions.assertNotNull(topicConfigDto.getSource()); + Assertions.assertNotNull(topicConfigDto.getSynonyms()); } } diff --git a/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java b/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java index fcc5a96fb..f4c5ca0b8 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java @@ -7,13 +7,13 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.web.reactive.server.WebTestClient; -public class KafkaTopicCreateTests extends AbstractIntegrationTest { +class KafkaTopicCreateTests extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; private TopicCreationDTO topicCreation; @BeforeEach - public void setUpBefore() { + void setUpBefore() { this.topicCreation = new TopicCreationDTO() .replicationFactor(1) .partitions(3) diff --git a/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java b/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java index e6525b14a..df59c6c35 100644 --- a/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java +++ b/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java @@ -9,13 +9,13 @@ import org.springframework.http.HttpStatus; import org.springframework.test.web.reactive.server.WebTestClient; -public class ReadOnlyModeTests extends AbstractIntegrationTest { +class ReadOnlyModeTests extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @Test - public void shouldCreateTopicForNonReadonlyCluster() { + void shouldCreateTopicForNonReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -31,7 +31,7 @@ public void shouldCreateTopicForNonReadonlyCluster() { } @Test - public void shouldNotCreateTopicForReadonlyCluster() { + void shouldNotCreateTopicForReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", SECOND_LOCAL) @@ -47,7 +47,7 @@ public void shouldNotCreateTopicForReadonlyCluster() { } @Test - public void shouldUpdateTopicForNonReadonlyCluster() { + void shouldUpdateTopicForNonReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -73,7 +73,7 @@ public void shouldUpdateTopicForNonReadonlyCluster() { } @Test - public void shouldNotUpdateTopicForReadonlyCluster() { + void shouldNotUpdateTopicForReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.patch() .uri("/api/clusters/{clusterName}/topics/{topicName}", SECOND_LOCAL, topicName) diff --git a/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java b/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java index 2653b7bac..a63a61b60 100644 --- a/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java +++ b/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java @@ -32,12 +32,12 @@ class SchemaRegistryServiceTests extends AbstractIntegrationTest { String subject; @BeforeEach - public void setUpBefore() { + void setUpBefore() { this.subject = UUID.randomUUID().toString(); } @Test - public void should404WhenGetAllSchemasForUnknownCluster() { + void should404WhenGetAllSchemasForUnknownCluster() { webTestClient .get() .uri("/api/clusters/unknown-cluster/schemas") @@ -46,7 +46,7 @@ public void should404WhenGetAllSchemasForUnknownCluster() { } @Test - public void shouldReturn404WhenGetLatestSchemaByNonExistingSubject() { + void shouldReturn404WhenGetLatestSchemaByNonExistingSubject() { String unknownSchema = "unknown-schema"; webTestClient .get() @@ -244,7 +244,7 @@ void shouldCreateNewProtobufSchemaWithRefs() { } @Test - public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { + void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { webTestClient .get() .uri("/api/clusters/{clusterName}/schemas/compatibility", LOCAL) @@ -260,7 +260,7 @@ public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { } @Test - public void shouldReturnNotEmptyResponseWhenGetAllSchemas() { + void shouldReturnNotEmptyResponseWhenGetAllSchemas() { createNewSubjectAndAssert(subject); webTestClient @@ -287,7 +287,7 @@ public void shouldReturnNotEmptyResponseWhenGetAllSchemas() { } @Test - public void shouldOkWhenCreateNewSchemaThenGetAndUpdateItsCompatibilityLevel() { + void shouldOkWhenCreateNewSchemaThenGetAndUpdateItsCompatibilityLevel() { createNewSubjectAndAssert(subject); //Get the created schema and check its items @@ -366,7 +366,7 @@ private void assertSchemaWhenGetLatest( List responseBody = listEntityExchangeResult.getResponseBody(); Assertions.assertNotNull(responseBody); Assertions.assertEquals(1, responseBody.size()); - SchemaSubjectDTO actualSchema = responseBody.get(0); + SchemaSubjectDTO actualSchema = responseBody.getFirst(); Assertions.assertNotNull(actualSchema); Assertions.assertEquals(subject, actualSchema.getSubject()); Assertions.assertEquals("\"string\"", actualSchema.getSchema()); diff --git a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java index 16fff06b9..701f7cf3d 100644 --- a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java +++ b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java @@ -32,7 +32,7 @@ import reactor.core.publisher.Mono; @ExtendWith(MockitoExtension.class) -public class AzureEntraLoginCallbackHandlerTest { +class AzureEntraLoginCallbackHandlerTest { // These are not real tokens. It was generated using fake values with an invalid signature, // so it is safe to store here. @@ -67,13 +67,13 @@ public class AzureEntraLoginCallbackHandlerTest { private AzureEntraLoginCallbackHandler azureEntraLoginCallbackHandler; @BeforeEach - public void beforeEach() { + void beforeEach() { azureEntraLoginCallbackHandler = new AzureEntraLoginCallbackHandler(); azureEntraLoginCallbackHandler.setTokenCredential(tokenCredential); } @Test - public void shouldProvideTokenToCallbackWithSuccessfulTokenRequest() throws UnsupportedCallbackException { + void shouldProvideTokenToCallbackWithSuccessfulTokenRequest() throws UnsupportedCallbackException { Map configs = Map.of("bootstrap.servers", List.of("test-eh.servicebus.windows.net:9093")); when(tokenCredential.getToken(any(TokenRequestContext.class))).thenReturn(Mono.just(accessToken)); @@ -105,7 +105,7 @@ public void shouldProvideTokenToCallbackWithSuccessfulTokenRequest() throws Unsu } @Test - public void shouldProvideErrorToCallbackWithTokenError() throws UnsupportedCallbackException { + void shouldProvideErrorToCallbackWithTokenError() throws UnsupportedCallbackException { Map configs = Map.of("bootstrap.servers", List.of("test-eh.servicebus.windows.net:9093")); when(tokenCredential.getToken(any(TokenRequestContext.class))) @@ -124,13 +124,13 @@ public void shouldProvideErrorToCallbackWithTokenError() throws UnsupportedCallb } @Test - public void shouldThrowExceptionWithNullBootstrapServers() { + void shouldThrowExceptionWithNullBootstrapServers() { assertThrows(IllegalArgumentException.class, () -> azureEntraLoginCallbackHandler.configure( Map.of(), null, null)); } @Test - public void shouldThrowExceptionWithMultipleBootstrapServers() { + void shouldThrowExceptionWithMultipleBootstrapServers() { Map configs = Map.of("bootstrap.servers", List.of("server1", "server2")); assertThrows(IllegalArgumentException.class, () -> azureEntraLoginCallbackHandler.configure( @@ -138,18 +138,18 @@ public void shouldThrowExceptionWithMultipleBootstrapServers() { } @Test - public void shouldThrowExceptionWithUnsupportedCallback() { + void shouldThrowExceptionWithUnsupportedCallback() { assertThrows(UnsupportedCallbackException.class, () -> azureEntraLoginCallbackHandler.handle( new Callback[] {mock(Callback.class)})); } @Test - public void shouldDoNothingOnClose() { + void shouldDoNothingOnClose() { azureEntraLoginCallbackHandler.close(); } @Test - public void shouldSupportDefaultConstructor() { + void shouldSupportDefaultConstructor() { new AzureEntraLoginCallbackHandler(); } } diff --git a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java index 6072de470..74dcfd077 100644 --- a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java +++ b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java @@ -12,7 +12,7 @@ import org.apache.kafka.common.errors.SaslAuthenticationException; import org.junit.jupiter.api.Test; -public class AzureEntraOAuthBearerTokenTest { +class AzureEntraOAuthBearerTokenTest { // These are not real tokens. It was generated using fake values with an invalid signature, // so it is safe to store here. diff --git a/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java b/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java index c95a34d26..9659b4ea6 100644 --- a/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java +++ b/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java @@ -20,7 +20,7 @@ class ApplicationConfigControllerTest extends AbstractIntegrationTest { private WebTestClient webTestClient; @Test - public void testUpload() throws IOException { + void testUpload() throws IOException { var fileToUpload = new ClassPathResource("/fileForUploadTest.txt", this.getClass()); UploadedFileInfoDTO result = webTestClient diff --git a/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java b/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java index c66251b25..7aafea5ab 100644 --- a/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java +++ b/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java @@ -199,7 +199,7 @@ void filterSpeedIsAtLeast5kPerSec() { long took = System.currentTimeMillis() - before; assertThat(took).isLessThan(1000); - assertThat(matched).isGreaterThan(0); + assertThat(matched).isPositive(); } } diff --git a/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java b/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java index a13690c1e..54cb63321 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java @@ -11,7 +11,7 @@ import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.EnumSource; -public class HexSerdeTest { +class HexSerdeTest { private static final byte[] TEST_BYTES = "hello world".getBytes(); private static final String TEST_BYTES_HEX_ENCODED = "68 65 6C 6C 6F 20 77 6F 72 6C 64"; diff --git a/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java b/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java index ba88b2d37..8dd65123c 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java @@ -52,7 +52,7 @@ void deserializeSimpleMessage() { void deserializeEmptyMessage() { var deserialized = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE) .deserialize(null, new byte[0]); - assertThat(deserialized.getResult()).isEqualTo(""); + assertThat(deserialized.getResult()).isEmpty(); } @Test diff --git a/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java b/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java index 64e6ed743..a4cae637b 100644 --- a/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java @@ -6,12 +6,12 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; -public class ApplicationInfoServiceTest extends AbstractIntegrationTest { +class ApplicationInfoServiceTest extends AbstractIntegrationTest { @Autowired private ApplicationInfoService service; @Test - public void testCustomGithubReleaseInfoTimeout() { + void testCustomGithubReleaseInfoTimeout() { assertEquals(100, service.githubReleaseInfo().getGithubApiMaxWaitTime()); } } diff --git a/api/src/test/java/io/kafbat/ui/service/ConfigTest.java b/api/src/test/java/io/kafbat/ui/service/ConfigTest.java index 4df63e476..756bd5c91 100644 --- a/api/src/test/java/io/kafbat/ui/service/ConfigTest.java +++ b/api/src/test/java/io/kafbat/ui/service/ConfigTest.java @@ -17,7 +17,7 @@ import org.springframework.test.web.reactive.server.WebTestClient; import org.testcontainers.shaded.org.awaitility.Awaitility; -public class ConfigTest extends AbstractIntegrationTest { +class ConfigTest extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @@ -35,7 +35,7 @@ void waitUntilStatsInitialized() { } @Test - public void testAlterConfig() { + void testAlterConfig() { String name = "background.threads"; Optional bc = getConfig(name); @@ -65,7 +65,7 @@ public void testAlterConfig() { } @Test - public void testAlterReadonlyConfig() { + void testAlterReadonlyConfig() { String name = "log.dirs"; webTestClient.put() diff --git a/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java b/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java index 3914f35a1..718f2f49c 100644 --- a/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java +++ b/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java @@ -15,13 +15,13 @@ import org.springframework.core.ParameterizedTypeReference; import org.springframework.test.web.reactive.server.WebTestClient; -public class LogDirsTest extends AbstractIntegrationTest { +class LogDirsTest extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @Test - public void testAllBrokers() { + void testAllBrokers() { List dirs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/logdirs", LOCAL) .exchange() @@ -31,7 +31,7 @@ public void testAllBrokers() { .getResponseBody(); assertThat(dirs).hasSize(1); - BrokersLogdirsDTO dir = dirs.get(0); + BrokersLogdirsDTO dir = dirs.getFirst(); assertThat(dir.getName()).isEqualTo("/var/lib/kafka/data"); assertThat(dir.getTopics().stream().anyMatch(t -> t.getName().equals("__consumer_offsets"))) .isTrue(); @@ -41,12 +41,12 @@ public void testAllBrokers() { .findAny().orElseThrow(); assertThat(topic.getPartitions()).hasSize(1); - assertThat(topic.getPartitions().get(0).getBroker()).isEqualTo(1); - assertThat(topic.getPartitions().get(0).getSize()).isPositive(); + assertThat(topic.getPartitions().getFirst().getBroker()).isEqualTo(1); + assertThat(topic.getPartitions().getFirst().getSize()).isPositive(); } @Test - public void testOneBrokers() { + void testOneBrokers() { List dirs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/logdirs?broker=1", LOCAL) .exchange() @@ -56,7 +56,7 @@ public void testOneBrokers() { .getResponseBody(); assertThat(dirs).hasSize(1); - BrokersLogdirsDTO dir = dirs.get(0); + BrokersLogdirsDTO dir = dirs.getFirst(); assertThat(dir.getName()).isEqualTo("/var/lib/kafka/data"); assertThat(dir.getTopics().stream().anyMatch(t -> t.getName().equals("__consumer_offsets"))) .isTrue(); @@ -66,12 +66,12 @@ public void testOneBrokers() { .findAny().orElseThrow(); assertThat(topic.getPartitions()).hasSize(1); - assertThat(topic.getPartitions().get(0).getBroker()).isEqualTo(1); - assertThat(topic.getPartitions().get(0).getSize()).isPositive(); + assertThat(topic.getPartitions().getFirst().getBroker()).isEqualTo(1); + assertThat(topic.getPartitions().getFirst().getSize()).isPositive(); } @Test - public void testWrongBrokers() { + void testWrongBrokers() { List dirs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/logdirs?broker=2", LOCAL) .exchange() @@ -84,7 +84,7 @@ public void testWrongBrokers() { } @Test - public void testChangeDirToWrongDir() { + void testChangeDirToWrongDir() { ErrorResponseDTO dirs = webTestClient.patch() .uri("/api/clusters/{clusterName}/brokers/{id}/logdirs", LOCAL, 1) .bodyValue(Map.of( diff --git a/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java b/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java index 2485c7140..8c873980e 100644 --- a/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java @@ -33,7 +33,7 @@ import reactor.core.publisher.Mono; import reactor.test.StepVerifier; -public class OffsetsResetServiceTest extends AbstractIntegrationTest { +class OffsetsResetServiceTest extends AbstractIntegrationTest { private static final int PARTITIONS = 5; diff --git a/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java b/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java index 4f61351c2..43cb29382 100644 --- a/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java +++ b/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java @@ -22,7 +22,7 @@ import org.mockito.Mockito; import reactor.core.publisher.Mono; -public class SchemaRegistryPaginationTest { +class SchemaRegistryPaginationTest { private static final String LOCAL_KAFKA_CLUSTER_NAME = "local"; @@ -127,7 +127,7 @@ void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { assertThat(schemas.getBody()).isNotNull(); assertThat(schemas.getBody().getPageCount()).isEqualTo(4); assertThat(schemas.getBody().getSchemas()).hasSize(1); - assertThat(schemas.getBody().getSchemas().get(0).getSubject()).isEqualTo("subject99"); + assertThat(schemas.getBody().getSchemas().getFirst().getSubject()).isEqualTo("subject99"); } @SuppressWarnings("unchecked") diff --git a/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java b/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java index 009a6f67b..9e0164540 100644 --- a/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java +++ b/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java @@ -32,7 +32,7 @@ import org.springframework.beans.factory.annotation.Autowired; import reactor.test.StepVerifier; -public class SendAndReadTests extends AbstractIntegrationTest { +class SendAndReadTests extends AbstractIntegrationTest { private static final AvroSchema AVRO_SCHEMA_1 = new AvroSchema( "{" @@ -81,14 +81,16 @@ public class SendAndReadTests extends AbstractIntegrationTest { private static final String AVRO_SCHEMA_2_JSON_RECORD = "{ \"f1\": 111, \"f2\": \"testStr\" }"; private static final ProtobufSchema PROTOBUF_SCHEMA = new ProtobufSchema( - "syntax = \"proto3\";\n" - + "package io.kafbat;\n" - + "\n" - + "message TestProtoRecord {\n" - + " string f1 = 1;\n" - + " int32 f2 = 2;\n" - + "}\n" - + "\n" + """ + syntax = "proto3"; + package io.kafbat; + + message TestProtoRecord { + string f1 = 1; + int32 f2 = 2; + } + + """ ); private static final String PROTOBUF_SCHEMA_JSON_RECORD @@ -528,7 +530,7 @@ public void doAssert(Consumer msgAssert) { .blockLast(Duration.ofSeconds(5000)); assertThat(polled).isNotNull(); - assertThat(polled.getPartition()).isEqualTo(0); + assertThat(polled.getPartition()).isZero(); assertThat(polled.getOffset()).isNotNull(); msgAssert.accept(polled); } finally { diff --git a/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java b/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java index 9091ebb3d..08b211b40 100644 --- a/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java +++ b/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java @@ -66,7 +66,7 @@ private void init(Map topicsInCache) { } @Test - public void shouldListFirst25Topics() { + void shouldListFirst25Topics() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -93,7 +93,7 @@ private KafkaCluster buildKafkaCluster(String clusterName) { } @Test - public void shouldListFirst25TopicsSortedByNameDescendingOrder() { + void shouldListFirst25TopicsSortedByNameDescendingOrder() { var internalTopics = IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) .map(name -> new TopicDescription(name, false, List.of())) @@ -119,7 +119,7 @@ public void shouldListFirst25TopicsSortedByNameDescendingOrder() { } @Test - public void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { + void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -134,11 +134,11 @@ public void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { assertThat(topics.getBody().getPageCount()).isEqualTo(4); assertThat(topics.getBody().getTopics()).hasSize(1); - assertThat(topics.getBody().getTopics().get(0).getName()).isEqualTo("99"); + assertThat(topics.getBody().getTopics().getFirst().getName()).isEqualTo("99"); } @Test - public void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() { + void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -157,7 +157,7 @@ public void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() { } @Test - public void shouldListBotInternalAndNonInternalTopics() { + void shouldListBotInternalAndNonInternalTopics() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -177,7 +177,7 @@ public void shouldListBotInternalAndNonInternalTopics() { } @Test - public void shouldListOnlyNonInternalTopics() { + void shouldListOnlyNonInternalTopics() { init( IntStream.rangeClosed(1, 100).boxed() @@ -198,7 +198,7 @@ public void shouldListOnlyNonInternalTopics() { } @Test - public void shouldListOnlyTopicsContainingOne() { + void shouldListOnlyTopicsContainingOne() { init( IntStream.rangeClosed(1, 100).boxed() @@ -219,7 +219,7 @@ public void shouldListOnlyTopicsContainingOne() { } @Test - public void shouldListTopicsOrderedByPartitionsCount() { + void shouldListTopicsOrderedByPartitionsCount() { Map internalTopics = IntStream.rangeClosed(1, 100).boxed() .map(i -> new TopicDescription(UUID.randomUUID().toString(), false, IntStream.range(0, i) diff --git a/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java b/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java index 8f3221289..ebff4bcd6 100644 --- a/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java +++ b/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java @@ -22,7 +22,7 @@ import org.springframework.test.web.reactive.server.WebTestClient; import org.testcontainers.shaded.org.awaitility.Awaitility; -public class AuditIntegrationTest extends AbstractIntegrationTest { +class AuditIntegrationTest extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; diff --git a/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java b/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java index 8cc2e5c9d..cd0a6c02e 100644 --- a/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java +++ b/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java @@ -1,6 +1,5 @@ package io.kafbat.ui.service.masking; -import static org.mockito.Mockito.eq; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; @@ -57,14 +56,14 @@ void appliesMasksToJsonContainerArgsBasedOnTopicPatterns(String jsonObjOrArr) { var parsedJson = (ContainerNode) new JsonMapper().readTree(jsonObjOrArr); masking.getMaskingFunction(TOPIC, Serde.Target.KEY).apply(jsonObjOrArr); - verify(policy1).applyToJsonContainer(eq(parsedJson)); + verify(policy1).applyToJsonContainer(parsedJson); verifyNoInteractions(policy2, policy3); reset(policy1, policy2, policy3); masking.getMaskingFunction(TOPIC, Serde.Target.VALUE).apply(jsonObjOrArr); - verify(policy2).applyToJsonContainer(eq(parsedJson)); - verify(policy3).applyToJsonContainer(eq(policy2.applyToJsonContainer(parsedJson))); + verify(policy2).applyToJsonContainer(parsedJson); + verify(policy3).applyToJsonContainer(policy2.applyToJsonContainer(parsedJson)); verifyNoInteractions(policy1); } @@ -76,13 +75,13 @@ void appliesMasksToJsonContainerArgsBasedOnTopicPatterns(String jsonObjOrArr) { }) void appliesFirstFoundMaskToStringArgsBasedOnTopicPatterns(String nonJsonObjOrArrString) { masking.getMaskingFunction(TOPIC, Serde.Target.KEY).apply(nonJsonObjOrArrString); - verify(policy1).applyToString(eq(nonJsonObjOrArrString)); + verify(policy1).applyToString(nonJsonObjOrArrString); verifyNoInteractions(policy2, policy3); reset(policy1, policy2, policy3); masking.getMaskingFunction(TOPIC, Serde.Target.VALUE).apply(nonJsonObjOrArrString); - verify(policy2).applyToString(eq(nonJsonObjOrArrString)); + verify(policy2).applyToString(nonJsonObjOrArrString); verifyNoInteractions(policy1, policy3); } From eaeb4a41dbd3233ac65aa5123d800d0d14db0c5f Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Tue, 11 Feb 2025 19:02:46 +0500 Subject: [PATCH 68/84] BE: Chore: Cleanup api module (#815) --- .../ui/client/RetryingKafkaConnectClient.java | 4 +- .../config/auth/BasicAuthSecurityConfig.java | 4 - .../logout/CognitoLogoutSuccessHandler.java | 2 +- .../io/kafbat/ui/emitter/OffsetsInfo.java | 2 +- .../kafbat/ui/emitter/ResultSizeLimiter.java | 23 ----- .../exception/ConnectNotFoundException.java | 13 --- .../exception/DuplicateEntityException.java | 13 --- .../io/kafbat/ui/exception/ErrorCode.java | 6 -- .../GlobalErrorWebExceptionHandler.java | 3 +- ...afkaConnectConflictResponseException.java} | 5 +- .../ui/exception/KsqlDbNotFoundException.java | 13 --- .../SchemaFailedToDeleteException.java | 13 --- .../UnprocessableEntityException.java | 14 --- .../kafbat/ui/mapper/ConsumerGroupMapper.java | 26 ++--- .../ui/mapper/DescribeLogDirsMapper.java | 5 +- .../io/kafbat/ui/serdes/SerdeInstance.java | 2 +- .../ui/serdes/builtin/ProtobufFileSerde.java | 2 +- .../ui/service/KafkaConnectService.java | 2 - .../ui/service/ReactiveAdminClient.java | 6 -- .../ui/service/SchemaRegistryService.java | 2 +- .../io/kafbat/ui/service/TopicsService.java | 12 +-- .../io/kafbat/ui/service/acl/AclsService.java | 4 +- .../kafbat/ui/service/ksql/KsqlApiClient.java | 2 +- .../service/ksql/response/ResponseParser.java | 97 ++++++++----------- .../ui/service/metrics/MetricsCollector.java | 9 +- .../kafbat/ui/util/EmptyRedirectStrategy.java | 22 +---- .../ui/util/KafkaServicesValidation.java | 5 +- .../io/kafbat/ui/util/ReactiveFailover.java | 4 +- .../util/jsonschema/JsonAvroConversion.java | 42 +++----- .../kafbat/ui/util/jsonschema/JsonSchema.java | 9 -- 30 files changed, 89 insertions(+), 277 deletions(-) delete mode 100644 api/src/main/java/io/kafbat/ui/emitter/ResultSizeLimiter.java delete mode 100644 api/src/main/java/io/kafbat/ui/exception/ConnectNotFoundException.java delete mode 100644 api/src/main/java/io/kafbat/ui/exception/DuplicateEntityException.java rename api/src/main/java/io/kafbat/ui/exception/{KafkaConnectConflictReponseException.java => KafkaConnectConflictResponseException.java} (67%) delete mode 100644 api/src/main/java/io/kafbat/ui/exception/KsqlDbNotFoundException.java delete mode 100644 api/src/main/java/io/kafbat/ui/exception/SchemaFailedToDeleteException.java delete mode 100644 api/src/main/java/io/kafbat/ui/exception/UnprocessableEntityException.java diff --git a/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java b/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java index df2da3e55..cdf5bce14 100644 --- a/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java +++ b/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java @@ -12,7 +12,7 @@ import io.kafbat.ui.connect.model.ConnectorTopics; import io.kafbat.ui.connect.model.NewConnector; import io.kafbat.ui.connect.model.TaskStatus; -import io.kafbat.ui.exception.KafkaConnectConflictReponseException; +import io.kafbat.ui.exception.KafkaConnectConflictResponseException; import io.kafbat.ui.exception.ValidationException; import io.kafbat.ui.util.WebClientConfigurator; import jakarta.validation.constraints.NotNull; @@ -48,7 +48,7 @@ private static Retry conflictCodeRetry() { .fixedDelay(MAX_RETRIES, RETRIES_DELAY) .filter(e -> e instanceof WebClientResponseException.Conflict) .onRetryExhaustedThrow((spec, signal) -> - new KafkaConnectConflictReponseException( + new KafkaConnectConflictResponseException( (WebClientResponseException.Conflict) signal.failure())); } diff --git a/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java index db8ef8153..788c33bdd 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java @@ -1,8 +1,6 @@ package io.kafbat.ui.config.auth; -import io.kafbat.ui.util.EmptyRedirectStrategy; import io.kafbat.ui.util.StaticFileWebFilter; -import java.net.URI; import lombok.extern.slf4j.Slf4j; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; @@ -12,8 +10,6 @@ import org.springframework.security.config.web.server.SecurityWebFiltersOrder; import org.springframework.security.config.web.server.ServerHttpSecurity; import org.springframework.security.web.server.SecurityWebFilterChain; -import org.springframework.security.web.server.authentication.RedirectServerAuthenticationSuccessHandler; -import org.springframework.security.web.server.authentication.logout.RedirectServerLogoutSuccessHandler; import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatchers; @Configuration diff --git a/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java b/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java index d98ea22af..e58f51ab3 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java @@ -40,7 +40,7 @@ public Mono handle(WebFilterExchange exchange, Authentication authenticati requestUri.getPath(), requestUri.getQuery()); final UriComponents baseUrl = UriComponentsBuilder - .fromHttpUrl(fullUrl) + .fromUriString(fullUrl) .replacePath("/") .replaceQuery(null) .fragment(null) diff --git a/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java b/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java index a361834d0..7f34e1708 100644 --- a/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java +++ b/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java @@ -53,7 +53,7 @@ private Map firstOffsetsForPolling(Consumer consumer Collection partitions) { try { // we try to use offsetsForTimes() to find earliest offsets, since for - // some topics (like compacted) beginningOffsets() ruturning 0 offsets + // some topics (like compacted) beginningOffsets() returning 0 offsets // even when effectively first offset can be very high var offsets = consumer.offsetsForTimes( partitions.stream().collect(Collectors.toMap(p -> p, p -> 0L)) diff --git a/api/src/main/java/io/kafbat/ui/emitter/ResultSizeLimiter.java b/api/src/main/java/io/kafbat/ui/emitter/ResultSizeLimiter.java deleted file mode 100644 index 3e0ec2a43..000000000 --- a/api/src/main/java/io/kafbat/ui/emitter/ResultSizeLimiter.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.kafbat.ui.emitter; - -import io.kafbat.ui.model.TopicMessageEventDTO; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Predicate; - -public class ResultSizeLimiter implements Predicate { - private final AtomicInteger processed = new AtomicInteger(); - private final int limit; - - public ResultSizeLimiter(int limit) { - this.limit = limit; - } - - @Override - public boolean test(TopicMessageEventDTO event) { - if (event.getType().equals(TopicMessageEventDTO.TypeEnum.MESSAGE)) { - final int i = processed.incrementAndGet(); - return i <= limit; - } - return true; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/ConnectNotFoundException.java b/api/src/main/java/io/kafbat/ui/exception/ConnectNotFoundException.java deleted file mode 100644 index 5978c2e93..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/ConnectNotFoundException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class ConnectNotFoundException extends CustomBaseException { - - public ConnectNotFoundException() { - super("Connect not found"); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.CONNECT_NOT_FOUND; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/DuplicateEntityException.java b/api/src/main/java/io/kafbat/ui/exception/DuplicateEntityException.java deleted file mode 100644 index 23ba0c5af..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/DuplicateEntityException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class DuplicateEntityException extends CustomBaseException { - - public DuplicateEntityException(String message) { - super(message); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.DUPLICATED_ENTITY; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java b/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java index 6d4a732e3..32cf5c5c8 100644 --- a/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java +++ b/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java @@ -4,11 +4,8 @@ import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; - public enum ErrorCode { - FORBIDDEN(403, HttpStatus.FORBIDDEN), - UNEXPECTED(5000, HttpStatus.INTERNAL_SERVER_ERROR), KSQL_API_ERROR(5001, HttpStatus.INTERNAL_SERVER_ERROR), BINDING_FAIL(4001, HttpStatus.BAD_REQUEST), @@ -16,13 +13,10 @@ public enum ErrorCode { VALIDATION_FAIL(4002, HttpStatus.BAD_REQUEST), READ_ONLY_MODE_ENABLE(4003, HttpStatus.METHOD_NOT_ALLOWED), CONNECT_CONFLICT_RESPONSE(4004, HttpStatus.CONFLICT), - DUPLICATED_ENTITY(4005, HttpStatus.CONFLICT), UNPROCESSABLE_ENTITY(4006, HttpStatus.UNPROCESSABLE_ENTITY), CLUSTER_NOT_FOUND(4007, HttpStatus.NOT_FOUND), TOPIC_NOT_FOUND(4008, HttpStatus.NOT_FOUND), SCHEMA_NOT_FOUND(4009, HttpStatus.NOT_FOUND), - CONNECT_NOT_FOUND(4010, HttpStatus.NOT_FOUND), - KSQLDB_NOT_FOUND(4011, HttpStatus.NOT_FOUND), DIR_NOT_FOUND(4012, HttpStatus.BAD_REQUEST), TOPIC_OR_PARTITION_NOT_FOUND(4013, HttpStatus.BAD_REQUEST), INVALID_REQUEST(4014, HttpStatus.BAD_REQUEST), diff --git a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java index e6c0c76a5..482ced492 100644 --- a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java +++ b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java @@ -102,8 +102,7 @@ private Mono render(CustomBaseException baseException, ServerReq private Mono render(WebExchangeBindException exception, ServerRequest request) { Map> fieldErrorsMap = exception.getFieldErrors().stream() - .collect(Collectors - .toMap(FieldError::getField, f -> Set.of(extractFieldErrorMsg(f)), Sets::union)); + .collect(Collectors.toMap(FieldError::getField, f -> Set.of(extractFieldErrorMsg(f)), Sets::union)); var fieldsErrors = fieldErrorsMap.entrySet().stream() .map(e -> { diff --git a/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictReponseException.java b/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictResponseException.java similarity index 67% rename from api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictReponseException.java rename to api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictResponseException.java index 48376d1ac..ad356b7e5 100644 --- a/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictReponseException.java +++ b/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictResponseException.java @@ -1,11 +1,10 @@ package io.kafbat.ui.exception; - import org.springframework.web.reactive.function.client.WebClientResponseException; -public class KafkaConnectConflictReponseException extends CustomBaseException { +public class KafkaConnectConflictResponseException extends CustomBaseException { - public KafkaConnectConflictReponseException(WebClientResponseException.Conflict e) { + public KafkaConnectConflictResponseException(WebClientResponseException.Conflict e) { super("Kafka Connect responded with 409 (Conflict) code. Response body: " + e.getResponseBodyAsString()); } diff --git a/api/src/main/java/io/kafbat/ui/exception/KsqlDbNotFoundException.java b/api/src/main/java/io/kafbat/ui/exception/KsqlDbNotFoundException.java deleted file mode 100644 index 255ccec80..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/KsqlDbNotFoundException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class KsqlDbNotFoundException extends CustomBaseException { - - public KsqlDbNotFoundException() { - super("KSQL DB not found"); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.KSQLDB_NOT_FOUND; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/SchemaFailedToDeleteException.java b/api/src/main/java/io/kafbat/ui/exception/SchemaFailedToDeleteException.java deleted file mode 100644 index 05ba55c70..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/SchemaFailedToDeleteException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class SchemaFailedToDeleteException extends CustomBaseException { - - public SchemaFailedToDeleteException(String schemaName) { - super(String.format("Unable to delete schema with name %s", schemaName)); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.SCHEMA_NOT_DELETED; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/UnprocessableEntityException.java b/api/src/main/java/io/kafbat/ui/exception/UnprocessableEntityException.java deleted file mode 100644 index fcd9e41fd..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/UnprocessableEntityException.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.kafbat.ui.exception; - - -public class UnprocessableEntityException extends CustomBaseException { - - public UnprocessableEntityException(String message) { - super(message); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.UNPROCESSABLE_ENTITY; - } -} diff --git a/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java b/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java index 72b3d65b4..800eab757 100644 --- a/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java +++ b/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java @@ -97,23 +97,15 @@ private static BrokerDTO mapCoordinator(Node node) { return new BrokerDTO().host(node.host()).id(node.id()).port(node.port()); } - private static ConsumerGroupStateDTO mapConsumerGroupState( - org.apache.kafka.common.ConsumerGroupState state) { - switch (state) { - case DEAD: - return ConsumerGroupStateDTO.DEAD; - case EMPTY: - return ConsumerGroupStateDTO.EMPTY; - case STABLE: - return ConsumerGroupStateDTO.STABLE; - case PREPARING_REBALANCE: - return ConsumerGroupStateDTO.PREPARING_REBALANCE; - case COMPLETING_REBALANCE: - return ConsumerGroupStateDTO.COMPLETING_REBALANCE; - default: - return ConsumerGroupStateDTO.UNKNOWN; - } + private static ConsumerGroupStateDTO mapConsumerGroupState(org.apache.kafka.common.ConsumerGroupState state) { + return switch (state) { + case DEAD -> ConsumerGroupStateDTO.DEAD; + case EMPTY -> ConsumerGroupStateDTO.EMPTY; + case STABLE -> ConsumerGroupStateDTO.STABLE; + case PREPARING_REBALANCE -> ConsumerGroupStateDTO.PREPARING_REBALANCE; + case COMPLETING_REBALANCE -> ConsumerGroupStateDTO.COMPLETING_REBALANCE; + default -> ConsumerGroupStateDTO.UNKNOWN; + }; } - } diff --git a/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java b/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java index 8a4e44e5c..bccd3a66b 100644 --- a/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java +++ b/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java @@ -42,7 +42,7 @@ private BrokersLogdirsDTO toBrokerLogDirs(Integer broker, String dirName, private BrokerTopicLogdirsDTO toTopicLogDirs(Integer broker, String name, List> partitions) { + DescribeLogDirsResponse.ReplicaInfo>> partitions) { BrokerTopicLogdirsDTO topic = new BrokerTopicLogdirsDTO(); topic.setName(name); topic.setPartitions( @@ -54,8 +54,7 @@ private BrokerTopicLogdirsDTO toTopicLogDirs(Integer broker, String name, } private BrokerTopicPartitionLogdirDTO topicPartitionLogDir(Integer broker, Integer partition, - DescribeLogDirsResponse.ReplicaInfo - replicaInfo) { + DescribeLogDirsResponse.ReplicaInfo replicaInfo) { BrokerTopicPartitionLogdirDTO logDir = new BrokerTopicPartitionLogdirDTO(); logDir.setBroker(broker); logDir.setPartition(partition); diff --git a/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java b/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java index 7c1826257..b0fdc9834 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java +++ b/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java @@ -97,7 +97,7 @@ public void close() { try { serde.close(); } catch (Exception e) { - log.error("Error closing serde " + name, e); + log.error("Error closing serde {}", name, e); } return null; }); diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java index 20ea47a06..618c711b1 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java @@ -380,7 +380,7 @@ private Map knownProtoFiles() { } private ProtoFile loadKnownProtoFile(String path, Descriptors.FileDescriptor fileDescriptor) { - String protoFileString = null; + String protoFileString; // know type file contains either message or enum if (!fileDescriptor.getMessageTypes().isEmpty()) { protoFileString = new ProtobufSchema(fileDescriptor.getMessageTypes().getFirst()).canonicalString(); diff --git a/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java b/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java index 31e4268a0..92bfc260b 100644 --- a/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java +++ b/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java @@ -1,6 +1,5 @@ package io.kafbat.ui.service; -import com.fasterxml.jackson.databind.ObjectMapper; import io.kafbat.ui.connect.api.KafkaConnectClientApi; import io.kafbat.ui.connect.model.ConnectorStatus; import io.kafbat.ui.connect.model.ConnectorStatusConnector; @@ -44,7 +43,6 @@ public class KafkaConnectService { private final ClusterMapper clusterMapper; private final KafkaConnectMapper kafkaConnectMapper; - private final ObjectMapper objectMapper; private final KafkaConfigSanitizer kafkaConfigSanitizer; public Flux getConnects(KafkaCluster cluster) { diff --git a/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java b/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java index 651f6d531..6aea290c3 100644 --- a/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java +++ b/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java @@ -389,12 +389,6 @@ static Mono> toMonoWithExceptionFilter(Map> v ); } - public Mono>> describeLogDirs() { - return describeCluster() - .map(d -> d.getNodes().stream().map(Node::id).collect(toList())) - .flatMap(this::describeLogDirs); - } - public Mono>> describeLogDirs( Collection brokerIds) { return toMono(client.describeLogDirs(brokerIds).all()) diff --git a/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java b/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java index 1bac22235..c725a787e 100644 --- a/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java +++ b/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java @@ -63,7 +63,7 @@ public Mono> getAllSubjectNames(KafkaCluster cluster) { @SneakyThrows private List parseSubjectListString(String subjectNamesStr) { //workaround for https://github.com/spring-projects/spring-framework/issues/24734 - return new JsonMapper().readValue(subjectNamesStr, new TypeReference>() { + return new JsonMapper().readValue(subjectNamesStr, new TypeReference<>() { }); } diff --git a/api/src/main/java/io/kafbat/ui/service/TopicsService.java b/api/src/main/java/io/kafbat/ui/service/TopicsService.java index 015a86838..95ad7bc5a 100644 --- a/api/src/main/java/io/kafbat/ui/service/TopicsService.java +++ b/api/src/main/java/io/kafbat/ui/service/TopicsService.java @@ -97,7 +97,7 @@ private Mono loadTopic(KafkaCluster c, String topicName) { /** * After creation topic can be invisible via API for some time. - * To workaround this, we retyring topic loading until it becomes visible. + * To workaround this, we're retrying topic loading until it becomes visible. */ private Mono loadTopicAfterCreation(KafkaCluster c, String topicName) { return loadTopic(c, topicName) @@ -137,8 +137,7 @@ private List createList(List orderedNames, .collect(toList()); } - private Mono getPartitionOffsets(Map - descriptionsMap, + private Mono getPartitionOffsets(Map descriptionsMap, ReactiveAdminClient ac) { var descriptions = descriptionsMap.values(); return ac.listOffsets(descriptions, OffsetSpec.earliest()) @@ -225,8 +224,7 @@ private Mono updateTopic(KafkaCluster cluster, .then(loadTopic(cluster, topicName))); } - public Mono updateTopic(KafkaCluster cl, String topicName, - Mono topicUpdate) { + public Mono updateTopic(KafkaCluster cl, String topicName, Mono topicUpdate) { return topicUpdate .flatMap(t -> updateTopic(cl, topicName, t)); } @@ -298,7 +296,7 @@ private Map> getPartitionsRea var brokers = brokersUsage.entrySet().stream() .sorted(Map.Entry.comparingByValue()) .map(Map.Entry::getKey) - .collect(toList()); + .toList(); // Iterate brokers and try to add them in assignment // while partition replicas count != requested replication factor @@ -326,7 +324,7 @@ private Map> getPartitionsRea var brokersUsageList = brokersUsage.entrySet().stream() .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) .map(Map.Entry::getKey) - .collect(toList()); + .toList(); // Iterate brokers and try to remove them from assignment // while partition replicas count != requested replication factor diff --git a/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java b/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java index b3877a336..30078d435 100644 --- a/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java +++ b/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java @@ -112,13 +112,13 @@ private void logAclSyncPlan(KafkaCluster cluster, Set toBeAdded, Set if (!toBeAdded.isEmpty()) { log.info("ACLs to be added ({}): ", toBeAdded.size()); for (AclBinding aclBinding : toBeAdded) { - log.info(" " + AclCsv.createAclString(aclBinding)); + log.info(" {}", AclCsv.createAclString(aclBinding)); } } if (!toBeDeleted.isEmpty()) { log.info("ACLs to be deleted ({}): ", toBeDeleted.size()); for (AclBinding aclBinding : toBeDeleted) { - log.info(" " + AclCsv.createAclString(aclBinding)); + log.info(" {}", AclCsv.createAclString(aclBinding)); } } } diff --git a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java index 90192eb2d..13daee2bc 100644 --- a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java +++ b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java @@ -176,7 +176,7 @@ public Flux execute(String ksql, Map streamPr if (statements.size() > 1) { return errorTableFlux("Only single statement supported now"); } - if (statements.size() == 0) { + if (statements.isEmpty()) { return errorTableFlux("No valid ksql statement found"); } if (isUnsupportedStatementType(statements.get(0))) { diff --git a/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java b/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java index f353ea578..a5d54f369 100644 --- a/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java +++ b/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java @@ -99,64 +99,45 @@ public static List parseStatementResponse(JsonN .orElse("unknown"); // messages structure can be inferred from https://github.com/confluentinc/ksql/blob/master/ksqldb-rest-model/src/main/java/io/confluent/ksql/rest/entity/KsqlEntity.java - switch (type) { - case "currentStatus": - return parseObject( - "Status", - List.of("status", "message"), - jsonNode.get("commandStatus") - ); - case "properties": - return parseProperties(jsonNode); - case "queries": - return parseArray("Queries", "queries", jsonNode); - case "sourceDescription": - return parseObjectDynamically("Source Description", jsonNode.get("sourceDescription")); - case "queryDescription": - return parseObjectDynamically("Queries Description", jsonNode.get("queryDescription")); - case "topicDescription": - return parseObject( - "Topic Description", - List.of("name", "kafkaTopic", "format", "schemaString"), - jsonNode - ); - case "streams": - return parseArray("Streams", "streams", jsonNode); - case "tables": - return parseArray("Tables", "tables", jsonNode); - case "kafka_topics": - return parseArray("Topics", "topics", jsonNode); - case "kafka_topics_extended": - return parseArray("Topics extended", "topics", jsonNode); - case "executionPlan": - return parseObject("Execution plan", List.of("executionPlanText"), jsonNode); - case "source_descriptions": - return parseArray("Source descriptions", "sourceDescriptions", jsonNode); - case "query_descriptions": - return parseArray("Queries", "queryDescriptions", jsonNode); - case "describe_function": - return parseObject("Function description", - List.of("name", "author", "version", "description", "functions", "path", "type"), - jsonNode - ); - case "function_names": - return parseArray("Function Names", "functions", jsonNode); - case "connector_info": - return parseObjectDynamically("Connector Info", jsonNode.get("info")); - case "drop_connector": - return parseObject("Dropped connector", List.of("connectorName"), jsonNode); - case "connector_list": - return parseArray("Connectors", "connectors", jsonNode); - case "connector_plugins_list": - return parseArray("Connector Plugins", "connectorPlugins", jsonNode); - case "connector_description": - return parseObject("Connector Description", - List.of("connectorClass", "status", "sources", "topics"), - jsonNode - ); - default: - return parseUnknownResponse(jsonNode); - } + return switch (type) { + case "currentStatus" -> parseObject( + "Status", + List.of("status", "message"), + jsonNode.get("commandStatus") + ); + case "properties" -> parseProperties(jsonNode); + case "queries" -> parseArray("Queries", "queries", jsonNode); + case "sourceDescription" -> parseObjectDynamically("Source Description", jsonNode.get("sourceDescription")); + case "queryDescription" -> parseObjectDynamically("Queries Description", jsonNode.get("queryDescription")); + case "topicDescription" -> parseObject( + "Topic Description", + List.of("name", "kafkaTopic", "format", "schemaString"), + jsonNode + ); + case "streams" -> parseArray("Streams", "streams", jsonNode); + case "tables" -> parseArray("Tables", "tables", jsonNode); + case "kafka_topics" -> parseArray("Topics", "topics", jsonNode); + case "kafka_topics_extended" -> parseArray("Topics extended", "topics", jsonNode); + case "executionPlan" -> parseObject("Execution plan", List.of("executionPlanText"), jsonNode); + case "source_descriptions" -> parseArray("Source descriptions", "sourceDescriptions", jsonNode); + case "query_descriptions" -> parseArray("Queries", "queryDescriptions", jsonNode); + case "describe_function" -> parseObject( + "Function description", + List.of("name", "author", "version", "description", "functions", "path", "type"), + jsonNode + ); + case "function_names" -> parseArray("Function Names", "functions", jsonNode); + case "connector_info" -> parseObjectDynamically("Connector Info", jsonNode.get("info")); + case "drop_connector" -> parseObject("Dropped connector", List.of("connectorName"), jsonNode); + case "connector_list" -> parseArray("Connectors", "connectors", jsonNode); + case "connector_plugins_list" -> parseArray("Connector Plugins", "connectorPlugins", jsonNode); + case "connector_description" -> parseObject( + "Connector Description", + List.of("connectorClass", "status", "sources", "topics"), + jsonNode + ); + default -> parseUnknownResponse(jsonNode); + }; } private static List parseObjectDynamically( diff --git a/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java b/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java index b124feb8d..e9a08e8cb 100644 --- a/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java +++ b/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java @@ -28,7 +28,7 @@ public Mono getBrokerMetrics(KafkaCluster cluster, Collection nod return Flux.fromIterable(nodes) .flatMap(n -> getMetrics(cluster, n).map(lst -> Tuples.of(n, lst))) .collectMap(Tuple2::getT1, Tuple2::getT2) - .map(nodeMetrics -> collectMetrics(cluster, nodeMetrics)) + .map(this::collectMetrics) .defaultIfEmpty(Metrics.empty()); } @@ -45,20 +45,19 @@ private Mono> getMetrics(KafkaCluster kafkaCluster, Node node) { return metricFlux.collectList(); } - public Metrics collectMetrics(KafkaCluster cluster, Map> perBrokerMetrics) { + public Metrics collectMetrics(Map> perBrokerMetrics) { Metrics.MetricsBuilder builder = Metrics.builder() .perBrokerMetrics( perBrokerMetrics.entrySet() .stream() .collect(Collectors.toMap(e -> e.getKey().id(), Map.Entry::getValue))); - populateWellknowMetrics(cluster, perBrokerMetrics) - .apply(builder); + populateWellknowMetrics(perBrokerMetrics).apply(builder); return builder.build(); } - private WellKnownMetrics populateWellknowMetrics(KafkaCluster cluster, Map> perBrokerMetrics) { + private WellKnownMetrics populateWellknowMetrics(Map> perBrokerMetrics) { WellKnownMetrics wellKnownMetrics = new WellKnownMetrics(); perBrokerMetrics.forEach((node, metrics) -> metrics.forEach(metric -> diff --git a/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java b/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java index c6f80a113..9ebbba7f8 100644 --- a/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java +++ b/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java @@ -10,27 +10,18 @@ public class EmptyRedirectStrategy implements ServerRedirectStrategy { - private HttpStatus httpStatus = HttpStatus.FOUND; - - private boolean contextRelative = true; - public Mono sendRedirect(ServerWebExchange exchange, URI location) { Assert.notNull(exchange, "exchange cannot be null"); Assert.notNull(location, "location cannot be null"); return Mono.fromRunnable(() -> { ServerHttpResponse response = exchange.getResponse(); - response.setStatusCode(this.httpStatus); + response.setStatusCode(HttpStatus.FOUND); response.getHeaders().setLocation(createLocation(exchange, location)); }); } private URI createLocation(ServerWebExchange exchange, URI location) { - if (!this.contextRelative) { - return location; - } - - String url = location.getPath().isEmpty() ? "/" - : location.toASCIIString(); + String url = location.getPath().isEmpty() ? "/" : location.toASCIIString(); if (url.startsWith("/")) { String context = exchange.getRequest().getPath().contextPath().value(); @@ -38,13 +29,4 @@ private URI createLocation(ServerWebExchange exchange, URI location) { } return location; } - - public void setHttpStatus(HttpStatus httpStatus) { - Assert.notNull(httpStatus, "httpStatus cannot be null"); - this.httpStatus = httpStatus; - } - - public void setContextRelative(boolean contextRelative) { - this.contextRelative = contextRelative; - } } diff --git a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java index 397fa3839..019a33543 100644 --- a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java +++ b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java @@ -62,8 +62,7 @@ public static Optional validateTruststore(TruststoreConfig truststoreCon public static Mono validateClusterConnection(String bootstrapServers, Properties clusterProps, - @Nullable - TruststoreConfig ssl) { + @Nullable TruststoreConfig ssl) { Properties properties = new Properties(); KafkaClientSslPropertiesUtil.addKafkaSslProperties(ssl, properties); properties.putAll(clusterProps); @@ -73,7 +72,7 @@ public static Mono validateClusterConnection(S properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 5_000); properties.put(AdminClientConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, 5_000); properties.put(AdminClientConfig.CLIENT_ID_CONFIG, "kui-admin-client-validation-" + System.currentTimeMillis()); - AdminClient adminClient = null; + AdminClient adminClient; try { adminClient = AdminClient.create(properties); } catch (Exception e) { diff --git a/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java b/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java index b46384d2e..872e9ddf9 100644 --- a/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java +++ b/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java @@ -59,8 +59,8 @@ public static ReactiveFailover create(List args, } private ReactiveFailover(List> publishers, - Predicate failoverExceptionsPredicate, - String noAvailablePublishersMsg) { + Predicate failoverExceptionsPredicate, + String noAvailablePublishersMsg) { Preconditions.checkArgument(!publishers.isEmpty()); this.publishers = publishers; this.failoverExceptionsPredicate = failoverExceptionsPredicate; diff --git a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java index 52b6913f6..de23d40cd 100644 --- a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java +++ b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java @@ -49,7 +49,7 @@ public class JsonAvroConversion { // converts json into Object that is expected input for KafkaAvroSerializer // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) public static Object convertJsonToAvro(String jsonString, Schema avroSchema) { - JsonNode rootNode = null; + JsonNode rootNode; try { rootNode = MAPPER.readTree(jsonString); } catch (JsonProcessingException e) { @@ -221,9 +221,7 @@ public static JsonNode convertAvroToJson(Object obj, Schema avroSchema) { list.forEach(e -> node.add(convertAvroToJson(e, avroSchema.getElementType()))); yield node; } - case ENUM -> { - yield new TextNode(obj.toString()); - } + case ENUM -> new TextNode(obj.toString()); case UNION -> { ObjectNode node = MAPPER.createObjectNode(); int unionIdx = AvroData.getGenericData().resolveUnion(avroSchema, obj); @@ -343,9 +341,7 @@ enum LogicalTypeConversion { assertJsonType(node, JsonNodeType.STRING); return java.util.UUID.fromString(node.asText()); }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -363,9 +359,7 @@ enum LogicalTypeConversion { "node '%s' can't be converted to decimal logical type" .formatted(node)); }, - (obj, schema) -> { - return new DecimalNode((BigDecimal) obj); - }, + (obj, schema) -> new DecimalNode((BigDecimal) obj), new SimpleFieldSchema(new SimpleJsonType(JsonType.Type.NUMBER)) ), @@ -381,9 +375,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -402,9 +394,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -423,9 +413,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -444,9 +432,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -469,9 +455,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -487,9 +471,7 @@ enum LogicalTypeConversion { Instant instant = (Instant) TIMESTAMP_MILLIS.jsonToAvroConversion.apply(node, schema); return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -504,9 +486,7 @@ enum LogicalTypeConversion { Instant instant = (Instant) TIMESTAMP_MICROS.jsonToAvroConversion.apply(node, schema); return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, diff --git a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java index 090010dac..491acebc7 100644 --- a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java +++ b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java @@ -9,7 +9,6 @@ import java.util.stream.Collectors; import lombok.Builder; import lombok.Data; -import lombok.SneakyThrows; import reactor.util.function.Tuple2; import reactor.util.function.Tuples; @@ -59,12 +58,4 @@ public String toJson() { } return objectNode.toString(); } - - @SneakyThrows - public static JsonSchema stringSchema() { - return JsonSchema.builder() - .id(new URI("http://unknown.unknown")) - .type(new SimpleJsonType(JsonType.Type.STRING)) - .build(); - } } From 6a22b4a868d014bc8095b50498ba1d57e4bc646f Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Tue, 11 Feb 2025 19:11:20 +0500 Subject: [PATCH 69/84] BE: RBAC: Add integration tests for AD auth (#726) --- .../ui/ActiveDirectoryIntegrationTest.java | 120 ++++++++++++++++++ .../container/ActiveDirectoryContainer.java | 79 ++++++++++++ .../test/resources/application-rbac-ad.yml | 23 ++++ 3 files changed, 222 insertions(+) create mode 100644 api/src/test/java/io/kafbat/ui/ActiveDirectoryIntegrationTest.java create mode 100644 api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java create mode 100644 api/src/test/resources/application-rbac-ad.yml diff --git a/api/src/test/java/io/kafbat/ui/ActiveDirectoryIntegrationTest.java b/api/src/test/java/io/kafbat/ui/ActiveDirectoryIntegrationTest.java new file mode 100644 index 000000000..80c3abe33 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/ActiveDirectoryIntegrationTest.java @@ -0,0 +1,120 @@ +package io.kafbat.ui; + +import static io.kafbat.ui.AbstractIntegrationTest.LOCAL; +import static io.kafbat.ui.container.ActiveDirectoryContainer.DOMAIN; +import static io.kafbat.ui.container.ActiveDirectoryContainer.EMPTY_PERMISSIONS_USER; +import static io.kafbat.ui.container.ActiveDirectoryContainer.FIRST_USER_WITH_GROUP; +import static io.kafbat.ui.container.ActiveDirectoryContainer.PASSWORD; +import static io.kafbat.ui.container.ActiveDirectoryContainer.SECOND_USER_WITH_GROUP; +import static io.kafbat.ui.container.ActiveDirectoryContainer.USER_WITHOUT_GROUP; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.kafbat.ui.container.ActiveDirectoryContainer; +import io.kafbat.ui.model.AuthenticationInfoDTO; +import io.kafbat.ui.model.ResourceTypeDTO; +import io.kafbat.ui.model.UserPermissionDTO; +import java.util.List; +import java.util.Objects; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.http.MediaType; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.web.reactive.server.WebTestClient; +import org.springframework.web.reactive.function.BodyInserters; + +@SpringBootTest +@ActiveProfiles("rbac-ad") +@AutoConfigureWebTestClient(timeout = "60000") +@ContextConfiguration(initializers = {ActiveDirectoryIntegrationTest.Initializer.class}) +public class ActiveDirectoryIntegrationTest { + private static final String SESSION = "SESSION"; + + private static final ActiveDirectoryContainer ACTIVE_DIRECTORY = new ActiveDirectoryContainer(); + + @Autowired + private WebTestClient webTestClient; + + @BeforeAll + public static void setup() { + ACTIVE_DIRECTORY.start(); + } + + @AfterAll + public static void shutdown() { + ACTIVE_DIRECTORY.stop(); + } + + @Test + public void testUserPermissions() { + AuthenticationInfoDTO info = authenticationInfo(FIRST_USER_WITH_GROUP); + + assertNotNull(info); + assertTrue(info.getRbacEnabled()); + + List permissions = info.getUserInfo().getPermissions(); + + assertFalse(permissions.isEmpty()); + assertTrue(permissions.stream().anyMatch(permission -> + permission.getClusters().contains(LOCAL) && permission.getResource() == ResourceTypeDTO.TOPIC)); + assertEquals(permissions, authenticationInfo(SECOND_USER_WITH_GROUP).getUserInfo().getPermissions()); + assertEquals(permissions, authenticationInfo(USER_WITHOUT_GROUP).getUserInfo().getPermissions()); + } + + @Test + public void testEmptyPermissions() { + assertTrue(Objects.requireNonNull(authenticationInfo(EMPTY_PERMISSIONS_USER)) + .getUserInfo() + .getPermissions() + .isEmpty() + ); + } + + private String session(String name) { + return Objects.requireNonNull( + webTestClient + .post() + .uri("/login") + .contentType(MediaType.APPLICATION_FORM_URLENCODED) + .body(BodyInserters.fromFormData("username", name).with("password", PASSWORD)) + .exchange() + .expectStatus() + .isFound() + .returnResult(String.class) + .getResponseCookies() + .getFirst(SESSION)) + .getValue(); + } + + private AuthenticationInfoDTO authenticationInfo(String name) { + return webTestClient + .get() + .uri("/api/authorization") + .cookie(SESSION, session(name)) + .exchange() + .expectStatus() + .isOk() + .returnResult(AuthenticationInfoDTO.class) + .getResponseBody() + .blockFirst(); + } + + public static class Initializer implements ApplicationContextInitializer { + @Override + public void initialize(@NotNull ConfigurableApplicationContext context) { + System.setProperty("spring.ldap.urls", ACTIVE_DIRECTORY.getLdapUrl()); + System.setProperty("oauth2.ldap.activeDirectory", "true"); + System.setProperty("oauth2.ldap.activeDirectory.domain", DOMAIN); + } + } +} diff --git a/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java b/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java new file mode 100644 index 000000000..55bc3a186 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java @@ -0,0 +1,79 @@ +package io.kafbat.ui.container; + +import com.github.dockerjava.api.command.InspectContainerResponse; +import java.io.IOException; +import lombok.extern.slf4j.Slf4j; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.utility.DockerImageName; + +@Slf4j +public class ActiveDirectoryContainer extends GenericContainer { + public static final String DOMAIN = "corp.kafbat.io"; + public static final String PASSWORD = "StrongPassword123"; + public static final String FIRST_USER_WITH_GROUP = "JohnDoe"; + public static final String SECOND_USER_WITH_GROUP = "JohnWick"; + public static final String USER_WITHOUT_GROUP = "JackSmith"; + public static final String EMPTY_PERMISSIONS_USER = "JohnJames"; + + private static final String DOMAIN_DC = "dc=corp,dc=kafbat,dc=io"; + private static final String GROUP = "group"; + private static final String FIRST_GROUP = "firstGroup"; + private static final String SECOND_GROUP = "secondGroup"; + private static final String DOMAIN_EMAIL = "kafbat.io"; + private static final String SAMBA_TOOL = "samba-tool"; + private static final int LDAP_PORT = 389; + private static final DockerImageName IMAGE_NAME = DockerImageName.parse("nowsci/samba-domain:latest"); + + public ActiveDirectoryContainer() { + super(IMAGE_NAME); + + withExposedPorts(LDAP_PORT); + + withEnv("DOMAIN", DOMAIN); + withEnv("DOMAIN_DC", DOMAIN_DC); + withEnv("DOMAIN_EMAIL", DOMAIN_EMAIL); + withEnv("DOMAINPASS", PASSWORD); + withEnv("NOCOMPLEXITY", "true"); + withEnv("INSECURELDAP", "true"); + + withPrivilegedMode(true); + } + + protected void containerIsStarted(InspectContainerResponse containerInfo) { + createUser(EMPTY_PERMISSIONS_USER); + createUser(USER_WITHOUT_GROUP); + createUser(FIRST_USER_WITH_GROUP); + createUser(SECOND_USER_WITH_GROUP); + + exec(SAMBA_TOOL, GROUP, "add", FIRST_GROUP); + exec(SAMBA_TOOL, GROUP, "add", SECOND_GROUP); + exec(SAMBA_TOOL, GROUP, "addmembers", FIRST_GROUP, FIRST_USER_WITH_GROUP); + exec(SAMBA_TOOL, GROUP, "addmembers", SECOND_GROUP, SECOND_USER_WITH_GROUP); + } + + public String getLdapUrl() { + return String.format("ldap://%s:%s", getHost(), getMappedPort(LDAP_PORT)); + } + + private void createUser(String name) { + exec(SAMBA_TOOL, "user", "create", name, PASSWORD, "--mail-address", name + '@' + DOMAIN_EMAIL); + exec(SAMBA_TOOL, "user", "setexpiry", name, "--noexpiry"); + } + + private void exec(String... cmd) { + ExecResult result; + try { + result = execInContainer(cmd); + } catch (IOException | InterruptedException e) { + throw new RuntimeException(e); + } + + if (result.getStdout() != null && !result.getStdout().isEmpty()) { + log.info("Output: {}", result.getStdout()); + } + + if (result.getExitCode() != 0) { + throw new IllegalStateException(result.toString()); + } + } +} diff --git a/api/src/test/resources/application-rbac-ad.yml b/api/src/test/resources/application-rbac-ad.yml new file mode 100644 index 000000000..3b97d185f --- /dev/null +++ b/api/src/test/resources/application-rbac-ad.yml @@ -0,0 +1,23 @@ +auth: + type: LDAP +rbac: + roles: + - name: "roleName" + clusters: + - local + subjects: + - provider: ldap_ad + type: group + value: firstGroup + - provider: ldap_ad + type: group + value: secondGroup + - provider: ldap_ad + type: user + value: JackSmith + permissions: + - resource: applicationconfig + actions: all + - resource: topic + value: ".*" + actions: all From 2601a9a8461b12f2ab13873a4694be4b21be1d79 Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Wed, 12 Feb 2025 12:29:51 +0500 Subject: [PATCH 70/84] BE: Chore: Remove unused classes (#838) --- .../condition/ActiveDirectoryCondition.java | 21 -------- .../io/kafbat/ui/model/BrokerMetrics.java | 11 ---- .../ui/model/InternalClusterMetrics.java | 54 ------------------- 3 files changed, 86 deletions(-) delete mode 100644 api/src/main/java/io/kafbat/ui/config/auth/condition/ActiveDirectoryCondition.java delete mode 100644 api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java delete mode 100644 api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java diff --git a/api/src/main/java/io/kafbat/ui/config/auth/condition/ActiveDirectoryCondition.java b/api/src/main/java/io/kafbat/ui/config/auth/condition/ActiveDirectoryCondition.java deleted file mode 100644 index 944eff0d3..000000000 --- a/api/src/main/java/io/kafbat/ui/config/auth/condition/ActiveDirectoryCondition.java +++ /dev/null @@ -1,21 +0,0 @@ -package io.kafbat.ui.config.auth.condition; - -import org.springframework.boot.autoconfigure.condition.AllNestedConditions; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; - -public class ActiveDirectoryCondition extends AllNestedConditions { - - public ActiveDirectoryCondition() { - super(ConfigurationPhase.PARSE_CONFIGURATION); - } - - @ConditionalOnProperty(value = "auth.type", havingValue = "LDAP") - public static class OnAuthType { - - } - - @ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false) - public static class OnActiveDirectory { - - } -} diff --git a/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java b/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java deleted file mode 100644 index dbd57c9c1..000000000 --- a/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java +++ /dev/null @@ -1,11 +0,0 @@ -package io.kafbat.ui.model; - -import java.util.List; -import lombok.Builder; -import lombok.Data; - -@Data -@Builder(toBuilder = true) -public class BrokerMetrics { - private final List metrics; -} diff --git a/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java b/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java deleted file mode 100644 index 6c04fadeb..000000000 --- a/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java +++ /dev/null @@ -1,54 +0,0 @@ -package io.kafbat.ui.model; - -import java.math.BigDecimal; -import java.util.List; -import java.util.Map; -import javax.annotation.Nullable; -import lombok.Builder; -import lombok.Data; - -@Data -@Builder(toBuilder = true) -public class InternalClusterMetrics { - - public static InternalClusterMetrics empty() { - return InternalClusterMetrics.builder() - .brokers(List.of()) - .topics(Map.of()) - .status(ServerStatusDTO.OFFLINE) - .internalBrokerMetrics(Map.of()) - .metrics(List.of()) - .version("unknown") - .build(); - } - - private final String version; - - private final ServerStatusDTO status; - private final Throwable lastKafkaException; - - private final int brokerCount; - private final int activeControllers; - private final List brokers; - - private final int topicCount; - private final Map topics; - - // partitions stats - private final int underReplicatedPartitionCount; - private final int onlinePartitionCount; - private final int offlinePartitionCount; - private final int inSyncReplicasCount; - private final int outOfSyncReplicasCount; - - // log dir stats - @Nullable // will be null if log dir collection disabled - private final Map internalBrokerDiskUsage; - - // metrics from metrics collector - private final BigDecimal bytesInPerSec; - private final BigDecimal bytesOutPerSec; - private final Map internalBrokerMetrics; - private final List metrics; - -} From a05709fe6241896dbf4af2879800e44572ce93be Mon Sep 17 00:00:00 2001 From: Renat Kalimulin <103274228+Nilumilak@users.noreply.github.com> Date: Wed, 12 Feb 2025 11:43:09 +0300 Subject: [PATCH 71/84] FE: Topics: Save field previews into local storage (#449) --- .../Topics/Topic/Messages/MessagesTable.tsx | 61 ++++++++++++++++--- .../Messages/__test__/MessagesTable.spec.tsx | 40 ++++++++++++ frontend/src/lib/hooks/useLocalStorage.ts | 9 ++- 3 files changed, 98 insertions(+), 12 deletions(-) diff --git a/frontend/src/components/Topics/Topic/Messages/MessagesTable.tsx b/frontend/src/components/Topics/Topic/Messages/MessagesTable.tsx index 813cabcfa..1751a0e56 100644 --- a/frontend/src/components/Topics/Topic/Messages/MessagesTable.tsx +++ b/frontend/src/components/Topics/Topic/Messages/MessagesTable.tsx @@ -2,31 +2,78 @@ import PageLoader from 'components/common/PageLoader/PageLoader'; import { Table } from 'components/common/table/Table/Table.styled'; import TableHeaderCell from 'components/common/table/TableHeaderCell/TableHeaderCell'; import { TopicMessage } from 'generated-sources'; -import React, { useState } from 'react'; +import React, { useCallback, useEffect, useState } from 'react'; import { Button } from 'components/common/Button/Button'; import * as S from 'components/common/NewTable/Table.styled'; import { usePaginateTopics, useIsLiveMode } from 'lib/hooks/useMessagesFilters'; import { useMessageFiltersStore } from 'lib/hooks/useMessageFiltersStore'; +import useAppParams from 'lib/hooks/useAppParams'; +import { RouteParamsClusterTopic } from 'lib/paths'; +import { useLocalStorage } from 'lib/hooks/useLocalStorage'; -import PreviewModal from './PreviewModal'; import Message, { PreviewFilter } from './Message'; +import PreviewModal from './PreviewModal'; export interface MessagesTableProps { messages: TopicMessage[]; isFetching: boolean; } +interface MessagePreviewProps { + [key: string]: { + keyFilters: PreviewFilter[]; + contentFilters: PreviewFilter[]; + }; +} + const MessagesTable: React.FC = ({ messages, isFetching, }) => { const paginate = usePaginateTopics(); - const [previewFor, setPreviewFor] = useState(null); - + const [previewFor, setPreviewFor] = useState<'key' | 'content' | null>(null); const [keyFilters, setKeyFilters] = useState([]); const [contentFilters, setContentFilters] = useState([]); const nextCursor = useMessageFiltersStore((state) => state.nextCursor); const isLive = useIsLiveMode(); + const { topicName } = useAppParams(); + const [messagesPreview, setMessagesPreview] = + useLocalStorage('message-preview', { + [topicName]: { + keyFilters: [], + contentFilters: [], + }, + }); + + useEffect(() => { + setKeyFilters(messagesPreview[topicName]?.keyFilters || []); + setContentFilters(messagesPreview[topicName]?.contentFilters || []); + }, []); + + const setFilters = useCallback( + (payload: PreviewFilter[]) => { + if (previewFor === 'key') { + setKeyFilters(payload); + setMessagesPreview({ + ...messagesPreview, + [topicName]: { + ...messagesPreview[topicName], + keyFilters: payload, + }, + }); + } else { + setContentFilters(payload); + setMessagesPreview({ + ...messagesPreview, + [topicName]: { + ...messagesPreview[topicName], + contentFilters: payload, + }, + }); + } + }, + [previewFor, messagesPreview, topicName] + ); return (

@@ -34,11 +81,7 @@ const MessagesTable: React.FC = ({ setPreviewFor(null)} - setFilters={(payload: PreviewFilter[]) => - previewFor === 'key' - ? setKeyFilters(payload) - : setContentFilters(payload) - } + setFilters={setFilters} /> )} diff --git a/frontend/src/components/Topics/Topic/Messages/__test__/MessagesTable.spec.tsx b/frontend/src/components/Topics/Topic/Messages/__test__/MessagesTable.spec.tsx index 808dde9e4..77f31cebc 100644 --- a/frontend/src/components/Topics/Topic/Messages/__test__/MessagesTable.spec.tsx +++ b/frontend/src/components/Topics/Topic/Messages/__test__/MessagesTable.spec.tsx @@ -7,6 +7,8 @@ import MessagesTable, { } from 'components/Topics/Topic/Messages/MessagesTable'; import { TopicMessage, TopicMessageTimestampTypeEnum } from 'generated-sources'; import { useIsLiveMode } from 'lib/hooks/useMessagesFilters'; +import useAppParams from 'lib/hooks/useAppParams'; +import { LOCAL_STORAGE_KEY_PREFIX } from 'lib/constants'; export const topicMessagePayload: TopicMessage = { partition: 29, @@ -33,8 +35,16 @@ jest.mock('lib/hooks/useMessagesFilters', () => ({ usePaginateTopics: jest.fn(), })); +jest.mock('lib/hooks/useAppParams', () => ({ + __esModule: true, + default: jest.fn(), +})); + describe('MessagesTable', () => { const renderComponent = (props?: Partial) => { + (useAppParams as jest.Mock).mockImplementation(() => ({ + topicName: 'testTopic', + })); return render( ); @@ -99,4 +109,34 @@ describe('MessagesTable', () => { } }); }); + + describe('should save messages preview into localstorage', () => { + beforeEach(() => { + renderComponent({ messages: mockTopicsMessages, isFetching: false }); + }); + + it('should save messages preview into localstorage', async () => { + const previewButtons = screen.getAllByText('Preview'); + await userEvent.click(previewButtons[0]); + await userEvent.type(screen.getByPlaceholderText('Field'), 'test1'); + await userEvent.type(screen.getByPlaceholderText('Json Path'), 'test2'); + await userEvent.click(screen.getByText('Save')); + await userEvent.click(previewButtons[1]); + await userEvent.type(screen.getByPlaceholderText('Field'), 'test3'); + await userEvent.type(screen.getByPlaceholderText('Json Path'), 'test4'); + await userEvent.click(screen.getByText('Save')); + expect( + global.localStorage.getItem( + `${LOCAL_STORAGE_KEY_PREFIX}-message-preview` + ) + ).toEqual( + JSON.stringify({ + testTopic: { + keyFilters: [{ field: 'test1', path: 'test2' }], + contentFilters: [{ field: 'test3', path: 'test4' }], + }, + }) + ); + }); + }); }); diff --git a/frontend/src/lib/hooks/useLocalStorage.ts b/frontend/src/lib/hooks/useLocalStorage.ts index d8945620d..65215fd2c 100644 --- a/frontend/src/lib/hooks/useLocalStorage.ts +++ b/frontend/src/lib/hooks/useLocalStorage.ts @@ -1,9 +1,12 @@ import { LOCAL_STORAGE_KEY_PREFIX } from 'lib/constants'; -import { useState, useEffect } from 'react'; +import { useState, useEffect, Dispatch, SetStateAction } from 'react'; -export const useLocalStorage = (featureKey: string, defaultValue: string) => { +export const useLocalStorage = ( + featureKey: string, + defaultValue: T +): [T, Dispatch>] => { const key = `${LOCAL_STORAGE_KEY_PREFIX}-${featureKey}`; - const [value, setValue] = useState(() => { + const [value, setValue] = useState(() => { const saved = localStorage.getItem(key); if (saved !== null) { From b689446c85afd9a1b9e669620e7a964bd20981c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Wed, 12 Feb 2025 13:18:57 +0100 Subject: [PATCH 72/84] Issues/300: remove useless null check & add test scenarios --- .../extractor/CognitoAuthorityExtractor.java | 2 +- ...exBasedProviderAuthorityExtractorTest.java | 35 +++++++++++++++++++ api/src/test/resources/roles_definition.yaml | 14 ++++++++ 3 files changed, 50 insertions(+), 1 deletion(-) diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java index e75666d83..23354b86c 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java @@ -50,7 +50,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_COGNITO)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> principal.getName() != null && principal.getName().matches(s.getValue()))) + .anyMatch(s -> principal.getName().matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); diff --git a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java index 7eb8c8bf1..4623296e7 100644 --- a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java +++ b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java @@ -1,6 +1,9 @@ package io.kafbat.ui.config; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.when; import static org.springframework.security.oauth2.client.registration.ClientRegistration.withRegistrationId; @@ -71,7 +74,33 @@ void extractOauth2Authorities() { Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + assertNotNull(roles); assertEquals(Set.of("viewer", "admin"), roles); + assertFalse(roles.contains("no one's role")); + + } + + @SneakyThrows + @Test() + void extractOauth2Authorities_blankEmail() { + + extractor = new OauthAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("role_definition", Set.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", ""), + "user_name"); + + HashMap additionalParams = new HashMap<>(); + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertNotNull(roles); + assertFalse(roles.contains("viewer")); + assertTrue(roles.contains("admin")); } @@ -94,7 +123,9 @@ void extractCognitoAuthorities() { Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + assertNotNull(roles); assertEquals(Set.of("viewer", "admin"), roles); + assertFalse(roles.contains("no one's role")); } @@ -129,7 +160,9 @@ void extractGithubAuthorities() { Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + assertNotNull(roles); assertEquals(Set.of("viewer"), roles); + assertFalse(roles.contains("no one's role")); } @@ -152,7 +185,9 @@ void extractGoogleAuthorities() { Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + assertNotNull(roles); assertEquals(Set.of("viewer", "admin"), roles); + assertFalse(roles.contains("no one's role")); } diff --git a/api/src/test/resources/roles_definition.yaml b/api/src/test/resources/roles_definition.yaml index 25e22b8a1..e1b370b5d 100644 --- a/api/src/test/resources/roles_definition.yaml +++ b/api/src/test/resources/roles_definition.yaml @@ -47,3 +47,17 @@ permissions: - resource: APPLICATIONCONFIG actions: [ all ] +- name: "no one's role" + subjects: + - provider: 'OAUTH' + value: '.*XXX' + type: 'role' + - provider: 'OAUTH_GITHUB' + value: '.*XXX' + type: 'user' + - provider: 'OAUTH_COGNITO' + value: '.*XXX' + type: 'user' + - provider: 'OAUTH_GOOGLE' + value: '.*XXX' + type: 'domain' From 708b463363cb5a711dff90fabb1d6c8ff31b8912 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Wed, 19 Feb 2025 09:35:21 +0100 Subject: [PATCH 73/84] Issues/300: Remove useless null check --- .../ui/service/rbac/extractor/OauthAuthorityExtractor.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java index 8812301a1..0d556bc1a 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java @@ -59,8 +59,8 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("user")) .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, - principalName != null && principalName.matches(s.getValue()))) - .anyMatch(s -> principalName != null && principalName.matches(s.getValue()))) + principalName.matches(s.getValue()))) + .anyMatch(s -> principalName.matches(s.getValue()))) .map(Role::getName) .collect(Collectors.toSet()); From 49894b886dc97b67d49247dee26d3fe39bf262f3 Mon Sep 17 00:00:00 2001 From: Dmitry Werner Date: Wed, 19 Feb 2025 14:54:14 +0500 Subject: [PATCH 74/84] BE: Auth: Support LDAPS for AD (#840) --- api/pom.xml | 13 +++ .../ui/config/auth/LdapSecurityConfig.java | 31 ++++- .../ui/util/CustomSslSocketFactory.java | 69 +++++++++++ ...stractActiveDirectoryIntegrationTest.java} | 61 ++-------- .../io/kafbat/ui/ActiveDirectoryLdapTest.java | 51 ++++++++ .../kafbat/ui/ActiveDirectoryLdapsTest.java | 110 ++++++++++++++++++ .../container/ActiveDirectoryContainer.java | 44 ++++++- 7 files changed, 320 insertions(+), 59 deletions(-) create mode 100644 api/src/main/java/io/kafbat/ui/util/CustomSslSocketFactory.java rename api/src/test/java/io/kafbat/ui/{ActiveDirectoryIntegrationTest.java => AbstractActiveDirectoryIntegrationTest.java} (55%) create mode 100644 api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapTest.java create mode 100644 api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapsTest.java diff --git a/api/pom.xml b/api/pom.xml index dd2c3a378..fac019ab5 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -212,6 +212,19 @@ ${okhttp3.mockwebserver.version} test + + org.apache.kafka + kafka-clients + ${confluent.version}-ccs + test + test + + + org.bouncycastle + bcpkix-jdk18on + 1.80 + test + org.springframework.boot diff --git a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java index 4b7473942..4267a4b0e 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java @@ -3,10 +3,13 @@ import io.kafbat.ui.service.rbac.AccessControlService; import io.kafbat.ui.service.rbac.extractor.RbacActiveDirectoryAuthoritiesExtractor; import io.kafbat.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor; +import io.kafbat.ui.util.CustomSslSocketFactory; import io.kafbat.ui.util.StaticFileWebFilter; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.stream.Stream; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; @@ -47,6 +50,9 @@ @RequiredArgsConstructor @Slf4j public class LdapSecurityConfig extends AbstractAuthSecurityConfig { + private static final Map BASE_ENV_PROPS = Map.of( + "java.naming.ldap.factory.socket", CustomSslSocketFactory.class.getName() + ); private final LdapProperties props; @@ -63,13 +69,10 @@ public AbstractLdapAuthenticationProvider authenticationProvider(LdapAuthorities AbstractLdapAuthenticationProvider authProvider; - if (!props.isActiveDirectory()) { - authProvider = new LdapAuthenticationProvider(ba, authoritiesExtractor); + if (props.isActiveDirectory()) { + authProvider = activeDirectoryProvider(authoritiesExtractor); } else { - authProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(), - props.getUrls()); - authProvider.setUseAuthenticationRequestCredentials(true); - ((ActiveDirectoryLdapAuthenticationProvider) authProvider).setAuthoritiesPopulator(authoritiesExtractor); + authProvider = new LdapAuthenticationProvider(ba, authoritiesExtractor); } if (rbacEnabled) { @@ -159,6 +162,22 @@ public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) { return builder.build(); } + private ActiveDirectoryLdapAuthenticationProvider activeDirectoryProvider(LdapAuthoritiesPopulator populator) { + ActiveDirectoryLdapAuthenticationProvider provider = new ActiveDirectoryLdapAuthenticationProvider( + props.getActiveDirectoryDomain(), + props.getUrls() + ); + + provider.setUseAuthenticationRequestCredentials(true); + provider.setAuthoritiesPopulator(populator); + + if (Stream.of(props.getUrls().split(",")).anyMatch(url -> url.startsWith("ldaps://"))) { + provider.setContextEnvironmentProperties(BASE_ENV_PROPS); + } + + return provider; + } + private static class RbacUserDetailsMapper extends LdapUserDetailsMapper { @Override public UserDetails mapUserFromContext(DirContextOperations ctx, String username, diff --git a/api/src/main/java/io/kafbat/ui/util/CustomSslSocketFactory.java b/api/src/main/java/io/kafbat/ui/util/CustomSslSocketFactory.java new file mode 100644 index 000000000..b5fee8cae --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/util/CustomSslSocketFactory.java @@ -0,0 +1,69 @@ +package io.kafbat.ui.util; + +import io.netty.handler.ssl.util.InsecureTrustManagerFactory; +import java.io.IOException; +import java.net.InetAddress; +import java.net.Socket; +import java.security.SecureRandom; +import javax.net.SocketFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSocketFactory; + +public class CustomSslSocketFactory extends SSLSocketFactory { + private final SSLSocketFactory socketFactory; + + public CustomSslSocketFactory() { + try { + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, InsecureTrustManagerFactory.INSTANCE.getTrustManagers(), new SecureRandom()); + + socketFactory = sslContext.getSocketFactory(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public static SocketFactory getDefault() { + return new CustomSslSocketFactory(); + } + + @Override + public String[] getDefaultCipherSuites() { + return socketFactory.getDefaultCipherSuites(); + } + + @Override + public String[] getSupportedCipherSuites() { + return socketFactory.getSupportedCipherSuites(); + } + + @Override + public Socket createSocket(Socket socket, String string, int i, boolean bln) throws IOException { + return socketFactory.createSocket(socket, string, i, bln); + } + + @Override + public Socket createSocket(String string, int i) throws IOException { + return socketFactory.createSocket(string, i); + } + + @Override + public Socket createSocket(String string, int i, InetAddress ia, int i1) throws IOException { + return socketFactory.createSocket(string, i, ia, i1); + } + + @Override + public Socket createSocket(InetAddress ia, int i) throws IOException { + return socketFactory.createSocket(ia, i); + } + + @Override + public Socket createSocket(InetAddress ia, int i, InetAddress ia1, int i1) throws IOException { + return socketFactory.createSocket(ia, i, ia1, i1); + } + + @Override + public Socket createSocket() throws IOException { + return socketFactory.createSocket(); + } +} diff --git a/api/src/test/java/io/kafbat/ui/ActiveDirectoryIntegrationTest.java b/api/src/test/java/io/kafbat/ui/AbstractActiveDirectoryIntegrationTest.java similarity index 55% rename from api/src/test/java/io/kafbat/ui/ActiveDirectoryIntegrationTest.java rename to api/src/test/java/io/kafbat/ui/AbstractActiveDirectoryIntegrationTest.java index 80c3abe33..098da29f9 100644 --- a/api/src/test/java/io/kafbat/ui/ActiveDirectoryIntegrationTest.java +++ b/api/src/test/java/io/kafbat/ui/AbstractActiveDirectoryIntegrationTest.java @@ -1,7 +1,6 @@ package io.kafbat.ui; import static io.kafbat.ui.AbstractIntegrationTest.LOCAL; -import static io.kafbat.ui.container.ActiveDirectoryContainer.DOMAIN; import static io.kafbat.ui.container.ActiveDirectoryContainer.EMPTY_PERMISSIONS_USER; import static io.kafbat.ui.container.ActiveDirectoryContainer.FIRST_USER_WITH_GROUP; import static io.kafbat.ui.container.ActiveDirectoryContainer.PASSWORD; @@ -12,52 +11,26 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; -import io.kafbat.ui.container.ActiveDirectoryContainer; import io.kafbat.ui.model.AuthenticationInfoDTO; import io.kafbat.ui.model.ResourceTypeDTO; import io.kafbat.ui.model.UserPermissionDTO; import java.util.List; import java.util.Objects; -import org.jetbrains.annotations.NotNull; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; -import org.junit.jupiter.api.Test; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.context.ApplicationContextInitializer; -import org.springframework.context.ConfigurableApplicationContext; import org.springframework.http.MediaType; import org.springframework.test.context.ActiveProfiles; -import org.springframework.test.context.ContextConfiguration; import org.springframework.test.web.reactive.server.WebTestClient; import org.springframework.web.reactive.function.BodyInserters; @SpringBootTest @ActiveProfiles("rbac-ad") @AutoConfigureWebTestClient(timeout = "60000") -@ContextConfiguration(initializers = {ActiveDirectoryIntegrationTest.Initializer.class}) -public class ActiveDirectoryIntegrationTest { +public abstract class AbstractActiveDirectoryIntegrationTest { private static final String SESSION = "SESSION"; - private static final ActiveDirectoryContainer ACTIVE_DIRECTORY = new ActiveDirectoryContainer(); - - @Autowired - private WebTestClient webTestClient; - - @BeforeAll - public static void setup() { - ACTIVE_DIRECTORY.start(); - } - - @AfterAll - public static void shutdown() { - ACTIVE_DIRECTORY.stop(); - } - - @Test - public void testUserPermissions() { - AuthenticationInfoDTO info = authenticationInfo(FIRST_USER_WITH_GROUP); + protected static void checkUserPermissions(WebTestClient client) { + AuthenticationInfoDTO info = authenticationInfo(client, FIRST_USER_WITH_GROUP); assertNotNull(info); assertTrue(info.getRbacEnabled()); @@ -67,22 +40,21 @@ public void testUserPermissions() { assertFalse(permissions.isEmpty()); assertTrue(permissions.stream().anyMatch(permission -> permission.getClusters().contains(LOCAL) && permission.getResource() == ResourceTypeDTO.TOPIC)); - assertEquals(permissions, authenticationInfo(SECOND_USER_WITH_GROUP).getUserInfo().getPermissions()); - assertEquals(permissions, authenticationInfo(USER_WITHOUT_GROUP).getUserInfo().getPermissions()); + assertEquals(permissions, authenticationInfo(client, SECOND_USER_WITH_GROUP).getUserInfo().getPermissions()); + assertEquals(permissions, authenticationInfo(client, USER_WITHOUT_GROUP).getUserInfo().getPermissions()); } - @Test - public void testEmptyPermissions() { - assertTrue(Objects.requireNonNull(authenticationInfo(EMPTY_PERMISSIONS_USER)) + protected static void checkEmptyPermissions(WebTestClient client) { + assertTrue(Objects.requireNonNull(authenticationInfo(client, EMPTY_PERMISSIONS_USER)) .getUserInfo() .getPermissions() .isEmpty() ); } - private String session(String name) { + protected static String session(WebTestClient client, String name) { return Objects.requireNonNull( - webTestClient + client .post() .uri("/login") .contentType(MediaType.APPLICATION_FORM_URLENCODED) @@ -96,11 +68,11 @@ private String session(String name) { .getValue(); } - private AuthenticationInfoDTO authenticationInfo(String name) { - return webTestClient + protected static AuthenticationInfoDTO authenticationInfo(WebTestClient client, String name) { + return client .get() .uri("/api/authorization") - .cookie(SESSION, session(name)) + .cookie(SESSION, session(client, name)) .exchange() .expectStatus() .isOk() @@ -108,13 +80,4 @@ private AuthenticationInfoDTO authenticationInfo(String name) { .getResponseBody() .blockFirst(); } - - public static class Initializer implements ApplicationContextInitializer { - @Override - public void initialize(@NotNull ConfigurableApplicationContext context) { - System.setProperty("spring.ldap.urls", ACTIVE_DIRECTORY.getLdapUrl()); - System.setProperty("oauth2.ldap.activeDirectory", "true"); - System.setProperty("oauth2.ldap.activeDirectory.domain", DOMAIN); - } - } } diff --git a/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapTest.java b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapTest.java new file mode 100644 index 000000000..4d32513f7 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapTest.java @@ -0,0 +1,51 @@ +package io.kafbat.ui; + +import static io.kafbat.ui.container.ActiveDirectoryContainer.DOMAIN; + +import io.kafbat.ui.container.ActiveDirectoryContainer; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.web.reactive.server.WebTestClient; + +@ContextConfiguration(initializers = {ActiveDirectoryLdapTest.Initializer.class}) +public class ActiveDirectoryLdapTest extends AbstractActiveDirectoryIntegrationTest { + private static final ActiveDirectoryContainer ACTIVE_DIRECTORY = new ActiveDirectoryContainer(false); + + @Autowired + private WebTestClient webTestClient; + + @BeforeAll + public static void setup() { + ACTIVE_DIRECTORY.start(); + } + + @AfterAll + public static void shutdown() { + ACTIVE_DIRECTORY.stop(); + } + + @Test + public void testUserPermissions() { + checkUserPermissions(webTestClient); + } + + @Test + public void testEmptyPermissions() { + checkEmptyPermissions(webTestClient); + } + + public static class Initializer implements ApplicationContextInitializer { + @Override + public void initialize(@NotNull ConfigurableApplicationContext context) { + System.setProperty("spring.ldap.urls", ACTIVE_DIRECTORY.getLdapUrl()); + System.setProperty("oauth2.ldap.activeDirectory", "true"); + System.setProperty("oauth2.ldap.activeDirectory.domain", DOMAIN); + } + } +} diff --git a/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapsTest.java b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapsTest.java new file mode 100644 index 000000000..86d46961d --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapsTest.java @@ -0,0 +1,110 @@ +package io.kafbat.ui; + +import static io.kafbat.ui.container.ActiveDirectoryContainer.CONTAINER_CERT_PATH; +import static io.kafbat.ui.container.ActiveDirectoryContainer.CONTAINER_KEY_PATH; +import static io.kafbat.ui.container.ActiveDirectoryContainer.DOMAIN; +import static io.kafbat.ui.container.ActiveDirectoryContainer.PASSWORD; +import static java.nio.file.Files.writeString; +import static org.testcontainers.utility.MountableFile.forHostPath; + +import io.kafbat.ui.container.ActiveDirectoryContainer; +import java.io.File; +import java.io.StringWriter; +import java.net.InetAddress; +import java.security.KeyPair; +import java.security.PrivateKey; +import java.security.cert.X509Certificate; +import java.util.Map; +import org.apache.kafka.common.config.types.Password; +import org.apache.kafka.test.TestSslUtils; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.web.reactive.server.WebTestClient; +import org.testcontainers.shaded.org.bouncycastle.openssl.jcajce.JcaMiscPEMGenerator; +import org.testcontainers.shaded.org.bouncycastle.openssl.jcajce.JcaPKCS8Generator; +import org.testcontainers.shaded.org.bouncycastle.util.io.pem.PemWriter; + +@ContextConfiguration(initializers = {ActiveDirectoryLdapsTest.Initializer.class}) +public class ActiveDirectoryLdapsTest extends AbstractActiveDirectoryIntegrationTest { + private static final ActiveDirectoryContainer ACTIVE_DIRECTORY = new ActiveDirectoryContainer(true); + + private static File certPem = null; + private static File privateKeyPem = null; + + @Autowired + private WebTestClient webTestClient; + + @BeforeAll + public static void setup() throws Exception { + generateCerts(); + + ACTIVE_DIRECTORY.withCopyFileToContainer(forHostPath(certPem.getAbsolutePath()), CONTAINER_CERT_PATH); + ACTIVE_DIRECTORY.withCopyFileToContainer(forHostPath(privateKeyPem.getAbsolutePath()), CONTAINER_KEY_PATH); + + ACTIVE_DIRECTORY.start(); + } + + @AfterAll + public static void shutdown() { + ACTIVE_DIRECTORY.stop(); + } + + @Test + public void testUserPermissions() { + checkUserPermissions(webTestClient); + } + + @Test + public void testEmptyPermissions() { + checkEmptyPermissions(webTestClient); + } + + private static void generateCerts() throws Exception { + File truststore = File.createTempFile("truststore", ".jks"); + + truststore.deleteOnExit(); + + String host = "localhost"; + KeyPair clientKeyPair = TestSslUtils.generateKeyPair("RSA"); + + X509Certificate clientCert = new TestSslUtils.CertificateBuilder(365, "SHA256withRSA") + .sanDnsNames(host) + .sanIpAddress(InetAddress.getByName(host)) + .generate("O=Samba Administration, OU=Samba, CN=" + host, clientKeyPair); + + TestSslUtils.createTrustStore(truststore.getPath(), new Password(PASSWORD), Map.of("client", clientCert)); + + certPem = File.createTempFile("cert", ".pem"); + writeString(certPem.toPath(), certOrKeyToString(clientCert)); + + privateKeyPem = File.createTempFile("key", ".pem"); + writeString(privateKeyPem.toPath(), certOrKeyToString(clientKeyPair.getPrivate())); + } + + private static String certOrKeyToString(Object certOrKey) throws Exception { + StringWriter sw = new StringWriter(); + try (PemWriter pw = new PemWriter(sw)) { + if (certOrKey instanceof X509Certificate) { + pw.writeObject(new JcaMiscPEMGenerator(certOrKey)); + } else { + pw.writeObject(new JcaPKCS8Generator((PrivateKey) certOrKey, null)); + } + } + return sw.toString(); + } + + public static class Initializer implements ApplicationContextInitializer { + @Override + public void initialize(@NotNull ConfigurableApplicationContext context) { + System.setProperty("spring.ldap.urls", ACTIVE_DIRECTORY.getLdapUrl()); + System.setProperty("oauth2.ldap.activeDirectory", "true"); + System.setProperty("oauth2.ldap.activeDirectory.domain", DOMAIN); + } + } +} diff --git a/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java b/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java index 55bc3a186..af1f42a68 100644 --- a/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java +++ b/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java @@ -14,6 +14,8 @@ public class ActiveDirectoryContainer extends GenericContainer Date: Mon, 24 Feb 2025 14:32:36 -0500 Subject: [PATCH 75/84] Create Kafka Admin Client outside of the Parallel scheduler thread pool (#784) --- .../java/io/kafbat/ui/service/AdminClientServiceImpl.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java index 6c018ba31..68f461e7e 100644 --- a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java +++ b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java @@ -8,6 +8,7 @@ import java.util.Map; import java.util.Optional; import java.util.Properties; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import lombok.extern.slf4j.Slf4j; @@ -40,7 +41,7 @@ public Mono get(KafkaCluster cluster) { } private Mono createAdminClient(KafkaCluster cluster) { - return Mono.fromSupplier(() -> { + return Mono.fromFuture(CompletableFuture.supplyAsync(() -> { Properties properties = new Properties(); KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); properties.putAll(cluster.getProperties()); @@ -51,7 +52,7 @@ private Mono createAdminClient(KafkaCluster cluster) { "kafbat-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet() ); return AdminClient.create(properties); - }).flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close())) + })).flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close())) .onErrorMap(th -> new IllegalStateException( "Error while creating AdminClient for the cluster " + cluster.getName(), th)); } From 01c6b694fea4dbab83ccfd5a128a8f455d023f48 Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Mon, 24 Feb 2025 23:46:36 +0400 Subject: [PATCH 76/84] Docs: Get back AMI link (#859) --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index d6206100a..fd01edf39 100644 --- a/README.md +++ b/README.md @@ -18,6 +18,7 @@ Versatile, fast and lightweight web UI for managing Apache Kafka® clusters. Quick StartCommunity
+ AWS MarketplaceProductHunt

From 07b59049aefb096348d84c5517bb09cd81c5827e Mon Sep 17 00:00:00 2001 From: Roman Zabaluev Date: Tue, 25 Feb 2025 00:00:12 +0400 Subject: [PATCH 77/84] BE: SR: Fix HTTP 400 with slashes in schema name (#849) --- .../kafbat/ui/config/GeneralSecurityConfig.java | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 api/src/main/java/io/kafbat/ui/config/GeneralSecurityConfig.java diff --git a/api/src/main/java/io/kafbat/ui/config/GeneralSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/GeneralSecurityConfig.java new file mode 100644 index 000000000..75ef456e5 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/config/GeneralSecurityConfig.java @@ -0,0 +1,17 @@ +package io.kafbat.ui.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.web.server.firewall.StrictServerWebExchangeFirewall; + +@Configuration +public class GeneralSecurityConfig { + + @Bean + public StrictServerWebExchangeFirewall strictServerWebExchangeFirewall() { + StrictServerWebExchangeFirewall firewall = new StrictServerWebExchangeFirewall(); + firewall.setAllowUrlEncodedSlash(true); + return firewall; + } + +} From 1682872a1807f25c4b26497aae399a8240cd11b0 Mon Sep 17 00:00:00 2001 From: Yeikel Date: Tue, 25 Feb 2025 09:40:48 -0500 Subject: [PATCH 78/84] BE: overwrite json-smart (#851) --- pom.xml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pom.xml b/pom.xml index f3556c3e8..064e17f13 100644 --- a/pom.xml +++ b/pom.xml @@ -143,6 +143,11 @@ cel ${dev.cel.version}
+ + net.minidev + json-smart + 2.5.2 + com.google.guava guava From 57d1937fabb5073dc804d2d108862c289dff8682 Mon Sep 17 00:00:00 2001 From: Callaert Anthony Date: Thu, 27 Feb 2025 18:39:12 +0100 Subject: [PATCH 79/84] feat: introduce regex option --- .../java/io/kafbat/ui/model/rbac/Subject.java | 21 ++++++++++++------- .../extractor/CognitoAuthorityExtractor.java | 4 ++-- .../extractor/GithubAuthorityExtractor.java | 6 +++--- .../extractor/GoogleAuthorityExtractor.java | 4 ++-- .../extractor/OauthAuthorityExtractor.java | 6 ++---- ...acActiveDirectoryAuthoritiesExtractor.java | 4 ++-- .../RbacLdapAuthoritiesExtractor.java | 4 ++-- ...exBasedProviderAuthorityExtractorTest.java | 2 +- api/src/test/resources/roles_definition.yaml | 10 ++++++--- 9 files changed, 34 insertions(+), 27 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java index d653c1d0b..030e6ac7a 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java @@ -4,27 +4,25 @@ import static com.google.common.base.Preconditions.checkNotNull; import io.kafbat.ui.model.rbac.provider.Provider; +import java.util.Objects; import lombok.Getter; +import lombok.Setter; @Getter public class Subject { Provider provider; + @Setter String type; + @Setter String value; + @Setter + boolean isRegex; public void setProvider(String provider) { this.provider = Provider.fromString(provider.toUpperCase()); } - public void setType(String type) { - this.type = type; - } - - public void setValue(String value) { - this.value = value; - } - public void validate() { checkNotNull(type, "Subject type cannot be null"); checkNotNull(value, "Subject value cannot be null"); @@ -32,4 +30,11 @@ public void validate() { checkArgument(!type.isEmpty(), "Subject type cannot be empty"); checkArgument(!value.isEmpty(), "Subject value cannot be empty"); } + + public boolean matches(final String attribute) { + if (isRegex()) { + return Objects.nonNull(attribute) && attribute.matches(getValue()); + } + return getValue().equalsIgnoreCase(attribute); + } } diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java index 23354b86c..cc0e419bf 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java @@ -50,7 +50,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_COGNITO)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> principal.getName().matches(s.getValue()))) + .anyMatch(s -> s.matches(principal.getName()))) .map(Role::getName) .collect(Collectors.toSet()); @@ -76,7 +76,7 @@ private Set extractGroupRoles(AccessControlService acs, DefaultOAuth2Use .filter(s -> s.getType().equals("group")) .anyMatch(subject -> groups .stream() - .anyMatch(cognitoGroup -> cognitoGroup.matches(subject.getValue())) + .anyMatch(subject::matches) )) .map(Role::getName) .collect(Collectors.toSet()); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java index f08e266d3..79f4907fc 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java @@ -90,7 +90,7 @@ private Set extractUsernameRoles(DefaultOAuth2User principal, AccessCont .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> username.matches(s.getValue()))) + .anyMatch(s -> s.matches(username))) .map(Role::getName) .collect(Collectors.toSet()); @@ -131,7 +131,7 @@ private Mono> getOrganizationRoles(DefaultOAuth2User principal, Map< .filter(s -> s.getType().equals(ORGANIZATION)) .anyMatch(subject -> orgsMap.stream() .map(org -> org.get(ORGANIZATION_NAME).toString()) - .anyMatch(orgName -> orgName.matches(subject.getValue())) + .anyMatch(subject::matches) )) .map(Role::getName) .collect(Collectors.toSet())); @@ -189,7 +189,7 @@ private Mono> getTeamRoles(WebClient webClient, Map .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) .filter(s -> s.getType().equals("team")) .anyMatch(subject -> teams.stream() - .anyMatch(teamName -> teamName.matches(subject.getValue())) + .anyMatch(subject::matches) )) .map(Role::getName) .collect(Collectors.toSet())); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java index c323e7ffd..a90ab50ef 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java @@ -52,7 +52,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .filter(s -> s.getType().equals("user")) .anyMatch(s -> { String email = principal.getAttribute(EMAIL_ATTRIBUTE_NAME); - return email != null && email.matches(s.getValue()); + return s.matches(email); })) .map(Role::getName) .collect(Collectors.toSet()); @@ -71,7 +71,7 @@ private Set extractDomainRoles(AccessControlService acs, DefaultOAuth2Us .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GOOGLE)) .filter(s -> s.getType().equals("domain")) - .anyMatch(s -> domain.matches(s.getValue()))) + .anyMatch(s -> s.matches(domain))) .map(Role::getName) .collect(Collectors.toSet()); } diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java index 0d556bc1a..20653953d 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java @@ -58,9 +58,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("user")) - .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, - principalName.matches(s.getValue()))) - .anyMatch(s -> principalName.matches(s.getValue()))) + .anyMatch(s -> s.matches(principalName))) .map(Role::getName) .collect(Collectors.toSet()); @@ -94,7 +92,7 @@ private Set extractRoles(AccessControlService acs, DefaultOAuth2User pri .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("role")) - .anyMatch(subject -> principalRoles.stream().anyMatch(s -> s.matches(subject.getValue())))) + .anyMatch(subject -> principalRoles.stream().anyMatch(subject::matches))) .map(Role::getName) .collect(Collectors.toSet()); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java index cefef5a7e..76feff063 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java @@ -37,8 +37,8 @@ public Collection getGrantedAuthorities(DirContextOp .stream() .filter(subject -> subject.getProvider().equals(Provider.LDAP_AD)) .anyMatch(subject -> switch (subject.getType()) { - case "user" -> username.equalsIgnoreCase(subject.getValue()); - case "group" -> adGroups.contains(subject.getValue()); + case "user" -> subject.matches(username); + case "group" -> adGroups.stream().anyMatch(subject::matches); default -> false; }) ) diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java index 261b30cfe..78ec4ba19 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java @@ -39,8 +39,8 @@ protected Set getAdditionalRoles(DirContextOperations user, St .stream() .filter(subject -> subject.getProvider().equals(Provider.LDAP)) .anyMatch(subject -> switch (subject.getType()) { - case "user" -> username.equalsIgnoreCase(subject.getValue()); - case "group" -> ldapGroups.contains(subject.getValue()); + case "user" -> subject.matches(username); + case "group" -> ldapGroups.stream().anyMatch(subject::matches); default -> false; }) ) diff --git a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java index 4623296e7..a2f704fae 100644 --- a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java +++ b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java @@ -174,7 +174,7 @@ void extractGoogleAuthorities() { OAuth2User oauth2User = new DefaultOAuth2User( AuthorityUtils.createAuthorityList("SCOPE_message:read"), - Map.of("hd", "test.domain.com", "email", "john@kafka.com"), + Map.of("hd", "memelord.lol", "email", "john@kafka.com"), "email"); HashMap additionalParams = new HashMap<>(); diff --git a/api/src/test/resources/roles_definition.yaml b/api/src/test/resources/roles_definition.yaml index e1b370b5d..f9af4f507 100644 --- a/api/src/test/resources/roles_definition.yaml +++ b/api/src/test/resources/roles_definition.yaml @@ -3,12 +3,13 @@ - provider: 'OAUTH' value: 'ROLE-[A-Z]+' type: 'role' + isRegex: 'true' - provider: 'OAUTH_COGNITO' - value: 'ROLE-[A-Z]+' + value: 'ROLE-ADMIN' type: 'group' - provider: 'OAUTH_GOOGLE' - value: '.*.domain.com' type: 'domain' + value: 'memelord.lol' clusters: - local - remote @@ -23,14 +24,17 @@ - provider: 'OAUTH' value: '.*@kafka.com' type: 'user' + isRegex: 'true' - provider: 'OAUTH_COGNITO' value: '.*@kafka.com' type: 'user' + isRegex: 'true' - provider: 'OAUTH_GITHUB' value: '.*@kafka.com' type: 'user' + isRegex: 'true' - provider: 'OAUTH_GOOGLE' - value: '.*@kafka.com' + value: 'john@kafka.com' type: 'user' clusters: - remote From 5d57beceb228112e3f264f6df8e441ff2ed5a07f Mon Sep 17 00:00:00 2001 From: German Osin Date: Fri, 28 Feb 2025 12:53:04 +0100 Subject: [PATCH 80/84] Fix supply AdminClient creation (#875) --- .../java/io/kafbat/ui/service/AdminClientServiceImpl.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java index 68f461e7e..fd9735b87 100644 --- a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java +++ b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java @@ -16,6 +16,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig; import org.springframework.stereotype.Service; import reactor.core.publisher.Mono; +import reactor.core.scheduler.Schedulers; @Service @Slf4j @@ -41,7 +42,7 @@ public Mono get(KafkaCluster cluster) { } private Mono createAdminClient(KafkaCluster cluster) { - return Mono.fromFuture(CompletableFuture.supplyAsync(() -> { + return Mono.fromSupplier(() -> { Properties properties = new Properties(); KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); properties.putAll(cluster.getProperties()); @@ -52,7 +53,8 @@ private Mono createAdminClient(KafkaCluster cluster) { "kafbat-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet() ); return AdminClient.create(properties); - })).flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close())) + }).subscribeOn(Schedulers.boundedElastic()) + .flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close())) .onErrorMap(th -> new IllegalStateException( "Error while creating AdminClient for the cluster " + cluster.getName(), th)); } From 997a7c411d77c52980854adfbb0471d47c6381cd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Fri, 28 Feb 2025 16:01:49 +0100 Subject: [PATCH 81/84] Issues/300: Replace Lombok annotation & add log --- .../java/io/kafbat/ui/model/rbac/Subject.java | 20 ++++++------------- .../extractor/OauthAuthorityExtractor.java | 1 + 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java index 030e6ac7a..509500648 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java @@ -5,23 +5,15 @@ import io.kafbat.ui.model.rbac.provider.Provider; import java.util.Objects; -import lombok.Getter; -import lombok.Setter; +import lombok.Data; -@Getter +@Data public class Subject { Provider provider; - @Setter String type; - @Setter String value; - @Setter - boolean isRegex; - - public void setProvider(String provider) { - this.provider = Provider.fromString(provider.toUpperCase()); - } + boolean isRegex = false; public void validate() { checkNotNull(type, "Subject type cannot be null"); @@ -32,9 +24,9 @@ public void validate() { } public boolean matches(final String attribute) { - if (isRegex()) { - return Objects.nonNull(attribute) && attribute.matches(getValue()); + if (isRegex) { + return Objects.nonNull(attribute) && attribute.matches(this.value); } - return getValue().equalsIgnoreCase(attribute); + return this.value.equalsIgnoreCase(attribute); } } diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java index 20653953d..61748610e 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java @@ -58,6 +58,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("user")) + .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, s.matches(principalName))) .anyMatch(s -> s.matches(principalName))) .map(Role::getName) .collect(Collectors.toSet()); From 3121341348243827a70013760feb524f77d44710 Mon Sep 17 00:00:00 2001 From: German Osin Date: Fri, 28 Feb 2025 18:46:40 +0100 Subject: [PATCH 82/84] Validate config values before start (#876) --- api/pom.xml | 4 ++++ .../kafbat/ui/config/ClustersProperties.java | 21 ++++++++++++++----- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/api/pom.xml b/api/pom.xml index fac019ab5..0c44d462e 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -29,6 +29,10 @@ org.springframework.boot spring-boot-starter-security + + org.springframework.boot + spring-boot-starter-validation + org.springframework.boot spring-boot-actuator diff --git a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java index 5931602b2..e73029c4f 100644 --- a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java +++ b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java @@ -2,6 +2,9 @@ import io.kafbat.ui.model.MetricsConfig; import jakarta.annotation.PostConstruct; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.NotNull; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -17,13 +20,15 @@ import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.util.StringUtils; +import org.springframework.validation.annotation.Validated; @Configuration @ConfigurationProperties("kafka") @Data +@Validated public class ClustersProperties { - List clusters = new ArrayList<>(); + List<@Valid Cluster> clusters = new ArrayList<>(); String internalTopicPrefix; @@ -33,7 +38,9 @@ public class ClustersProperties { @Data public static class Cluster { + @NotBlank(message = "field name for for cluster could not be blank") String name; + @NotBlank(message = "field bootstrapServers for for cluster could not be blank") String bootstrapServers; TruststoreConfig ssl; @@ -46,9 +53,9 @@ public static class Cluster { KsqldbServerAuth ksqldbServerAuth; KeystoreConfig ksqldbServerSsl; - List kafkaConnect; + List<@Valid ConnectCluster> kafkaConnect; - List serde; + List<@Valid SerdeConfig> serde; String defaultKeySerde; String defaultValueSerde; @@ -58,7 +65,7 @@ public static class Cluster { Long pollingThrottleRate; - List masking; + List<@Valid Masking> masking; AuditProperties audit; } @@ -88,7 +95,9 @@ public static class MetricsConfigData { @Builder(toBuilder = true) @ToString(exclude = {"password", "keystorePassword"}) public static class ConnectCluster { + @NotBlank String name; + @NotBlank String address; String username; String password; @@ -122,6 +131,7 @@ public static class KeystoreConfig { @Data public static class SerdeConfig { + @NotBlank String name; String className; String filePath; @@ -139,6 +149,7 @@ public static class KsqldbServerAuth { @Data public static class Masking { + @NotNull Type type; List fields; String fieldsNamePattern; @@ -160,7 +171,7 @@ public static class AuditProperties { Integer auditTopicsPartitions; Boolean topicAuditEnabled; Boolean consoleAuditEnabled; - LogLevel level; + LogLevel level = LogLevel.ALTER_ONLY; Map auditTopicProperties; public enum LogLevel { From 9fe46048ceaa02aa5c3fd352499a68544db9f32f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois?= Date: Mon, 3 Mar 2025 15:40:00 +0100 Subject: [PATCH 83/84] Issues/300: Use Jackson to deserialize Roles[] --- .../java/io/kafbat/ui/model/rbac/Subject.java | 11 ++++++++--- ...gexBasedProviderAuthorityExtractorTest.java | 18 ++++++++++-------- 2 files changed, 18 insertions(+), 11 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java index 509500648..62c32f02c 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java @@ -3,17 +3,22 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import com.fasterxml.jackson.annotation.JsonProperty; import io.kafbat.ui.model.rbac.provider.Provider; import java.util.Objects; -import lombok.Data; +import lombok.Builder; +import lombok.Value; +import lombok.extern.jackson.Jacksonized; -@Data +@Value +@Jacksonized +@Builder public class Subject { Provider provider; String type; String value; - boolean isRegex = false; + boolean isRegex; public void validate() { checkNotNull(type, "Subject type cannot be null"); diff --git a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java index a2f704fae..11eec0ea4 100644 --- a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java +++ b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java @@ -7,6 +7,9 @@ import static org.mockito.Mockito.when; import static org.springframework.security.oauth2.client.registration.ClientRegistration.withRegistrationId; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; import io.kafbat.ui.config.auth.OAuthProperties; import io.kafbat.ui.model.rbac.Role; import io.kafbat.ui.service.rbac.AccessControlService; @@ -16,6 +19,7 @@ import io.kafbat.ui.service.rbac.extractor.OauthAuthorityExtractor; import io.kafbat.ui.service.rbac.extractor.ProviderAuthorityExtractor; import io.kafbat.ui.util.AccessControlServiceMock; +import java.io.IOException; import java.io.InputStream; import java.time.Instant; import java.time.temporal.ChronoUnit; @@ -32,27 +36,25 @@ import org.springframework.security.oauth2.core.OAuth2AccessToken; import org.springframework.security.oauth2.core.user.DefaultOAuth2User; import org.springframework.security.oauth2.core.user.OAuth2User; -import org.yaml.snakeyaml.Yaml; -import org.yaml.snakeyaml.introspector.BeanAccess; public class RegexBasedProviderAuthorityExtractorTest { private final AccessControlService accessControlService = new AccessControlServiceMock().getMock(); - Yaml yaml; ProviderAuthorityExtractor extractor; @BeforeEach - void setUp() { - yaml = new Yaml(); - yaml.setBeanAccess(BeanAccess.FIELD); + void setUp() throws IOException { + + YAMLMapper mapper = new YAMLMapper(); InputStream rolesFile = this.getClass() .getClassLoader() .getResourceAsStream("roles_definition.yaml"); - Role[] roleArray = yaml.loadAs(rolesFile, Role[].class); - when(accessControlService.getRoles()).thenReturn(List.of(roleArray)); + Role[] roles = mapper.readValue(rolesFile, Role[].class); + + when(accessControlService.getRoles()).thenReturn(List.of(roles)); } From 040361c3788e59ed8872f4c5ed451968b0299dd2 Mon Sep 17 00:00:00 2001 From: Callaert Anthony Date: Wed, 5 Mar 2025 20:01:00 +0100 Subject: [PATCH 84/84] fix: back to specific setters --- .../java/io/kafbat/ui/model/rbac/Subject.java | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java index 62c32f02c..9781d69ca 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java @@ -3,16 +3,11 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; -import com.fasterxml.jackson.annotation.JsonProperty; import io.kafbat.ui.model.rbac.provider.Provider; import java.util.Objects; -import lombok.Builder; -import lombok.Value; -import lombok.extern.jackson.Jacksonized; +import lombok.Getter; -@Value -@Jacksonized -@Builder +@Getter public class Subject { Provider provider; @@ -20,6 +15,22 @@ public class Subject { String value; boolean isRegex; + public void setProvider(String provider) { + this.provider = Provider.fromString(provider.toUpperCase()); + } + + public void setType(String type) { + this.type = type; + } + + public void setValue(String value) { + this.value = value; + } + + public void setIsRegex(boolean isRegex) { + this.isRegex = isRegex; + } + public void validate() { checkNotNull(type, "Subject type cannot be null"); checkNotNull(value, "Subject value cannot be null");