diff --git a/.dev/dev_arm64.yaml b/.dev/dev.yaml similarity index 90% rename from .dev/dev_arm64.yaml rename to .dev/dev.yaml index 220140d3d..47149ed92 100644 --- a/.dev/dev_arm64.yaml +++ b/.dev/dev.yaml @@ -1,9 +1,3 @@ -# This is a compose file designed for arm64/Apple Silicon systems -# To adapt this to x86 please find and replace ".arm64" with empty - -# ARM64 supported images for kafka can be found here -# https://hub.docker.com/r/confluentinc/cp-kafka/tags?page=1&name=arm64 ---- version: '3.8' name: "kafbat-ui-dev" @@ -32,8 +26,7 @@ services: KAFKA_CLUSTERS_0_AUDIT_CONSOLEAUDITENABLED: 'true' kafka0: - image: confluentinc/cp-kafka:7.6.0.arm64 - user: "0:0" + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -60,7 +53,7 @@ services: CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schema-registry0: - image: confluentinc/cp-schema-registry:7.6.0.arm64 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: @@ -76,7 +69,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-connect0: - image: confluentinc/cp-kafka-connect:7.6.0.arm64 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: @@ -101,7 +94,7 @@ services: CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components,/usr/local/share/kafka/plugins,/usr/share/filestream-connectors" ksqldb0: - image: confluentinc/cp-ksqldb-server:7.6.0.arm64 + image: confluentinc/cp-ksqldb-server:7.8.0 depends_on: - kafka0 - kafka-connect0 @@ -119,7 +112,7 @@ services: KSQL_CACHE_MAX_BYTES_BUFFERING: 0 kafka-init-topics: - image: confluentinc/cp-kafka:7.6.0.arm64 + image: confluentinc/cp-kafka:7.8.0 volumes: - ../documentation/compose/data/message.json:/data/message.json depends_on: diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index d8a0e0375..e450aaedd 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1 +1,2 @@ github: [kafbat] +open_collective: kafka-ui diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 17e8484c9..ba0fe794a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -14,6 +14,23 @@ updates: - "type/dependencies" - "scope/backend" +- package-ecosystem: docker + directory: "/api" + schedule: + interval: weekly + time: "10:00" + timezone: Europe/London + reviewers: + - "kafbat/backend" + open-pull-requests-limit: 10 + ignore: + - dependency-name: "azul/zulu-openjdk-alpine" + # Limit dependabot pull requests to minor Java upgrades + update-types: ["version-update:semver-major"] + labels: + - "type/dependencies" + - "scope/backend" + - package-ecosystem: npm directory: "/frontend" schedule: diff --git a/.github/workflows/backend_main.yml b/.github/workflows/backend_main.yml index b0c7f8ab4..a6c24cc22 100644 --- a/.github/workflows/backend_main.yml +++ b/.github/workflows/backend_main.yml @@ -19,7 +19,7 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/backend_tests.yml with: event_name: ${{ github.event_name }} diff --git a/.github/workflows/backend_pr.yml b/.github/workflows/backend_pr.yml index 3570dfa97..5812a14c5 100644 --- a/.github/workflows/backend_pr.yml +++ b/.github/workflows/backend_pr.yml @@ -20,7 +20,7 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/backend_tests.yml with: event_name: ${{ github.event_name }} diff --git a/.github/workflows/backend_tests.yml b/.github/workflows/backend_tests.yml index abb97d149..e9e6f975c 100644 --- a/.github/workflows/backend_tests.yml +++ b/.github/workflows/backend_tests.yml @@ -28,7 +28,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' diff --git a/.github/workflows/branch-deploy.yml b/.github/workflows/branch-deploy.yml index 720ceee3f..998e09bfe 100644 --- a/.github/workflows/branch-deploy.yml +++ b/.github/workflows/branch-deploy.yml @@ -29,7 +29,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' - name: Build diff --git a/.github/workflows/build-public-image.yml b/.github/workflows/build-public-image.yml index 3e364c992..4f37246bc 100644 --- a/.github/workflows/build-public-image.yml +++ b/.github/workflows/build-public-image.yml @@ -27,7 +27,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' - name: Build diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 40fc3fb3e..e73c9576b 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -45,7 +45,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' diff --git a/.github/workflows/cve_checks.yml b/.github/workflows/cve_checks.yml index 5df2d33bf..cfdc40a60 100644 --- a/.github/workflows/cve_checks.yml +++ b/.github/workflows/cve_checks.yml @@ -1,5 +1,9 @@ name: "Infra: CVE checks" on: + pull_request: + types: [ "opened", "reopened", "synchronize" ] + push: + branches: [ "main" ] workflow_dispatch: schedule: # * is a special character in YAML so you have to quote this string @@ -9,7 +13,8 @@ permissions: contents: read jobs: - build-and-test: + + check-cves: runs-on: ubuntu-latest steps: @@ -20,7 +25,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' @@ -62,8 +67,17 @@ jobs: cache-to: type=local,dest=/tmp/.buildx-cache - name: Run CVE checks - uses: aquasecurity/trivy-action@0.19.0 + uses: aquasecurity/trivy-action@0.29.0 with: image-ref: "ghcr.io/kafbat/kafka-ui:${{ steps.build.outputs.version }}" format: "table" exit-code: "1" + + notify: + needs: check-cves + if: ${{ always() && needs.build-and-test.result == 'failure' && github.event_name == 'schedule' }} + uses: ./.github/workflows/infra_discord_hook.yml + with: + message: "Attention! CVE checks run failed! Please fix them CVEs :(" + secrets: + DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_WEBHOOK_URL_CVE }} diff --git a/.github/workflows/docker_publish.yml b/.github/workflows/docker_publish.yml index e359ea740..1806dc017 100644 --- a/.github/workflows/docker_publish.yml +++ b/.github/workflows/docker_publish.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - registry: [ 'docker.io', 'ghcr.io', 'ecr' ] + registry: [ 'docker.io', 'ghcr.io', 'public.ecr.aws' ] runs-on: ubuntu-latest steps: @@ -31,7 +31,8 @@ jobs: name: image path: /tmp - # setup containerd to preserve provenance attestations :https://docs.docker.com/build/attestations/#creating-attestations + # setup containerd to preserve provenance attestations: + # https://docs.docker.com/build/attestations/#creating-attestations - name: Setup docker with containerd uses: crazy-max/ghaction-setup-docker@v3 with: @@ -63,33 +64,33 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Configure AWS credentials - if: matrix.registry == 'ecr' + if: matrix.registry == 'public.ecr.aws' uses: aws-actions/configure-aws-credentials@v4 with: aws-region: us-east-1 # This region only for public ECR role-to-assume: ${{ secrets.AWS_ROLE }} - name: Login to public ECR - if: matrix.registry == 'ecr' + if: matrix.registry == 'public.ecr.aws' id: login-ecr-public uses: aws-actions/amazon-ecr-login@v2 with: registry-type: public - - name: define env vars + - name: Define env vars for container registry URL run: | - if [ ${{matrix.registry }} == 'docker.io' ]; then - echo "REGISTRY=${{ matrix.registry }}" >> $GITHUB_ENV - echo "REPOSITORY=${{ github.repository }}" >> $GITHUB_ENV - elif [ ${{ matrix.registry }} == 'ghcr.io' ]; then - echo "REGISTRY=${{ matrix.registry }}" >> $GITHUB_ENV - echo "REPOSITORY=${{ github.repository }}" >> $GITHUB_ENV - elif [ ${{ matrix.registry }} == 'ecr' ]; then + if [ ${{ matrix.registry }} == 'public.ecr.aws' ]; then + # vars.ECR_REGISTRY value is expected to be of the `public.ecr.aws/` form + # The `public_ecr_id` must be a *default* alias associated with public regsitry (rather + # than a custom alias) echo "REGISTRY=${{ vars.ECR_REGISTRY }}" >> $GITHUB_ENV + # Trim GH Org name so that resulting Public ECR URL has no duplicate org name + # Public ECR default alias: public.ecr.aws//kafka-ui + # Public ECR custom alias: public.ecr.aws/kafbat/kafka-ui + echo "REPOSITORY=$(basename ${{ github.repository }})" >> $GITHUB_ENV + else # this covers the case of docker.io and ghcr.io + echo "REGISTRY=${{ matrix.registry }}" >> $GITHUB_ENV echo "REPOSITORY=${{ github.repository }}" >> $GITHUB_ENV - else - echo "REGISTRY=" >> $GITHUB_ENV - echo "REPOSITORY=notworking" >> $GITHUB_ENV fi - name: Push images to ${{ matrix.registry }} diff --git a/.github/workflows/e2e-run.yml b/.github/workflows/e2e-run.yml index b79e07897..b17335215 100644 --- a/.github/workflows/e2e-run.yml +++ b/.github/workflows/e2e-run.yml @@ -30,7 +30,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v3 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' @@ -72,7 +72,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v3 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' diff --git a/.github/workflows/frontend_main.yml b/.github/workflows/frontend_main.yml index b680a7334..008517f7a 100644 --- a/.github/workflows/frontend_main.yml +++ b/.github/workflows/frontend_main.yml @@ -15,5 +15,5 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/frontend_tests.yml diff --git a/.github/workflows/frontend_pr.yml b/.github/workflows/frontend_pr.yml index 22a4d6f5f..a49b3193a 100644 --- a/.github/workflows/frontend_pr.yml +++ b/.github/workflows/frontend_pr.yml @@ -16,5 +16,5 @@ concurrency: cancel-in-progress: true jobs: - build: + build-and-test: uses: ./.github/workflows/frontend_tests.yml diff --git a/.github/workflows/frontend_tests.yml b/.github/workflows/frontend_tests.yml index 6755b1b61..00d9b619a 100644 --- a/.github/workflows/frontend_tests.yml +++ b/.github/workflows/frontend_tests.yml @@ -23,12 +23,12 @@ jobs: - uses: pnpm/action-setup@v4.0.0 with: - version: 9.11.0 + version: 9.15.4 - name: Install node uses: actions/setup-node@v4.0.2 with: - node-version: "18.17.1" + node-version: "22.12.0" cache: "pnpm" cache-dependency-path: "./frontend/pnpm-lock.yaml" diff --git a/.github/workflows/infra_discord_hook.yml b/.github/workflows/infra_discord_hook.yml new file mode 100644 index 000000000..929a19cd4 --- /dev/null +++ b/.github/workflows/infra_discord_hook.yml @@ -0,0 +1,27 @@ +name: 'Discord hook' + +on: + workflow_call: + inputs: + message: + description: 'Message text' + required: true + type: string + secrets: + DISCORD_WEBHOOK_URL: + required: true + +permissions: + contents: read + +jobs: + + hook: + runs-on: ubuntu-latest + steps: + - name: Notify Discord on Failure + uses: Ilshidur/action-discord@0.3.2 + with: + args: ${{ inputs.message }} + env: + DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_URL }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 7701b91e6..bf684a69b 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -28,7 +28,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' diff --git a/.github/workflows/md-links.yml b/.github/workflows/md-links.yml index b885e014d..45a8e920a 100644 --- a/.github/workflows/md-links.yml +++ b/.github/workflows/md-links.yml @@ -13,7 +13,7 @@ permissions: contents: read jobs: - build-and-test: + lint-md: runs-on: ubuntu-latest steps: diff --git a/.github/workflows/pr_linter.yml b/.github/workflows/pr_linter.yml index f4562345f..703dccd4f 100644 --- a/.github/workflows/pr_linter.yml +++ b/.github/workflows/pr_linter.yml @@ -5,13 +5,12 @@ on: permissions: checks: write jobs: - task-check: + check-tasks: runs-on: ubuntu-latest steps: - uses: kentaro-m/task-completed-checker-action@v0.1.2 with: repo-token: "${{ secrets.GITHUB_TOKEN }}" - uses: dekinderfiets/pr-description-enforcer@0.0.1 - if: false # TODO remove when public with: repo-token: "${{ secrets.GITHUB_TOKEN }}" diff --git a/.github/workflows/release-serde-api.yml b/.github/workflows/release-serde-api.yml index 36d8a8412..b1e210e45 100644 --- a/.github/workflows/release-serde-api.yml +++ b/.github/workflows/release-serde-api.yml @@ -22,7 +22,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: "17" + java-version: "21" distribution: "zulu" cache: "maven" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3a3c9de23..6e2075b75 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -28,7 +28,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' diff --git a/.github/workflows/separate_env_public_create.yml b/.github/workflows/separate_env_public_create.yml index 97eb8ee02..e77a15438 100644 --- a/.github/workflows/separate_env_public_create.yml +++ b/.github/workflows/separate_env_public_create.yml @@ -29,7 +29,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: '17' + java-version: '21' distribution: 'zulu' cache: 'maven' - name: Build diff --git a/.github/workflows/workflow_linter.yml b/.github/workflows/workflow_linter.yml index 728aaa251..12dc3656c 100644 --- a/.github/workflows/workflow_linter.yml +++ b/.github/workflows/workflow_linter.yml @@ -9,7 +9,7 @@ permissions: contents: read jobs: - build-and-test: + lint-workflows: runs-on: ubuntu-latest steps: diff --git a/.gitignore b/.gitignore index efd6a9749..51efbef39 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,4 @@ build/ *.tgz /docker/*.override.yaml +/e2e-tests/allure-results/ diff --git a/.java-version b/.java-version new file mode 100644 index 000000000..aabe6ec39 --- /dev/null +++ b/.java-version @@ -0,0 +1 @@ +21 diff --git a/.mvn/jvm.config b/.mvn/jvm.config new file mode 100644 index 000000000..2bf66750a --- /dev/null +++ b/.mvn/jvm.config @@ -0,0 +1 @@ +-Djava.net.useSystemProxies=true \ No newline at end of file diff --git a/LICENSE b/LICENSE index bef1db1d9..bedb0fad0 100644 --- a/LICENSE +++ b/LICENSE @@ -187,7 +187,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 CloudHut + Copyright 2025 Kafbat Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -199,4 +199,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file + limitations under the License. diff --git a/README.md b/README.md index 2b21a0b99..fd01edf39 100644 --- a/README.md +++ b/README.md @@ -15,10 +15,10 @@ Versatile, fast and lightweight web UI for managing Apache Kafka® clusters.

Documentation • - Quick Start • + Quick StartCommunity
- AWS Marketplace • + AWS MarketplaceProductHunt

@@ -28,7 +28,7 @@ Versatile, fast and lightweight web UI for managing Apache Kafka® clusters. #### Kafbat UI is a free, open-source web UI to monitor and manage Apache Kafka clusters. -Kafbat UI is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption. +[Kafbat UI](https://kafbat.io/) is a simple tool that makes your data flows observable, helps find and troubleshoot issues faster and deliver optimal performance. Its lightweight dashboard makes it easy to track key metrics of your Kafka clusters - Brokers, Topics, Partitions, Production, and Consumption. Kafbat UI, developed by Kafbat*, proudly carries forward the legacy of the UI Apache Kafka project. @@ -50,7 +50,7 @@ We extend our gratitude to Provectus for their past support in groundbreaking wo * **View Consumer Groups** — view per-partition parked offsets, combined and per-partition lag * **Browse Messages** — browse messages with JSON, plain text, and Avro encoding * **Dynamic Topic Configuration** — create and configure new topics with dynamic configuration -* **Configurable Authentification** — [secure](https://ui.docs.kafbat.io/configuration/authentication) your installation with optional Github/Gitlab/Google OAuth 2.0 +* **Configurable Authentication** — [secure](https://ui.docs.kafbat.io/configuration/authentication) your installation with optional Github/Gitlab/Google OAuth 2.0 * **Custom serialization/deserialization plugins** - [use](https://ui.docs.kafbat.io/configuration/serialization-serde) a ready-to-go serde for your data like AWS Glue or Smile, or code your own! * **Role based access control** - [manage permissions](https://ui.docs.kafbat.io/configuration/rbac-role-based-access-control) to access the UI with granular precision * **Data masking** - [obfuscate](https://ui.docs.kafbat.io/configuration/data-masking) sensitive data in topic messages diff --git a/api/Dockerfile b/api/Dockerfile index 9e1173250..adaa64009 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,7 @@ -FROM azul/zulu-openjdk-alpine:17.0.11-jre-headless +# The tag is ignored when a sha is included but the reason to add it are: +# 1. Self Documentation: It is difficult to find out what the expected tag is given a sha alone +# 2. Helps dependabot during discovery of upgrades +FROM azul/zulu-openjdk-alpine:21.0.5-jre-headless@sha256:842b23baf96980437281b7419af970238b4c1c3109e50beac5299cdc278291d7 RUN apk add --no-cache \ # snappy codec diff --git a/api/pom.xml b/api/pom.xml index 8452167ce..0c44d462e 100644 --- a/api/pom.xml +++ b/api/pom.xml @@ -29,6 +29,10 @@ org.springframework.boot spring-boot-starter-security + + org.springframework.boot + spring-boot-starter-validation + org.springframework.boot spring-boot-actuator @@ -50,7 +54,10 @@ org.apache.kafka kafka-clients - ${kafka-clients.version} + + ${confluent.version}-ccs org.apache.commons @@ -97,7 +104,7 @@ com.azure azure-identity - 1.13.3 + 1.14.2 io.netty @@ -209,6 +216,19 @@ ${okhttp3.mockwebserver.version} test + + org.apache.kafka + kafka-clients + ${confluent.version}-ccs + test + test + + + org.bouncycastle + bcpkix-jdk18on + 1.80 + test + org.springframework.boot @@ -326,7 +346,7 @@ org.apache.maven.plugins maven-surefire-plugin - @{argLine} --illegal-access=permit + @{argLine} @@ -489,6 +509,7 @@ build + false diff --git a/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java b/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java index bb4ea92fe..cdf5bce14 100644 --- a/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java +++ b/api/src/main/java/io/kafbat/ui/client/RetryingKafkaConnectClient.java @@ -1,5 +1,6 @@ package io.kafbat.ui.client; +import com.fasterxml.jackson.annotation.JsonProperty; import io.kafbat.ui.config.ClustersProperties; import io.kafbat.ui.connect.ApiClient; import io.kafbat.ui.connect.api.KafkaConnectClientApi; @@ -11,14 +12,17 @@ import io.kafbat.ui.connect.model.ConnectorTopics; import io.kafbat.ui.connect.model.NewConnector; import io.kafbat.ui.connect.model.TaskStatus; -import io.kafbat.ui.exception.KafkaConnectConflictReponseException; +import io.kafbat.ui.exception.KafkaConnectConflictResponseException; import io.kafbat.ui.exception.ValidationException; import io.kafbat.ui.util.WebClientConfigurator; +import jakarta.validation.constraints.NotNull; import java.time.Duration; import java.util.List; import java.util.Map; +import java.util.Objects; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.StringUtils; import org.springframework.http.ResponseEntity; import org.springframework.util.unit.DataSize; import org.springframework.web.client.RestClientException; @@ -44,30 +48,69 @@ private static Retry conflictCodeRetry() { .fixedDelay(MAX_RETRIES, RETRIES_DELAY) .filter(e -> e instanceof WebClientResponseException.Conflict) .onRetryExhaustedThrow((spec, signal) -> - new KafkaConnectConflictReponseException( + new KafkaConnectConflictResponseException( (WebClientResponseException.Conflict) signal.failure())); } - private static Mono withRetryOnConflict(Mono publisher) { - return publisher.retryWhen(conflictCodeRetry()); + private static @NotNull Retry retryOnRebalance() { + return Retry.fixedDelay(MAX_RETRIES, RETRIES_DELAY).filter(e -> { + + if (e instanceof WebClientResponseException.InternalServerError exception) { + final var errorMessage = getMessage(exception); + return StringUtils.equals(errorMessage, + // From https://github.com/apache/kafka/blob/dfc07e0e0c6e737a56a5402644265f634402b864/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedHerder.java#L2340 + "Request cannot be completed because a rebalance is expected"); + } + return false; + }); + } + + private static Mono withRetryOnConflictOrRebalance(Mono publisher) { + return publisher + .retryWhen(retryOnRebalance()) + .retryWhen(conflictCodeRetry()); + } + + private static Flux withRetryOnConflictOrRebalance(Flux publisher) { + return publisher + .retryWhen(retryOnRebalance()) + .retryWhen(conflictCodeRetry()); } - private static Flux withRetryOnConflict(Flux publisher) { - return publisher.retryWhen(conflictCodeRetry()); + private static Mono withRetryOnRebalance(Mono publisher) { + return publisher.retryWhen(retryOnRebalance()); } + private static Mono withBadRequestErrorHandling(Mono publisher) { return publisher - .onErrorResume(WebClientResponseException.BadRequest.class, e -> - Mono.error(new ValidationException("Invalid configuration"))) - .onErrorResume(WebClientResponseException.InternalServerError.class, e -> - Mono.error(new ValidationException("Invalid configuration"))); + .onErrorResume(WebClientResponseException.BadRequest.class, + RetryingKafkaConnectClient::parseConnectErrorMessage) + .onErrorResume(WebClientResponseException.InternalServerError.class, + RetryingKafkaConnectClient::parseConnectErrorMessage); + } + + // Adapted from https://github.com/apache/kafka/blob/a0a501952b6d61f6f273bdb8f842346b51e9dfce/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/entities/ErrorMessage.java + // Adding the connect runtime dependency for this single class seems excessive + private record ErrorMessage(@NotNull @JsonProperty("message") String message) { + } + + private static @NotNull Mono parseConnectErrorMessage(WebClientResponseException parseException) { + return Mono.error(new ValidationException(getMessage(parseException))); + } + + private static String getMessage(WebClientResponseException parseException) { + final var errorMessage = parseException.getResponseBodyAs(ErrorMessage.class); + return Objects.requireNonNull(errorMessage, + // see https://github.com/apache/kafka/blob/a0a501952b6d61f6f273bdb8f842346b51e9dfce/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/errors/ConnectExceptionMapper.java + "This should not happen according to the ConnectExceptionMapper") + .message(); } @Override public Mono createConnector(NewConnector newConnector) throws RestClientException { return withBadRequestErrorHandling( - super.createConnector(newConnector) + withRetryOnRebalance(super.createConnector(newConnector)) ); } @@ -75,178 +118,200 @@ public Mono createConnector(NewConnector newConnector) throws RestCli public Mono setConnectorConfig(String connectorName, Map requestBody) throws RestClientException { return withBadRequestErrorHandling( - super.setConnectorConfig(connectorName, requestBody) + withRetryOnRebalance(super.setConnectorConfig(connectorName, requestBody)) ); } @Override public Mono> createConnectorWithHttpInfo(NewConnector newConnector) throws WebClientResponseException { - return withRetryOnConflict(super.createConnectorWithHttpInfo(newConnector)); + return withRetryOnConflictOrRebalance(super.createConnectorWithHttpInfo(newConnector)); } @Override public Mono deleteConnector(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.deleteConnector(connectorName)); + return withRetryOnConflictOrRebalance(super.deleteConnector(connectorName)); } @Override public Mono> deleteConnectorWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.deleteConnectorWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.deleteConnectorWithHttpInfo(connectorName)); } @Override public Mono getConnector(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnector(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnector(connectorName)); } @Override public Mono> getConnectorWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorWithHttpInfo(connectorName)); } @Override public Mono> getConnectorConfig(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorConfig(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorConfig(connectorName)); } @Override public Mono>> getConnectorConfigWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorConfigWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorConfigWithHttpInfo(connectorName)); } @Override public Flux getConnectorPlugins() throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorPlugins()); + return withRetryOnConflictOrRebalance(super.getConnectorPlugins()); } @Override public Mono>> getConnectorPluginsWithHttpInfo() throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorPluginsWithHttpInfo()); + return withRetryOnConflictOrRebalance(super.getConnectorPluginsWithHttpInfo()); } @Override public Mono getConnectorStatus(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorStatus(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorStatus(connectorName)); } @Override public Mono> getConnectorStatusWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorStatusWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorStatusWithHttpInfo(connectorName)); } @Override public Mono getConnectorTaskStatus(String connectorName, Integer taskId) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorTaskStatus(connectorName, taskId)); + return withRetryOnConflictOrRebalance(super.getConnectorTaskStatus(connectorName, taskId)); } @Override public Mono> getConnectorTaskStatusWithHttpInfo(String connectorName, Integer taskId) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorTaskStatusWithHttpInfo(connectorName, taskId)); + return withRetryOnConflictOrRebalance(super.getConnectorTaskStatusWithHttpInfo(connectorName, taskId)); } @Override public Flux getConnectorTasks(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorTasks(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorTasks(connectorName)); } @Override public Mono>> getConnectorTasksWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorTasksWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorTasksWithHttpInfo(connectorName)); } @Override public Mono> getConnectorTopics(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorTopics(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorTopics(connectorName)); } @Override public Mono>> getConnectorTopicsWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorTopicsWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.getConnectorTopicsWithHttpInfo(connectorName)); } @Override - public Flux getConnectors(String search) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectors(search)); + public Mono> getConnectors(String search) throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.getConnectors(search)); } @Override public Mono>> getConnectorsWithHttpInfo(String search) throws WebClientResponseException { - return withRetryOnConflict(super.getConnectorsWithHttpInfo(search)); + return withRetryOnConflictOrRebalance(super.getConnectorsWithHttpInfo(search)); } @Override public Mono pauseConnector(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.pauseConnector(connectorName)); + return withRetryOnConflictOrRebalance(super.pauseConnector(connectorName)); } @Override public Mono> pauseConnectorWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.pauseConnectorWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.pauseConnectorWithHttpInfo(connectorName)); + } + + @Override + public Mono stopConnector(String connectorName) throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.stopConnector(connectorName)); + } + + @Override + public Mono> stopConnectorWithHttpInfo(String connectorName) throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.stopConnectorWithHttpInfo(connectorName)); } @Override public Mono restartConnector(String connectorName, Boolean includeTasks, Boolean onlyFailed) throws WebClientResponseException { - return withRetryOnConflict(super.restartConnector(connectorName, includeTasks, onlyFailed)); + return withRetryOnConflictOrRebalance(super.restartConnector(connectorName, includeTasks, onlyFailed)); } @Override public Mono> restartConnectorWithHttpInfo(String connectorName, Boolean includeTasks, Boolean onlyFailed) throws WebClientResponseException { - return withRetryOnConflict(super.restartConnectorWithHttpInfo(connectorName, includeTasks, onlyFailed)); + return withRetryOnConflictOrRebalance(super.restartConnectorWithHttpInfo(connectorName, includeTasks, onlyFailed)); } @Override public Mono restartConnectorTask(String connectorName, Integer taskId) throws WebClientResponseException { - return withRetryOnConflict(super.restartConnectorTask(connectorName, taskId)); + return withRetryOnConflictOrRebalance(super.restartConnectorTask(connectorName, taskId)); } @Override public Mono> restartConnectorTaskWithHttpInfo(String connectorName, Integer taskId) throws WebClientResponseException { - return withRetryOnConflict(super.restartConnectorTaskWithHttpInfo(connectorName, taskId)); + return withRetryOnConflictOrRebalance(super.restartConnectorTaskWithHttpInfo(connectorName, taskId)); + } + + @Override + public Mono resetConnectorOffsets(String connectorName) + throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.resetConnectorOffsets(connectorName)); + } + + @Override + public Mono> resetConnectorOffsetsWithHttpInfo(String connectorName) + throws WebClientResponseException { + return withRetryOnConflictOrRebalance(super.resetConnectorOffsetsWithHttpInfo(connectorName)); } @Override public Mono resumeConnector(String connectorName) throws WebClientResponseException { - return super.resumeConnector(connectorName); + return withRetryOnRebalance(super.resumeConnector(connectorName)); } @Override public Mono> resumeConnectorWithHttpInfo(String connectorName) throws WebClientResponseException { - return withRetryOnConflict(super.resumeConnectorWithHttpInfo(connectorName)); + return withRetryOnConflictOrRebalance(super.resumeConnectorWithHttpInfo(connectorName)); } @Override public Mono> setConnectorConfigWithHttpInfo(String connectorName, Map requestBody) throws WebClientResponseException { - return withRetryOnConflict(super.setConnectorConfigWithHttpInfo(connectorName, requestBody)); + return withRetryOnConflictOrRebalance(super.setConnectorConfigWithHttpInfo(connectorName, requestBody)); } @Override public Mono validateConnectorPluginConfig(String pluginName, Map requestBody) throws WebClientResponseException { - return withRetryOnConflict(super.validateConnectorPluginConfig(pluginName, requestBody)); + return withRetryOnConflictOrRebalance(super.validateConnectorPluginConfig(pluginName, requestBody)); } @Override public Mono> validateConnectorPluginConfigWithHttpInfo( String pluginName, Map requestBody) throws WebClientResponseException { - return withRetryOnConflict(super.validateConnectorPluginConfigWithHttpInfo(pluginName, requestBody)); + return withRetryOnConflictOrRebalance(super.validateConnectorPluginConfigWithHttpInfo(pluginName, requestBody)); } private static class RetryingApiClient extends ApiClient { diff --git a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java index e91a5bc9a..e73029c4f 100644 --- a/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java +++ b/api/src/main/java/io/kafbat/ui/config/ClustersProperties.java @@ -2,6 +2,9 @@ import io.kafbat.ui.model.MetricsConfig; import jakarta.annotation.PostConstruct; +import jakarta.validation.Valid; +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.NotNull; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; @@ -17,13 +20,15 @@ import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; import org.springframework.util.StringUtils; +import org.springframework.validation.annotation.Validated; @Configuration @ConfigurationProperties("kafka") @Data +@Validated public class ClustersProperties { - List clusters = new ArrayList<>(); + List<@Valid Cluster> clusters = new ArrayList<>(); String internalTopicPrefix; @@ -33,24 +38,35 @@ public class ClustersProperties { @Data public static class Cluster { + @NotBlank(message = "field name for for cluster could not be blank") String name; + @NotBlank(message = "field bootstrapServers for for cluster could not be blank") String bootstrapServers; + + TruststoreConfig ssl; + String schemaRegistry; SchemaRegistryAuth schemaRegistryAuth; KeystoreConfig schemaRegistrySsl; + String ksqldbServer; KsqldbServerAuth ksqldbServerAuth; KeystoreConfig ksqldbServerSsl; - List kafkaConnect; + + List<@Valid ConnectCluster> kafkaConnect; + + List<@Valid SerdeConfig> serde; + String defaultKeySerde; + String defaultValueSerde; + MetricsConfigData metrics; Map properties; boolean readOnly = false; - List serde; - String defaultKeySerde; - String defaultValueSerde; - List masking; + Long pollingThrottleRate; - TruststoreConfig ssl; + + List<@Valid Masking> masking; + AuditProperties audit; } @@ -79,7 +95,9 @@ public static class MetricsConfigData { @Builder(toBuilder = true) @ToString(exclude = {"password", "keystorePassword"}) public static class ConnectCluster { + @NotBlank String name; + @NotBlank String address; String username; String password; @@ -99,10 +117,21 @@ public static class SchemaRegistryAuth { public static class TruststoreConfig { String truststoreLocation; String truststorePassword; + boolean verifySsl = true; + } + + @Data + @NoArgsConstructor + @AllArgsConstructor + @ToString(exclude = {"keystorePassword"}) + public static class KeystoreConfig { + String keystoreLocation; + String keystorePassword; } @Data public static class SerdeConfig { + @NotBlank String name; String className; String filePath; @@ -118,17 +147,9 @@ public static class KsqldbServerAuth { String password; } - @Data - @NoArgsConstructor - @AllArgsConstructor - @ToString(exclude = {"keystorePassword"}) - public static class KeystoreConfig { - String keystoreLocation; - String keystorePassword; - } - @Data public static class Masking { + @NotNull Type type; List fields; String fieldsNamePattern; @@ -150,7 +171,7 @@ public static class AuditProperties { Integer auditTopicsPartitions; Boolean topicAuditEnabled; Boolean consoleAuditEnabled; - LogLevel level; + LogLevel level = LogLevel.ALTER_ONLY; Map auditTopicProperties; public enum LogLevel { @@ -182,6 +203,7 @@ private void flattenClusterProperties() { } } + @SuppressWarnings("unchecked") private Map flattenClusterProperties(@Nullable String prefix, @Nullable Map propertiesMap) { Map flattened = new HashMap<>(); diff --git a/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java b/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java index 4713dfd37..d39fda91d 100644 --- a/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java +++ b/api/src/main/java/io/kafbat/ui/config/CorsGlobalConfiguration.java @@ -22,10 +22,7 @@ public WebFilter corsFilter() { final ServerHttpResponse response = ctx.getResponse(); final HttpHeaders headers = response.getHeaders(); - headers.add("Access-Control-Allow-Origin", "*"); - headers.add("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, OPTIONS"); - headers.add("Access-Control-Max-Age", "3600"); - headers.add("Access-Control-Allow-Headers", "Content-Type"); + fillCorsHeader(headers, request); if (request.getMethod() == HttpMethod.OPTIONS) { response.setStatusCode(HttpStatus.OK); @@ -36,4 +33,11 @@ public WebFilter corsFilter() { }; } + public static void fillCorsHeader(HttpHeaders responseHeaders, ServerHttpRequest request) { + responseHeaders.add("Access-Control-Allow-Origin", request.getHeaders().getOrigin()); + responseHeaders.add("Access-Control-Allow-Credentials", "true"); + responseHeaders.add("Access-Control-Allow-Methods", "GET, PUT, POST, DELETE, OPTIONS"); + responseHeaders.add("Access-Control-Max-Age", "3600"); + responseHeaders.add("Access-Control-Allow-Headers", "Content-Type"); + } } diff --git a/api/src/main/java/io/kafbat/ui/config/GeneralSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/GeneralSecurityConfig.java new file mode 100644 index 000000000..75ef456e5 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/config/GeneralSecurityConfig.java @@ -0,0 +1,17 @@ +package io.kafbat.ui.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.security.web.server.firewall.StrictServerWebExchangeFirewall; + +@Configuration +public class GeneralSecurityConfig { + + @Bean + public StrictServerWebExchangeFirewall strictServerWebExchangeFirewall() { + StrictServerWebExchangeFirewall firewall = new StrictServerWebExchangeFirewall(); + firewall.setAllowUrlEncodedSlash(true); + return firewall; + } + +} diff --git a/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java b/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java index ac7c6747f..acfe1929c 100644 --- a/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java +++ b/api/src/main/java/io/kafbat/ui/config/ReadOnlyModeFilter.java @@ -33,7 +33,8 @@ public class ReadOnlyModeFilter implements WebFilter { @NotNull @Override public Mono filter(ServerWebExchange exchange, @NotNull WebFilterChain chain) { - var isSafeMethod = exchange.getRequest().getMethod() == HttpMethod.GET; + var isSafeMethod = + exchange.getRequest().getMethod() == HttpMethod.GET || exchange.getRequest().getMethod() == HttpMethod.OPTIONS; if (isSafeMethod) { return chain.filter(exchange); } diff --git a/api/src/main/java/io/kafbat/ui/config/auth/AbstractAuthSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/AbstractAuthSecurityConfig.java index f23a0dd2a..265bac03f 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/AbstractAuthSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/AbstractAuthSecurityConfig.java @@ -1,24 +1,53 @@ package io.kafbat.ui.config.auth; +import io.kafbat.ui.util.EmptyRedirectStrategy; +import java.net.URI; +import org.springframework.security.web.server.authentication.RedirectServerAuthenticationSuccessHandler; +import org.springframework.security.web.server.authentication.logout.RedirectServerLogoutSuccessHandler; + abstract class AbstractAuthSecurityConfig { protected AbstractAuthSecurityConfig() { } + protected static final String LOGIN_URL = "/login"; + protected static final String LOGOUT_URL = "/auth?logout"; + protected static final String[] AUTH_WHITELIST = { - "/css/**", - "/js/**", - "/media/**", + /* STATIC */ + "/index.html", + "/assets/**", + "/manifest.json", + "/favicon.svg", + "/favicon/**", + + "/static/**", "/resources/**", + + /* ACTUATOR */ "/actuator/health/**", "/actuator/info", "/actuator/prometheus", - "/auth", + + /* AUTH */ "/login", "/logout", "/oauth2/**", - "/static/**" + "/api/config/authentication", + "/api/authorization" }; + protected RedirectServerAuthenticationSuccessHandler emptyRedirectSuccessHandler() { + final var authHandler = new RedirectServerAuthenticationSuccessHandler(); + authHandler.setRedirectStrategy(new EmptyRedirectStrategy()); + return authHandler; + } + + protected RedirectServerLogoutSuccessHandler redirectLogoutSuccessHandler() { + final var logoutSuccessHandler = new RedirectServerLogoutSuccessHandler(); + logoutSuccessHandler.setLogoutSuccessUrl(URI.create(LOGOUT_URL)); + return logoutSuccessHandler; + } + } diff --git a/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java index 7a25fb3a7..788c33bdd 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/BasicAuthSecurityConfig.java @@ -1,17 +1,15 @@ package io.kafbat.ui.config.auth; -import io.kafbat.ui.util.EmptyRedirectStrategy; -import java.net.URI; +import io.kafbat.ui.util.StaticFileWebFilter; import lombok.extern.slf4j.Slf4j; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.http.HttpMethod; import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity; +import org.springframework.security.config.web.server.SecurityWebFiltersOrder; import org.springframework.security.config.web.server.ServerHttpSecurity; import org.springframework.security.web.server.SecurityWebFilterChain; -import org.springframework.security.web.server.authentication.RedirectServerAuthenticationSuccessHandler; -import org.springframework.security.web.server.authentication.logout.RedirectServerLogoutSuccessHandler; import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatchers; @Configuration @@ -20,32 +18,28 @@ @Slf4j public class BasicAuthSecurityConfig extends AbstractAuthSecurityConfig { - public static final String LOGIN_URL = "/auth"; - public static final String LOGOUT_URL = "/auth?logout"; - @Bean public SecurityWebFilterChain configure(ServerHttpSecurity http) { log.info("Configuring LOGIN_FORM authentication."); - final var authHandler = new RedirectServerAuthenticationSuccessHandler(); - authHandler.setRedirectStrategy(new EmptyRedirectStrategy()); - - final var logoutSuccessHandler = new RedirectServerLogoutSuccessHandler(); - logoutSuccessHandler.setLogoutSuccessUrl(URI.create(LOGOUT_URL)); - - - return http.authorizeExchange(spec -> spec + var builder = http.authorizeExchange(spec -> spec .pathMatchers(AUTH_WHITELIST) .permitAll() .anyExchange() .authenticated() ) - .formLogin(spec -> spec.loginPage(LOGIN_URL).authenticationSuccessHandler(authHandler)) + .formLogin(form -> form + .loginPage(LOGIN_URL) + .authenticationSuccessHandler(emptyRedirectSuccessHandler()) + ) .logout(spec -> spec - .logoutSuccessHandler(logoutSuccessHandler) + .logoutSuccessHandler(redirectLogoutSuccessHandler()) .requiresLogout(ServerWebExchangeMatchers.pathMatchers(HttpMethod.GET, "/logout"))) - .csrf(ServerHttpSecurity.CsrfSpec::disable) - .build(); + .csrf(ServerHttpSecurity.CsrfSpec::disable); + + builder.addFilterAt(new StaticFileWebFilter(), SecurityWebFiltersOrder.LOGIN_PAGE_GENERATING); + + return builder.build(); } } diff --git a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java index 1b5a8ca87..4267a4b0e 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/LdapSecurityConfig.java @@ -1,41 +1,47 @@ package io.kafbat.ui.config.auth; -import static io.kafbat.ui.config.auth.AbstractAuthSecurityConfig.AUTH_WHITELIST; - import io.kafbat.ui.service.rbac.AccessControlService; +import io.kafbat.ui.service.rbac.extractor.RbacActiveDirectoryAuthoritiesExtractor; import io.kafbat.ui.service.rbac.extractor.RbacLdapAuthoritiesExtractor; +import io.kafbat.ui.util.CustomSslSocketFactory; +import io.kafbat.ui.util.StaticFileWebFilter; import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.stream.Stream; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; +import org.springframework.http.HttpMethod; import org.springframework.ldap.core.DirContextOperations; import org.springframework.ldap.core.support.BaseLdapPathContextSource; import org.springframework.ldap.core.support.LdapContextSource; -import org.springframework.security.authentication.AuthenticationManager; import org.springframework.security.authentication.ProviderManager; import org.springframework.security.authentication.ReactiveAuthenticationManager; import org.springframework.security.authentication.ReactiveAuthenticationManagerAdapter; -import org.springframework.security.config.Customizer; import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity; +import org.springframework.security.config.web.server.SecurityWebFiltersOrder; import org.springframework.security.config.web.server.ServerHttpSecurity; import org.springframework.security.core.GrantedAuthority; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider; import org.springframework.security.ldap.authentication.BindAuthenticator; import org.springframework.security.ldap.authentication.LdapAuthenticationProvider; +import org.springframework.security.ldap.authentication.NullLdapAuthoritiesPopulator; import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider; +import org.springframework.security.ldap.authentication.ad.DefaultActiveDirectoryAuthoritiesPopulator; import org.springframework.security.ldap.search.FilterBasedLdapUserSearch; import org.springframework.security.ldap.search.LdapUserSearch; -import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator; import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator; import org.springframework.security.ldap.userdetails.LdapUserDetailsMapper; import org.springframework.security.web.server.SecurityWebFilterChain; +import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatchers; @Configuration @EnableWebFluxSecurity @@ -43,19 +49,48 @@ @EnableConfigurationProperties(LdapProperties.class) @RequiredArgsConstructor @Slf4j -public class LdapSecurityConfig { +public class LdapSecurityConfig extends AbstractAuthSecurityConfig { + private static final Map BASE_ENV_PROPS = Map.of( + "java.naming.ldap.factory.socket", CustomSslSocketFactory.class.getName() + ); private final LdapProperties props; @Bean - public ReactiveAuthenticationManager authenticationManager(LdapContextSource ldapContextSource, - LdapAuthoritiesPopulator authoritiesExtractor, - AccessControlService acs) { + public ReactiveAuthenticationManager authenticationManager(AbstractLdapAuthenticationProvider authProvider) { + return new ReactiveAuthenticationManagerAdapter(new ProviderManager(List.of(authProvider))); + } + + @Bean + public AbstractLdapAuthenticationProvider authenticationProvider(LdapAuthoritiesPopulator authoritiesExtractor, + @Autowired(required = false) BindAuthenticator ba, + AccessControlService acs) { var rbacEnabled = acs.isRbacEnabled(); + + AbstractLdapAuthenticationProvider authProvider; + + if (props.isActiveDirectory()) { + authProvider = activeDirectoryProvider(authoritiesExtractor); + } else { + authProvider = new LdapAuthenticationProvider(ba, authoritiesExtractor); + } + + if (rbacEnabled) { + authProvider.setUserDetailsContextMapper(new RbacUserDetailsMapper()); + } + + return authProvider; + } + + @Bean + @ConditionalOnProperty(value = "oauth2.ldap.activeDirectory", havingValue = "false", matchIfMissing = true) + public BindAuthenticator ldapBindAuthentication(LdapContextSource ldapContextSource) { BindAuthenticator ba = new BindAuthenticator(ldapContextSource); + if (props.getBase() != null) { ba.setUserDnPatterns(new String[] {props.getBase()}); } + if (props.getUserFilterSearchFilter() != null) { LdapUserSearch userSearch = new FilterBasedLdapUserSearch(props.getUserFilterSearchBase(), props.getUserFilterSearchFilter(), @@ -63,24 +98,7 @@ public ReactiveAuthenticationManager authenticationManager(LdapContextSource lda ba.setUserSearch(userSearch); } - AbstractLdapAuthenticationProvider authenticationProvider; - if (!props.isActiveDirectory()) { - authenticationProvider = rbacEnabled - ? new LdapAuthenticationProvider(ba, authoritiesExtractor) - : new LdapAuthenticationProvider(ba); - } else { - authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider(props.getActiveDirectoryDomain(), - props.getUrls()); // TODO Issue #3741 - authenticationProvider.setUseAuthenticationRequestCredentials(true); - } - - if (rbacEnabled) { - authenticationProvider.setUserDetailsContextMapper(new UserDetailsMapper()); - } - - AuthenticationManager am = new ProviderManager(List.of(authenticationProvider)); - - return new ReactiveAuthenticationManagerAdapter(am); + return ba; } @Bean @@ -94,24 +112,27 @@ public LdapContextSource ldapContextSource() { } @Bean - public DefaultLdapAuthoritiesPopulator ldapAuthoritiesExtractor(ApplicationContext context, - BaseLdapPathContextSource contextSource, - AccessControlService acs) { - var rbacEnabled = acs != null && acs.isRbacEnabled(); + public LdapAuthoritiesPopulator authoritiesExtractor(ApplicationContext ctx, + BaseLdapPathContextSource ldapCtx, + AccessControlService acs) { + if (!props.isActiveDirectory()) { + if (!acs.isRbacEnabled()) { + return new NullLdapAuthoritiesPopulator(); + } - DefaultLdapAuthoritiesPopulator extractor; + var extractor = new RbacLdapAuthoritiesExtractor(ctx, ldapCtx, props.getGroupFilterSearchBase()); - if (rbacEnabled) { - extractor = new RbacLdapAuthoritiesExtractor(context, contextSource, props.getGroupFilterSearchBase()); + Optional.ofNullable(props.getGroupFilterSearchFilter()).ifPresent(extractor::setGroupSearchFilter); + extractor.setRolePrefix(""); + extractor.setConvertToUpperCase(false); + extractor.setSearchSubtree(true); + + return extractor; } else { - extractor = new DefaultLdapAuthoritiesPopulator(contextSource, props.getGroupFilterSearchBase()); + return acs.isRbacEnabled() + ? new RbacActiveDirectoryAuthoritiesExtractor(ctx) + : new DefaultActiveDirectoryAuthoritiesPopulator(); } - - Optional.ofNullable(props.getGroupFilterSearchFilter()).ifPresent(extractor::setGroupSearchFilter); - extractor.setRolePrefix(""); - extractor.setConvertToUpperCase(false); - extractor.setSearchSubtree(true); - return extractor; } @Bean @@ -121,19 +142,43 @@ public SecurityWebFilterChain configureLdap(ServerHttpSecurity http) { log.info("Active Directory support for LDAP has been enabled."); } - return http.authorizeExchange(spec -> spec + var builder = http.authorizeExchange(spec -> spec .pathMatchers(AUTH_WHITELIST) .permitAll() .anyExchange() .authenticated() ) - .formLogin(Customizer.withDefaults()) - .logout(Customizer.withDefaults()) - .csrf(ServerHttpSecurity.CsrfSpec::disable) - .build(); + .formLogin(form -> form + .loginPage(LOGIN_URL) + .authenticationSuccessHandler(emptyRedirectSuccessHandler()) + ) + .logout(spec -> spec + .logoutSuccessHandler(redirectLogoutSuccessHandler()) + .requiresLogout(ServerWebExchangeMatchers.pathMatchers(HttpMethod.GET, "/logout"))) + .csrf(ServerHttpSecurity.CsrfSpec::disable); + + builder.addFilterAt(new StaticFileWebFilter(), SecurityWebFiltersOrder.LOGIN_PAGE_GENERATING); + + return builder.build(); + } + + private ActiveDirectoryLdapAuthenticationProvider activeDirectoryProvider(LdapAuthoritiesPopulator populator) { + ActiveDirectoryLdapAuthenticationProvider provider = new ActiveDirectoryLdapAuthenticationProvider( + props.getActiveDirectoryDomain(), + props.getUrls() + ); + + provider.setUseAuthenticationRequestCredentials(true); + provider.setAuthoritiesPopulator(populator); + + if (Stream.of(props.getUrls().split(",")).anyMatch(url -> url.startsWith("ldaps://"))) { + provider.setContextEnvironmentProperties(BASE_ENV_PROPS); + } + + return provider; } - private static class UserDetailsMapper extends LdapUserDetailsMapper { + private static class RbacUserDetailsMapper extends LdapUserDetailsMapper { @Override public UserDetails mapUserFromContext(DirContextOperations ctx, String username, Collection authorities) { diff --git a/api/src/main/java/io/kafbat/ui/config/auth/OAuthSecurityConfig.java b/api/src/main/java/io/kafbat/ui/config/auth/OAuthSecurityConfig.java index 5c1e88bd7..4794b83ca 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/OAuthSecurityConfig.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/OAuthSecurityConfig.java @@ -3,12 +3,13 @@ import io.kafbat.ui.config.auth.logout.OAuthLogoutSuccessHandler; import io.kafbat.ui.service.rbac.AccessControlService; import io.kafbat.ui.service.rbac.extractor.ProviderAuthorityExtractor; +import io.kafbat.ui.util.StaticFileWebFilter; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Optional; import lombok.RequiredArgsConstructor; -import lombok.extern.log4j.Log4j2; +import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.Nullable; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.security.oauth2.client.OAuth2ClientProperties; @@ -19,6 +20,7 @@ import org.springframework.security.config.Customizer; import org.springframework.security.config.annotation.method.configuration.EnableReactiveMethodSecurity; import org.springframework.security.config.annotation.web.reactive.EnableWebFluxSecurity; +import org.springframework.security.config.web.server.SecurityWebFiltersOrder; import org.springframework.security.config.web.server.ServerHttpSecurity; import org.springframework.security.oauth2.client.oidc.userinfo.OidcReactiveOAuth2UserService; import org.springframework.security.oauth2.client.oidc.userinfo.OidcUserRequest; @@ -41,7 +43,7 @@ @EnableWebFluxSecurity @EnableReactiveMethodSecurity @RequiredArgsConstructor -@Log4j2 +@Slf4j public class OAuthSecurityConfig extends AbstractAuthSecurityConfig { private final OAuthProperties properties; @@ -50,7 +52,7 @@ public class OAuthSecurityConfig extends AbstractAuthSecurityConfig { public SecurityWebFilterChain configure(ServerHttpSecurity http, OAuthLogoutSuccessHandler logoutHandler) { log.info("Configuring OAUTH2 authentication."); - return http.authorizeExchange(spec -> spec + var builder = http.authorizeExchange(spec -> spec .pathMatchers(AUTH_WHITELIST) .permitAll() .anyExchange() @@ -58,8 +60,12 @@ public SecurityWebFilterChain configure(ServerHttpSecurity http, OAuthLogoutSucc ) .oauth2Login(Customizer.withDefaults()) .logout(spec -> spec.logoutSuccessHandler(logoutHandler)) - .csrf(ServerHttpSecurity.CsrfSpec::disable) - .build(); + .csrf(ServerHttpSecurity.CsrfSpec::disable); + + + builder.addFilterAt(new StaticFileWebFilter(), SecurityWebFiltersOrder.LOGIN_PAGE_GENERATING); + + return builder.build(); } @Bean diff --git a/api/src/main/java/io/kafbat/ui/config/auth/condition/ActiveDirectoryCondition.java b/api/src/main/java/io/kafbat/ui/config/auth/condition/ActiveDirectoryCondition.java deleted file mode 100644 index 944eff0d3..000000000 --- a/api/src/main/java/io/kafbat/ui/config/auth/condition/ActiveDirectoryCondition.java +++ /dev/null @@ -1,21 +0,0 @@ -package io.kafbat.ui.config.auth.condition; - -import org.springframework.boot.autoconfigure.condition.AllNestedConditions; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; - -public class ActiveDirectoryCondition extends AllNestedConditions { - - public ActiveDirectoryCondition() { - super(ConfigurationPhase.PARSE_CONFIGURATION); - } - - @ConditionalOnProperty(value = "auth.type", havingValue = "LDAP") - public static class OnAuthType { - - } - - @ConditionalOnProperty(value = "${oauth2.ldap.activeDirectory}:false", havingValue = "true", matchIfMissing = false) - public static class OnActiveDirectory { - - } -} diff --git a/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java b/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java index d98ea22af..e58f51ab3 100644 --- a/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java +++ b/api/src/main/java/io/kafbat/ui/config/auth/logout/CognitoLogoutSuccessHandler.java @@ -40,7 +40,7 @@ public Mono handle(WebFilterExchange exchange, Authentication authenticati requestUri.getPath(), requestUri.getQuery()); final UriComponents baseUrl = UriComponentsBuilder - .fromHttpUrl(fullUrl) + .fromUriString(fullUrl) .replacePath("/") .replaceQuery(null) .fragment(null) diff --git a/api/src/main/java/io/kafbat/ui/controller/ApplicationConfigController.java b/api/src/main/java/io/kafbat/ui/controller/ApplicationConfigController.java index 5d5d4ed98..e8d763545 100644 --- a/api/src/main/java/io/kafbat/ui/controller/ApplicationConfigController.java +++ b/api/src/main/java/io/kafbat/ui/controller/ApplicationConfigController.java @@ -6,6 +6,7 @@ import io.kafbat.ui.api.ApplicationConfigApi; import io.kafbat.ui.config.ClustersProperties; import io.kafbat.ui.model.ActionDTO; +import io.kafbat.ui.model.AppAuthenticationSettingsDTO; import io.kafbat.ui.model.ApplicationConfigDTO; import io.kafbat.ui.model.ApplicationConfigPropertiesDTO; import io.kafbat.ui.model.ApplicationConfigValidationDTO; @@ -66,6 +67,13 @@ public Mono> getApplicationInfo(ServerWebExch return Mono.just(applicationInfoService.getApplicationInfo()).map(ResponseEntity::ok); } + @Override + public Mono> getAuthenticationSettings( + ServerWebExchange exchange) { + return Mono.just(applicationInfoService.getAuthenticationProperties()) + .map(ResponseEntity::ok); + } + @Override public Mono> getCurrentConfig(ServerWebExchange exchange) { var context = AccessContext.builder() @@ -109,7 +117,7 @@ public Mono> uploadConfigRelatedFile(Flux dynamicConfigOperations.uploadConfigRelatedFile((FilePart) file) - .map(path -> new UploadedFileInfoDTO().location(path.toString())) + .map(path -> new UploadedFileInfoDTO(path.toString())) .map(ResponseEntity::ok)) .doOnEach(sig -> audit(context, sig)); } diff --git a/api/src/main/java/io/kafbat/ui/controller/AuthController.java b/api/src/main/java/io/kafbat/ui/controller/AuthController.java deleted file mode 100644 index e4532dda3..000000000 --- a/api/src/main/java/io/kafbat/ui/controller/AuthController.java +++ /dev/null @@ -1,99 +0,0 @@ -package io.kafbat.ui.controller; - -import java.nio.charset.Charset; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.springframework.security.web.server.csrf.CsrfToken; -import org.springframework.util.MultiValueMap; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.RestController; -import org.springframework.web.server.ServerWebExchange; -import reactor.core.publisher.Mono; - -@RestController -@RequiredArgsConstructor -@Slf4j -public class AuthController { - - @GetMapping(value = "/auth", produces = {"text/html"}) - public Mono getAuth(ServerWebExchange exchange) { - Mono token = exchange.getAttributeOrDefault(CsrfToken.class.getName(), Mono.empty()); - return token - .map(AuthController::csrfToken) - .defaultIfEmpty("") - .map(csrfTokenHtmlInput -> createPage(exchange, csrfTokenHtmlInput)); - } - - private byte[] createPage(ServerWebExchange exchange, String csrfTokenHtmlInput) { - MultiValueMap queryParams = exchange.getRequest() - .getQueryParams(); - String contextPath = exchange.getRequest().getPath().contextPath().value(); - String page = - "\n" + "\n" + " \n" - + " \n" - + " \n" - + " \n" - + " \n" - + " Please sign in\n" - + " \n" - + " \n" - + " \n" - + " \n" - + "
\n" - + formLogin(queryParams, contextPath, csrfTokenHtmlInput) - + "
\n" - + " \n" - + ""; - - return page.getBytes(Charset.defaultCharset()); - } - - private String formLogin( - MultiValueMap queryParams, - String contextPath, String csrfTokenHtmlInput) { - - boolean isError = queryParams.containsKey("error"); - boolean isLogoutSuccess = queryParams.containsKey("logout"); - return - "
\n" - + " \n" - + createError(isError) - + createLogoutSuccess(isLogoutSuccess) - + "

\n" - + " \n" - + " \n" - + "

\n" + "

\n" - + " \n" - + " \n" - + "

\n" + csrfTokenHtmlInput - + " \n" - + "
\n"; - } - - private static String csrfToken(CsrfToken token) { - return " \n"; - } - - private static String createError(boolean isError) { - return isError - ? "
Invalid credentials
" - : ""; - } - - private static String createLogoutSuccess(boolean isLogoutSuccess) { - return isLogoutSuccess - ? "
You have been signed out
" - : ""; - } -} diff --git a/api/src/main/java/io/kafbat/ui/controller/AuthenticationController.java b/api/src/main/java/io/kafbat/ui/controller/AuthenticationController.java new file mode 100644 index 000000000..c94c344c9 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/controller/AuthenticationController.java @@ -0,0 +1,22 @@ +package io.kafbat.ui.controller; + +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.springframework.core.io.ClassPathResource; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; +import reactor.core.publisher.Mono; + +@RestController +@RequiredArgsConstructor +@Slf4j +public class AuthenticationController { + + private static final String INDEX_HTML = "/static/index.html"; + + @GetMapping(value = "/login", produces = {"text/html"}) + public Mono getLoginPage() { + return Mono.just(new ClassPathResource(INDEX_HTML)); + } + +} diff --git a/api/src/main/java/io/kafbat/ui/controller/AccessController.java b/api/src/main/java/io/kafbat/ui/controller/AuthorizationController.java similarity index 97% rename from api/src/main/java/io/kafbat/ui/controller/AccessController.java rename to api/src/main/java/io/kafbat/ui/controller/AuthorizationController.java index e5b1ea438..1ac0aeb85 100644 --- a/api/src/main/java/io/kafbat/ui/controller/AccessController.java +++ b/api/src/main/java/io/kafbat/ui/controller/AuthorizationController.java @@ -26,7 +26,7 @@ @RestController @RequiredArgsConstructor @Slf4j -public class AccessController implements AuthorizationApi { +public class AuthorizationController implements AuthorizationApi { private final AccessControlService accessControlService; diff --git a/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java b/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java index 08eb304c0..328a7353e 100644 --- a/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java +++ b/api/src/main/java/io/kafbat/ui/controller/KafkaConnectController.java @@ -3,6 +3,8 @@ import static io.kafbat.ui.model.ConnectorActionDTO.RESTART; import static io.kafbat.ui.model.ConnectorActionDTO.RESTART_ALL_TASKS; import static io.kafbat.ui.model.ConnectorActionDTO.RESTART_FAILED_TASKS; +import static io.kafbat.ui.model.rbac.permission.ConnectAction.RESET_OFFSETS; +import static io.kafbat.ui.model.rbac.permission.ConnectAction.VIEW; import io.kafbat.ui.api.KafkaConnectApi; import io.kafbat.ui.model.ConnectDTO; @@ -285,4 +287,23 @@ private Comparator getConnectorsComparator(ConnectorColumn default -> defaultComparator; }; } + + @Override + public Mono> resetConnectorOffsets(String clusterName, String connectName, + String connectorName, + ServerWebExchange exchange) { + + var context = AccessContext.builder() + .cluster(clusterName) + .connectActions(connectName, VIEW, RESET_OFFSETS) + .operationName("resetConnectorOffsets") + .operationParams(Map.of(CONNECTOR_NAME, connectorName)) + .build(); + + return validateAccess(context).then( + kafkaConnectService + .resetConnectorOffsets(getCluster(clusterName), connectName, connectorName) + .map(ResponseEntity::ok)) + .doOnEach(sig -> audit(context, sig)); + } } diff --git a/api/src/main/java/io/kafbat/ui/controller/MessagesController.java b/api/src/main/java/io/kafbat/ui/controller/MessagesController.java index 62189b04a..b88e2d566 100644 --- a/api/src/main/java/io/kafbat/ui/controller/MessagesController.java +++ b/api/src/main/java/io/kafbat/ui/controller/MessagesController.java @@ -118,10 +118,11 @@ public Mono>> getTopicMessagesV2(Strin if (cursor != null) { messagesFlux = messagesService.loadMessages(getCluster(clusterName), topicName, cursor); } else { + var pollingMode = mode == null ? PollingModeDTO.LATEST : mode; messagesFlux = messagesService.loadMessages( getCluster(clusterName), topicName, - ConsumerPosition.create(checkNotNull(mode), checkNotNull(topicName), partitions, timestamp, offset), + ConsumerPosition.create(pollingMode, checkNotNull(topicName), partitions, timestamp, offset), stringFilter, smartFilterId, limit, diff --git a/api/src/main/java/io/kafbat/ui/emitter/MessageFilters.java b/api/src/main/java/io/kafbat/ui/emitter/MessageFilters.java index 402b5a43e..b008e388e 100644 --- a/api/src/main/java/io/kafbat/ui/emitter/MessageFilters.java +++ b/api/src/main/java/io/kafbat/ui/emitter/MessageFilters.java @@ -52,7 +52,23 @@ public static Predicate noop() { public static Predicate containsStringFilter(String string) { return msg -> StringUtils.contains(msg.getKey(), string) - || StringUtils.contains(msg.getContent(), string); + || StringUtils.contains(msg.getContent(), string) || headersContains(msg, string); + } + + private static boolean headersContains(TopicMessageDTO msg, String searchString) { + final var headers = msg.getHeaders(); + + if (headers == null) { + return false; + } + + for (final var entry : headers.entrySet()) { + if (StringUtils.contains(entry.getKey(), searchString) || StringUtils.contains(entry.getValue(), searchString)) { + return true; + } + } + + return false; } public static Predicate celScriptFilter(String script) { diff --git a/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java b/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java index a361834d0..7f34e1708 100644 --- a/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java +++ b/api/src/main/java/io/kafbat/ui/emitter/OffsetsInfo.java @@ -53,7 +53,7 @@ private Map firstOffsetsForPolling(Consumer consumer Collection partitions) { try { // we try to use offsetsForTimes() to find earliest offsets, since for - // some topics (like compacted) beginningOffsets() ruturning 0 offsets + // some topics (like compacted) beginningOffsets() returning 0 offsets // even when effectively first offset can be very high var offsets = consumer.offsetsForTimes( partitions.stream().collect(Collectors.toMap(p -> p, p -> 0L)) diff --git a/api/src/main/java/io/kafbat/ui/emitter/ResultSizeLimiter.java b/api/src/main/java/io/kafbat/ui/emitter/ResultSizeLimiter.java deleted file mode 100644 index 3e0ec2a43..000000000 --- a/api/src/main/java/io/kafbat/ui/emitter/ResultSizeLimiter.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.kafbat.ui.emitter; - -import io.kafbat.ui.model.TopicMessageEventDTO; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Predicate; - -public class ResultSizeLimiter implements Predicate { - private final AtomicInteger processed = new AtomicInteger(); - private final int limit; - - public ResultSizeLimiter(int limit) { - this.limit = limit; - } - - @Override - public boolean test(TopicMessageEventDTO event) { - if (event.getType().equals(TopicMessageEventDTO.TypeEnum.MESSAGE)) { - final int i = processed.incrementAndGet(); - return i <= limit; - } - return true; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/CelException.java b/api/src/main/java/io/kafbat/ui/exception/CelException.java index e904368f6..d71d0da20 100644 --- a/api/src/main/java/io/kafbat/ui/exception/CelException.java +++ b/api/src/main/java/io/kafbat/ui/exception/CelException.java @@ -1,7 +1,10 @@ package io.kafbat.ui.exception; +import lombok.Getter; + +@Getter public class CelException extends CustomBaseException { - private String celOriginalExpression; + private final String celOriginalExpression; public CelException(String celOriginalExpression, String errorMessage) { super("CEL error. Original expression: %s. Error message: %s".formatted(celOriginalExpression, errorMessage)); diff --git a/api/src/main/java/io/kafbat/ui/exception/ConnectNotFoundException.java b/api/src/main/java/io/kafbat/ui/exception/ConnectNotFoundException.java deleted file mode 100644 index 5978c2e93..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/ConnectNotFoundException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class ConnectNotFoundException extends CustomBaseException { - - public ConnectNotFoundException() { - super("Connect not found"); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.CONNECT_NOT_FOUND; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/ConnectorOffsetsResetException.java b/api/src/main/java/io/kafbat/ui/exception/ConnectorOffsetsResetException.java new file mode 100644 index 000000000..f76feddc3 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/exception/ConnectorOffsetsResetException.java @@ -0,0 +1,13 @@ +package io.kafbat.ui.exception; + +public class ConnectorOffsetsResetException extends CustomBaseException { + + public ConnectorOffsetsResetException(String message) { + super(message); + } + + @Override + public ErrorCode getErrorCode() { + return ErrorCode.CONNECTOR_OFFSETS_RESET_ERROR; + } +} diff --git a/api/src/main/java/io/kafbat/ui/exception/DuplicateEntityException.java b/api/src/main/java/io/kafbat/ui/exception/DuplicateEntityException.java deleted file mode 100644 index 23ba0c5af..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/DuplicateEntityException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class DuplicateEntityException extends CustomBaseException { - - public DuplicateEntityException(String message) { - super(message); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.DUPLICATED_ENTITY; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java b/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java index a1b499aff..32cf5c5c8 100644 --- a/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java +++ b/api/src/main/java/io/kafbat/ui/exception/ErrorCode.java @@ -4,11 +4,8 @@ import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; - public enum ErrorCode { - FORBIDDEN(403, HttpStatus.FORBIDDEN), - UNEXPECTED(5000, HttpStatus.INTERNAL_SERVER_ERROR), KSQL_API_ERROR(5001, HttpStatus.INTERNAL_SERVER_ERROR), BINDING_FAIL(4001, HttpStatus.BAD_REQUEST), @@ -16,13 +13,10 @@ public enum ErrorCode { VALIDATION_FAIL(4002, HttpStatus.BAD_REQUEST), READ_ONLY_MODE_ENABLE(4003, HttpStatus.METHOD_NOT_ALLOWED), CONNECT_CONFLICT_RESPONSE(4004, HttpStatus.CONFLICT), - DUPLICATED_ENTITY(4005, HttpStatus.CONFLICT), UNPROCESSABLE_ENTITY(4006, HttpStatus.UNPROCESSABLE_ENTITY), CLUSTER_NOT_FOUND(4007, HttpStatus.NOT_FOUND), TOPIC_NOT_FOUND(4008, HttpStatus.NOT_FOUND), SCHEMA_NOT_FOUND(4009, HttpStatus.NOT_FOUND), - CONNECT_NOT_FOUND(4010, HttpStatus.NOT_FOUND), - KSQLDB_NOT_FOUND(4011, HttpStatus.NOT_FOUND), DIR_NOT_FOUND(4012, HttpStatus.BAD_REQUEST), TOPIC_OR_PARTITION_NOT_FOUND(4013, HttpStatus.BAD_REQUEST), INVALID_REQUEST(4014, HttpStatus.BAD_REQUEST), @@ -32,6 +26,7 @@ public enum ErrorCode { TOPIC_ANALYSIS_ERROR(4018, HttpStatus.BAD_REQUEST), FILE_UPLOAD_EXCEPTION(4019, HttpStatus.INTERNAL_SERVER_ERROR), CEL_ERROR(4020, HttpStatus.BAD_REQUEST), + CONNECTOR_OFFSETS_RESET_ERROR(4021, HttpStatus.BAD_REQUEST), ; static { diff --git a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java index b4c978ac2..482ced492 100644 --- a/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java +++ b/api/src/main/java/io/kafbat/ui/exception/GlobalErrorWebExceptionHandler.java @@ -2,12 +2,14 @@ import com.google.common.base.Throwables; import com.google.common.collect.Sets; +import io.kafbat.ui.config.CorsGlobalConfiguration; import io.kafbat.ui.model.ErrorResponseDTO; import java.math.BigDecimal; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.Stream; import org.springframework.boot.autoconfigure.web.WebProperties; @@ -16,6 +18,7 @@ import org.springframework.context.ApplicationContext; import org.springframework.core.Ordered; import org.springframework.core.annotation.Order; +import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.codec.ServerCodecConfigurer; @@ -51,18 +54,18 @@ private Mono renderErrorResponse(ServerRequest request) { Throwable throwable = getError(request); // validation and params binding errors - if (throwable instanceof WebExchangeBindException) { - return render((WebExchangeBindException) throwable, request); + if (throwable instanceof WebExchangeBindException webExchangeBindException) { + return render(webExchangeBindException, request); } // requests mapping & access errors - if (throwable instanceof ResponseStatusException) { - return render((ResponseStatusException) throwable, request); + if (throwable instanceof ResponseStatusException responseStatusException) { + return render(responseStatusException, request); } // custom exceptions - if (throwable instanceof CustomBaseException) { - return render((CustomBaseException) throwable, request); + if (throwable instanceof CustomBaseException customBaseException) { + return render(customBaseException, request); } return renderDefault(throwable, request); @@ -78,6 +81,7 @@ private Mono renderDefault(Throwable throwable, ServerRequest re return ServerResponse .status(ErrorCode.UNEXPECTED.httpStatus()) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } @@ -92,13 +96,13 @@ private Mono render(CustomBaseException baseException, ServerReq return ServerResponse .status(errorCode.httpStatus()) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } private Mono render(WebExchangeBindException exception, ServerRequest request) { Map> fieldErrorsMap = exception.getFieldErrors().stream() - .collect(Collectors - .toMap(FieldError::getField, f -> Set.of(extractFieldErrorMsg(f)), Sets::union)); + .collect(Collectors.toMap(FieldError::getField, f -> Set.of(extractFieldErrorMsg(f)), Sets::union)); var fieldsErrors = fieldErrorsMap.entrySet().stream() .map(e -> { @@ -122,6 +126,7 @@ private Mono render(WebExchangeBindException exception, ServerRe return ServerResponse .status(HttpStatus.BAD_REQUEST) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } @@ -136,6 +141,7 @@ private Mono render(ResponseStatusException exception, ServerReq return ServerResponse .status(exception.getStatusCode()) .contentType(MediaType.APPLICATION_JSON) + .headers(headers(request)) .bodyValue(response); } @@ -143,6 +149,10 @@ private String requestId(ServerRequest request) { return request.exchange().getRequest().getId(); } + private Consumer headers(ServerRequest request) { + return (HttpHeaders headers) -> CorsGlobalConfiguration.fillCorsHeader(headers, request.exchange().getRequest()); + } + private BigDecimal currentTimestamp() { return BigDecimal.valueOf(System.currentTimeMillis()); } diff --git a/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictReponseException.java b/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictResponseException.java similarity index 67% rename from api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictReponseException.java rename to api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictResponseException.java index 48376d1ac..ad356b7e5 100644 --- a/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictReponseException.java +++ b/api/src/main/java/io/kafbat/ui/exception/KafkaConnectConflictResponseException.java @@ -1,11 +1,10 @@ package io.kafbat.ui.exception; - import org.springframework.web.reactive.function.client.WebClientResponseException; -public class KafkaConnectConflictReponseException extends CustomBaseException { +public class KafkaConnectConflictResponseException extends CustomBaseException { - public KafkaConnectConflictReponseException(WebClientResponseException.Conflict e) { + public KafkaConnectConflictResponseException(WebClientResponseException.Conflict e) { super("Kafka Connect responded with 409 (Conflict) code. Response body: " + e.getResponseBodyAsString()); } diff --git a/api/src/main/java/io/kafbat/ui/exception/KsqlDbNotFoundException.java b/api/src/main/java/io/kafbat/ui/exception/KsqlDbNotFoundException.java deleted file mode 100644 index 255ccec80..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/KsqlDbNotFoundException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class KsqlDbNotFoundException extends CustomBaseException { - - public KsqlDbNotFoundException() { - super("KSQL DB not found"); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.KSQLDB_NOT_FOUND; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/SchemaFailedToDeleteException.java b/api/src/main/java/io/kafbat/ui/exception/SchemaFailedToDeleteException.java deleted file mode 100644 index 05ba55c70..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/SchemaFailedToDeleteException.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.exception; - -public class SchemaFailedToDeleteException extends CustomBaseException { - - public SchemaFailedToDeleteException(String schemaName) { - super(String.format("Unable to delete schema with name %s", schemaName)); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.SCHEMA_NOT_DELETED; - } -} diff --git a/api/src/main/java/io/kafbat/ui/exception/UnprocessableEntityException.java b/api/src/main/java/io/kafbat/ui/exception/UnprocessableEntityException.java deleted file mode 100644 index fcd9e41fd..000000000 --- a/api/src/main/java/io/kafbat/ui/exception/UnprocessableEntityException.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.kafbat.ui.exception; - - -public class UnprocessableEntityException extends CustomBaseException { - - public UnprocessableEntityException(String message) { - super(message); - } - - @Override - public ErrorCode getErrorCode() { - return ErrorCode.UNPROCESSABLE_ENTITY; - } -} diff --git a/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java b/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java index 72b3d65b4..800eab757 100644 --- a/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java +++ b/api/src/main/java/io/kafbat/ui/mapper/ConsumerGroupMapper.java @@ -97,23 +97,15 @@ private static BrokerDTO mapCoordinator(Node node) { return new BrokerDTO().host(node.host()).id(node.id()).port(node.port()); } - private static ConsumerGroupStateDTO mapConsumerGroupState( - org.apache.kafka.common.ConsumerGroupState state) { - switch (state) { - case DEAD: - return ConsumerGroupStateDTO.DEAD; - case EMPTY: - return ConsumerGroupStateDTO.EMPTY; - case STABLE: - return ConsumerGroupStateDTO.STABLE; - case PREPARING_REBALANCE: - return ConsumerGroupStateDTO.PREPARING_REBALANCE; - case COMPLETING_REBALANCE: - return ConsumerGroupStateDTO.COMPLETING_REBALANCE; - default: - return ConsumerGroupStateDTO.UNKNOWN; - } + private static ConsumerGroupStateDTO mapConsumerGroupState(org.apache.kafka.common.ConsumerGroupState state) { + return switch (state) { + case DEAD -> ConsumerGroupStateDTO.DEAD; + case EMPTY -> ConsumerGroupStateDTO.EMPTY; + case STABLE -> ConsumerGroupStateDTO.STABLE; + case PREPARING_REBALANCE -> ConsumerGroupStateDTO.PREPARING_REBALANCE; + case COMPLETING_REBALANCE -> ConsumerGroupStateDTO.COMPLETING_REBALANCE; + default -> ConsumerGroupStateDTO.UNKNOWN; + }; } - } diff --git a/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java b/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java index 8a4e44e5c..bccd3a66b 100644 --- a/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java +++ b/api/src/main/java/io/kafbat/ui/mapper/DescribeLogDirsMapper.java @@ -42,7 +42,7 @@ private BrokersLogdirsDTO toBrokerLogDirs(Integer broker, String dirName, private BrokerTopicLogdirsDTO toTopicLogDirs(Integer broker, String name, List> partitions) { + DescribeLogDirsResponse.ReplicaInfo>> partitions) { BrokerTopicLogdirsDTO topic = new BrokerTopicLogdirsDTO(); topic.setName(name); topic.setPartitions( @@ -54,8 +54,7 @@ private BrokerTopicLogdirsDTO toTopicLogDirs(Integer broker, String name, } private BrokerTopicPartitionLogdirDTO topicPartitionLogDir(Integer broker, Integer partition, - DescribeLogDirsResponse.ReplicaInfo - replicaInfo) { + DescribeLogDirsResponse.ReplicaInfo replicaInfo) { BrokerTopicPartitionLogdirDTO logDir = new BrokerTopicPartitionLogdirDTO(); logDir.setBroker(broker); logDir.setPartition(partition); diff --git a/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java b/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java deleted file mode 100644 index 31d4ad7e7..000000000 --- a/api/src/main/java/io/kafbat/ui/model/BrokerMetrics.java +++ /dev/null @@ -1,12 +0,0 @@ -package io.kafbat.ui.model; - -import io.kafbat.ui.model.MetricDTO; -import java.util.List; -import lombok.Builder; -import lombok.Data; - -@Data -@Builder(toBuilder = true) -public class BrokerMetrics { - private final List metrics; -} diff --git a/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java b/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java index 81186059f..815a8549d 100644 --- a/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java +++ b/api/src/main/java/io/kafbat/ui/model/CleanupPolicy.java @@ -20,18 +20,10 @@ public enum CleanupPolicy { this.policies = policies; } - public String getPolicy() { - return policies.get(0); - } - public static CleanupPolicy fromString(String string) { return Arrays.stream(CleanupPolicy.values()) - .filter(v -> - v.policies.stream().anyMatch( - s -> s.equals(string.replace(" ", "") - ) - ) - ).findFirst() + .filter(v -> v.policies.stream().anyMatch(s -> s.equals(string.replace(" ", "")))) + .findFirst() .orElse(UNKNOWN); } } diff --git a/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java b/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java index 5f87b0487..eb37270ed 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalBrokerConfig.java @@ -1,6 +1,5 @@ package io.kafbat.ui.model; - import java.util.List; import lombok.Builder; import lombok.Data; @@ -17,13 +16,13 @@ public class InternalBrokerConfig { private final List synonyms; public static InternalBrokerConfig from(ConfigEntry configEntry, boolean readOnlyCluster) { - InternalBrokerConfig.InternalBrokerConfigBuilder builder = InternalBrokerConfig.builder() + return InternalBrokerConfig.builder() .name(configEntry.name()) .value(configEntry.value()) .source(configEntry.source()) .isReadOnly(readOnlyCluster || configEntry.isReadOnly()) .isSensitive(configEntry.isSensitive()) - .synonyms(configEntry.synonyms()); - return builder.build(); + .synonyms(configEntry.synonyms()) + .build(); } } diff --git a/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java b/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java deleted file mode 100644 index df59cf007..000000000 --- a/api/src/main/java/io/kafbat/ui/model/InternalClusterMetrics.java +++ /dev/null @@ -1,57 +0,0 @@ -package io.kafbat.ui.model; - -import io.kafbat.ui.model.MetricDTO; -import io.kafbat.ui.model.ServerStatusDTO; -import java.math.BigDecimal; -import java.util.List; -import java.util.Map; -import javax.annotation.Nullable; -import lombok.Builder; -import lombok.Data; - - -@Data -@Builder(toBuilder = true) -public class InternalClusterMetrics { - - public static InternalClusterMetrics empty() { - return InternalClusterMetrics.builder() - .brokers(List.of()) - .topics(Map.of()) - .status(ServerStatusDTO.OFFLINE) - .internalBrokerMetrics(Map.of()) - .metrics(List.of()) - .version("unknown") - .build(); - } - - private final String version; - - private final ServerStatusDTO status; - private final Throwable lastKafkaException; - - private final int brokerCount; - private final int activeControllers; - private final List brokers; - - private final int topicCount; - private final Map topics; - - // partitions stats - private final int underReplicatedPartitionCount; - private final int onlinePartitionCount; - private final int offlinePartitionCount; - private final int inSyncReplicasCount; - private final int outOfSyncReplicasCount; - - // log dir stats - @Nullable // will be null if log dir collection disabled - private final Map internalBrokerDiskUsage; - - // metrics from metrics collector - private final BigDecimal bytesInPerSec; - private final BigDecimal bytesOutPerSec; - private final Map internalBrokerMetrics; - private final List metrics; - -} diff --git a/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java b/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java index 5e1874d10..5f3c1f308 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalClusterState.java @@ -1,9 +1,6 @@ package io.kafbat.ui.model; import com.google.common.base.Throwables; -import io.kafbat.ui.model.BrokerDiskUsageDTO; -import io.kafbat.ui.model.MetricsCollectionErrorDTO; -import io.kafbat.ui.model.ServerStatusDTO; import java.math.BigDecimal; import java.util.List; import java.util.Optional; diff --git a/api/src/main/java/io/kafbat/ui/model/InternalPartition.java b/api/src/main/java/io/kafbat/ui/model/InternalPartition.java index 61646207e..0b7c46ae3 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalPartition.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalPartition.java @@ -20,5 +20,4 @@ public class InternalPartition { private final Long segmentSize; private final Integer segmentCount; - } diff --git a/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java b/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java index 3084be5c3..b4febb56d 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalPartitionsOffsets.java @@ -7,7 +7,6 @@ import lombok.Value; import org.apache.kafka.common.TopicPartition; - public class InternalPartitionsOffsets { @Value diff --git a/api/src/main/java/io/kafbat/ui/model/InternalSegmentSizeDto.java b/api/src/main/java/io/kafbat/ui/model/InternalSegmentSizeDto.java deleted file mode 100644 index 8ce8014ae..000000000 --- a/api/src/main/java/io/kafbat/ui/model/InternalSegmentSizeDto.java +++ /dev/null @@ -1,13 +0,0 @@ -package io.kafbat.ui.model; - -import java.util.Map; -import lombok.Builder; -import lombok.Data; - -@Data -@Builder(toBuilder = true) -public class InternalSegmentSizeDto { - - private final Map internalTopicWithSegmentSize; - private final InternalClusterMetrics clusterMetricsWithSegmentSize; -} diff --git a/api/src/main/java/io/kafbat/ui/model/InternalTopic.java b/api/src/main/java/io/kafbat/ui/model/InternalTopic.java index 3a6134fa0..6aa3a0a1a 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalTopic.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalTopic.java @@ -1,6 +1,5 @@ package io.kafbat.ui.model; -import io.kafbat.ui.config.ClustersProperties; import java.math.BigDecimal; import java.util.List; import java.util.Map; @@ -16,8 +15,6 @@ @Builder(toBuilder = true) public class InternalTopic { - ClustersProperties clustersProperties; - // from TopicDescription private final String name; private final boolean internal; diff --git a/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java b/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java index b68348418..84620e2e4 100644 --- a/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java +++ b/api/src/main/java/io/kafbat/ui/model/InternalTopicConfig.java @@ -5,7 +5,6 @@ import lombok.Data; import org.apache.kafka.clients.admin.ConfigEntry; - @Data @Builder public class InternalTopicConfig { diff --git a/api/src/main/java/io/kafbat/ui/model/Metrics.java b/api/src/main/java/io/kafbat/ui/model/Metrics.java index e9b7c9f0e..bb6d2ff0c 100644 --- a/api/src/main/java/io/kafbat/ui/model/Metrics.java +++ b/api/src/main/java/io/kafbat/ui/model/Metrics.java @@ -11,7 +11,6 @@ import lombok.Builder; import lombok.Value; - @Builder @Value public class Metrics { diff --git a/api/src/main/java/io/kafbat/ui/model/Statistics.java b/api/src/main/java/io/kafbat/ui/model/Statistics.java index 35de5e251..43e6a26e0 100644 --- a/api/src/main/java/io/kafbat/ui/model/Statistics.java +++ b/api/src/main/java/io/kafbat/ui/model/Statistics.java @@ -1,6 +1,5 @@ package io.kafbat.ui.model; -import io.kafbat.ui.model.ServerStatusDTO; import io.kafbat.ui.service.ReactiveAdminClient; import java.util.List; import java.util.Map; diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java b/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java index 9ccc10ccf..dbf5c456b 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/AccessContext.java @@ -69,8 +69,10 @@ public boolean isAccessible(List userPermissions) throws AccessDenie if (name == null && permission.getCompiledValuePattern() == null) { return true; } - Preconditions.checkState(permission.getCompiledValuePattern() != null && name != null); - return permission.getCompiledValuePattern().matcher(name).matches(); + if (permission.getCompiledValuePattern() != null && name != null) { + return permission.getCompiledValuePattern().matcher(name).matches(); + } + return false; }) .flatMap(p -> p.getParsedActions().stream()) .collect(Collectors.toSet()); diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Role.java b/api/src/main/java/io/kafbat/ui/model/rbac/Role.java index a41e39d19..e01392cc6 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Role.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Role.java @@ -1,5 +1,7 @@ package io.kafbat.ui.model.rbac; +import static com.google.common.base.Preconditions.checkArgument; + import java.util.List; import lombok.Data; @@ -12,8 +14,11 @@ public class Role { List permissions; public void validate() { + checkArgument(!clusters.isEmpty(), "Role clusters cannot be empty"); + checkArgument(!subjects.isEmpty(), "Role subjects cannot be empty"); permissions.forEach(Permission::transform); permissions.forEach(Permission::validate); + subjects.forEach(Subject::validate); } } diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java index a71cebea3..9781d69ca 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/Subject.java @@ -1,6 +1,10 @@ package io.kafbat.ui.model.rbac; +import static com.google.common.base.Preconditions.checkArgument; +import static com.google.common.base.Preconditions.checkNotNull; + import io.kafbat.ui.model.rbac.provider.Provider; +import java.util.Objects; import lombok.Getter; @Getter @@ -9,6 +13,7 @@ public class Subject { Provider provider; String type; String value; + boolean isRegex; public void setProvider(String provider) { this.provider = Provider.fromString(provider.toUpperCase()); @@ -21,4 +26,23 @@ public void setType(String type) { public void setValue(String value) { this.value = value; } + + public void setIsRegex(boolean isRegex) { + this.isRegex = isRegex; + } + + public void validate() { + checkNotNull(type, "Subject type cannot be null"); + checkNotNull(value, "Subject value cannot be null"); + + checkArgument(!type.isEmpty(), "Subject type cannot be empty"); + checkArgument(!value.isEmpty(), "Subject value cannot be empty"); + } + + public boolean matches(final String attribute) { + if (isRegex) { + return Objects.nonNull(attribute) && attribute.matches(this.value); + } + return this.value.equalsIgnoreCase(attribute); + } } diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java b/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java index 7634e89c0..a357245bc 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/permission/ConnectAction.java @@ -10,7 +10,8 @@ public enum ConnectAction implements PermissibleAction { EDIT(VIEW), CREATE(VIEW), RESTART(VIEW), - DELETE(VIEW) + DELETE(VIEW), + RESET_OFFSETS(VIEW) ; @@ -20,7 +21,7 @@ public enum ConnectAction implements PermissibleAction { this.dependantActions = dependantActions; } - public static final Set ALTER_ACTIONS = Set.of(CREATE, EDIT, DELETE, RESTART); + public static final Set ALTER_ACTIONS = Set.of(CREATE, EDIT, DELETE, RESTART, RESET_OFFSETS); @Nullable public static ConnectAction fromString(String name) { diff --git a/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java b/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java index 94b1cd039..3fbae2423 100644 --- a/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java +++ b/api/src/main/java/io/kafbat/ui/model/rbac/provider/Provider.java @@ -21,11 +21,11 @@ public static Provider fromString(String name) { } public static class Name { - public static String GOOGLE = "google"; - public static String GITHUB = "github"; - public static String COGNITO = "cognito"; + public static final String GOOGLE = "google"; + public static final String GITHUB = "github"; + public static final String COGNITO = "cognito"; - public static String OAUTH = "oauth"; + public static final String OAUTH = "oauth"; } } diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/ErrorResponse.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/ErrorResponse.java deleted file mode 100644 index 00bc760b9..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/ErrorResponse.java +++ /dev/null @@ -1,14 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -@Data -public class ErrorResponse { - - @JsonProperty("error_code") - private int errorCode; - - private String message; - -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityCheck.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityCheck.java deleted file mode 100644 index bd59b6f6e..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityCheck.java +++ /dev/null @@ -1,10 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import com.fasterxml.jackson.annotation.JsonProperty; -import lombok.Data; - -@Data -public class InternalCompatibilityCheck { - @JsonProperty("is_compatible") - private boolean isCompatible; -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityLevel.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityLevel.java deleted file mode 100644 index af4acf8f7..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalCompatibilityLevel.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import lombok.Data; - -@Data -public class InternalCompatibilityLevel { - private String compatibilityLevel; -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalNewSchema.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalNewSchema.java deleted file mode 100644 index 9c09eccff..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/InternalNewSchema.java +++ /dev/null @@ -1,17 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import com.fasterxml.jackson.annotation.JsonInclude; -import io.kafbat.ui.model.SchemaTypeDTO; -import lombok.Data; - -@Data -public class InternalNewSchema { - private String schema; - @JsonInclude(JsonInclude.Include.NON_NULL) - private SchemaTypeDTO schemaType; - - public InternalNewSchema(String schema, SchemaTypeDTO schemaType) { - this.schema = schema; - this.schemaType = schemaType; - } -} diff --git a/api/src/main/java/io/kafbat/ui/model/schemaregistry/SubjectIdResponse.java b/api/src/main/java/io/kafbat/ui/model/schemaregistry/SubjectIdResponse.java deleted file mode 100644 index 951477676..000000000 --- a/api/src/main/java/io/kafbat/ui/model/schemaregistry/SubjectIdResponse.java +++ /dev/null @@ -1,8 +0,0 @@ -package io.kafbat.ui.model.schemaregistry; - -import lombok.Data; - -@Data -public class SubjectIdResponse { - private Integer id; -} diff --git a/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java b/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java index 359c871d6..e7abbad90 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java +++ b/api/src/main/java/io/kafbat/ui/serdes/ProducerRecordCreator.java @@ -31,7 +31,7 @@ public ProducerRecord create(String topic, private Iterable
createHeaders(Map clientHeaders) { RecordHeaders headers = new RecordHeaders(); - clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v.getBytes()))); + clientHeaders.forEach((k, v) -> headers.add(new RecordHeader(k, v == null ? null : v.getBytes()))); return headers; } diff --git a/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java b/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java index 7c1826257..b0fdc9834 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java +++ b/api/src/main/java/io/kafbat/ui/serdes/SerdeInstance.java @@ -97,7 +97,7 @@ public void close() { try { serde.close(); } catch (Exception e) { - log.error("Error closing serde " + name, e); + log.error("Error closing serde {}", name, e); } return null; }); diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java index 02e56ff22..515354695 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/Base64Serde.java @@ -40,7 +40,7 @@ public Serde.Serializer serializer(String topic, Serde.Target type) { return inputString -> { inputString = inputString.trim(); // it is actually a hack to provide ability to sent empty array as a key/value - if (inputString.length() == 0) { + if (inputString.isEmpty()) { return new byte[] {}; } return decoder.decode(inputString); diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java index a3c958a06..ab7f66ebb 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/HexSerde.java @@ -62,7 +62,7 @@ public Serializer serializer(String topic, Target type) { return input -> { input = input.trim(); // it is a hack to provide ability to sent empty array as a key/value - if (input.length() == 0) { + if (input.isEmpty()) { return new byte[] {}; } return HexFormat.of().parseHex(prepareInputForParse(input)); diff --git a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java index 51c921603..618c711b1 100644 --- a/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java +++ b/api/src/main/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerde.java @@ -15,6 +15,7 @@ import com.google.protobuf.StructProto; import com.google.protobuf.TimestampProto; import com.google.protobuf.TypeProto; +import com.google.protobuf.TypeRegistry; import com.google.protobuf.WrappersProto; import com.google.protobuf.util.JsonFormat; import com.google.type.ColorProto; @@ -63,6 +64,7 @@ import javax.annotation.Nullable; import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.SystemUtils; import org.jetbrains.annotations.NotNull; @Slf4j @@ -147,12 +149,18 @@ public boolean canSerialize(String topic, Serde.Target type) { @Override public Serde.Serializer serializer(String topic, Serde.Target type) { var descriptor = descriptorFor(topic, type).orElseThrow(); + TypeRegistry typeRegistry = TypeRegistry.newBuilder() + .add(descriptorPaths.keySet()) + .build(); + return new Serde.Serializer() { @SneakyThrows @Override public byte[] serialize(String input) { DynamicMessage.Builder builder = DynamicMessage.newBuilder(descriptor); - JsonFormat.parser().merge(input, builder); + JsonFormat.parser() + .usingTypeRegistry(typeRegistry) + .merge(input, builder); return builder.build().toByteArray(); } }; @@ -372,12 +380,12 @@ private Map knownProtoFiles() { } private ProtoFile loadKnownProtoFile(String path, Descriptors.FileDescriptor fileDescriptor) { - String protoFileString = null; + String protoFileString; // know type file contains either message or enum if (!fileDescriptor.getMessageTypes().isEmpty()) { - protoFileString = new ProtobufSchema(fileDescriptor.getMessageTypes().get(0)).canonicalString(); + protoFileString = new ProtobufSchema(fileDescriptor.getMessageTypes().getFirst()).canonicalString(); } else if (!fileDescriptor.getEnumTypes().isEmpty()) { - protoFileString = new ProtobufSchema(fileDescriptor.getEnumTypes().get(0)).canonicalString(); + protoFileString = new ProtobufSchema(fileDescriptor.getEnumTypes().getFirst()).canonicalString(); } else { throw new IllegalStateException(); } @@ -409,7 +417,7 @@ private Map loadFilesWithLocations() { files.filter(p -> !Files.isDirectory(p) && p.toString().endsWith(".proto")) .forEach(path -> { // relative path will be used as "import" statement - String relativePath = baseLocation.relativize(path).toString(); + String relativePath = removeBackSlashes(baseLocation.relativize(path).toString()); var protoFileElement = ProtoParser.Companion.parse( Location.get(baseLocation.toString(), relativePath), readFileAsString(path) @@ -419,6 +427,27 @@ private Map loadFilesWithLocations() { } return filesByLocations; } + + /** + * Replaces backslashes in the given file path with forward slashes if the operating system is Windows. + * + *

This method is designed to standardize file paths by converting Windows-style backslashes (`\`) + * to Linux/Unix-style forward slashes (`/`) when the application is running on a Windows OS. + * On other operating systems, the input path is returned unchanged.

+ * + *

This is needed because imports in Protobuf use forward slashes (`/`) + * which causes a conflict with Windows paths. For example,`language/language.proto` + * would be converted to `language\language.proto` in Windows causing a resolution exception

+ * + * @param path the file path to standardize; must not be {@code null}. + * @return the standardized file path with forward slashes if running on Windows, or the original path otherwise. + */ + private @NotNull String removeBackSlashes(@NotNull final String path) { + if (SystemUtils.IS_OS_WINDOWS) { + return path.replace("\\", "/"); + } + return path; + } } } diff --git a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java index bc175b980..fd9735b87 100644 --- a/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java +++ b/api/src/main/java/io/kafbat/ui/service/AdminClientServiceImpl.java @@ -2,12 +2,13 @@ import io.kafbat.ui.config.ClustersProperties; import io.kafbat.ui.model.KafkaCluster; -import io.kafbat.ui.util.SslPropertiesUtil; +import io.kafbat.ui.util.KafkaClientSslPropertiesUtil; import java.io.Closeable; import java.time.Instant; import java.util.Map; import java.util.Optional; import java.util.Properties; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; import lombok.extern.slf4j.Slf4j; @@ -15,6 +16,7 @@ import org.apache.kafka.clients.admin.AdminClientConfig; import org.springframework.stereotype.Service; import reactor.core.publisher.Mono; +import reactor.core.scheduler.Schedulers; @Service @Slf4j @@ -42,7 +44,7 @@ public Mono get(KafkaCluster cluster) { private Mono createAdminClient(KafkaCluster cluster) { return Mono.fromSupplier(() -> { Properties properties = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); properties.putAll(cluster.getProperties()); properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers()); properties.putIfAbsent(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, clientTimeout); @@ -51,9 +53,10 @@ private Mono createAdminClient(KafkaCluster cluster) { "kafbat-ui-admin-" + Instant.now().getEpochSecond() + "-" + CLIENT_ID_SEQ.incrementAndGet() ); return AdminClient.create(properties); - }).flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close())) + }).subscribeOn(Schedulers.boundedElastic()) + .flatMap(ac -> ReactiveAdminClient.create(ac).doOnError(th -> ac.close())) .onErrorMap(th -> new IllegalStateException( - "Error while creating AdminClient for Cluster " + cluster.getName(), th)); + "Error while creating AdminClient for the cluster " + cluster.getName(), th)); } @Override diff --git a/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java b/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java index 7d380036c..7ee28b62d 100644 --- a/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java +++ b/api/src/main/java/io/kafbat/ui/service/ApplicationInfoService.java @@ -1,16 +1,23 @@ package io.kafbat.ui.service; +import static io.kafbat.ui.api.model.AuthType.DISABLED; +import static io.kafbat.ui.api.model.AuthType.OAUTH2; import static io.kafbat.ui.model.ApplicationInfoDTO.EnabledFeaturesEnum; import static io.kafbat.ui.util.GithubReleaseInfo.GITHUB_RELEASE_INFO_TIMEOUT; import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Streams; +import io.kafbat.ui.model.AppAuthenticationSettingsDTO; import io.kafbat.ui.model.ApplicationInfoBuildDTO; import io.kafbat.ui.model.ApplicationInfoDTO; import io.kafbat.ui.model.ApplicationInfoLatestReleaseDTO; +import io.kafbat.ui.model.AuthTypeDTO; +import io.kafbat.ui.model.OAuthProviderDTO; import io.kafbat.ui.util.DynamicConfigOperations; import io.kafbat.ui.util.GithubReleaseInfo; import java.time.format.DateTimeFormatter; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Properties; @@ -18,20 +25,27 @@ import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.info.BuildProperties; import org.springframework.boot.info.GitProperties; +import org.springframework.context.ApplicationContext; +import org.springframework.core.ResolvableType; import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.security.oauth2.client.registration.ClientRegistration; +import org.springframework.security.oauth2.core.AuthorizationGrantType; import org.springframework.stereotype.Service; @Service public class ApplicationInfoService { private final GithubReleaseInfo githubReleaseInfo; + private final ApplicationContext applicationContext; private final DynamicConfigOperations dynamicConfigOperations; private final BuildProperties buildProperties; private final GitProperties gitProperties; public ApplicationInfoService(DynamicConfigOperations dynamicConfigOperations, + ApplicationContext applicationContext, @Autowired(required = false) BuildProperties buildProperties, @Autowired(required = false) GitProperties gitProperties, @Value("${" + GITHUB_RELEASE_INFO_TIMEOUT + ":10}") int githubApiMaxWaitTime) { + this.applicationContext = applicationContext; this.dynamicConfigOperations = dynamicConfigOperations; this.buildProperties = Optional.ofNullable(buildProperties).orElse(new BuildProperties(new Properties())); this.gitProperties = Optional.ofNullable(gitProperties).orElse(new GitProperties(new Properties())); @@ -70,6 +84,38 @@ private List getEnabledFeatures() { return enabledFeatures; } + public AppAuthenticationSettingsDTO getAuthenticationProperties() { + return new AppAuthenticationSettingsDTO() + .authType(AuthTypeDTO.fromValue(getAuthType())) + .oAuthProviders(getOAuthProviders()); + } + + private String getAuthType() { + return Optional.ofNullable(applicationContext.getEnvironment().getProperty("auth.type")) + .orElse(DISABLED.getValue()); + } + + @SuppressWarnings("unchecked") + private List getOAuthProviders() { + if (!getAuthType().equalsIgnoreCase(OAUTH2.getValue())) { + return Collections.emptyList(); + } + var type = ResolvableType.forClassWithGenerics(Iterable.class, ClientRegistration.class); + String[] names = this.applicationContext.getBeanNamesForType(type); + var bean = (Iterable) (names.length == 1 ? this.applicationContext.getBean(names[0]) : null); + + if (bean == null) { + return Collections.emptyList(); + } + + return Streams.stream(bean.iterator()) + .filter(r -> AuthorizationGrantType.AUTHORIZATION_CODE.equals(r.getAuthorizationGrantType())) + .map(r -> new OAuthProviderDTO() + .clientName(r.getClientName()) + .authorizationUri("/oauth2/authorization/" + r.getRegistrationId())) + .toList(); + } + // updating on startup and every hour @Scheduled(fixedRateString = "${github-release-info-update-rate:3600000}") public void updateGithubReleaseInfo() { diff --git a/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java b/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java index 27593cd6f..282bdc5b6 100644 --- a/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java +++ b/api/src/main/java/io/kafbat/ui/service/ConsumerGroupService.java @@ -10,7 +10,7 @@ import io.kafbat.ui.model.SortOrderDTO; import io.kafbat.ui.service.rbac.AccessControlService; import io.kafbat.ui.util.ApplicationMetrics; -import io.kafbat.ui.util.SslPropertiesUtil; +import io.kafbat.ui.util.KafkaClientSslPropertiesUtil; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; @@ -149,6 +149,8 @@ private Mono> loadSortedDescriptions(ReactiveAdmi case EMPTY -> 3; case DEAD -> 4; case UNKNOWN -> 5; + case ASSIGNING -> 6; + case RECONCILING -> 7; }; var comparator = Comparator.comparingInt(statesPriorities); yield loadDescriptionsByListings(ac, groups, comparator, pageNum, perPage, sortOrderDto); @@ -262,7 +264,7 @@ public EnhancedConsumer createConsumer(KafkaCluster cluster) { public EnhancedConsumer createConsumer(KafkaCluster cluster, Map properties) { Properties props = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), props); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), props); props.putAll(cluster.getProperties()); props.put(ConsumerConfig.CLIENT_ID_CONFIG, "kafbat-ui-consumer-" + System.currentTimeMillis()); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers()); diff --git a/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java b/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java index c5a32b29e..92bfc260b 100644 --- a/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java +++ b/api/src/main/java/io/kafbat/ui/service/KafkaConnectService.java @@ -1,12 +1,11 @@ package io.kafbat.ui.service; -import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.ObjectMapper; import io.kafbat.ui.connect.api.KafkaConnectClientApi; import io.kafbat.ui.connect.model.ConnectorStatus; import io.kafbat.ui.connect.model.ConnectorStatusConnector; import io.kafbat.ui.connect.model.ConnectorTopics; import io.kafbat.ui.connect.model.TaskStatus; +import io.kafbat.ui.exception.ConnectorOffsetsResetException; import io.kafbat.ui.exception.NotFoundException; import io.kafbat.ui.exception.ValidationException; import io.kafbat.ui.mapper.ClusterMapper; @@ -31,7 +30,6 @@ import java.util.stream.Stream; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; -import lombok.SneakyThrows; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.stereotype.Service; @@ -45,7 +43,6 @@ public class KafkaConnectService { private final ClusterMapper clusterMapper; private final KafkaConnectMapper kafkaConnectMapper; - private final ObjectMapper objectMapper; private final KafkaConfigSanitizer kafkaConfigSanitizer; public Flux getConnects(KafkaCluster cluster) { @@ -106,10 +103,7 @@ public Mono getConnectorTopics(KafkaCluster cluster, String con public Flux getConnectorNames(KafkaCluster cluster, String connectName) { return api(cluster, connectName) - .flux(client -> client.getConnectors(null)) - // for some reason `getConnectors` method returns the response as a single string - .collectList().map(e -> e.get(0)) - .map(this::parseConnectorsNamesStringToList) + .mono(client -> client.getConnectors(null)) .flatMapMany(Flux::fromIterable); } @@ -118,12 +112,6 @@ public Flux getConnectorNamesWithErrorsSuppress(KafkaCluster cluster, St return getConnectorNames(cluster, connectName).onErrorComplete(); } - @SneakyThrows - private List parseConnectorsNamesStringToList(String json) { - return objectMapper.readValue(json, new TypeReference<>() { - }); - } - public Mono createConnector(KafkaCluster cluster, String connectName, Mono connector) { return api(cluster, connectName) @@ -218,22 +206,14 @@ public Mono deleteConnector( public Mono updateConnectorState(KafkaCluster cluster, String connectName, String connectorName, ConnectorActionDTO action) { return api(cluster, connectName) - .mono(client -> { - switch (action) { - case RESTART: - return client.restartConnector(connectorName, false, false); - case RESTART_ALL_TASKS: - return restartTasks(cluster, connectName, connectorName, task -> true); - case RESTART_FAILED_TASKS: - return restartTasks(cluster, connectName, connectorName, - t -> t.getStatus().getState() == ConnectorTaskStatusDTO.FAILED); - case PAUSE: - return client.pauseConnector(connectorName); - case RESUME: - return client.resumeConnector(connectorName); - default: - throw new IllegalStateException("Unexpected value: " + action); - } + .mono(client -> switch (action) { + case RESTART -> client.restartConnector(connectorName, false, false); + case RESTART_ALL_TASKS -> restartTasks(cluster, connectName, connectorName, task -> true); + case RESTART_FAILED_TASKS -> restartTasks(cluster, connectName, connectorName, + t -> t.getStatus().getState() == ConnectorTaskStatusDTO.FAILED); + case PAUSE -> client.pauseConnector(connectorName); + case STOP -> client.stopConnector(connectorName); + case RESUME -> client.resumeConnector(connectorName); }); } @@ -292,4 +272,20 @@ private ReactiveFailover api(KafkaCluster cluster, String } return client; } + + public Mono resetConnectorOffsets(KafkaCluster cluster, String connectName, + String connectorName) { + return api(cluster, connectName) + .mono(client -> client.resetConnectorOffsets(connectorName)) + .onErrorResume(WebClientResponseException.NotFound.class, + e -> { + throw new NotFoundException("Connector %s not found in %s".formatted(connectorName, connectName)); + }) + .onErrorResume(WebClientResponseException.BadRequest.class, + e -> { + throw new ConnectorOffsetsResetException( + "Failed to reset offsets of connector %s of %s. Make sure it is STOPPED first." + .formatted(connectorName, connectName)); + }); + } } diff --git a/api/src/main/java/io/kafbat/ui/service/MessagesService.java b/api/src/main/java/io/kafbat/ui/service/MessagesService.java index 2f6192e11..c94472d56 100644 --- a/api/src/main/java/io/kafbat/ui/service/MessagesService.java +++ b/api/src/main/java/io/kafbat/ui/service/MessagesService.java @@ -23,7 +23,7 @@ import io.kafbat.ui.model.TopicMessageEventDTO; import io.kafbat.ui.serdes.ConsumerRecordDeserializer; import io.kafbat.ui.serdes.ProducerRecordCreator; -import io.kafbat.ui.util.SslPropertiesUtil; +import io.kafbat.ui.util.KafkaClientSslPropertiesUtil; import java.time.Instant; import java.time.OffsetDateTime; import java.time.ZoneOffset; @@ -199,7 +199,7 @@ private Mono sendMessageImpl(KafkaCluster cluster, public static KafkaProducer createProducer(KafkaCluster cluster, Map additionalProps) { Properties properties = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(cluster.getOriginalProperties().getSsl(), properties); properties.putAll(cluster.getProperties()); properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, cluster.getBootstrapServers()); properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class); diff --git a/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java b/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java index 651f6d531..6aea290c3 100644 --- a/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java +++ b/api/src/main/java/io/kafbat/ui/service/ReactiveAdminClient.java @@ -389,12 +389,6 @@ static Mono> toMonoWithExceptionFilter(Map> v ); } - public Mono>> describeLogDirs() { - return describeCluster() - .map(d -> d.getNodes().stream().map(Node::id).collect(toList())) - .flatMap(this::describeLogDirs); - } - public Mono>> describeLogDirs( Collection brokerIds) { return toMono(client.describeLogDirs(brokerIds).all()) diff --git a/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java b/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java index 1bac22235..c725a787e 100644 --- a/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java +++ b/api/src/main/java/io/kafbat/ui/service/SchemaRegistryService.java @@ -63,7 +63,7 @@ public Mono> getAllSubjectNames(KafkaCluster cluster) { @SneakyThrows private List parseSubjectListString(String subjectNamesStr) { //workaround for https://github.com/spring-projects/spring-framework/issues/24734 - return new JsonMapper().readValue(subjectNamesStr, new TypeReference>() { + return new JsonMapper().readValue(subjectNamesStr, new TypeReference<>() { }); } diff --git a/api/src/main/java/io/kafbat/ui/service/TopicsService.java b/api/src/main/java/io/kafbat/ui/service/TopicsService.java index 015a86838..95ad7bc5a 100644 --- a/api/src/main/java/io/kafbat/ui/service/TopicsService.java +++ b/api/src/main/java/io/kafbat/ui/service/TopicsService.java @@ -97,7 +97,7 @@ private Mono loadTopic(KafkaCluster c, String topicName) { /** * After creation topic can be invisible via API for some time. - * To workaround this, we retyring topic loading until it becomes visible. + * To workaround this, we're retrying topic loading until it becomes visible. */ private Mono loadTopicAfterCreation(KafkaCluster c, String topicName) { return loadTopic(c, topicName) @@ -137,8 +137,7 @@ private List createList(List orderedNames, .collect(toList()); } - private Mono getPartitionOffsets(Map - descriptionsMap, + private Mono getPartitionOffsets(Map descriptionsMap, ReactiveAdminClient ac) { var descriptions = descriptionsMap.values(); return ac.listOffsets(descriptions, OffsetSpec.earliest()) @@ -225,8 +224,7 @@ private Mono updateTopic(KafkaCluster cluster, .then(loadTopic(cluster, topicName))); } - public Mono updateTopic(KafkaCluster cl, String topicName, - Mono topicUpdate) { + public Mono updateTopic(KafkaCluster cl, String topicName, Mono topicUpdate) { return topicUpdate .flatMap(t -> updateTopic(cl, topicName, t)); } @@ -298,7 +296,7 @@ private Map> getPartitionsRea var brokers = brokersUsage.entrySet().stream() .sorted(Map.Entry.comparingByValue()) .map(Map.Entry::getKey) - .collect(toList()); + .toList(); // Iterate brokers and try to add them in assignment // while partition replicas count != requested replication factor @@ -326,7 +324,7 @@ private Map> getPartitionsRea var brokersUsageList = brokersUsage.entrySet().stream() .sorted(Map.Entry.comparingByValue(Comparator.reverseOrder())) .map(Map.Entry::getKey) - .collect(toList()); + .toList(); // Iterate brokers and try to remove them from assignment // while partition replicas count != requested replication factor diff --git a/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java b/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java index b3877a336..30078d435 100644 --- a/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java +++ b/api/src/main/java/io/kafbat/ui/service/acl/AclsService.java @@ -112,13 +112,13 @@ private void logAclSyncPlan(KafkaCluster cluster, Set toBeAdded, Set if (!toBeAdded.isEmpty()) { log.info("ACLs to be added ({}): ", toBeAdded.size()); for (AclBinding aclBinding : toBeAdded) { - log.info(" " + AclCsv.createAclString(aclBinding)); + log.info(" {}", AclCsv.createAclString(aclBinding)); } } if (!toBeDeleted.isEmpty()) { log.info("ACLs to be deleted ({}): ", toBeDeleted.size()); for (AclBinding aclBinding : toBeDeleted) { - log.info(" " + AclCsv.createAclString(aclBinding)); + log.info(" {}", AclCsv.createAclString(aclBinding)); } } } diff --git a/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java b/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java index d7ef659bf..2bb69c20f 100644 --- a/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java +++ b/api/src/main/java/io/kafbat/ui/service/audit/AuditRecord.java @@ -53,14 +53,12 @@ static OperationResult successful() { } static OperationResult error(Throwable th) { - OperationError err = OperationError.UNRECOGNIZED_ERROR; - if (th instanceof AccessDeniedException) { - err = OperationError.ACCESS_DENIED; - } else if (th instanceof ValidationException) { - err = OperationError.VALIDATION_ERROR; - } else if (th instanceof CustomBaseException) { - err = OperationError.EXECUTION_ERROR; - } + OperationError err = switch (th) { + case AccessDeniedException ignored -> OperationError.ACCESS_DENIED; + case ValidationException ignored -> OperationError.VALIDATION_ERROR; + case CustomBaseException ignored -> OperationError.EXECUTION_ERROR; + case null, default -> OperationError.UNRECOGNIZED_ERROR; + }; return new OperationResult(false, err); } diff --git a/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java b/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java index c1703f0b1..186c4a8a1 100644 --- a/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java +++ b/api/src/main/java/io/kafbat/ui/service/integration/odd/ConnectorInfo.java @@ -5,7 +5,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.Function; +import java.util.function.UnaryOperator; import java.util.stream.Stream; import javax.annotation.Nullable; import org.apache.commons.collections.CollectionUtils; @@ -22,7 +22,7 @@ static ConnectorInfo extract(String className, ConnectorTypeDTO type, Map config, List topicsFromApi, // can be empty for old Connect API versions - Function topicOddrnBuilder) { + UnaryOperator topicOddrnBuilder) { return switch (className) { case "org.apache.kafka.connect.file.FileStreamSinkConnector", "org.apache.kafka.connect.file.FileStreamSourceConnector", @@ -43,7 +43,7 @@ static ConnectorInfo extract(String className, private static ConnectorInfo extractFileIoConnector(ConnectorTypeDTO type, List topics, Map config, - Function topicOddrnBuilder) { + UnaryOperator topicOddrnBuilder) { return new ConnectorInfo( extractInputs(type, topics, config, topicOddrnBuilder), extractOutputs(type, topics, config, topicOddrnBuilder) @@ -53,7 +53,7 @@ private static ConnectorInfo extractFileIoConnector(ConnectorTypeDTO type, private static ConnectorInfo extractJdbcSink(ConnectorTypeDTO type, List topics, Map config, - Function topicOddrnBuilder) { + UnaryOperator topicOddrnBuilder) { String tableNameFormat = (String) config.getOrDefault("table.name.format", "${topic}"); List targetTables = extractTopicNamesBestEffort(topics, config) .map(topic -> tableNameFormat.replace("${kafka}", topic)) @@ -106,7 +106,7 @@ private static ConnectorInfo extractDebeziumMysql(Map config) { private static ConnectorInfo extractS3Sink(ConnectorTypeDTO type, List topics, Map config, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { String bucketName = (String) config.get("s3.bucket.name"); String topicsDir = (String) config.getOrDefault("topics.dir", "topics"); String directoryDelim = (String) config.getOrDefault("directory.delim", "/"); @@ -122,7 +122,7 @@ private static ConnectorInfo extractS3Sink(ConnectorTypeDTO type, private static List extractInputs(ConnectorTypeDTO type, List topicsFromApi, Map config, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { return type == ConnectorTypeDTO.SINK ? extractTopicsOddrns(config, topicsFromApi, topicOrrdnBuilder) : List.of(); @@ -131,7 +131,7 @@ private static List extractInputs(ConnectorTypeDTO type, private static List extractOutputs(ConnectorTypeDTO type, List topicsFromApi, Map config, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { return type == ConnectorTypeDTO.SOURCE ? extractTopicsOddrns(config, topicsFromApi, topicOrrdnBuilder) : List.of(); @@ -158,7 +158,7 @@ private static Stream extractTopicNamesBestEffort( private static List extractTopicsOddrns(Map config, List topicsFromApi, - Function topicOrrdnBuilder) { + UnaryOperator topicOrrdnBuilder) { return extractTopicNamesBestEffort(topicsFromApi, config) .map(topicOrrdnBuilder) .toList(); diff --git a/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java b/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java index 328fcda26..aadf93d7c 100644 --- a/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/integration/odd/schema/JsonSchemaExtractor.java @@ -286,25 +286,17 @@ private static DataSetField createDataSetField(Schema schema, } private static DataSetFieldType.TypeEnum mapType(Schema type) { - if (type instanceof NumberSchema) { - return DataSetFieldType.TypeEnum.NUMBER; - } - if (type instanceof StringSchema) { - return DataSetFieldType.TypeEnum.STRING; - } - if (type instanceof BooleanSchema || type instanceof TrueSchema || type instanceof FalseSchema) { - return DataSetFieldType.TypeEnum.BOOLEAN; - } - if (type instanceof ObjectSchema) { - return DataSetFieldType.TypeEnum.STRUCT; - } - if (type instanceof ReferenceSchema s) { - return mapType(s.getReferredSchema()); - } - if (type instanceof CombinedSchema) { - return DataSetFieldType.TypeEnum.UNION; - } - return DataSetFieldType.TypeEnum.UNKNOWN; + return switch (type) { + case NumberSchema ignored -> DataSetFieldType.TypeEnum.NUMBER; + case StringSchema ignored -> DataSetFieldType.TypeEnum.STRING; + case BooleanSchema ignored -> DataSetFieldType.TypeEnum.BOOLEAN; + case TrueSchema ignored -> DataSetFieldType.TypeEnum.BOOLEAN; + case FalseSchema ignored -> DataSetFieldType.TypeEnum.BOOLEAN; + case ObjectSchema ignored -> DataSetFieldType.TypeEnum.STRUCT; + case ReferenceSchema referenceSchema -> mapType(referenceSchema.getReferredSchema()); + case CombinedSchema ignored -> DataSetFieldType.TypeEnum.UNION; + default -> DataSetFieldType.TypeEnum.UNKNOWN; + }; } } diff --git a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java index 3a0b46c81..13daee2bc 100644 --- a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java +++ b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlApiClient.java @@ -130,8 +130,8 @@ private Flux executeSelect(String ksql, Map s * Some version of ksqldb (?..0.24) can cut off json streaming without respect proper array ending like

* [{"header":{"queryId":"...","schema":"..."}}, ] * which will cause json parsing error and will be propagated to UI. - * This is a know issue(https://github.com/confluentinc/ksql/issues/8746), but we don't know when it will be fixed. - * To workaround this we need to check DecodingException err msg. + * This is a known issue(...), but we don't know when it will be fixed. + * To work around this we need to check DecodingException err msg. */ private boolean isUnexpectedJsonArrayEndCharException(Throwable th) { return th instanceof DecodingException @@ -176,7 +176,7 @@ public Flux execute(String ksql, Map streamPr if (statements.size() > 1) { return errorTableFlux("Only single statement supported now"); } - if (statements.size() == 0) { + if (statements.isEmpty()) { return errorTableFlux("No valid ksql statement found"); } if (isUnsupportedStatementType(statements.get(0))) { diff --git a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java index 3243841eb..1068ac193 100644 --- a/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java +++ b/api/src/main/java/io/kafbat/ui/service/ksql/KsqlGrammar.java @@ -74,13 +74,10 @@ public static CaseInsensitiveStream from(CharStream stream) { @Override public int LA(final int i) { final int result = stream.LA(i); - switch (result) { - case 0: - case IntStream.EOF: - return result; - default: - return Character.toUpperCase(result); - } + return switch (result) { + case 0, IntStream.EOF -> result; + default -> Character.toUpperCase(result); + }; } }; } diff --git a/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java b/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java index f353ea578..a5d54f369 100644 --- a/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java +++ b/api/src/main/java/io/kafbat/ui/service/ksql/response/ResponseParser.java @@ -99,64 +99,45 @@ public static List parseStatementResponse(JsonN .orElse("unknown"); // messages structure can be inferred from https://github.com/confluentinc/ksql/blob/master/ksqldb-rest-model/src/main/java/io/confluent/ksql/rest/entity/KsqlEntity.java - switch (type) { - case "currentStatus": - return parseObject( - "Status", - List.of("status", "message"), - jsonNode.get("commandStatus") - ); - case "properties": - return parseProperties(jsonNode); - case "queries": - return parseArray("Queries", "queries", jsonNode); - case "sourceDescription": - return parseObjectDynamically("Source Description", jsonNode.get("sourceDescription")); - case "queryDescription": - return parseObjectDynamically("Queries Description", jsonNode.get("queryDescription")); - case "topicDescription": - return parseObject( - "Topic Description", - List.of("name", "kafkaTopic", "format", "schemaString"), - jsonNode - ); - case "streams": - return parseArray("Streams", "streams", jsonNode); - case "tables": - return parseArray("Tables", "tables", jsonNode); - case "kafka_topics": - return parseArray("Topics", "topics", jsonNode); - case "kafka_topics_extended": - return parseArray("Topics extended", "topics", jsonNode); - case "executionPlan": - return parseObject("Execution plan", List.of("executionPlanText"), jsonNode); - case "source_descriptions": - return parseArray("Source descriptions", "sourceDescriptions", jsonNode); - case "query_descriptions": - return parseArray("Queries", "queryDescriptions", jsonNode); - case "describe_function": - return parseObject("Function description", - List.of("name", "author", "version", "description", "functions", "path", "type"), - jsonNode - ); - case "function_names": - return parseArray("Function Names", "functions", jsonNode); - case "connector_info": - return parseObjectDynamically("Connector Info", jsonNode.get("info")); - case "drop_connector": - return parseObject("Dropped connector", List.of("connectorName"), jsonNode); - case "connector_list": - return parseArray("Connectors", "connectors", jsonNode); - case "connector_plugins_list": - return parseArray("Connector Plugins", "connectorPlugins", jsonNode); - case "connector_description": - return parseObject("Connector Description", - List.of("connectorClass", "status", "sources", "topics"), - jsonNode - ); - default: - return parseUnknownResponse(jsonNode); - } + return switch (type) { + case "currentStatus" -> parseObject( + "Status", + List.of("status", "message"), + jsonNode.get("commandStatus") + ); + case "properties" -> parseProperties(jsonNode); + case "queries" -> parseArray("Queries", "queries", jsonNode); + case "sourceDescription" -> parseObjectDynamically("Source Description", jsonNode.get("sourceDescription")); + case "queryDescription" -> parseObjectDynamically("Queries Description", jsonNode.get("queryDescription")); + case "topicDescription" -> parseObject( + "Topic Description", + List.of("name", "kafkaTopic", "format", "schemaString"), + jsonNode + ); + case "streams" -> parseArray("Streams", "streams", jsonNode); + case "tables" -> parseArray("Tables", "tables", jsonNode); + case "kafka_topics" -> parseArray("Topics", "topics", jsonNode); + case "kafka_topics_extended" -> parseArray("Topics extended", "topics", jsonNode); + case "executionPlan" -> parseObject("Execution plan", List.of("executionPlanText"), jsonNode); + case "source_descriptions" -> parseArray("Source descriptions", "sourceDescriptions", jsonNode); + case "query_descriptions" -> parseArray("Queries", "queryDescriptions", jsonNode); + case "describe_function" -> parseObject( + "Function description", + List.of("name", "author", "version", "description", "functions", "path", "type"), + jsonNode + ); + case "function_names" -> parseArray("Function Names", "functions", jsonNode); + case "connector_info" -> parseObjectDynamically("Connector Info", jsonNode.get("info")); + case "drop_connector" -> parseObject("Dropped connector", List.of("connectorName"), jsonNode); + case "connector_list" -> parseArray("Connectors", "connectors", jsonNode); + case "connector_plugins_list" -> parseArray("Connector Plugins", "connectorPlugins", jsonNode); + case "connector_description" -> parseObject( + "Connector Description", + List.of("connectorClass", "status", "sources", "topics"), + jsonNode + ); + default -> parseUnknownResponse(jsonNode); + }; } private static List parseObjectDynamically( diff --git a/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java b/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java index e4eeed278..37323c7dd 100644 --- a/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java +++ b/api/src/main/java/io/kafbat/ui/service/metrics/JmxMetricsFormatter.java @@ -51,10 +51,10 @@ private static Optional convertNumericValue(Object value) { return Optional.empty(); } try { - if (value instanceof Long) { - return Optional.of(new BigDecimal((Long) value)); - } else if (value instanceof Integer) { - return Optional.of(new BigDecimal((Integer) value)); + if (value instanceof Long longValue) { + return Optional.of(new BigDecimal(longValue)); + } else if (value instanceof Integer integerValue) { + return Optional.of(new BigDecimal(integerValue)); } return Optional.of(new BigDecimal(value.toString())); } catch (NumberFormatException nfe) { diff --git a/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java b/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java index b124feb8d..e9a08e8cb 100644 --- a/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java +++ b/api/src/main/java/io/kafbat/ui/service/metrics/MetricsCollector.java @@ -28,7 +28,7 @@ public Mono getBrokerMetrics(KafkaCluster cluster, Collection nod return Flux.fromIterable(nodes) .flatMap(n -> getMetrics(cluster, n).map(lst -> Tuples.of(n, lst))) .collectMap(Tuple2::getT1, Tuple2::getT2) - .map(nodeMetrics -> collectMetrics(cluster, nodeMetrics)) + .map(this::collectMetrics) .defaultIfEmpty(Metrics.empty()); } @@ -45,20 +45,19 @@ private Mono> getMetrics(KafkaCluster kafkaCluster, Node node) { return metricFlux.collectList(); } - public Metrics collectMetrics(KafkaCluster cluster, Map> perBrokerMetrics) { + public Metrics collectMetrics(Map> perBrokerMetrics) { Metrics.MetricsBuilder builder = Metrics.builder() .perBrokerMetrics( perBrokerMetrics.entrySet() .stream() .collect(Collectors.toMap(e -> e.getKey().id(), Map.Entry::getValue))); - populateWellknowMetrics(cluster, perBrokerMetrics) - .apply(builder); + populateWellknowMetrics(perBrokerMetrics).apply(builder); return builder.build(); } - private WellKnownMetrics populateWellknowMetrics(KafkaCluster cluster, Map> perBrokerMetrics) { + private WellKnownMetrics populateWellknowMetrics(Map> perBrokerMetrics) { WellKnownMetrics wellKnownMetrics = new WellKnownMetrics(); perBrokerMetrics.forEach((node, metrics) -> metrics.forEach(metric -> diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/AccessControlService.java b/api/src/main/java/io/kafbat/ui/service/rbac/AccessControlService.java index 7fe3782b3..896bbf875 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/AccessControlService.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/AccessControlService.java @@ -113,11 +113,11 @@ private boolean isAccessible(AuthenticatedUser user, AccessContext context) { return context.isAccessible(getUserPermissions(user, context.cluster())); } - private List getUserPermissions(AuthenticatedUser user, String clusterName) { + private List getUserPermissions(AuthenticatedUser user, @Nullable String clusterName) { return properties.getRoles() .stream() .filter(filterRole(user)) - .filter(role -> role.getClusters().stream().anyMatch(clusterName::equalsIgnoreCase)) + .filter(role -> clusterName == null || role.getClusters().stream().anyMatch(clusterName::equalsIgnoreCase)) .flatMap(role -> role.getPermissions().stream()) .toList(); } diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java index a246b8910..cc0e419bf 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/CognitoAuthorityExtractor.java @@ -50,7 +50,7 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_COGNITO)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> s.getValue().equalsIgnoreCase(principal.getName()))) + .anyMatch(s -> s.matches(principal.getName()))) .map(Role::getName) .collect(Collectors.toSet()); @@ -76,7 +76,7 @@ private Set extractGroupRoles(AccessControlService acs, DefaultOAuth2Use .filter(s -> s.getType().equals("group")) .anyMatch(subject -> groups .stream() - .anyMatch(cognitoGroup -> cognitoGroup.equalsIgnoreCase(subject.getValue())) + .anyMatch(subject::matches) )) .map(Role::getName) .collect(Collectors.toSet()); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java index b50e76a16..79f4907fc 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GithubAuthorityExtractor.java @@ -90,7 +90,7 @@ private Set extractUsernameRoles(DefaultOAuth2User principal, AccessCont .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> s.getValue().equals(username))) + .anyMatch(s -> s.matches(username))) .map(Role::getName) .collect(Collectors.toSet()); @@ -131,7 +131,7 @@ private Mono> getOrganizationRoles(DefaultOAuth2User principal, Map< .filter(s -> s.getType().equals(ORGANIZATION)) .anyMatch(subject -> orgsMap.stream() .map(org -> org.get(ORGANIZATION_NAME).toString()) - .anyMatch(orgName -> orgName.equalsIgnoreCase(subject.getValue())) + .anyMatch(subject::matches) )) .map(Role::getName) .collect(Collectors.toSet())); @@ -189,7 +189,7 @@ private Mono> getTeamRoles(WebClient webClient, Map .filter(s -> s.getProvider().equals(Provider.OAUTH_GITHUB)) .filter(s -> s.getType().equals("team")) .anyMatch(subject -> teams.stream() - .anyMatch(teamName -> teamName.equalsIgnoreCase(subject.getValue())) + .anyMatch(subject::matches) )) .map(Role::getName) .collect(Collectors.toSet())); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java index 8ea6d2108..a90ab50ef 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/GoogleAuthorityExtractor.java @@ -50,7 +50,10 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GOOGLE)) .filter(s -> s.getType().equals("user")) - .anyMatch(s -> s.getValue().equalsIgnoreCase(principal.getAttribute(EMAIL_ATTRIBUTE_NAME)))) + .anyMatch(s -> { + String email = principal.getAttribute(EMAIL_ATTRIBUTE_NAME); + return s.matches(email); + })) .map(Role::getName) .collect(Collectors.toSet()); } @@ -68,7 +71,7 @@ private Set extractDomainRoles(AccessControlService acs, DefaultOAuth2Us .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH_GOOGLE)) .filter(s -> s.getType().equals("domain")) - .anyMatch(s -> s.getValue().equals(domain))) + .anyMatch(s -> s.matches(domain))) .map(Role::getName) .collect(Collectors.toSet()); } diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java index 6d14ab870..61748610e 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/OauthAuthorityExtractor.java @@ -58,9 +58,8 @@ private Set extractUsernameRoles(AccessControlService acs, DefaultOAuth2 .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("user")) - .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, - s.getValue().equalsIgnoreCase(principalName))) - .anyMatch(s -> s.getValue().equalsIgnoreCase(principalName))) + .peek(s -> log.trace("[{}] matches [{}]? [{}]", s.getValue(), principalName, s.matches(principalName))) + .anyMatch(s -> s.matches(principalName))) .map(Role::getName) .collect(Collectors.toSet()); @@ -94,11 +93,7 @@ private Set extractRoles(AccessControlService acs, DefaultOAuth2User pri .stream() .filter(s -> s.getProvider().equals(Provider.OAUTH)) .filter(s -> s.getType().equals("role")) - .anyMatch(subject -> { - var roleName = subject.getValue(); - return principalRoles.contains(roleName); - }) - ) + .anyMatch(subject -> principalRoles.stream().anyMatch(subject::matches))) .map(Role::getName) .collect(Collectors.toSet()); diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java new file mode 100644 index 000000000..76feff063 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacActiveDirectoryAuthoritiesExtractor.java @@ -0,0 +1,50 @@ +package io.kafbat.ui.service.rbac.extractor; + +import io.kafbat.ui.model.rbac.Role; +import io.kafbat.ui.model.rbac.provider.Provider; +import io.kafbat.ui.service.rbac.AccessControlService; +import java.util.Collection; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; +import org.springframework.context.ApplicationContext; +import org.springframework.ldap.core.DirContextOperations; +import org.springframework.security.core.GrantedAuthority; +import org.springframework.security.core.authority.SimpleGrantedAuthority; +import org.springframework.security.ldap.authentication.ad.DefaultActiveDirectoryAuthoritiesPopulator; +import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator; + +@Slf4j +public class RbacActiveDirectoryAuthoritiesExtractor implements LdapAuthoritiesPopulator { + + private final DefaultActiveDirectoryAuthoritiesPopulator populator = new DefaultActiveDirectoryAuthoritiesPopulator(); + private final AccessControlService acs; + + public RbacActiveDirectoryAuthoritiesExtractor(ApplicationContext context) { + this.acs = context.getBean(AccessControlService.class); + } + + @Override + public Collection getGrantedAuthorities(DirContextOperations userData, String username) { + var adGroups = populator.getGrantedAuthorities(userData, username) + .stream() + .map(GrantedAuthority::getAuthority) + .peek(group -> log.trace("Found AD group [{}] for user [{}]", group, username)) + .collect(Collectors.toSet()); + + return acs.getRoles() + .stream() + .filter(r -> r.getSubjects() + .stream() + .filter(subject -> subject.getProvider().equals(Provider.LDAP_AD)) + .anyMatch(subject -> switch (subject.getType()) { + case "user" -> subject.matches(username); + case "group" -> adGroups.stream().anyMatch(subject::matches); + default -> false; + }) + ) + .map(Role::getName) + .peek(role -> log.trace("Mapped role [{}] for user [{}]", role, username)) + .map(SimpleGrantedAuthority::new) + .collect(Collectors.toSet()); + } +} diff --git a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java index 3282ab1e2..78ec4ba19 100644 --- a/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java +++ b/api/src/main/java/io/kafbat/ui/service/rbac/extractor/RbacLdapAuthoritiesExtractor.java @@ -19,7 +19,8 @@ public class RbacLdapAuthoritiesExtractor extends NestedLdapAuthoritiesPopulator private final AccessControlService acs; public RbacLdapAuthoritiesExtractor(ApplicationContext context, - BaseLdapPathContextSource contextSource, String groupFilterSearchBase) { + BaseLdapPathContextSource contextSource, + String groupFilterSearchBase) { super(contextSource, groupFilterSearchBase); this.acs = context.getBean(AccessControlService.class); } @@ -37,8 +38,11 @@ protected Set getAdditionalRoles(DirContextOperations user, St .filter(r -> r.getSubjects() .stream() .filter(subject -> subject.getProvider().equals(Provider.LDAP)) - .filter(subject -> subject.getType().equals("group")) - .anyMatch(subject -> ldapGroups.contains(subject.getValue())) + .anyMatch(subject -> switch (subject.getType()) { + case "user" -> subject.matches(username); + case "group" -> ldapGroups.stream().anyMatch(subject::matches); + default -> false; + }) ) .map(Role::getName) .peek(role -> log.trace("Mapped role [{}] for user [{}]", role, username)) diff --git a/api/src/main/java/io/kafbat/ui/util/CustomSslSocketFactory.java b/api/src/main/java/io/kafbat/ui/util/CustomSslSocketFactory.java new file mode 100644 index 000000000..b5fee8cae --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/util/CustomSslSocketFactory.java @@ -0,0 +1,69 @@ +package io.kafbat.ui.util; + +import io.netty.handler.ssl.util.InsecureTrustManagerFactory; +import java.io.IOException; +import java.net.InetAddress; +import java.net.Socket; +import java.security.SecureRandom; +import javax.net.SocketFactory; +import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLSocketFactory; + +public class CustomSslSocketFactory extends SSLSocketFactory { + private final SSLSocketFactory socketFactory; + + public CustomSslSocketFactory() { + try { + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, InsecureTrustManagerFactory.INSTANCE.getTrustManagers(), new SecureRandom()); + + socketFactory = sslContext.getSocketFactory(); + } catch (Exception e) { + throw new RuntimeException(e); + } + } + + public static SocketFactory getDefault() { + return new CustomSslSocketFactory(); + } + + @Override + public String[] getDefaultCipherSuites() { + return socketFactory.getDefaultCipherSuites(); + } + + @Override + public String[] getSupportedCipherSuites() { + return socketFactory.getSupportedCipherSuites(); + } + + @Override + public Socket createSocket(Socket socket, String string, int i, boolean bln) throws IOException { + return socketFactory.createSocket(socket, string, i, bln); + } + + @Override + public Socket createSocket(String string, int i) throws IOException { + return socketFactory.createSocket(string, i); + } + + @Override + public Socket createSocket(String string, int i, InetAddress ia, int i1) throws IOException { + return socketFactory.createSocket(string, i, ia, i1); + } + + @Override + public Socket createSocket(InetAddress ia, int i) throws IOException { + return socketFactory.createSocket(ia, i); + } + + @Override + public Socket createSocket(InetAddress ia, int i, InetAddress ia1, int i1) throws IOException { + return socketFactory.createSocket(ia, i, ia1, i1); + } + + @Override + public Socket createSocket() throws IOException { + return socketFactory.createSocket(); + } +} diff --git a/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java b/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java index 9ac25c517..6c3a99b04 100644 --- a/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java +++ b/api/src/main/java/io/kafbat/ui/util/DynamicConfigOperations.java @@ -38,6 +38,7 @@ import org.yaml.snakeyaml.nodes.Tag; import org.yaml.snakeyaml.representer.Representer; import reactor.core.publisher.Mono; +import reactor.core.scheduler.Schedulers; @Slf4j @RequiredArgsConstructor @@ -125,26 +126,30 @@ public Mono uploadConfigRelatedFile(FilePart file) { String targetDirStr = ctx.getEnvironment() .getProperty(CONFIG_RELATED_UPLOADS_DIR_PROPERTY, CONFIG_RELATED_UPLOADS_DIR_DEFAULT); - Path targetDir = Path.of(targetDirStr); - if (!Files.exists(targetDir)) { - try { - Files.createDirectories(targetDir); - } catch (IOException e) { - return Mono.error( - new FileUploadException("Error creating directory for uploads %s".formatted(targetDir), e)); + Mono directoryCreationMono = Mono.fromCallable(() -> { + Path targetDir = Path.of(targetDirStr); + if (!Files.exists(targetDir)) { + try { + Files.createDirectories(targetDir); + } catch (IOException e) { + throw new FileUploadException("Error creating directory for uploads %s".formatted(targetDir), e); + } + } + return targetDir; + }).subscribeOn(Schedulers.boundedElastic()); + + return directoryCreationMono.flatMap(dir -> { + Path targetFilePath = dir.resolve(file.filename() + "-" + Instant.now().getEpochSecond()); + log.info("Uploading config-related file {}", targetFilePath); + if (Files.exists(targetFilePath)) { + log.info("File {} already exists, it will be overwritten", targetFilePath); } - } - - Path targetFilePath = targetDir.resolve(file.filename() + "-" + Instant.now().getEpochSecond()); - log.info("Uploading config-related file {}", targetFilePath); - if (Files.exists(targetFilePath)) { - log.info("File {} already exists, it will be overwritten", targetFilePath); - } - return file.transferTo(targetFilePath) - .thenReturn(targetFilePath) - .doOnError(th -> log.error("Error uploading file {}", targetFilePath, th)) - .onErrorMap(th -> new FileUploadException(targetFilePath, th)); + return file.transferTo(targetFilePath) + .thenReturn(targetFilePath) + .doOnError(th -> log.error("Error uploading file {}", targetFilePath, th)) + .onErrorMap(th -> new FileUploadException(targetFilePath, th)); + }); } private void checkIfDynamicConfigEnabled() { @@ -163,8 +168,8 @@ private void writeYamlToFile(String yaml, Path path) { if (!Files.exists(path.getParent())) { Files.createDirectories(path.getParent()); } - if (Files.exists(path) && !Files.isWritable(path)) { - throw new ValidationException("File already exists and is not writable"); + if (Files.exists(path) && (!Files.isReadable(path) || !Files.isWritable(path))) { + throw new ValidationException("File already exists and is not readable or writable"); } try { Files.writeString( diff --git a/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java b/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java index c6f80a113..9ebbba7f8 100644 --- a/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java +++ b/api/src/main/java/io/kafbat/ui/util/EmptyRedirectStrategy.java @@ -10,27 +10,18 @@ public class EmptyRedirectStrategy implements ServerRedirectStrategy { - private HttpStatus httpStatus = HttpStatus.FOUND; - - private boolean contextRelative = true; - public Mono sendRedirect(ServerWebExchange exchange, URI location) { Assert.notNull(exchange, "exchange cannot be null"); Assert.notNull(location, "location cannot be null"); return Mono.fromRunnable(() -> { ServerHttpResponse response = exchange.getResponse(); - response.setStatusCode(this.httpStatus); + response.setStatusCode(HttpStatus.FOUND); response.getHeaders().setLocation(createLocation(exchange, location)); }); } private URI createLocation(ServerWebExchange exchange, URI location) { - if (!this.contextRelative) { - return location; - } - - String url = location.getPath().isEmpty() ? "/" - : location.toASCIIString(); + String url = location.getPath().isEmpty() ? "/" : location.toASCIIString(); if (url.startsWith("/")) { String context = exchange.getRequest().getPath().contextPath().value(); @@ -38,13 +29,4 @@ private URI createLocation(ServerWebExchange exchange, URI location) { } return location; } - - public void setHttpStatus(HttpStatus httpStatus) { - Assert.notNull(httpStatus, "httpStatus cannot be null"); - this.httpStatus = httpStatus; - } - - public void setContextRelative(boolean contextRelative) { - this.contextRelative = contextRelative; - } } diff --git a/api/src/main/java/io/kafbat/ui/util/KafkaClientSslPropertiesUtil.java b/api/src/main/java/io/kafbat/ui/util/KafkaClientSslPropertiesUtil.java new file mode 100644 index 000000000..324e2e4d0 --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/util/KafkaClientSslPropertiesUtil.java @@ -0,0 +1,35 @@ +package io.kafbat.ui.util; + +import io.kafbat.ui.config.ClustersProperties; +import java.util.Properties; +import javax.annotation.Nullable; +import org.apache.kafka.common.config.SslConfigs; + +public final class KafkaClientSslPropertiesUtil { + + private KafkaClientSslPropertiesUtil() { + } + + public static void addKafkaSslProperties(@Nullable ClustersProperties.TruststoreConfig truststoreConfig, + Properties sink) { + if (truststoreConfig == null) { + return; + } + + if (!truststoreConfig.isVerifySsl()) { + sink.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, ""); + } + + if (truststoreConfig.getTruststoreLocation() == null) { + return; + } + + sink.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststoreConfig.getTruststoreLocation()); + + if (truststoreConfig.getTruststorePassword() != null) { + sink.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, truststoreConfig.getTruststorePassword()); + } + + } + +} diff --git a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java index 629d0f339..019a33543 100644 --- a/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java +++ b/api/src/main/java/io/kafbat/ui/util/KafkaServicesValidation.java @@ -62,10 +62,9 @@ public static Optional validateTruststore(TruststoreConfig truststoreCon public static Mono validateClusterConnection(String bootstrapServers, Properties clusterProps, - @Nullable - TruststoreConfig ssl) { + @Nullable TruststoreConfig ssl) { Properties properties = new Properties(); - SslPropertiesUtil.addKafkaSslProperties(ssl, properties); + KafkaClientSslPropertiesUtil.addKafkaSslProperties(ssl, properties); properties.putAll(clusterProps); properties.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); // editing properties to make validation faster @@ -73,7 +72,7 @@ public static Mono validateClusterConnection(S properties.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 5_000); properties.put(AdminClientConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, 5_000); properties.put(AdminClientConfig.CLIENT_ID_CONFIG, "kui-admin-client-validation-" + System.currentTimeMillis()); - AdminClient adminClient = null; + AdminClient adminClient; try { adminClient = AdminClient.create(properties); } catch (Exception e) { diff --git a/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java b/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java index b46384d2e..872e9ddf9 100644 --- a/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java +++ b/api/src/main/java/io/kafbat/ui/util/ReactiveFailover.java @@ -59,8 +59,8 @@ public static ReactiveFailover create(List args, } private ReactiveFailover(List> publishers, - Predicate failoverExceptionsPredicate, - String noAvailablePublishersMsg) { + Predicate failoverExceptionsPredicate, + String noAvailablePublishersMsg) { Preconditions.checkArgument(!publishers.isEmpty()); this.publishers = publishers; this.failoverExceptionsPredicate = failoverExceptionsPredicate; diff --git a/api/src/main/java/io/kafbat/ui/util/SslPropertiesUtil.java b/api/src/main/java/io/kafbat/ui/util/SslPropertiesUtil.java deleted file mode 100644 index fda959a2b..000000000 --- a/api/src/main/java/io/kafbat/ui/util/SslPropertiesUtil.java +++ /dev/null @@ -1,23 +0,0 @@ -package io.kafbat.ui.util; - -import io.kafbat.ui.config.ClustersProperties; -import java.util.Properties; -import javax.annotation.Nullable; -import org.apache.kafka.common.config.SslConfigs; - -public final class SslPropertiesUtil { - - private SslPropertiesUtil() { - } - - public static void addKafkaSslProperties(@Nullable ClustersProperties.TruststoreConfig truststoreConfig, - Properties sink) { - if (truststoreConfig != null && truststoreConfig.getTruststoreLocation() != null) { - sink.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, truststoreConfig.getTruststoreLocation()); - if (truststoreConfig.getTruststorePassword() != null) { - sink.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, truststoreConfig.getTruststorePassword()); - } - } - } - -} diff --git a/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java b/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java new file mode 100644 index 000000000..f76ffad6b --- /dev/null +++ b/api/src/main/java/io/kafbat/ui/util/StaticFileWebFilter.java @@ -0,0 +1,69 @@ +package io.kafbat.ui.util; + +import java.io.IOException; +import lombok.extern.slf4j.Slf4j; +import org.jetbrains.annotations.NotNull; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.buffer.DataBufferFactory; +import org.springframework.http.HttpMethod; +import org.springframework.http.HttpStatus; +import org.springframework.http.MediaType; +import org.springframework.http.server.reactive.ServerHttpResponse; +import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatcher; +import org.springframework.security.web.server.util.matcher.ServerWebExchangeMatchers; +import org.springframework.web.server.ServerWebExchange; +import org.springframework.web.server.WebFilter; +import org.springframework.web.server.WebFilterChain; +import reactor.core.publisher.Mono; + +@Slf4j +public class StaticFileWebFilter implements WebFilter { + + private static final String INDEX_HTML = "/static/index.html"; + + private final ServerWebExchangeMatcher matcher; + private final String contents; + + public StaticFileWebFilter() { + this("/login", new ClassPathResource(INDEX_HTML)); + } + + public StaticFileWebFilter(String path, ClassPathResource resource) { + this.matcher = ServerWebExchangeMatchers.pathMatchers(HttpMethod.GET, path); + + if (!resource.exists()) { + log.warn("Resource [{}] does not exist. Frontend might not be available.", resource.getPath()); + contents = "Missing index.html. Make sure the app has been built with a correct (prod) profile."; + return; + } + + try { + this.contents = ResourceUtil.readAsString(resource); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public @NotNull Mono filter(@NotNull ServerWebExchange exchange, WebFilterChain chain) { + return this.matcher.matches(exchange) + .filter(ServerWebExchangeMatcher.MatchResult::isMatch) + .switchIfEmpty(chain.filter(exchange).then(Mono.empty())) + .flatMap((matchResult) -> this.render(exchange)); + } + + private Mono render(ServerWebExchange exchange) { + String contextPath = exchange.getRequest().getPath().contextPath().value(); + + String contentBody = contents + .replace("\"assets/", "\"" + contextPath + "/assets/") + .replace("PUBLIC-PATH-VARIABLE", contextPath); + + ServerHttpResponse result = exchange.getResponse(); + result.setStatusCode(HttpStatus.OK); + result.getHeaders().setContentType(MediaType.TEXT_HTML); + DataBufferFactory bufferFactory = exchange.getResponse().bufferFactory(); + return result.writeWith(Mono.just(bufferFactory.wrap(contentBody.getBytes()))); + } + +} diff --git a/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java b/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java index 5d364f6dc..1c289f54f 100644 --- a/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java +++ b/api/src/main/java/io/kafbat/ui/util/WebClientConfigurator.java @@ -7,6 +7,7 @@ import io.kafbat.ui.exception.ValidationException; import io.netty.handler.ssl.SslContext; import io.netty.handler.ssl.SslContextBuilder; +import io.netty.handler.ssl.util.InsecureTrustManagerFactory; import java.io.FileInputStream; import java.security.KeyStore; import java.util.function.Consumer; @@ -45,6 +46,10 @@ private static ObjectMapper defaultOM() { public WebClientConfigurator configureSsl(@Nullable ClustersProperties.TruststoreConfig truststoreConfig, @Nullable ClustersProperties.KeystoreConfig keystoreConfig) { + if (truststoreConfig != null && !truststoreConfig.isVerifySsl()) { + return configureNoSsl(); + } + return configureSsl( keystoreConfig != null ? keystoreConfig.getKeystoreLocation() : null, keystoreConfig != null ? keystoreConfig.getKeystorePassword() : null, @@ -97,6 +102,17 @@ private WebClientConfigurator configureSsl( return this; } + @SneakyThrows + public WebClientConfigurator configureNoSsl() { + var contextBuilder = SslContextBuilder.forClient(); + contextBuilder.trustManager(InsecureTrustManagerFactory.INSTANCE); + + SslContext context = contextBuilder.build(); + + httpClient = httpClient.secure(t -> t.sslContext(context)); + return this; + } + public WebClientConfigurator configureBasicAuth(@Nullable String username, @Nullable String password) { if (username != null && password != null) { builder.defaultHeaders(httpHeaders -> httpHeaders.setBasicAuth(username, password)); diff --git a/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java b/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java index 55c75c950..4756c36b0 100644 --- a/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java +++ b/api/src/main/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverter.java @@ -80,14 +80,12 @@ private FieldSchema createUnionSchema(Schema schema, Map de final Map fields = schema.getTypes().stream() .filter(t -> !t.getType().equals(Schema.Type.NULL)) .map(f -> { - String oneOfFieldName; - if (f.getType().equals(Schema.Type.RECORD)) { - // for records using full record name - oneOfFieldName = f.getFullName(); - } else { + String oneOfFieldName = switch (f.getType()) { + case RECORD -> f.getFullName(); + case ENUM -> f.getName(); // for primitive types - using type name - oneOfFieldName = f.getType().getName().toLowerCase(); - } + default -> f.getType().getName().toLowerCase(); + }; return Tuples.of(oneOfFieldName, convertSchema(f, definitions, false)); }).collect(Collectors.toMap( Tuple2::getT1, @@ -155,12 +153,11 @@ private JsonType convertType(Schema schema) { case INT, LONG -> new SimpleJsonType(JsonType.Type.INTEGER); case MAP, RECORD -> new SimpleJsonType(JsonType.Type.OBJECT); case ENUM -> new EnumJsonType(schema.getEnumSymbols()); - case BYTES, STRING -> new SimpleJsonType(JsonType.Type.STRING); case NULL -> new SimpleJsonType(JsonType.Type.NULL); case ARRAY -> new SimpleJsonType(JsonType.Type.ARRAY); case FIXED, FLOAT, DOUBLE -> new SimpleJsonType(JsonType.Type.NUMBER); case BOOLEAN -> new SimpleJsonType(JsonType.Type.BOOLEAN); - default -> new SimpleJsonType(JsonType.Type.STRING); + default -> new SimpleJsonType(JsonType.Type.STRING); // BYTES, STRING and the remaining possibilities }; } } diff --git a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java index 52b6913f6..de23d40cd 100644 --- a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java +++ b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonAvroConversion.java @@ -49,7 +49,7 @@ public class JsonAvroConversion { // converts json into Object that is expected input for KafkaAvroSerializer // (with AVRO_USE_LOGICAL_TYPE_CONVERTERS flat enabled!) public static Object convertJsonToAvro(String jsonString, Schema avroSchema) { - JsonNode rootNode = null; + JsonNode rootNode; try { rootNode = MAPPER.readTree(jsonString); } catch (JsonProcessingException e) { @@ -221,9 +221,7 @@ public static JsonNode convertAvroToJson(Object obj, Schema avroSchema) { list.forEach(e -> node.add(convertAvroToJson(e, avroSchema.getElementType()))); yield node; } - case ENUM -> { - yield new TextNode(obj.toString()); - } + case ENUM -> new TextNode(obj.toString()); case UNION -> { ObjectNode node = MAPPER.createObjectNode(); int unionIdx = AvroData.getGenericData().resolveUnion(avroSchema, obj); @@ -343,9 +341,7 @@ enum LogicalTypeConversion { assertJsonType(node, JsonNodeType.STRING); return java.util.UUID.fromString(node.asText()); }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -363,9 +359,7 @@ enum LogicalTypeConversion { "node '%s' can't be converted to decimal logical type" .formatted(node)); }, - (obj, schema) -> { - return new DecimalNode((BigDecimal) obj); - }, + (obj, schema) -> new DecimalNode((BigDecimal) obj), new SimpleFieldSchema(new SimpleJsonType(JsonType.Type.NUMBER)) ), @@ -381,9 +375,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -402,9 +394,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -423,9 +413,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -444,9 +432,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -469,9 +455,7 @@ enum LogicalTypeConversion { .formatted(node)); } }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -487,9 +471,7 @@ enum LogicalTypeConversion { Instant instant = (Instant) TIMESTAMP_MILLIS.jsonToAvroConversion.apply(node, schema); return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, @@ -504,9 +486,7 @@ enum LogicalTypeConversion { Instant instant = (Instant) TIMESTAMP_MICROS.jsonToAvroConversion.apply(node, schema); return LocalDateTime.ofInstant(instant, ZoneOffset.UTC); }, - (obj, schema) -> { - return new TextNode(obj.toString()); - }, + (obj, schema) -> new TextNode(obj.toString()), new SimpleFieldSchema( new SimpleJsonType( JsonType.Type.STRING, diff --git a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java index 090010dac..491acebc7 100644 --- a/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java +++ b/api/src/main/java/io/kafbat/ui/util/jsonschema/JsonSchema.java @@ -9,7 +9,6 @@ import java.util.stream.Collectors; import lombok.Builder; import lombok.Data; -import lombok.SneakyThrows; import reactor.util.function.Tuple2; import reactor.util.function.Tuples; @@ -59,12 +58,4 @@ public String toJson() { } return objectNode.toString(); } - - @SneakyThrows - public static JsonSchema stringSchema() { - return JsonSchema.builder() - .id(new URI("http://unknown.unknown")) - .type(new SimpleJsonType(JsonType.Type.STRING)) - .build(); - } } diff --git a/api/src/test/java/io/kafbat/ui/AbstractActiveDirectoryIntegrationTest.java b/api/src/test/java/io/kafbat/ui/AbstractActiveDirectoryIntegrationTest.java new file mode 100644 index 000000000..098da29f9 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/AbstractActiveDirectoryIntegrationTest.java @@ -0,0 +1,83 @@ +package io.kafbat.ui; + +import static io.kafbat.ui.AbstractIntegrationTest.LOCAL; +import static io.kafbat.ui.container.ActiveDirectoryContainer.EMPTY_PERMISSIONS_USER; +import static io.kafbat.ui.container.ActiveDirectoryContainer.FIRST_USER_WITH_GROUP; +import static io.kafbat.ui.container.ActiveDirectoryContainer.PASSWORD; +import static io.kafbat.ui.container.ActiveDirectoryContainer.SECOND_USER_WITH_GROUP; +import static io.kafbat.ui.container.ActiveDirectoryContainer.USER_WITHOUT_GROUP; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import io.kafbat.ui.model.AuthenticationInfoDTO; +import io.kafbat.ui.model.ResourceTypeDTO; +import io.kafbat.ui.model.UserPermissionDTO; +import java.util.List; +import java.util.Objects; +import org.springframework.boot.test.autoconfigure.web.reactive.AutoConfigureWebTestClient; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.http.MediaType; +import org.springframework.test.context.ActiveProfiles; +import org.springframework.test.web.reactive.server.WebTestClient; +import org.springframework.web.reactive.function.BodyInserters; + +@SpringBootTest +@ActiveProfiles("rbac-ad") +@AutoConfigureWebTestClient(timeout = "60000") +public abstract class AbstractActiveDirectoryIntegrationTest { + private static final String SESSION = "SESSION"; + + protected static void checkUserPermissions(WebTestClient client) { + AuthenticationInfoDTO info = authenticationInfo(client, FIRST_USER_WITH_GROUP); + + assertNotNull(info); + assertTrue(info.getRbacEnabled()); + + List permissions = info.getUserInfo().getPermissions(); + + assertFalse(permissions.isEmpty()); + assertTrue(permissions.stream().anyMatch(permission -> + permission.getClusters().contains(LOCAL) && permission.getResource() == ResourceTypeDTO.TOPIC)); + assertEquals(permissions, authenticationInfo(client, SECOND_USER_WITH_GROUP).getUserInfo().getPermissions()); + assertEquals(permissions, authenticationInfo(client, USER_WITHOUT_GROUP).getUserInfo().getPermissions()); + } + + protected static void checkEmptyPermissions(WebTestClient client) { + assertTrue(Objects.requireNonNull(authenticationInfo(client, EMPTY_PERMISSIONS_USER)) + .getUserInfo() + .getPermissions() + .isEmpty() + ); + } + + protected static String session(WebTestClient client, String name) { + return Objects.requireNonNull( + client + .post() + .uri("/login") + .contentType(MediaType.APPLICATION_FORM_URLENCODED) + .body(BodyInserters.fromFormData("username", name).with("password", PASSWORD)) + .exchange() + .expectStatus() + .isFound() + .returnResult(String.class) + .getResponseCookies() + .getFirst(SESSION)) + .getValue(); + } + + protected static AuthenticationInfoDTO authenticationInfo(WebTestClient client, String name) { + return client + .get() + .uri("/api/authorization") + .cookie(SESSION, session(client, name)) + .exchange() + .expectStatus() + .isOk() + .returnResult(AuthenticationInfoDTO.class) + .getResponseBody() + .blockFirst(); + } +} diff --git a/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java b/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java index 554387a1a..9e9e903cc 100644 --- a/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java +++ b/api/src/test/java/io/kafbat/ui/AbstractIntegrationTest.java @@ -5,6 +5,7 @@ import io.kafbat.ui.container.KafkaConnectContainer; import io.kafbat.ui.container.KsqlDbContainer; import io.kafbat.ui.container.SchemaRegistryContainer; +import java.io.FileNotFoundException; import java.nio.file.Path; import java.util.List; import java.util.Properties; @@ -22,6 +23,7 @@ import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.util.TestSocketUtils; +import org.springframework.util.ResourceUtils; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.containers.Network; import org.testcontainers.utility.DockerImageName; @@ -35,10 +37,7 @@ public abstract class AbstractIntegrationTest { public static final String LOCAL = "local"; public static final String SECOND_LOCAL = "secondLocal"; - private static final boolean IS_ARM = - System.getProperty("os.arch").contains("arm") || System.getProperty("os.arch").contains("aarch64"); - - private static final String CONFLUENT_PLATFORM_VERSION = IS_ARM ? "7.2.1.arm64" : "7.2.1"; + private static final String CONFLUENT_PLATFORM_VERSION = "7.8.0"; public static final KafkaContainer kafka = new KafkaContainer( DockerImageName.parse("confluentinc/cp-kafka").withTag(CONFLUENT_PLATFORM_VERSION)) @@ -75,6 +74,18 @@ public static class Initializer public void initialize(@NotNull ConfigurableApplicationContext context) { System.setProperty("kafka.clusters.0.name", LOCAL); System.setProperty("kafka.clusters.0.bootstrapServers", kafka.getBootstrapServers()); + + // Add ProtobufFileSerde configuration + System.setProperty("kafka.clusters.0.serde.0.name", "ProtobufFile"); + System.setProperty("kafka.clusters.0.serde.0.topicValuesPattern", "masking-test-.*"); + try { + System.setProperty("kafka.clusters.0.serde.0.properties.protobufFilesDir", + ResourceUtils.getFile("classpath:protobuf-serde").getAbsolutePath()); + } catch (FileNotFoundException e) { + throw new RuntimeException(e); + } + System.setProperty("kafka.clusters.0.serde.0.properties.protobufMessageName", "test.MessageWithAny"); + // List unavailable hosts to verify failover System.setProperty("kafka.clusters.0.schemaRegistry", String.format("http://localhost:%1$s,http://localhost:%1$s,%2$s", diff --git a/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapTest.java b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapTest.java new file mode 100644 index 000000000..4d32513f7 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapTest.java @@ -0,0 +1,51 @@ +package io.kafbat.ui; + +import static io.kafbat.ui.container.ActiveDirectoryContainer.DOMAIN; + +import io.kafbat.ui.container.ActiveDirectoryContainer; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.web.reactive.server.WebTestClient; + +@ContextConfiguration(initializers = {ActiveDirectoryLdapTest.Initializer.class}) +public class ActiveDirectoryLdapTest extends AbstractActiveDirectoryIntegrationTest { + private static final ActiveDirectoryContainer ACTIVE_DIRECTORY = new ActiveDirectoryContainer(false); + + @Autowired + private WebTestClient webTestClient; + + @BeforeAll + public static void setup() { + ACTIVE_DIRECTORY.start(); + } + + @AfterAll + public static void shutdown() { + ACTIVE_DIRECTORY.stop(); + } + + @Test + public void testUserPermissions() { + checkUserPermissions(webTestClient); + } + + @Test + public void testEmptyPermissions() { + checkEmptyPermissions(webTestClient); + } + + public static class Initializer implements ApplicationContextInitializer { + @Override + public void initialize(@NotNull ConfigurableApplicationContext context) { + System.setProperty("spring.ldap.urls", ACTIVE_DIRECTORY.getLdapUrl()); + System.setProperty("oauth2.ldap.activeDirectory", "true"); + System.setProperty("oauth2.ldap.activeDirectory.domain", DOMAIN); + } + } +} diff --git a/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapsTest.java b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapsTest.java new file mode 100644 index 000000000..86d46961d --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/ActiveDirectoryLdapsTest.java @@ -0,0 +1,110 @@ +package io.kafbat.ui; + +import static io.kafbat.ui.container.ActiveDirectoryContainer.CONTAINER_CERT_PATH; +import static io.kafbat.ui.container.ActiveDirectoryContainer.CONTAINER_KEY_PATH; +import static io.kafbat.ui.container.ActiveDirectoryContainer.DOMAIN; +import static io.kafbat.ui.container.ActiveDirectoryContainer.PASSWORD; +import static java.nio.file.Files.writeString; +import static org.testcontainers.utility.MountableFile.forHostPath; + +import io.kafbat.ui.container.ActiveDirectoryContainer; +import java.io.File; +import java.io.StringWriter; +import java.net.InetAddress; +import java.security.KeyPair; +import java.security.PrivateKey; +import java.security.cert.X509Certificate; +import java.util.Map; +import org.apache.kafka.common.config.types.Password; +import org.apache.kafka.test.TestSslUtils; +import org.jetbrains.annotations.NotNull; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.ApplicationContextInitializer; +import org.springframework.context.ConfigurableApplicationContext; +import org.springframework.test.context.ContextConfiguration; +import org.springframework.test.web.reactive.server.WebTestClient; +import org.testcontainers.shaded.org.bouncycastle.openssl.jcajce.JcaMiscPEMGenerator; +import org.testcontainers.shaded.org.bouncycastle.openssl.jcajce.JcaPKCS8Generator; +import org.testcontainers.shaded.org.bouncycastle.util.io.pem.PemWriter; + +@ContextConfiguration(initializers = {ActiveDirectoryLdapsTest.Initializer.class}) +public class ActiveDirectoryLdapsTest extends AbstractActiveDirectoryIntegrationTest { + private static final ActiveDirectoryContainer ACTIVE_DIRECTORY = new ActiveDirectoryContainer(true); + + private static File certPem = null; + private static File privateKeyPem = null; + + @Autowired + private WebTestClient webTestClient; + + @BeforeAll + public static void setup() throws Exception { + generateCerts(); + + ACTIVE_DIRECTORY.withCopyFileToContainer(forHostPath(certPem.getAbsolutePath()), CONTAINER_CERT_PATH); + ACTIVE_DIRECTORY.withCopyFileToContainer(forHostPath(privateKeyPem.getAbsolutePath()), CONTAINER_KEY_PATH); + + ACTIVE_DIRECTORY.start(); + } + + @AfterAll + public static void shutdown() { + ACTIVE_DIRECTORY.stop(); + } + + @Test + public void testUserPermissions() { + checkUserPermissions(webTestClient); + } + + @Test + public void testEmptyPermissions() { + checkEmptyPermissions(webTestClient); + } + + private static void generateCerts() throws Exception { + File truststore = File.createTempFile("truststore", ".jks"); + + truststore.deleteOnExit(); + + String host = "localhost"; + KeyPair clientKeyPair = TestSslUtils.generateKeyPair("RSA"); + + X509Certificate clientCert = new TestSslUtils.CertificateBuilder(365, "SHA256withRSA") + .sanDnsNames(host) + .sanIpAddress(InetAddress.getByName(host)) + .generate("O=Samba Administration, OU=Samba, CN=" + host, clientKeyPair); + + TestSslUtils.createTrustStore(truststore.getPath(), new Password(PASSWORD), Map.of("client", clientCert)); + + certPem = File.createTempFile("cert", ".pem"); + writeString(certPem.toPath(), certOrKeyToString(clientCert)); + + privateKeyPem = File.createTempFile("key", ".pem"); + writeString(privateKeyPem.toPath(), certOrKeyToString(clientKeyPair.getPrivate())); + } + + private static String certOrKeyToString(Object certOrKey) throws Exception { + StringWriter sw = new StringWriter(); + try (PemWriter pw = new PemWriter(sw)) { + if (certOrKey instanceof X509Certificate) { + pw.writeObject(new JcaMiscPEMGenerator(certOrKey)); + } else { + pw.writeObject(new JcaPKCS8Generator((PrivateKey) certOrKey, null)); + } + } + return sw.toString(); + } + + public static class Initializer implements ApplicationContextInitializer { + @Override + public void initialize(@NotNull ConfigurableApplicationContext context) { + System.setProperty("spring.ldap.urls", ACTIVE_DIRECTORY.getLdapUrl()); + System.setProperty("oauth2.ldap.activeDirectory", "true"); + System.setProperty("oauth2.ldap.activeDirectory.domain", DOMAIN); + } + } +} diff --git a/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java b/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java index 56ae1430e..82d7a3041 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaConnectServiceTests.java @@ -4,6 +4,7 @@ import static org.assertj.core.api.Assertions.assertThat; import static org.junit.jupiter.api.Assertions.assertEquals; +import io.kafbat.ui.api.model.ErrorResponse; import io.kafbat.ui.model.ConnectorDTO; import io.kafbat.ui.model.ConnectorPluginConfigDTO; import io.kafbat.ui.model.ConnectorPluginConfigValidationResponseDTO; @@ -26,7 +27,7 @@ import org.springframework.test.web.reactive.server.WebTestClient; @Slf4j -public class KafkaConnectServiceTests extends AbstractIntegrationTest { +class KafkaConnectServiceTests extends AbstractIntegrationTest { private final String connectName = "kafka-connect"; private final String connectorName = UUID.randomUUID().toString(); private final Map config = Map.of( @@ -43,7 +44,8 @@ public class KafkaConnectServiceTests extends AbstractIntegrationTest { @BeforeEach - public void setUp() { + void setUp() { + webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) .bodyValue(new NewConnectorDTO() @@ -53,15 +55,14 @@ public void setUp() { "tasks.max", "1", "topics", "output-topic", "file", "/tmp/test", - "test.password", "test-credentials" - )) - ) + "test.password", "test-credentials"))) .exchange() .expectStatus().isOk(); + } @AfterEach - public void tearDown() { + void tearDown() { webTestClient.delete() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", LOCAL, connectName, connectorName) @@ -70,7 +71,7 @@ public void tearDown() { } @Test - public void shouldListAllConnectors() { + void shouldListAllConnectors() { webTestClient.get() .uri("/api/clusters/{clusterName}/connectors", LOCAL) .exchange() @@ -81,7 +82,7 @@ public void shouldListAllConnectors() { } @Test - public void shouldFilterByNameConnectors() { + void shouldFilterByNameConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -95,7 +96,7 @@ public void shouldFilterByNameConnectors() { } @Test - public void shouldFilterByStatusConnectors() { + void shouldFilterByStatusConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -109,7 +110,7 @@ public void shouldFilterByStatusConnectors() { } @Test - public void shouldFilterByTypeConnectors() { + void shouldFilterByTypeConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -123,7 +124,7 @@ public void shouldFilterByTypeConnectors() { } @Test - public void shouldNotFilterConnectors() { + void shouldNotFilterConnectors() { webTestClient.get() .uri( "/api/clusters/{clusterName}/connectors?search={search}", @@ -137,7 +138,7 @@ public void shouldNotFilterConnectors() { } @Test - public void shouldListConnectors() { + void shouldListConnectors() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) .exchange() @@ -147,7 +148,7 @@ public void shouldListConnectors() { } @Test - public void shouldReturnNotFoundForNonExistingCluster() { + void shouldReturnNotFoundForNonExistingCluster() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", "nonExistingCluster", connectName) @@ -156,7 +157,7 @@ public void shouldReturnNotFoundForNonExistingCluster() { } @Test - public void shouldReturnNotFoundForNonExistingConnectName() { + void shouldReturnNotFoundForNonExistingConnectName() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, "nonExistingConnect") @@ -165,8 +166,8 @@ public void shouldReturnNotFoundForNonExistingConnectName() { } @Test - public void shouldRetrieveConnector() { - ConnectorDTO expected = (ConnectorDTO) new ConnectorDTO() + void shouldRetrieveConnector() { + ConnectorDTO expected = new ConnectorDTO() .connect(connectName) .status(new ConnectorStatusDTO() .state(ConnectorStateDTO.RUNNING) @@ -187,7 +188,7 @@ public void shouldRetrieveConnector() { } @Test - public void shouldUpdateConfig() { + void shouldUpdateConfig() { webTestClient.put() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/config", LOCAL, connectName, connectorName) @@ -218,7 +219,7 @@ public void shouldUpdateConfig() { } @Test - public void shouldReturn400WhenConnectReturns400ForInvalidConfigCreate() { + void shouldReturn400WhenConnectReturns400ForInvalidConfigCreate() { var connectorName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) @@ -244,7 +245,7 @@ public void shouldReturn400WhenConnectReturns400ForInvalidConfigCreate() { } @Test - public void shouldReturn400WhenConnectReturns500ForInvalidConfigCreate() { + void shouldReturn400WhenConnectReturns500ForInvalidConfigCreate() { var connectorName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) @@ -268,19 +269,28 @@ public void shouldReturn400WhenConnectReturns500ForInvalidConfigCreate() { @Test - public void shouldReturn400WhenConnectReturns400ForInvalidConfigUpdate() { + @SuppressWarnings("checkstyle:LineLength") + void shouldReturn400WhenConnectReturns400ForInvalidConfigUpdate() { webTestClient.put() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/config", LOCAL, connectName, connectorName) .bodyValue(Map.of( - "connector.class", "org.apache.kafka.connect.file.FileStreamSinkConnector", - "tasks.max", "invalid number", - "topics", "another-topic", - "file", "/tmp/test" + "connector.class", "org.apache.kafka.connect.file.FileStreamSinkConnector", + "tasks.max", "invalid number", + "topics", "another-topic", + "file", "/tmp/test" ) ) .exchange() - .expectStatus().isBadRequest(); + .expectStatus().isBadRequest() + .expectBody(ErrorResponse.class) + .value(response -> assertThat(response.getMessage()).isEqualTo( + """ + Connector configuration is invalid and contains the following 2 error(s): + Invalid value invalid number for configuration tasks.max: Not a number of type INT + Invalid value null for configuration tasks.max: Value must be non-null + You can also find the above list of errors at the endpoint `/connector-plugins/{connectorType}/config/validate`""" + )); webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/config", @@ -300,7 +310,7 @@ public void shouldReturn400WhenConnectReturns400ForInvalidConfigUpdate() { } @Test - public void shouldReturn400WhenConnectReturns500ForInvalidConfigUpdate() { + void shouldReturn400WhenConnectReturns500ForInvalidConfigUpdate() { webTestClient.put() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/config", LOCAL, connectName, connectorName) @@ -329,7 +339,7 @@ public void shouldReturn400WhenConnectReturns500ForInvalidConfigUpdate() { } @Test - public void shouldRetrieveConnectorPlugins() { + void shouldRetrieveConnectorPlugins() { webTestClient.get() .uri("/api/clusters/{clusterName}/connects/{connectName}/plugins", LOCAL, connectName) .exchange() @@ -339,7 +349,7 @@ public void shouldRetrieveConnectorPlugins() { } @Test - public void shouldSuccessfullyValidateConnectorPluginConfiguration() { + void shouldSuccessfullyValidateConnectorPluginConfiguration() { var pluginName = "FileStreamSinkConnector"; var path = "/api/clusters/{clusterName}/connects/{connectName}/plugins/{pluginName}/config/validate"; @@ -360,7 +370,7 @@ public void shouldSuccessfullyValidateConnectorPluginConfiguration() { } @Test - public void shouldValidateAndReturnErrorsOfConnectorPluginConfiguration() { + void shouldValidateAndReturnErrorsOfConnectorPluginConfiguration() { var pluginName = "FileStreamSinkConnector"; var path = "/api/clusters/{clusterName}/connects/{connectName}/plugins/{pluginName}/config/validate"; @@ -383,16 +393,16 @@ public void shouldValidateAndReturnErrorsOfConnectorPluginConfiguration() { .map(ConnectorPluginConfigDTO::getValue) .map(ConnectorPluginConfigValueDTO::getErrors) .filter(not(List::isEmpty)) - .findFirst().get(); + .findFirst().orElseThrow(); assertEquals( "Invalid value 0 for configuration tasks.max: Value must be at least 1", - error.get(0) + error.getFirst() ); }); } @Test - public void shouldReturn400WhenTryingToCreateConnectorWithExistingName() { + void shouldReturn400WhenTryingToCreateConnectorWithExistingName() { webTestClient.post() .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors", LOCAL, connectName) .bodyValue(new NewConnectorDTO() @@ -408,4 +418,56 @@ public void shouldReturn400WhenTryingToCreateConnectorWithExistingName() { .expectStatus() .isBadRequest(); } + + @Test + void shouldResetConnectorWhenInStoppedState() { + + webTestClient.get() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk() + .expectBody(ConnectorDTO.class) + .value(connector -> assertThat(connector.getStatus().getState()).isEqualTo(ConnectorStateDTO.RUNNING)); + + webTestClient.post() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/action/STOP", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk(); + + webTestClient.get() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk() + .expectBody(ConnectorDTO.class) + .value(connector -> assertThat(connector.getStatus().getState()).isEqualTo(ConnectorStateDTO.STOPPED)); + + webTestClient.delete() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/offsets", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk(); + + } + + @Test + void shouldReturn400WhenResettingConnectorInRunningState() { + + webTestClient.get() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}", + LOCAL, connectName, connectorName) + .exchange() + .expectStatus().isOk() + .expectBody(ConnectorDTO.class) + .value(connector -> assertThat(connector.getStatus().getState()).isEqualTo(ConnectorStateDTO.RUNNING)); + + webTestClient.delete() + .uri("/api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/offsets", LOCAL, + connectName, connectorName) + .exchange() + .expectStatus().isBadRequest(); + + } } diff --git a/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java b/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java index b1bf4baa7..c23ea5fb0 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaConsumerGroupTests.java @@ -27,7 +27,7 @@ import reactor.core.publisher.Mono; @Slf4j -public class KafkaConsumerGroupTests extends AbstractIntegrationTest { +class KafkaConsumerGroupTests extends AbstractIntegrationTest { @Autowired WebTestClient webTestClient; diff --git a/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java b/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java index 6eeada1b6..97ab381e6 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaConsumerTests.java @@ -13,6 +13,7 @@ import io.kafbat.ui.producer.KafkaTestProducer; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.UUID; import java.util.stream.Stream; import lombok.extern.slf4j.Slf4j; @@ -24,14 +25,14 @@ import reactor.core.publisher.Mono; @Slf4j -public class KafkaConsumerTests extends AbstractIntegrationTest { +class KafkaConsumerTests extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @Test - public void shouldDeleteRecords() { + void shouldDeleteRecords() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -55,15 +56,17 @@ public void shouldDeleteRecords() { throw new RuntimeException(e); } - long count = webTestClient.get() - .uri("/api/clusters/{clusterName}/topics/{topicName}/messages/v2?mode=EARLIEST", LOCAL, topicName) - .accept(TEXT_EVENT_STREAM) - .exchange() - .expectStatus() - .isOk() - .expectBodyList(TopicMessageEventDTO.class) - .returnResult() - .getResponseBody() + long count = Objects.requireNonNull( + webTestClient.get() + .uri("/api/clusters/{clusterName}/topics/{topicName}/messages/v2?mode=EARLIEST", LOCAL, topicName) + .accept(TEXT_EVENT_STREAM) + .exchange() + .expectStatus() + .isOk() + .expectBodyList(TopicMessageEventDTO.class) + .returnResult() + .getResponseBody() + ) .stream() .filter(e -> e.getType().equals(TopicMessageEventDTO.TypeEnum.MESSAGE)) .count(); @@ -76,14 +79,16 @@ public void shouldDeleteRecords() { .expectStatus() .isOk(); - count = webTestClient.get() - .uri("/api/clusters/{clusterName}/topics/{topicName}/messages/v2?mode=EARLIEST", LOCAL, topicName) - .exchange() - .expectStatus() - .isOk() - .expectBodyList(TopicMessageEventDTO.class) - .returnResult() - .getResponseBody() + count = Objects.requireNonNull( + webTestClient.get() + .uri("/api/clusters/{clusterName}/topics/{topicName}/messages/v2?mode=EARLIEST", LOCAL, topicName) + .exchange() + .expectStatus() + .isOk() + .expectBodyList(TopicMessageEventDTO.class) + .returnResult() + .getResponseBody() + ) .stream() .filter(e -> e.getType().equals(TopicMessageEventDTO.TypeEnum.MESSAGE)) .count(); @@ -92,7 +97,7 @@ public void shouldDeleteRecords() { } @Test - public void shouldIncreasePartitionsUpTo10() { + void shouldIncreasePartitionsUpTo10() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -120,7 +125,7 @@ public void shouldIncreasePartitionsUpTo10() { .returnResult() .getResponseBody(); - assert response != null; + Assertions.assertNotNull(response); Assertions.assertEquals(10, response.getTotalPartitionsCount()); TopicDetailsDTO topicDetails = webTestClient.get() @@ -134,12 +139,12 @@ public void shouldIncreasePartitionsUpTo10() { .returnResult() .getResponseBody(); - assert topicDetails != null; + Assertions.assertNotNull(topicDetails); Assertions.assertEquals(10, topicDetails.getPartitionCount()); } @Test - public void shouldReturn404ForNonExistingTopic() { + void shouldReturn404ForNonExistingTopic() { var topicName = UUID.randomUUID().toString(); webTestClient.delete() @@ -156,9 +161,7 @@ public void shouldReturn404ForNonExistingTopic() { } @Test - public void shouldReturnConfigsForBroker() { - var topicName = UUID.randomUUID().toString(); - + void shouldReturnConfigsForBroker() { List configs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/{id}/configs", LOCAL, @@ -171,16 +174,17 @@ public void shouldReturnConfigsForBroker() { .getResponseBody(); Assertions.assertNotNull(configs); - assert !configs.isEmpty(); - Assertions.assertNotNull(configs.get(0).getName()); - Assertions.assertNotNull(configs.get(0).getIsReadOnly()); - Assertions.assertNotNull(configs.get(0).getIsSensitive()); - Assertions.assertNotNull(configs.get(0).getSource()); - Assertions.assertNotNull(configs.get(0).getSynonyms()); + Assertions.assertFalse(configs.isEmpty()); + BrokerConfigDTO brokerConfigDto = configs.getFirst(); + Assertions.assertNotNull(brokerConfigDto.getName()); + Assertions.assertNotNull(brokerConfigDto.getIsReadOnly()); + Assertions.assertNotNull(brokerConfigDto.getIsSensitive()); + Assertions.assertNotNull(brokerConfigDto.getSource()); + Assertions.assertNotNull(brokerConfigDto.getSynonyms()); } @Test - public void shouldReturn404ForNonExistingBroker() { + void shouldReturn404ForNonExistingBroker() { webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/{id}/configs", LOCAL, @@ -191,7 +195,7 @@ public void shouldReturn404ForNonExistingBroker() { } @Test - public void shouldRetrieveTopicConfig() { + void shouldRetrieveTopicConfig() { var topicName = UUID.randomUUID().toString(); webTestClient.post() @@ -216,11 +220,12 @@ public void shouldRetrieveTopicConfig() { .getResponseBody(); Assertions.assertNotNull(configs); - assert !configs.isEmpty(); - Assertions.assertNotNull(configs.get(0).getName()); - Assertions.assertNotNull(configs.get(0).getIsReadOnly()); - Assertions.assertNotNull(configs.get(0).getIsSensitive()); - Assertions.assertNotNull(configs.get(0).getSource()); - Assertions.assertNotNull(configs.get(0).getSynonyms()); + Assertions.assertFalse(configs.isEmpty()); + TopicConfigDTO topicConfigDto = configs.getFirst(); + Assertions.assertNotNull(topicConfigDto.getName()); + Assertions.assertNotNull(topicConfigDto.getIsReadOnly()); + Assertions.assertNotNull(topicConfigDto.getIsSensitive()); + Assertions.assertNotNull(topicConfigDto.getSource()); + Assertions.assertNotNull(topicConfigDto.getSynonyms()); } } diff --git a/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java b/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java index fcc5a96fb..f4c5ca0b8 100644 --- a/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java +++ b/api/src/test/java/io/kafbat/ui/KafkaTopicCreateTests.java @@ -7,13 +7,13 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.web.reactive.server.WebTestClient; -public class KafkaTopicCreateTests extends AbstractIntegrationTest { +class KafkaTopicCreateTests extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; private TopicCreationDTO topicCreation; @BeforeEach - public void setUpBefore() { + void setUpBefore() { this.topicCreation = new TopicCreationDTO() .replicationFactor(1) .partitions(3) diff --git a/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java b/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java index e6525b14a..df59c6c35 100644 --- a/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java +++ b/api/src/test/java/io/kafbat/ui/ReadOnlyModeTests.java @@ -9,13 +9,13 @@ import org.springframework.http.HttpStatus; import org.springframework.test.web.reactive.server.WebTestClient; -public class ReadOnlyModeTests extends AbstractIntegrationTest { +class ReadOnlyModeTests extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @Test - public void shouldCreateTopicForNonReadonlyCluster() { + void shouldCreateTopicForNonReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -31,7 +31,7 @@ public void shouldCreateTopicForNonReadonlyCluster() { } @Test - public void shouldNotCreateTopicForReadonlyCluster() { + void shouldNotCreateTopicForReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", SECOND_LOCAL) @@ -47,7 +47,7 @@ public void shouldNotCreateTopicForReadonlyCluster() { } @Test - public void shouldUpdateTopicForNonReadonlyCluster() { + void shouldUpdateTopicForNonReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.post() .uri("/api/clusters/{clusterName}/topics", LOCAL) @@ -73,7 +73,7 @@ public void shouldUpdateTopicForNonReadonlyCluster() { } @Test - public void shouldNotUpdateTopicForReadonlyCluster() { + void shouldNotUpdateTopicForReadonlyCluster() { var topicName = UUID.randomUUID().toString(); webTestClient.patch() .uri("/api/clusters/{clusterName}/topics/{topicName}", SECOND_LOCAL, topicName) diff --git a/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java b/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java index 2653b7bac..a63a61b60 100644 --- a/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java +++ b/api/src/test/java/io/kafbat/ui/SchemaRegistryServiceTests.java @@ -32,12 +32,12 @@ class SchemaRegistryServiceTests extends AbstractIntegrationTest { String subject; @BeforeEach - public void setUpBefore() { + void setUpBefore() { this.subject = UUID.randomUUID().toString(); } @Test - public void should404WhenGetAllSchemasForUnknownCluster() { + void should404WhenGetAllSchemasForUnknownCluster() { webTestClient .get() .uri("/api/clusters/unknown-cluster/schemas") @@ -46,7 +46,7 @@ public void should404WhenGetAllSchemasForUnknownCluster() { } @Test - public void shouldReturn404WhenGetLatestSchemaByNonExistingSubject() { + void shouldReturn404WhenGetLatestSchemaByNonExistingSubject() { String unknownSchema = "unknown-schema"; webTestClient .get() @@ -244,7 +244,7 @@ void shouldCreateNewProtobufSchemaWithRefs() { } @Test - public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { + void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { webTestClient .get() .uri("/api/clusters/{clusterName}/schemas/compatibility", LOCAL) @@ -260,7 +260,7 @@ public void shouldReturnBackwardAsGlobalCompatibilityLevelByDefault() { } @Test - public void shouldReturnNotEmptyResponseWhenGetAllSchemas() { + void shouldReturnNotEmptyResponseWhenGetAllSchemas() { createNewSubjectAndAssert(subject); webTestClient @@ -287,7 +287,7 @@ public void shouldReturnNotEmptyResponseWhenGetAllSchemas() { } @Test - public void shouldOkWhenCreateNewSchemaThenGetAndUpdateItsCompatibilityLevel() { + void shouldOkWhenCreateNewSchemaThenGetAndUpdateItsCompatibilityLevel() { createNewSubjectAndAssert(subject); //Get the created schema and check its items @@ -366,7 +366,7 @@ private void assertSchemaWhenGetLatest( List responseBody = listEntityExchangeResult.getResponseBody(); Assertions.assertNotNull(responseBody); Assertions.assertEquals(1, responseBody.size()); - SchemaSubjectDTO actualSchema = responseBody.get(0); + SchemaSubjectDTO actualSchema = responseBody.getFirst(); Assertions.assertNotNull(actualSchema); Assertions.assertEquals(subject, actualSchema.getSubject()); Assertions.assertEquals("\"string\"", actualSchema.getSchema()); diff --git a/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java new file mode 100644 index 000000000..11eec0ea4 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/config/RegexBasedProviderAuthorityExtractorTest.java @@ -0,0 +1,196 @@ +package io.kafbat.ui.config; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.when; +import static org.springframework.security.oauth2.client.registration.ClientRegistration.withRegistrationId; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.fasterxml.jackson.dataformat.yaml.YAMLMapper; +import io.kafbat.ui.config.auth.OAuthProperties; +import io.kafbat.ui.model.rbac.Role; +import io.kafbat.ui.service.rbac.AccessControlService; +import io.kafbat.ui.service.rbac.extractor.CognitoAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.GithubAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.GoogleAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.OauthAuthorityExtractor; +import io.kafbat.ui.service.rbac.extractor.ProviderAuthorityExtractor; +import io.kafbat.ui.util.AccessControlServiceMock; +import java.io.IOException; +import java.io.InputStream; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import lombok.SneakyThrows; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.security.core.authority.AuthorityUtils; +import org.springframework.security.oauth2.client.userinfo.OAuth2UserRequest; +import org.springframework.security.oauth2.core.AuthorizationGrantType; +import org.springframework.security.oauth2.core.OAuth2AccessToken; +import org.springframework.security.oauth2.core.user.DefaultOAuth2User; +import org.springframework.security.oauth2.core.user.OAuth2User; + +public class RegexBasedProviderAuthorityExtractorTest { + + + private final AccessControlService accessControlService = new AccessControlServiceMock().getMock(); + ProviderAuthorityExtractor extractor; + + @BeforeEach + void setUp() throws IOException { + + YAMLMapper mapper = new YAMLMapper(); + + InputStream rolesFile = this.getClass() + .getClassLoader() + .getResourceAsStream("roles_definition.yaml"); + + Role[] roles = mapper.readValue(rolesFile, Role[].class); + + when(accessControlService.getRoles()).thenReturn(List.of(roles)); + + } + + @SneakyThrows + @Test + void extractOauth2Authorities() { + + extractor = new OauthAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("role_definition", Set.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", "john@kafka.com"), + "user_name"); + + HashMap additionalParams = new HashMap<>(); + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertNotNull(roles); + assertEquals(Set.of("viewer", "admin"), roles); + assertFalse(roles.contains("no one's role")); + + } + + @SneakyThrows + @Test() + void extractOauth2Authorities_blankEmail() { + + extractor = new OauthAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("role_definition", Set.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", ""), + "user_name"); + + HashMap additionalParams = new HashMap<>(); + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertNotNull(roles); + assertFalse(roles.contains("viewer")); + assertTrue(roles.contains("admin")); + + } + + @SneakyThrows + @Test + void extractCognitoAuthorities() { + + extractor = new CognitoAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("cognito:groups", List.of("ROLE-ADMIN", "ANOTHER-ROLE"), "user_name", "john@kafka.com"), + "user_name"); + + HashMap additionalParams = new HashMap<>(); + + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertNotNull(roles); + assertEquals(Set.of("viewer", "admin"), roles); + assertFalse(roles.contains("no one's role")); + + } + + @SneakyThrows + @Test + void extractGithubAuthorities() { + + extractor = new GithubAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("login", "john@kafka.com"), + "login"); + + HashMap additionalParams = new HashMap<>(); + + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + additionalParams.put("provider", provider); + + additionalParams.put("request", new OAuth2UserRequest( + withRegistrationId("registration-1") + .clientId("client-1") + .clientSecret("secret") + .redirectUri("https://client.com") + .authorizationGrantType(AuthorizationGrantType.AUTHORIZATION_CODE) + .authorizationUri("https://provider.com/oauth2/authorization") + .tokenUri("https://provider.com/oauth2/token") + .clientName("Client 1") + .build(), + new OAuth2AccessToken(OAuth2AccessToken.TokenType.BEARER, "XXXX", Instant.now(), + Instant.now().plus(10, ChronoUnit.HOURS)))); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertNotNull(roles); + assertEquals(Set.of("viewer"), roles); + assertFalse(roles.contains("no one's role")); + + } + + @SneakyThrows + @Test + void extractGoogleAuthorities() { + + extractor = new GoogleAuthorityExtractor(); + + OAuth2User oauth2User = new DefaultOAuth2User( + AuthorityUtils.createAuthorityList("SCOPE_message:read"), + Map.of("hd", "memelord.lol", "email", "john@kafka.com"), + "email"); + + HashMap additionalParams = new HashMap<>(); + + OAuthProperties.OAuth2Provider provider = new OAuthProperties.OAuth2Provider(); + provider.setCustomParams(Map.of("roles-field", "role_definition")); + additionalParams.put("provider", provider); + + Set roles = extractor.extract(accessControlService, oauth2User, additionalParams).block(); + + assertNotNull(roles); + assertEquals(Set.of("viewer", "admin"), roles); + assertFalse(roles.contains("no one's role")); + + } + +} diff --git a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java index d2e39ce75..701f7cf3d 100644 --- a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java +++ b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraLoginCallbackHandlerTest.java @@ -17,7 +17,6 @@ import com.azure.core.credential.AccessToken; import com.azure.core.credential.TokenCredential; import com.azure.core.credential.TokenRequestContext; -import java.util.HashMap; import java.util.List; import java.util.Map; import javax.security.auth.callback.Callback; @@ -33,7 +32,7 @@ import reactor.core.publisher.Mono; @ExtendWith(MockitoExtension.class) -public class AzureEntraLoginCallbackHandlerTest { +class AzureEntraLoginCallbackHandlerTest { // These are not real tokens. It was generated using fake values with an invalid signature, // so it is safe to store here. @@ -59,9 +58,6 @@ public class AzureEntraLoginCallbackHandlerTest { @Mock private OAuthBearerTokenCallback oauthBearerTokenCallBack; - @Mock - private OAuthBearerToken oauthBearerToken; - @Mock private TokenCredential tokenCredential; @@ -71,18 +67,14 @@ public class AzureEntraLoginCallbackHandlerTest { private AzureEntraLoginCallbackHandler azureEntraLoginCallbackHandler; @BeforeEach - public void beforeEach() { + void beforeEach() { azureEntraLoginCallbackHandler = new AzureEntraLoginCallbackHandler(); azureEntraLoginCallbackHandler.setTokenCredential(tokenCredential); } @Test - public void shouldProvideTokenToCallbackWithSuccessfulTokenRequest() - throws UnsupportedCallbackException { - final Map configs = new HashMap<>(); - configs.put( - "bootstrap.servers", - List.of("test-eh.servicebus.windows.net:9093")); + void shouldProvideTokenToCallbackWithSuccessfulTokenRequest() throws UnsupportedCallbackException { + Map configs = Map.of("bootstrap.servers", List.of("test-eh.servicebus.windows.net:9093")); when(tokenCredential.getToken(any(TokenRequestContext.class))).thenReturn(Mono.just(accessToken)); when(accessToken.getToken()).thenReturn(VALID_SAMPLE_TOKEN); @@ -113,11 +105,8 @@ public void shouldProvideTokenToCallbackWithSuccessfulTokenRequest() } @Test - public void shouldProvideErrorToCallbackWithTokenError() throws UnsupportedCallbackException { - final Map configs = new HashMap<>(); - configs.put( - "bootstrap.servers", - List.of("test-eh.servicebus.windows.net:9093")); + void shouldProvideErrorToCallbackWithTokenError() throws UnsupportedCallbackException { + Map configs = Map.of("bootstrap.servers", List.of("test-eh.servicebus.windows.net:9093")); when(tokenCredential.getToken(any(TokenRequestContext.class))) .thenThrow(new RuntimeException("failed to acquire token")); @@ -135,35 +124,32 @@ public void shouldProvideErrorToCallbackWithTokenError() throws UnsupportedCallb } @Test - public void shouldThrowExceptionWithNullBootstrapServers() { - final Map configs = new HashMap<>(); - + void shouldThrowExceptionWithNullBootstrapServers() { assertThrows(IllegalArgumentException.class, () -> azureEntraLoginCallbackHandler.configure( - configs, null, null)); + Map.of(), null, null)); } @Test - public void shouldThrowExceptionWithMultipleBootstrapServers() { - final Map configs = new HashMap<>(); - configs.put("bootstrap.servers", List.of("server1", "server2")); + void shouldThrowExceptionWithMultipleBootstrapServers() { + Map configs = Map.of("bootstrap.servers", List.of("server1", "server2")); assertThrows(IllegalArgumentException.class, () -> azureEntraLoginCallbackHandler.configure( configs, null, null)); } @Test - public void shouldThrowExceptionWithUnsupportedCallback() { + void shouldThrowExceptionWithUnsupportedCallback() { assertThrows(UnsupportedCallbackException.class, () -> azureEntraLoginCallbackHandler.handle( new Callback[] {mock(Callback.class)})); } @Test - public void shouldDoNothingOnClose() { + void shouldDoNothingOnClose() { azureEntraLoginCallbackHandler.close(); } @Test - public void shouldSupportDefaultConstructor() { + void shouldSupportDefaultConstructor() { new AzureEntraLoginCallbackHandler(); } } diff --git a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java index 84ed3b1cd..74dcfd077 100644 --- a/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java +++ b/api/src/test/java/io/kafbat/ui/config/auth/azure/AzureEntraOAuthBearerTokenTest.java @@ -12,7 +12,7 @@ import org.apache.kafka.common.errors.SaslAuthenticationException; import org.junit.jupiter.api.Test; -public class AzureEntraOAuthBearerTokenTest { +class AzureEntraOAuthBearerTokenTest { // These are not real tokens. It was generated using fake values with an invalid signature, // so it is safe to store here. @@ -39,8 +39,7 @@ public class AzureEntraOAuthBearerTokenTest { void constructorShouldParseToken() { final AccessToken accessToken = new AccessToken(VALID_SAMPLE_TOKEN, OffsetDateTime.MIN); - final AzureEntraOAuthBearerToken azureOAuthBearerToken = - new AzureEntraOAuthBearerToken(accessToken); + final AzureEntraOAuthBearerToken azureOAuthBearerToken = new AzureEntraOAuthBearerToken(accessToken); assertThat(azureOAuthBearerToken, is(notNullValue())); assertThat(azureOAuthBearerToken.value(), is(VALID_SAMPLE_TOKEN)); diff --git a/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java b/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java new file mode 100644 index 000000000..af1f42a68 --- /dev/null +++ b/api/src/test/java/io/kafbat/ui/container/ActiveDirectoryContainer.java @@ -0,0 +1,115 @@ +package io.kafbat.ui.container; + +import com.github.dockerjava.api.command.InspectContainerResponse; +import java.io.IOException; +import lombok.extern.slf4j.Slf4j; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.utility.DockerImageName; + +@Slf4j +public class ActiveDirectoryContainer extends GenericContainer { + public static final String DOMAIN = "corp.kafbat.io"; + public static final String PASSWORD = "StrongPassword123"; + public static final String FIRST_USER_WITH_GROUP = "JohnDoe"; + public static final String SECOND_USER_WITH_GROUP = "JohnWick"; + public static final String USER_WITHOUT_GROUP = "JackSmith"; + public static final String EMPTY_PERMISSIONS_USER = "JohnJames"; + public static final String CONTAINER_CERT_PATH = "/var/containers/cert.pem"; + public static final String CONTAINER_KEY_PATH = "/var/containers/key.pem"; + + private static final String DOMAIN_DC = "dc=corp,dc=kafbat,dc=io"; + private static final String GROUP = "group"; + private static final String FIRST_GROUP = "firstGroup"; + private static final String SECOND_GROUP = "secondGroup"; + private static final String DOMAIN_EMAIL = "kafbat.io"; + private static final String SAMBA_TOOL = "samba-tool"; + private static final int LDAP_PORT = 389; + private static final int LDAPS_PORT = 636; + private static final DockerImageName IMAGE_NAME = DockerImageName.parse("nowsci/samba-domain:latest"); + + private final boolean sslEnabled; + private final int port; + + public ActiveDirectoryContainer(boolean sslEnabled) { + super(IMAGE_NAME); + + this.sslEnabled = sslEnabled; + port = sslEnabled ? LDAPS_PORT : LDAP_PORT; + + withExposedPorts(port); + + withEnv("DOMAIN", DOMAIN); + withEnv("DOMAIN_DC", DOMAIN_DC); + withEnv("DOMAIN_EMAIL", DOMAIN_EMAIL); + withEnv("DOMAINPASS", PASSWORD); + withEnv("NOCOMPLEXITY", "true"); + withEnv("INSECURELDAP", String.valueOf(!sslEnabled)); + + withPrivilegedMode(true); + } + + @Override + public void start() { + super.start(); + + if (sslEnabled) { + setCustomCertAndRestartServer(); + } + } + + protected void containerIsStarted(InspectContainerResponse containerInfo) { + createUser(EMPTY_PERMISSIONS_USER); + createUser(USER_WITHOUT_GROUP); + createUser(FIRST_USER_WITH_GROUP); + createUser(SECOND_USER_WITH_GROUP); + + exec(SAMBA_TOOL, GROUP, "add", FIRST_GROUP); + exec(SAMBA_TOOL, GROUP, "add", SECOND_GROUP); + exec(SAMBA_TOOL, GROUP, "addmembers", FIRST_GROUP, FIRST_USER_WITH_GROUP); + exec(SAMBA_TOOL, GROUP, "addmembers", SECOND_GROUP, SECOND_USER_WITH_GROUP); + } + + public String getLdapUrl() { + return String.format("%s://%s:%s", sslEnabled ? "ldaps" : "ldap", getHost(), getMappedPort(port)); + } + + private void createUser(String name) { + exec(SAMBA_TOOL, "user", "create", name, PASSWORD, "--mail-address", name + '@' + DOMAIN_EMAIL); + exec(SAMBA_TOOL, "user", "setexpiry", name, "--noexpiry"); + } + + private void exec(String... cmd) { + ExecResult result; + try { + result = execInContainer(cmd); + } catch (IOException | InterruptedException e) { + throw new RuntimeException(e); + } + + if (result.getStdout() != null && !result.getStdout().isEmpty()) { + log.info("Output: {}", result.getStdout()); + } + + if (result.getExitCode() != 0) { + throw new IllegalStateException(result.toString()); + } + } + + private void setCustomCertAndRestartServer() { + exec( + "sed", + "-i", + "/\\[global\\]/a \\\t" + + "tls cafile = \\\n" + + "tls keyfile = " + CONTAINER_KEY_PATH + "\\\n" + + "tls certfile = " + CONTAINER_CERT_PATH + "\\\n", + "/etc/samba/external/smb.conf" + ); + + exec("chown", "-R", "root:root", "/var/containers/"); + exec("chmod", "600", CONTAINER_KEY_PATH); + + exec("./domain.sh", "reload-config"); + exec("supervisorctl", "restart", "samba"); + } +} diff --git a/api/src/test/java/io/kafbat/ui/container/KafkaConnectContainer.java b/api/src/test/java/io/kafbat/ui/container/KafkaConnectContainer.java index 0dbf361e8..42d2e2816 100644 --- a/api/src/test/java/io/kafbat/ui/container/KafkaConnectContainer.java +++ b/api/src/test/java/io/kafbat/ui/container/KafkaConnectContainer.java @@ -47,6 +47,6 @@ public KafkaConnectContainer withKafka(Network network, String bootstrapServers) } public String getTarget() { - return "http://" + getContainerIpAddress() + ":" + getMappedPort(CONNECT_PORT); + return "http://" + getHost() + ":" + getMappedPort(CONNECT_PORT); } } diff --git a/api/src/test/java/io/kafbat/ui/container/KsqlDbContainer.java b/api/src/test/java/io/kafbat/ui/container/KsqlDbContainer.java index 40de05d9e..09e8aad32 100644 --- a/api/src/test/java/io/kafbat/ui/container/KsqlDbContainer.java +++ b/api/src/test/java/io/kafbat/ui/container/KsqlDbContainer.java @@ -34,6 +34,6 @@ private KsqlDbContainer withKafka(Network network, String bootstrapServers) { } public String url() { - return "http://" + getContainerIpAddress() + ":" + getMappedPort(PORT); + return "http://" + getHost() + ":" + getMappedPort(PORT); } } diff --git a/api/src/test/java/io/kafbat/ui/container/SchemaRegistryContainer.java b/api/src/test/java/io/kafbat/ui/container/SchemaRegistryContainer.java index e6dba5298..052e5307a 100644 --- a/api/src/test/java/io/kafbat/ui/container/SchemaRegistryContainer.java +++ b/api/src/test/java/io/kafbat/ui/container/SchemaRegistryContainer.java @@ -28,7 +28,7 @@ public SchemaRegistryContainer withKafka(Network network, String bootstrapServer } public String getUrl() { - return "http://" + getContainerIpAddress() + ":" + getMappedPort(SCHEMA_PORT); + return "http://" + getHost() + ":" + getMappedPort(SCHEMA_PORT); } public SchemaRegistryClient schemaRegistryClient() { diff --git a/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java b/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java index c95a34d26..9659b4ea6 100644 --- a/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java +++ b/api/src/test/java/io/kafbat/ui/controller/ApplicationConfigControllerTest.java @@ -20,7 +20,7 @@ class ApplicationConfigControllerTest extends AbstractIntegrationTest { private WebTestClient webTestClient; @Test - public void testUpload() throws IOException { + void testUpload() throws IOException { var fileToUpload = new ClassPathResource("/fileForUploadTest.txt", this.getClass()); UploadedFileInfoDTO result = webTestClient diff --git a/api/src/test/java/io/kafbat/ui/emitter/CursorTest.java b/api/src/test/java/io/kafbat/ui/emitter/CursorTest.java index 3dd5dadd1..5a262b132 100644 --- a/api/src/test/java/io/kafbat/ui/emitter/CursorTest.java +++ b/api/src/test/java/io/kafbat/ui/emitter/CursorTest.java @@ -99,6 +99,7 @@ private Cursor assertCursor(PollingModeDTO expectedMode, var cursorPosition = registeredCursor.consumerPosition(); assertThat(cursorPosition).isNotNull(); + assertThat(cursorPosition.offsets()).isNotNull(); assertThat(cursorPosition.topic()).isEqualTo(TOPIC); assertThat(cursorPosition.partitions()).isEqualTo(List.of()); assertThat(cursorPosition.pollingMode()).isEqualTo(expectedMode); @@ -111,6 +112,7 @@ private void waitMgsgEmitted(AbstractEmitter emitter, int expectedMsgsCnt) { List events = Flux.create(emitter) .collectList() .block(); + assertThat(events).isNotNull(); assertThat(events.stream().filter(m -> m.getType() == TopicMessageEventDTO.TypeEnum.MESSAGE).count()) .isEqualTo(expectedMsgsCnt); } diff --git a/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java b/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java index 617cfb0c1..7aafea5ab 100644 --- a/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java +++ b/api/src/test/java/io/kafbat/ui/emitter/MessageFiltersTest.java @@ -28,7 +28,7 @@ class StringContainsFilter { Predicate filter = containsStringFilter("abC"); @Test - void returnsTrueWhenStringContainedInKeyOrContentOrInBoth() { + void returnsTrueWhenStringContainedInKeyOrContentOrHeadersOrInAllThree() { assertTrue( filter.test(msg().key("contains abCd").content("some str")) ); @@ -40,6 +40,14 @@ void returnsTrueWhenStringContainedInKeyOrContentOrInBoth() { assertTrue( filter.test(msg().key("contains abCd").content("contains abCd")) ); + + assertTrue( + filter.test(msg().key("dfg").content("does-not-contain").headers(Map.of("abC", "value"))) + ); + + assertTrue( + filter.test(msg().key("dfg").content("does-not-contain").headers(Map.of("x1", "some abC"))) + ); } @Test @@ -55,6 +63,11 @@ void returnsFalseOtherwise() { assertFalse( filter.test(msg().key("aBc").content("AbC")) ); + + assertFalse( + filter.test(msg().key("aBc").content("AbC").headers(Map.of("abc", "value"))) + ); + } } @@ -186,7 +199,7 @@ void filterSpeedIsAtLeast5kPerSec() { long took = System.currentTimeMillis() - before; assertThat(took).isLessThan(1000); - assertThat(matched).isGreaterThan(0); + assertThat(matched).isPositive(); } } diff --git a/api/src/test/java/io/kafbat/ui/emitter/TailingEmitterTest.java b/api/src/test/java/io/kafbat/ui/emitter/TailingEmitterTest.java index 4194868d3..592bee935 100644 --- a/api/src/test/java/io/kafbat/ui/emitter/TailingEmitterTest.java +++ b/api/src/test/java/io/kafbat/ui/emitter/TailingEmitterTest.java @@ -106,7 +106,7 @@ private Flux createTailingFlux( String query) { var cluster = applicationContext.getBean(ClustersStorage.class) .getClusterByName(LOCAL) - .get(); + .orElseThrow(); return applicationContext.getBean(MessagesService.class) .loadMessages(cluster, topicName, diff --git a/api/src/test/java/io/kafbat/ui/serdes/SerdesInitializerTest.java b/api/src/test/java/io/kafbat/ui/serdes/SerdesInitializerTest.java index 4f0caf7b4..0d6fc36a3 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/SerdesInitializerTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/SerdesInitializerTest.java @@ -5,7 +5,6 @@ import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyString; import static org.mockito.Mockito.eq; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @@ -145,6 +144,8 @@ private void verifyAutoConfigured(SerdeInstance serde) { } private void verifyPatternsMatch(ClustersProperties.SerdeConfig config, SerdeInstance serde) { + assertThat(serde.topicKeyPattern).isNotNull(); + assertThat(serde.topicValuePattern).isNotNull(); assertThat(serde.topicKeyPattern.pattern()).isEqualTo(config.getTopicKeysPattern()); assertThat(serde.topicValuePattern.pattern()).isEqualTo(config.getTopicValuesPattern()); } diff --git a/api/src/test/java/io/kafbat/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java b/api/src/test/java/io/kafbat/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java index 727cb5525..c737b3ef7 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/builtin/ConsumerOffsetsSerdeTest.java @@ -168,6 +168,7 @@ private boolean isGroupMetadataMessage(DeserializeResult key, DeserializeResult } @SneakyThrows + @SuppressWarnings("unchecked") private Map toMapFromJsom(DeserializeResult result) { return new JsonMapper().readValue(result.getResult(), Map.class); } diff --git a/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java b/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java index a13690c1e..54cb63321 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/builtin/HexSerdeTest.java @@ -11,7 +11,7 @@ import org.junit.jupiter.params.provider.CsvSource; import org.junit.jupiter.params.provider.EnumSource; -public class HexSerdeTest { +class HexSerdeTest { private static final byte[] TEST_BYTES = "hello world".getBytes(); private static final String TEST_BYTES_HEX_ENCODED = "68 65 6C 6C 6F 20 77 6F 72 6C 64"; diff --git a/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerdeTest.java b/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerdeTest.java index 61d1407c4..becccafa6 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerdeTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufFileSerdeTest.java @@ -80,14 +80,15 @@ void setUp() throws Exception { void loadsAllProtoFiledFromTargetDirectory() throws Exception { var protoDir = ResourceUtils.getFile("classpath:protobuf-serde/").getPath(); List files = new ProtobufFileSerde.ProtoSchemaLoader(protoDir).load(); - assertThat(files).hasSize(4); + assertThat(files).hasSize(5); assertThat(files) .map(f -> f.getLocation().getPath()) .containsExactlyInAnyOrder( "language/language.proto", "sensor.proto", "address-book.proto", - "lang-description.proto" + "lang-description.proto", + "messagewithany.proto" ); } @@ -226,7 +227,7 @@ void createConfigureFillsDescriptorMappingsWhenProtoFilesListProvided() throws E } @Test - void createConfigureFillsDescriptorMappingsWhenProtoFileDirProvided() throws Exception { + void createConfigureFillsDescriptorMappingsWhenProtoFileDirProvided() { PropertyResolver resolver = mock(PropertyResolver.class); when(resolver.getProperty("protobufFilesDir", String.class)) .thenReturn(Optional.of(protoFilesDir())); diff --git a/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java b/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java index ba88b2d37..8dd65123c 100644 --- a/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java +++ b/api/src/test/java/io/kafbat/ui/serdes/builtin/ProtobufRawSerdeTest.java @@ -52,7 +52,7 @@ void deserializeSimpleMessage() { void deserializeEmptyMessage() { var deserialized = serde.deserializer(DUMMY_TOPIC, Serde.Target.VALUE) .deserialize(null, new byte[0]); - assertThat(deserialized.getResult()).isEqualTo(""); + assertThat(deserialized.getResult()).isEmpty(); } @Test diff --git a/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java b/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java index 64e6ed743..a4cae637b 100644 --- a/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/ApplicationInfoServiceTest.java @@ -6,12 +6,12 @@ import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; -public class ApplicationInfoServiceTest extends AbstractIntegrationTest { +class ApplicationInfoServiceTest extends AbstractIntegrationTest { @Autowired private ApplicationInfoService service; @Test - public void testCustomGithubReleaseInfoTimeout() { + void testCustomGithubReleaseInfoTimeout() { assertEquals(100, service.githubReleaseInfo().getGithubApiMaxWaitTime()); } } diff --git a/api/src/test/java/io/kafbat/ui/service/BrokerServiceTest.java b/api/src/test/java/io/kafbat/ui/service/BrokerServiceTest.java index 3035b156a..274c4f1ee 100644 --- a/api/src/test/java/io/kafbat/ui/service/BrokerServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/BrokerServiceTest.java @@ -15,7 +15,7 @@ class BrokerServiceTest extends AbstractIntegrationTest { @Test void getBrokersReturnsFilledBrokerDto() { - var localCluster = clustersStorage.getClusterByName(LOCAL).get(); + var localCluster = clustersStorage.getClusterByName(LOCAL).orElseThrow(); StepVerifier.create(brokerService.getBrokers(localCluster)) .expectNextMatches(b -> b.getId().equals(1) && b.getHost().equals(kafka.getHost()) diff --git a/api/src/test/java/io/kafbat/ui/service/ConfigTest.java b/api/src/test/java/io/kafbat/ui/service/ConfigTest.java index 3e2554746..756bd5c91 100644 --- a/api/src/test/java/io/kafbat/ui/service/ConfigTest.java +++ b/api/src/test/java/io/kafbat/ui/service/ConfigTest.java @@ -17,7 +17,7 @@ import org.springframework.test.web.reactive.server.WebTestClient; import org.testcontainers.shaded.org.awaitility.Awaitility; -public class ConfigTest extends AbstractIntegrationTest { +class ConfigTest extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @@ -35,7 +35,7 @@ void waitUntilStatsInitialized() { } @Test - public void testAlterConfig() { + void testAlterConfig() { String name = "background.threads"; Optional bc = getConfig(name); @@ -65,7 +65,7 @@ public void testAlterConfig() { } @Test - public void testAlterReadonlyConfig() { + void testAlterReadonlyConfig() { String name = "log.dirs"; webTestClient.put() @@ -89,6 +89,8 @@ private Optional getConfig(String name) { .returnResult() .getResponseBody(); + assertThat(configs).isNotNull(); + return configs.stream() .filter(c -> c.getName().equals(name)) .findAny(); diff --git a/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java b/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java index 587bdfb46..718f2f49c 100644 --- a/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java +++ b/api/src/test/java/io/kafbat/ui/service/LogDirsTest.java @@ -15,13 +15,13 @@ import org.springframework.core.ParameterizedTypeReference; import org.springframework.test.web.reactive.server.WebTestClient; -public class LogDirsTest extends AbstractIntegrationTest { +class LogDirsTest extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; @Test - public void testAllBrokers() { + void testAllBrokers() { List dirs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/logdirs", LOCAL) .exchange() @@ -31,22 +31,22 @@ public void testAllBrokers() { .getResponseBody(); assertThat(dirs).hasSize(1); - BrokersLogdirsDTO dir = dirs.get(0); + BrokersLogdirsDTO dir = dirs.getFirst(); assertThat(dir.getName()).isEqualTo("/var/lib/kafka/data"); assertThat(dir.getTopics().stream().anyMatch(t -> t.getName().equals("__consumer_offsets"))) .isTrue(); BrokerTopicLogdirsDTO topic = dir.getTopics().stream() .filter(t -> t.getName().equals("__consumer_offsets")) - .findAny().get(); + .findAny().orElseThrow(); assertThat(topic.getPartitions()).hasSize(1); - assertThat(topic.getPartitions().get(0).getBroker()).isEqualTo(1); - assertThat(topic.getPartitions().get(0).getSize()).isPositive(); + assertThat(topic.getPartitions().getFirst().getBroker()).isEqualTo(1); + assertThat(topic.getPartitions().getFirst().getSize()).isPositive(); } @Test - public void testOneBrokers() { + void testOneBrokers() { List dirs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/logdirs?broker=1", LOCAL) .exchange() @@ -56,22 +56,22 @@ public void testOneBrokers() { .getResponseBody(); assertThat(dirs).hasSize(1); - BrokersLogdirsDTO dir = dirs.get(0); + BrokersLogdirsDTO dir = dirs.getFirst(); assertThat(dir.getName()).isEqualTo("/var/lib/kafka/data"); assertThat(dir.getTopics().stream().anyMatch(t -> t.getName().equals("__consumer_offsets"))) .isTrue(); BrokerTopicLogdirsDTO topic = dir.getTopics().stream() .filter(t -> t.getName().equals("__consumer_offsets")) - .findAny().get(); + .findAny().orElseThrow(); assertThat(topic.getPartitions()).hasSize(1); - assertThat(topic.getPartitions().get(0).getBroker()).isEqualTo(1); - assertThat(topic.getPartitions().get(0).getSize()).isPositive(); + assertThat(topic.getPartitions().getFirst().getBroker()).isEqualTo(1); + assertThat(topic.getPartitions().getFirst().getSize()).isPositive(); } @Test - public void testWrongBrokers() { + void testWrongBrokers() { List dirs = webTestClient.get() .uri("/api/clusters/{clusterName}/brokers/logdirs?broker=2", LOCAL) .exchange() @@ -84,7 +84,7 @@ public void testWrongBrokers() { } @Test - public void testChangeDirToWrongDir() { + void testChangeDirToWrongDir() { ErrorResponseDTO dirs = webTestClient.patch() .uri("/api/clusters/{clusterName}/brokers/{id}/logdirs", LOCAL, 1) .bodyValue(Map.of( @@ -99,6 +99,7 @@ public void testChangeDirToWrongDir() { .returnResult() .getResponseBody(); + assertThat(dirs).isNotNull(); assertThat(dirs.getMessage()) .isEqualTo(new LogDirNotFoundApiException().getMessage()); @@ -116,6 +117,7 @@ public void testChangeDirToWrongDir() { .returnResult() .getResponseBody(); + assertThat(dirs).isNotNull(); assertThat(dirs.getMessage()) .isEqualTo(new TopicOrPartitionNotFoundException().getMessage()); } diff --git a/api/src/test/java/io/kafbat/ui/service/MessagesServiceTest.java b/api/src/test/java/io/kafbat/ui/service/MessagesServiceTest.java index 8939b50c3..1fecae247 100644 --- a/api/src/test/java/io/kafbat/ui/service/MessagesServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/MessagesServiceTest.java @@ -10,10 +10,10 @@ import io.kafbat.ui.model.KafkaCluster; import io.kafbat.ui.model.PollingModeDTO; import io.kafbat.ui.model.SmartFilterTestExecutionDTO; -import io.kafbat.ui.model.SmartFilterTestExecutionResultDTO; import io.kafbat.ui.model.TopicMessageDTO; import io.kafbat.ui.model.TopicMessageEventDTO; import io.kafbat.ui.producer.KafkaTestProducer; +import io.kafbat.ui.serdes.builtin.ProtobufFileSerde; import io.kafbat.ui.serdes.builtin.StringSerde; import java.util.HashSet; import java.util.List; @@ -48,7 +48,7 @@ void init() { cluster = applicationContext .getBean(ClustersStorage.class) .getClusterByName(LOCAL) - .get(); + .orElseThrow(); } @AfterEach @@ -215,4 +215,33 @@ void execSmartFilterTestReturnsErrorOnFilterCompilationError() { assertThat(result.getError()).containsIgnoringCase("Compilation error"); } + @Test + void sendMessageWithProtobufAnyType() { + String jsonContent = """ + { + "name": "testName", + "payload": { + "@type": "type.googleapis.com/test.PayloadMessage", + "id": "123" + } + } + """; + + CreateTopicMessageDTO testMessage = new CreateTopicMessageDTO() + .key(null) + .partition(0) + .keySerde(StringSerde.name()) + .content(jsonContent) + .valueSerde(ProtobufFileSerde.name()); + + String testTopic = MASKED_TOPICS_PREFIX + UUID.randomUUID(); + createTopicWithCleanup(new NewTopic(testTopic, 5, (short) 1)); + + StepVerifier.create(messagesService.sendMessage(cluster, testTopic, testMessage)) + .expectNextMatches(metadata -> metadata.topic().equals(testTopic) + && metadata.partition() == 0 + && metadata.offset() >= 0) + .verifyComplete(); + } + } diff --git a/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java b/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java index 91593fafa..8c873980e 100644 --- a/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/OffsetsResetServiceTest.java @@ -33,7 +33,7 @@ import reactor.core.publisher.Mono; import reactor.test.StepVerifier; -public class OffsetsResetServiceTest extends AbstractIntegrationTest { +class OffsetsResetServiceTest extends AbstractIntegrationTest { private static final int PARTITIONS = 5; @@ -45,7 +45,7 @@ public class OffsetsResetServiceTest extends AbstractIntegrationTest { @BeforeEach void init() { - cluster = applicationContext.getBean(ClustersStorage.class).getClusterByName(LOCAL).get(); + cluster = applicationContext.getBean(ClustersStorage.class).getClusterByName(LOCAL).orElseThrow(); offsetsResetService = new OffsetsResetService(applicationContext.getBean(AdminClientService.class)); createTopic(new NewTopic(topic, PARTITIONS, (short) 1)); createConsumerGroup(); diff --git a/api/src/test/java/io/kafbat/ui/service/ReactiveAdminClientTest.java b/api/src/test/java/io/kafbat/ui/service/ReactiveAdminClientTest.java index db1acfe3b..15cc1c903 100644 --- a/api/src/test/java/io/kafbat/ui/service/ReactiveAdminClientTest.java +++ b/api/src/test/java/io/kafbat/ui/service/ReactiveAdminClientTest.java @@ -56,7 +56,8 @@ class ReactiveAdminClientTest extends AbstractIntegrationTest { void init() { AdminClientService adminClientService = applicationContext.getBean(AdminClientService.class); ClustersStorage clustersStorage = applicationContext.getBean(ClustersStorage.class); - reactiveAdminClient = requireNonNull(adminClientService.get(clustersStorage.getClusterByName(LOCAL).get()).block()); + reactiveAdminClient = + requireNonNull(adminClientService.get(clustersStorage.getClusterByName(LOCAL).orElseThrow()).block()); adminClient = reactiveAdminClient.getClient(); } @@ -104,7 +105,6 @@ void testUpdateTopicConfigs() throws Exception { assertThat(config.get("file.delete.delay.ms").value()).isEqualTo("12345"); } - @SneakyThrows void createTopics(NewTopic... topics) { adminClient.createTopics(List.of(topics)).all().get(); @@ -207,25 +207,20 @@ void testListOffsetsUnsafe() { ); StepVerifier.create(reactiveAdminClient.listOffsetsUnsafe(requestedPartitions, OffsetSpec.earliest())) - .assertNext(offsets -> { - Assertions.assertThat(offsets) - .hasSize(2) - .containsEntry(new TopicPartition(topic, 0), 0L) - .containsEntry(new TopicPartition(topic, 1), 0L); - }) + .assertNext(offsets -> Assertions.assertThat(offsets) + .hasSize(2) + .containsEntry(new TopicPartition(topic, 0), 0L) + .containsEntry(new TopicPartition(topic, 1), 0L)) .verifyComplete(); StepVerifier.create(reactiveAdminClient.listOffsetsUnsafe(requestedPartitions, OffsetSpec.latest())) - .assertNext(offsets -> { - Assertions.assertThat(offsets) - .hasSize(2) - .containsEntry(new TopicPartition(topic, 0), 0L) - .containsEntry(new TopicPartition(topic, 1), 2L); - }) + .assertNext(offsets -> Assertions.assertThat(offsets) + .hasSize(2) + .containsEntry(new TopicPartition(topic, 0), 0L) + .containsEntry(new TopicPartition(topic, 1), 2L)) .verifyComplete(); } - @Test void testListConsumerGroupOffsets() throws Exception { String topic = UUID.randomUUID().toString(); @@ -241,7 +236,7 @@ void testListConsumerGroupOffsets() throws Exception { p.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); p.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName()); p.setProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); - return new KafkaConsumer(p); + return new KafkaConsumer<>(p); }; String fullyPolledConsumer = UUID.randomUUID().toString(); diff --git a/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java b/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java index abee297f9..43cb29382 100644 --- a/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java +++ b/api/src/test/java/io/kafbat/ui/service/SchemaRegistryPaginationTest.java @@ -22,7 +22,7 @@ import org.mockito.Mockito; import reactor.core.publisher.Mono; -public class SchemaRegistryPaginationTest { +class SchemaRegistryPaginationTest { private static final String LOCAL_KAFKA_CLUSTER_NAME = "local"; @@ -59,6 +59,8 @@ void shouldListFirst25andThen10Schemas() { ); var schemasFirst25 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME, null, null, null, null).block(); + assertThat(schemasFirst25).isNotNull(); + assertThat(schemasFirst25.getBody()).isNotNull(); assertThat(schemasFirst25.getBody().getPageCount()).isEqualTo(4); assertThat(schemasFirst25.getBody().getSchemas()).hasSize(25); assertThat(schemasFirst25.getBody().getSchemas()) @@ -67,6 +69,8 @@ void shouldListFirst25andThen10Schemas() { var schemasFirst10 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME, null, 10, null, null).block(); + assertThat(schemasFirst10).isNotNull(); + assertThat(schemasFirst10.getBody()).isNotNull(); assertThat(schemasFirst10.getBody().getPageCount()).isEqualTo(10); assertThat(schemasFirst10.getBody().getSchemas()).hasSize(10); assertThat(schemasFirst10.getBody().getSchemas()) @@ -83,6 +87,8 @@ void shouldListSchemasContaining_1() { ); var schemasSearch7 = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME, null, null, "1", null).block(); + assertThat(schemasSearch7).isNotNull(); + assertThat(schemasSearch7.getBody()).isNotNull(); assertThat(schemasSearch7.getBody().getPageCount()).isEqualTo(1); assertThat(schemasSearch7.getBody().getSchemas()).hasSize(20); } @@ -98,6 +104,8 @@ void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() { var schemas = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME, 0, -1, null, null).block(); + assertThat(schemas).isNotNull(); + assertThat(schemas.getBody()).isNotNull(); assertThat(schemas.getBody().getPageCount()).isEqualTo(4); assertThat(schemas.getBody().getSchemas()).hasSize(25); assertThat(schemas.getBody().getSchemas()).isSortedAccordingTo(Comparator.comparing(SchemaSubjectDTO::getSubject)); @@ -115,11 +123,14 @@ void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { var schemas = controller.getSchemas(LOCAL_KAFKA_CLUSTER_NAME, 4, 33, null, null).block(); + assertThat(schemas).isNotNull(); + assertThat(schemas.getBody()).isNotNull(); assertThat(schemas.getBody().getPageCount()).isEqualTo(4); assertThat(schemas.getBody().getSchemas()).hasSize(1); - assertThat(schemas.getBody().getSchemas().get(0).getSubject()).isEqualTo("subject99"); + assertThat(schemas.getBody().getSchemas().getFirst().getSubject()).isEqualTo("subject99"); } + @SuppressWarnings("unchecked") private KafkaCluster buildKafkaCluster(String clusterName) { return KafkaCluster.builder() .name(clusterName) diff --git a/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java b/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java index cdf304509..9e0164540 100644 --- a/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java +++ b/api/src/test/java/io/kafbat/ui/service/SendAndReadTests.java @@ -19,6 +19,7 @@ import io.kafbat.ui.serdes.builtin.StringSerde; import io.kafbat.ui.serdes.builtin.sr.SchemaRegistrySerde; import java.time.Duration; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Objects; @@ -26,13 +27,12 @@ import java.util.function.Consumer; import lombok.SneakyThrows; import org.apache.kafka.clients.admin.NewTopic; -import org.apache.kafka.common.TopicPartition; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import reactor.test.StepVerifier; -public class SendAndReadTests extends AbstractIntegrationTest { +class SendAndReadTests extends AbstractIntegrationTest { private static final AvroSchema AVRO_SCHEMA_1 = new AvroSchema( "{" @@ -81,14 +81,16 @@ public class SendAndReadTests extends AbstractIntegrationTest { private static final String AVRO_SCHEMA_2_JSON_RECORD = "{ \"f1\": 111, \"f2\": \"testStr\" }"; private static final ProtobufSchema PROTOBUF_SCHEMA = new ProtobufSchema( - "syntax = \"proto3\";\n" - + "package io.kafbat;\n" - + "\n" - + "message TestProtoRecord {\n" - + " string f1 = 1;\n" - + " int32 f2 = 2;\n" - + "}\n" - + "\n" + """ + syntax = "proto3"; + package io.kafbat; + + message TestProtoRecord { + string f1 = 1; + int32 f2 = 2; + } + + """ ); private static final String PROTOBUF_SCHEMA_JSON_RECORD @@ -131,7 +133,7 @@ public class SendAndReadTests extends AbstractIntegrationTest { @BeforeEach void init() { - targetCluster = clustersStorage.getClusterByName(LOCAL).get(); + targetCluster = clustersStorage.getClusterByName(LOCAL).orElseThrow(); } @Test @@ -343,7 +345,7 @@ void valueWithJsonSchemaThrowsExceptionIfArgIsNotValidJsonObject() { new CreateTopicMessageDTO() .key(null) .keySerde(StringSerde.name()) - // 'f2' field has has type object instead of string + // 'f2' field has type object instead of string .content("{ \"f1\": 12, \"f2\": {}, \"schema\": \"some txt\" }") .valueSerde(SchemaRegistrySerde.name()) ) @@ -425,6 +427,23 @@ void topicMessageMetadataJson() { }); } + @Test + void headerValueNullPresentTest() { + new SendAndReadSpec() + .withKeySchema(JSON_SCHEMA) + .withValueSchema(JSON_SCHEMA) + .withMsgToSend( + new CreateTopicMessageDTO() + .key(JSON_SCHEMA_RECORD) + .keySerde(SchemaRegistrySerde.name()) + .content(JSON_SCHEMA_RECORD) + .valueSerde(SchemaRegistrySerde.name()) + .headers(Collections.singletonMap("header123", null)) + ) + .doAssert(polled -> assertThat(polled.getHeaders().get("header123")).isNull()); + } + + @Test void noKeyAndNoContentPresentTest() { new SendAndReadSpec() @@ -511,7 +530,7 @@ public void doAssert(Consumer msgAssert) { .blockLast(Duration.ofSeconds(5000)); assertThat(polled).isNotNull(); - assertThat(polled.getPartition()).isEqualTo(0); + assertThat(polled.getPartition()).isZero(); assertThat(polled.getOffset()).isNotNull(); msgAssert.accept(polled); } finally { diff --git a/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java b/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java index 9091ebb3d..08b211b40 100644 --- a/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java +++ b/api/src/test/java/io/kafbat/ui/service/TopicsServicePaginationTest.java @@ -66,7 +66,7 @@ private void init(Map topicsInCache) { } @Test - public void shouldListFirst25Topics() { + void shouldListFirst25Topics() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -93,7 +93,7 @@ private KafkaCluster buildKafkaCluster(String clusterName) { } @Test - public void shouldListFirst25TopicsSortedByNameDescendingOrder() { + void shouldListFirst25TopicsSortedByNameDescendingOrder() { var internalTopics = IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) .map(name -> new TopicDescription(name, false, List.of())) @@ -119,7 +119,7 @@ public void shouldListFirst25TopicsSortedByNameDescendingOrder() { } @Test - public void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { + void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -134,11 +134,11 @@ public void shouldCalculateCorrectPageCountForNonDivisiblePageSize() { assertThat(topics.getBody().getPageCount()).isEqualTo(4); assertThat(topics.getBody().getTopics()).hasSize(1); - assertThat(topics.getBody().getTopics().get(0).getName()).isEqualTo("99"); + assertThat(topics.getBody().getTopics().getFirst().getName()).isEqualTo("99"); } @Test - public void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() { + void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -157,7 +157,7 @@ public void shouldCorrectlyHandleNonPositivePageNumberAndPageSize() { } @Test - public void shouldListBotInternalAndNonInternalTopics() { + void shouldListBotInternalAndNonInternalTopics() { init( IntStream.rangeClosed(1, 100).boxed() .map(Objects::toString) @@ -177,7 +177,7 @@ public void shouldListBotInternalAndNonInternalTopics() { } @Test - public void shouldListOnlyNonInternalTopics() { + void shouldListOnlyNonInternalTopics() { init( IntStream.rangeClosed(1, 100).boxed() @@ -198,7 +198,7 @@ public void shouldListOnlyNonInternalTopics() { } @Test - public void shouldListOnlyTopicsContainingOne() { + void shouldListOnlyTopicsContainingOne() { init( IntStream.rangeClosed(1, 100).boxed() @@ -219,7 +219,7 @@ public void shouldListOnlyTopicsContainingOne() { } @Test - public void shouldListTopicsOrderedByPartitionsCount() { + void shouldListTopicsOrderedByPartitionsCount() { Map internalTopics = IntStream.rangeClosed(1, 100).boxed() .map(i -> new TopicDescription(UUID.randomUUID().toString(), false, IntStream.range(0, i) diff --git a/api/src/test/java/io/kafbat/ui/service/acl/AclsServiceTest.java b/api/src/test/java/io/kafbat/ui/service/acl/AclsServiceTest.java index cfa46d1eb..109683014 100644 --- a/api/src/test/java/io/kafbat/ui/service/acl/AclsServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/acl/AclsServiceTest.java @@ -58,11 +58,11 @@ void testSyncAclWithAclCsv() { when(adminClientMock.listAcls(ResourcePatternFilter.ANY)) .thenReturn(Mono.just(List.of(existingBinding1, existingBinding2))); - ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class); + ArgumentCaptor> createdCaptor = captor(); when(adminClientMock.createAcls(createdCaptor.capture())) .thenReturn(Mono.empty()); - ArgumentCaptor> deletedCaptor = ArgumentCaptor.forClass(Collection.class); + ArgumentCaptor> deletedCaptor = captor(); when(adminClientMock.deleteAcls(deletedCaptor.capture())) .thenReturn(Mono.empty()); @@ -87,7 +87,7 @@ void testSyncAclWithAclCsv() { @Test void createsConsumerDependantAcls() { - ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class); + ArgumentCaptor> createdCaptor = captor(); when(adminClientMock.createAcls(createdCaptor.capture())) .thenReturn(Mono.empty()); @@ -135,7 +135,7 @@ void createsConsumerDependantAcls() { @Test void createsConsumerDependantAclsWhenTopicsAndGroupsSpecifiedByPrefix() { - ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class); + ArgumentCaptor> createdCaptor = captor(); when(adminClientMock.createAcls(createdCaptor.capture())) .thenReturn(Mono.empty()); @@ -171,7 +171,7 @@ void createsConsumerDependantAclsWhenTopicsAndGroupsSpecifiedByPrefix() { @Test void createsProducerDependantAcls() { - ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class); + ArgumentCaptor> createdCaptor = captor(); when(adminClientMock.createAcls(createdCaptor.capture())) .thenReturn(Mono.empty()); @@ -216,7 +216,7 @@ void createsProducerDependantAcls() { @Test void createsProducerDependantAclsWhenTopicsAndTxIdSpecifiedByPrefix() { - ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class); + ArgumentCaptor> createdCaptor = captor(); when(adminClientMock.createAcls(createdCaptor.capture())) .thenReturn(Mono.empty()); @@ -258,7 +258,7 @@ void createsProducerDependantAclsWhenTopicsAndTxIdSpecifiedByPrefix() { @Test void createsStreamAppDependantAcls() { - ArgumentCaptor> createdCaptor = ArgumentCaptor.forClass(Collection.class); + ArgumentCaptor> createdCaptor = captor(); when(adminClientMock.createAcls(createdCaptor.capture())) .thenReturn(Mono.empty()); @@ -296,4 +296,9 @@ void createsStreamAppDependantAcls() { new ResourcePattern(ResourceType.TOPIC, "appId1", PatternType.PREFIXED), new AccessControlEntry(principal, host, AclOperation.ALL, AclPermissionType.ALLOW))); } + + @SuppressWarnings("unchecked") + private ArgumentCaptor> captor() { + return ArgumentCaptor.forClass(Collection.class); + } } diff --git a/api/src/test/java/io/kafbat/ui/service/analyze/TopicAnalysisServiceTest.java b/api/src/test/java/io/kafbat/ui/service/analyze/TopicAnalysisServiceTest.java index 5826f8f76..1c32b4215 100644 --- a/api/src/test/java/io/kafbat/ui/service/analyze/TopicAnalysisServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/analyze/TopicAnalysisServiceTest.java @@ -29,21 +29,19 @@ void savesResultWhenAnalysisIsCompleted() { createTopic(new NewTopic(topic, 2, (short) 1)); fillTopic(topic, 1_000); - var cluster = clustersStorage.getClusterByName(LOCAL).get(); + var cluster = clustersStorage.getClusterByName(LOCAL).orElseThrow(); topicAnalysisService.analyze(cluster, topic).block(); Awaitility.await() .atMost(Duration.ofSeconds(20)) - .untilAsserted(() -> { - assertThat(topicAnalysisService.getTopicAnalysis(cluster, topic)) - .hasValueSatisfying(state -> { - assertThat(state.getProgress()).isNull(); - assertThat(state.getResult()).isNotNull(); - var completedAnalyze = state.getResult(); - assertThat(completedAnalyze.getTotalStats().getTotalMsgs()).isEqualTo(1_000); - assertThat(completedAnalyze.getPartitionStats().size()).isEqualTo(2); - }); - }); + .untilAsserted(() -> assertThat(topicAnalysisService.getTopicAnalysis(cluster, topic)) + .hasValueSatisfying(state -> { + assertThat(state.getProgress()).isNull(); + assertThat(state.getResult()).isNotNull(); + var completedAnalyze = state.getResult(); + assertThat(completedAnalyze.getTotalStats().getTotalMsgs()).isEqualTo(1_000); + assertThat(completedAnalyze.getPartitionStats().size()).isEqualTo(2); + })); } private void fillTopic(String topic, int cnt) { diff --git a/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java b/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java index 8f3221289..ebff4bcd6 100644 --- a/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java +++ b/api/src/test/java/io/kafbat/ui/service/audit/AuditIntegrationTest.java @@ -22,7 +22,7 @@ import org.springframework.test.web.reactive.server.WebTestClient; import org.testcontainers.shaded.org.awaitility.Awaitility; -public class AuditIntegrationTest extends AbstractIntegrationTest { +class AuditIntegrationTest extends AbstractIntegrationTest { @Autowired private WebTestClient webTestClient; diff --git a/api/src/test/java/io/kafbat/ui/service/integration/odd/ConnectorsExporterTest.java b/api/src/test/java/io/kafbat/ui/service/integration/odd/ConnectorsExporterTest.java index 6d5e0ae23..fb80338e7 100644 --- a/api/src/test/java/io/kafbat/ui/service/integration/odd/ConnectorsExporterTest.java +++ b/api/src/test/java/io/kafbat/ui/service/integration/odd/ConnectorsExporterTest.java @@ -88,6 +88,8 @@ void exportsConnectorsAsDataTransformers() { .filteredOn(DataEntity::getOddrn, "//kafkaconnect/host/kconnect:8083/connectors/testSink") .singleElement() .satisfies(sink -> { + assertThat(sink.getMetadata()).isNotNull(); + assertThat(sink.getDataTransformer()).isNotNull(); assertThat(sink.getMetadata().get(0).getMetadata()) .containsOnlyKeys("type", "connector.class", "file", "topic"); assertThat(sink.getDataTransformer().getInputs()).contains( @@ -98,6 +100,8 @@ void exportsConnectorsAsDataTransformers() { .filteredOn(DataEntity::getOddrn, "//kafkaconnect/host/kconnect:8083/connectors/testSource") .singleElement() .satisfies(source -> { + assertThat(source.getMetadata()).isNotNull(); + assertThat(source.getDataTransformer()).isNotNull(); assertThat(source.getMetadata().get(0).getMetadata()) .containsOnlyKeys("type", "connector.class", "file", "topic"); assertThat(source.getDataTransformer().getOutputs()).contains( diff --git a/api/src/test/java/io/kafbat/ui/service/integration/odd/SchemaReferencesResolverTest.java b/api/src/test/java/io/kafbat/ui/service/integration/odd/SchemaReferencesResolverTest.java index 50505a2ca..54e750c48 100644 --- a/api/src/test/java/io/kafbat/ui/service/integration/odd/SchemaReferencesResolverTest.java +++ b/api/src/test/java/io/kafbat/ui/service/integration/odd/SchemaReferencesResolverTest.java @@ -1,6 +1,5 @@ package io.kafbat.ui.service.integration.odd; -import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; diff --git a/api/src/test/java/io/kafbat/ui/service/integration/odd/TopicsExporterTest.java b/api/src/test/java/io/kafbat/ui/service/integration/odd/TopicsExporterTest.java index b20b7a834..6bf6887a7 100644 --- a/api/src/test/java/io/kafbat/ui/service/integration/odd/TopicsExporterTest.java +++ b/api/src/test/java/io/kafbat/ui/service/integration/odd/TopicsExporterTest.java @@ -78,7 +78,7 @@ void doesNotExportTopicsWhichDontFitFiltrationRule() { assertThat(entityList.getItems()) .hasSize(1) - .allSatisfy(e -> e.getOddrn().contains("visible")); + .allSatisfy(e -> assertThat(e.getOddrn()).contains("visible")); }) .verifyComplete(); } diff --git a/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java b/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java index 8cc2e5c9d..cd0a6c02e 100644 --- a/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java +++ b/api/src/test/java/io/kafbat/ui/service/masking/DataMaskingTest.java @@ -1,6 +1,5 @@ package io.kafbat.ui.service.masking; -import static org.mockito.Mockito.eq; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; @@ -57,14 +56,14 @@ void appliesMasksToJsonContainerArgsBasedOnTopicPatterns(String jsonObjOrArr) { var parsedJson = (ContainerNode) new JsonMapper().readTree(jsonObjOrArr); masking.getMaskingFunction(TOPIC, Serde.Target.KEY).apply(jsonObjOrArr); - verify(policy1).applyToJsonContainer(eq(parsedJson)); + verify(policy1).applyToJsonContainer(parsedJson); verifyNoInteractions(policy2, policy3); reset(policy1, policy2, policy3); masking.getMaskingFunction(TOPIC, Serde.Target.VALUE).apply(jsonObjOrArr); - verify(policy2).applyToJsonContainer(eq(parsedJson)); - verify(policy3).applyToJsonContainer(eq(policy2.applyToJsonContainer(parsedJson))); + verify(policy2).applyToJsonContainer(parsedJson); + verify(policy3).applyToJsonContainer(policy2.applyToJsonContainer(parsedJson)); verifyNoInteractions(policy1); } @@ -76,13 +75,13 @@ void appliesMasksToJsonContainerArgsBasedOnTopicPatterns(String jsonObjOrArr) { }) void appliesFirstFoundMaskToStringArgsBasedOnTopicPatterns(String nonJsonObjOrArrString) { masking.getMaskingFunction(TOPIC, Serde.Target.KEY).apply(nonJsonObjOrArrString); - verify(policy1).applyToString(eq(nonJsonObjOrArrString)); + verify(policy1).applyToString(nonJsonObjOrArrString); verifyNoInteractions(policy2, policy3); reset(policy1, policy2, policy3); masking.getMaskingFunction(TOPIC, Serde.Target.VALUE).apply(nonJsonObjOrArrString); - verify(policy2).applyToString(eq(nonJsonObjOrArrString)); + verify(policy2).applyToString(nonJsonObjOrArrString); verifyNoInteractions(policy1, policy3); } diff --git a/api/src/test/java/io/kafbat/ui/service/quota/ClientQuotaServiceTest.java b/api/src/test/java/io/kafbat/ui/service/quota/ClientQuotaServiceTest.java index fe6fe988e..fddf040e9 100644 --- a/api/src/test/java/io/kafbat/ui/service/quota/ClientQuotaServiceTest.java +++ b/api/src/test/java/io/kafbat/ui/service/quota/ClientQuotaServiceTest.java @@ -6,6 +6,7 @@ import io.kafbat.ui.model.KafkaCluster; import io.kafbat.ui.service.ClustersStorage; import java.util.Map; +import java.util.Objects; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; @@ -21,7 +22,7 @@ class ClientQuotaServiceTest extends AbstractIntegrationTest { @BeforeEach void init() { - cluster = applicationContext.getBean(ClustersStorage.class).getClusterByName(LOCAL).get(); + cluster = applicationContext.getBean(ClustersStorage.class).getClusterByName(LOCAL).orElseThrow(); } @ParameterizedTest @@ -72,7 +73,7 @@ void createUpdateDelete(String user, String clientId, String ip) { } private boolean quotaRecordExists(ClientQuotaRecord rec) { - return quotaService.getAll(cluster).collectList().block().contains(rec); + return Objects.requireNonNull(quotaService.getAll(cluster).collectList().block()).contains(rec); } } diff --git a/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java b/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java index 299283aed..35c230f56 100644 --- a/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java +++ b/api/src/test/java/io/kafbat/ui/util/jsonschema/AvroJsonSchemaConverterTest.java @@ -244,6 +244,48 @@ void testRecordReferences() { convertAndCompare(expectedJsonSchema, avroSchema); } + @Test + void testNullableUnionEnum() { + String avroSchema = + " {" + + " \"type\": \"record\"," + + " \"name\": \"Message\"," + + " \"namespace\": \"com.provectus.kafka\"," + + " \"fields\": [" + + " {" + + " \"name\": \"enum_nullable_union\"," + + " \"type\": [\"null\", {" + + " \"type\": \"enum\"," + + " \"name\": \"Suit\"," + + " \"symbols\": [\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]" + + " }]" + + " }" + + " ]" + + " }"; + + String expectedJsonSchema = + "{\"$id\":\"http://example.com/Message\"," + + "\"$schema\":\"https://json-schema.org/draft/2020-12/schema\"," + + "\"type\":\"object\"," + + "\"properties\":{" + + "\"enum_nullable_union\":{" + + "\"oneOf\":[" + + "{\"type\":\"null\"}," + + "{\"type\":\"object\"," + + "\"properties\":{" + + "\"Suit\":{" + + "\"type\":\"string\"," + + "\"enum\":[\"SPADES\",\"HEARTS\",\"DIAMONDS\",\"CLUBS\"]" + + "}}}" + + "]" + + "}}," + + "\"definitions\":{" + + "\"com.provectus.kafka.Message\":{\"$ref\":\"#\"}" + + "}}"; + + convertAndCompare(expectedJsonSchema, avroSchema); + } + @SneakyThrows private void convertAndCompare(String expectedJsonSchema, String sourceAvroSchema) { var parseAvroSchema = new Schema.Parser().parse(sourceAvroSchema); diff --git a/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java b/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java index 5d3daf08e..01e31875e 100644 --- a/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java +++ b/api/src/test/java/io/kafbat/ui/util/jsonschema/JsonAvroConversionTest.java @@ -304,6 +304,7 @@ var record = (GenericData.Record) convertJsonToAvro(jsonPayload, schema); assertThat(record.get("self_ref_map")) .isNotNull(); + @SuppressWarnings("unchecked") Map selfRefMapField = (Map) record.get("self_ref_map"); assertThat(selfRefMapField) .hasSize(1) @@ -699,6 +700,43 @@ void unionFieldWithInnerTypesNamesClash() { } + @Test + void unionNullableEnumField() { + var schema = createSchema( + """ + { + "type": "record", + "namespace": "com.test", + "name": "TestAvroRecord", + "fields": [ + { + "name": "enum_nullable_union", + "type" : [ "null", { + "type" : "enum", + "name" : "Suit", + "symbols" : ["SPADES", "HEARTS", "DIAMONDS", "CLUBS"] + } ] + } + ] + }""" + ); + + GenericData.Record inputRecord = new GenericData.Record(schema); + inputRecord.put("enum_nullable_union", + new GenericData.EnumSymbol( + schema.getField("enum_nullable_union").schema().getTypes().get(1), "SPADES")); + String expectedJsonWithEnum = """ + { + "enum_nullable_union": { "Suit": "SPADES"}\s + } + \s"""; + assertJsonsEqual(expectedJsonWithEnum, convertAvroToJson(inputRecord, schema)); + + GenericData.Record inputNullRecord = new GenericData.Record(schema); + inputNullRecord.put("enum_nullable_union", null); + assertJsonsEqual("{}", convertAvroToJson(inputNullRecord, schema)); + } + private Schema createSchema(String schema) { return new AvroSchema(schema).rawSchema(); } diff --git a/api/src/test/resources/application-rbac-ad.yml b/api/src/test/resources/application-rbac-ad.yml new file mode 100644 index 000000000..3b97d185f --- /dev/null +++ b/api/src/test/resources/application-rbac-ad.yml @@ -0,0 +1,23 @@ +auth: + type: LDAP +rbac: + roles: + - name: "roleName" + clusters: + - local + subjects: + - provider: ldap_ad + type: group + value: firstGroup + - provider: ldap_ad + type: group + value: secondGroup + - provider: ldap_ad + type: user + value: JackSmith + permissions: + - resource: applicationconfig + actions: all + - resource: topic + value: ".*" + actions: all diff --git a/api/src/test/resources/protobuf-serde/messagewithany.proto b/api/src/test/resources/protobuf-serde/messagewithany.proto new file mode 100644 index 000000000..5a4b0dd64 --- /dev/null +++ b/api/src/test/resources/protobuf-serde/messagewithany.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; +package test; + +import "google/protobuf/any.proto"; + +message MessageWithAny { + string name = 1; + google.protobuf.Any payload = 2; +} + +message PayloadMessage { + string id = 1; +} diff --git a/api/src/test/resources/roles_definition.yaml b/api/src/test/resources/roles_definition.yaml new file mode 100644 index 000000000..f9af4f507 --- /dev/null +++ b/api/src/test/resources/roles_definition.yaml @@ -0,0 +1,67 @@ +- name: 'admin' + subjects: + - provider: 'OAUTH' + value: 'ROLE-[A-Z]+' + type: 'role' + isRegex: 'true' + - provider: 'OAUTH_COGNITO' + value: 'ROLE-ADMIN' + type: 'group' + - provider: 'OAUTH_GOOGLE' + type: 'domain' + value: 'memelord.lol' + clusters: + - local + - remote + permissions: + - resource: APPLICATIONCONFIG + actions: [ all ] +- name: 'viewer' + subjects: + - provider: 'LDAP' + value: 'CS-XXX' + type: 'kafka-viewer' + - provider: 'OAUTH' + value: '.*@kafka.com' + type: 'user' + isRegex: 'true' + - provider: 'OAUTH_COGNITO' + value: '.*@kafka.com' + type: 'user' + isRegex: 'true' + - provider: 'OAUTH_GITHUB' + value: '.*@kafka.com' + type: 'user' + isRegex: 'true' + - provider: 'OAUTH_GOOGLE' + value: 'john@kafka.com' + type: 'user' + clusters: + - remote + permissions: + - resource: APPLICATIONCONFIG + actions: [ all ] +- name: 'editor' + subjects: + - provider: 'OAUTH' + value: 'ROLE_EDITOR' + type: 'role' + clusters: + - local + permissions: + - resource: APPLICATIONCONFIG + actions: [ all ] +- name: "no one's role" + subjects: + - provider: 'OAUTH' + value: '.*XXX' + type: 'role' + - provider: 'OAUTH_GITHUB' + value: '.*XXX' + type: 'user' + - provider: 'OAUTH_COGNITO' + value: '.*XXX' + type: 'user' + - provider: 'OAUTH_GOOGLE' + value: '.*XXX' + type: 'domain' diff --git a/contract/pom.xml b/contract/pom.xml index 8d7e76cea..55d86d40b 100644 --- a/contract/pom.xml +++ b/contract/pom.xml @@ -201,6 +201,7 @@ gen:sources + false diff --git a/contract/src/main/resources/swagger/kafbat-ui-api.yaml b/contract/src/main/resources/swagger/kafbat-ui-api.yaml index 5eede6cef..24530bcb4 100644 --- a/contract/src/main/resources/swagger/kafbat-ui-api.yaml +++ b/contract/src/main/resources/swagger/kafbat-ui-api.yaml @@ -31,7 +31,6 @@ paths: items: $ref: '#/components/schemas/Cluster' - /api/clusters/{clusterName}/cache: post: tags: @@ -54,7 +53,6 @@ paths: 404: description: Not found - /api/clusters/{clusterName}/brokers: get: tags: @@ -432,7 +430,6 @@ paths: 404: description: Not found - /api/clusters/{clusterName}/topics/{topicName}: get: tags: @@ -1568,7 +1565,7 @@ paths: post: tags: - Kafka Connect - summary: update connector state (restart, pause or resume) + summary: update connector state (restart, pause, stop or resume) operationId: updateConnectorState parameters: - name: clusterName @@ -1725,6 +1722,31 @@ paths: 200: description: OK + /api/clusters/{clusterName}/connects/{connectName}/connectors/{connectorName}/offsets: + delete: + tags: + - Kafka Connect + summary: reset the offsets for the specified connector + operationId: resetConnectorOffsets + parameters: + - name: clusterName + in: path + required: true + schema: + type: string + - name: connectName + in: path + required: true + schema: + type: string + - name: connectorName + in: path + required: true + schema: + type: string + responses: + 200: + description: OK /api/clusters/{clusterName}/ksql/v2: post: @@ -2150,7 +2172,7 @@ paths: get: tags: - Authorization - summary: Get user authentication related info + summary: Get user authorization related info operationId: getUserAuthInfo responses: 200: @@ -2220,7 +2242,6 @@ paths: schema: $ref: '#/components/schemas/ApplicationConfigValidation' - /api/config/relatedfiles: post: tags: @@ -2244,6 +2265,43 @@ paths: schema: $ref: '#/components/schemas/UploadedFileInfo' + /api/config/authentication: + get: + tags: + - ApplicationConfig + summary: Get authentication methods enabled for the app and other related settings + operationId: getAuthenticationSettings + responses: + 200: + description: OK + content: + application/json: + schema: + $ref: '#/components/schemas/AppAuthenticationSettings' + + /login: + post: + tags: + - Unmapped + summary: Authenticate + operationId: authenticate + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + type: object + properties: + username: + type: string + password: + type: string + responses: + '200': + description: OK + '401': + description: Unauthorized + components: schemas: TopicSerdeSuggestion: @@ -2354,6 +2412,32 @@ components: htmlUrl: type: string + AppAuthenticationSettings: + type: object + properties: + authType: + $ref: '#/components/schemas/AuthType' + oAuthProviders: + type: array + items: + $ref: '#/components/schemas/OAuthProvider' + + OAuthProvider: + type: object + properties: + clientName: + type: string + authorizationUri: + type: string + + AuthType: + type: string + enum: + - DISABLED + - OAUTH2 + - LOGIN_FORM + - LDAP + Cluster: type: object properties: @@ -3508,6 +3592,7 @@ components: - RESTART_FAILED_TASKS - PAUSE - RESUME + - STOP TaskAction: type: string @@ -3779,6 +3864,7 @@ components: - DYNAMIC_BROKER_LOGGER_CONFIG - DYNAMIC_BROKER_CONFIG - DYNAMIC_DEFAULT_BROKER_CONFIG + - DYNAMIC_CLIENT_METRICS_CONFIG - STATIC_BROKER_CONFIG - DEFAULT_CONFIG - UNKNOWN @@ -3893,7 +3979,16 @@ components: KafkaAcl: type: object - required: [resourceType, resourceName, namePatternType, principal, host, operation, permission] + required: + [ + resourceType, + resourceName, + namePatternType, + principal, + host, + operation, + permission, + ] properties: resourceType: $ref: '#/components/schemas/KafkaAclResourceType' @@ -4182,6 +4277,10 @@ components: type: string truststorePassword: type: string + verifySsl: + type: boolean + description: Skip SSL verification for the host. + default: true schemaRegistry: type: string schemaRegistryAuth: diff --git a/contract/src/main/resources/swagger/kafka-connect-api.yaml b/contract/src/main/resources/swagger/kafka-connect-api.yaml index e014d5529..5fa8dc230 100644 --- a/contract/src/main/resources/swagger/kafka-connect-api.yaml +++ b/contract/src/main/resources/swagger/kafka-connect-api.yaml @@ -144,6 +144,42 @@ paths: 500: description: Internal server error + /connectors/{connector}/offsets: + delete: + tags: + - KafkaConnectClient + summary: Reset the offsets for the specified connector + operationId: resetConnectorOffsets + parameters: + - in: path + name: connector + required: true + schema: + type: string + responses: + 200: + description: OK + 400: + description: Bad request + content: + application/json: + schema: + $ref: '#/components/schemas/ConnectorOffsetsError' + + get: + tags: + - KafkaConnectClient + summary: Get the offsets for the specified connector + operationId: getConnectorOffsets + parameters: + - in: path + name: connector + required: true + schema: + type: string + responses: + 200: + description: OK /connectors/{connectorName}/status: get: @@ -230,6 +266,22 @@ paths: 202: description: Accepted + /connectors/{connectorName}/stop: + put: + tags: + - KafkaConnectClient + summary: stop the connector + operationId: stopConnector + parameters: + - name: connectorName + in: path + required: true + schema: + type: string + responses: + 204: + description: No Content + /connectors/{connectorName}/tasks: get: tags: @@ -432,6 +484,14 @@ components: trace: type: string + ConnectorOffsetsError: + type: object + properties: + error_code: + type: number + message: + type: string + ConnectorStatus: type: object properties: diff --git a/contract/src/main/resources/swagger/kafka-sr-api.yaml b/contract/src/main/resources/swagger/kafka-sr-api.yaml index 0320e891e..2b082d689 100644 --- a/contract/src/main/resources/swagger/kafka-sr-api.yaml +++ b/contract/src/main/resources/swagger/kafka-sr-api.yaml @@ -165,7 +165,7 @@ paths: schema: $ref: '#/components/schemas/SubjectId' - /config/: + /config: get: tags: - KafkaSrClient diff --git a/documentation/compose/DOCKER_COMPOSE.md b/documentation/compose/DOCKER_COMPOSE.md index 57a5cf4d0..186c15c45 100644 --- a/documentation/compose/DOCKER_COMPOSE.md +++ b/documentation/compose/DOCKER_COMPOSE.md @@ -1,7 +1,7 @@ # Descriptions of docker-compose configurations (*.yaml) 1. [kafka-ui.yaml](./kafbat-ui.yaml) - Default configuration with 2 kafka clusters with two nodes of Schema Registry, one kafka-connect and a few dummy topics. -2. [kafka-ui-arm64.yaml](../../.dev/dev_arm64.yaml) - Default configuration for ARM64(Mac M1) architecture with 1 kafka cluster without zookeeper with one node of Schema Registry, one kafka-connect and a few dummy topics. +2. [kafka-ui.yaml](../../.dev/dev_arm64.yaml) - Default configuration with 1 kafka cluster without zookeeper with one node of Schema Registry, one kafka-connect and a few dummy topics. 3. [kafka-ui-ssl.yml](./kafka-ssl.yml) - Connect to Kafka via TLS/SSL 4. [kafka-cluster-sr-auth.yaml](./cluster-sr-auth.yaml) - Schema registry with authentication. 5. [kafka-ui-auth-context.yaml](./auth-context.yaml) - Basic (username/password) authentication with custom path (URL) (issue 861). diff --git a/documentation/compose/auth-context.yaml b/documentation/compose/auth-context.yaml index f2587255c..d8d465e51 100644 --- a/documentation/compose/auth-context.yaml +++ b/documentation/compose/auth-context.yaml @@ -19,7 +19,7 @@ services: SPRING_SECURITY_USER_PASSWORD: pass kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: @@ -42,6 +42,4 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' diff --git a/documentation/compose/cluster-sr-auth.yaml b/documentation/compose/cluster-sr-auth.yaml index 5845d1e66..78e80b4f1 100644 --- a/documentation/compose/cluster-sr-auth.yaml +++ b/documentation/compose/cluster-sr-auth.yaml @@ -3,7 +3,7 @@ version: '2' services: kafka1: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka1 container_name: kafka1 ports: @@ -26,12 +26,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry1: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 18085:8085 depends_on: @@ -55,7 +53,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/connectors/start.sh b/documentation/compose/connectors/start.sh index 7adc5e4e1..092267591 100755 --- a/documentation/compose/connectors/start.sh +++ b/documentation/compose/connectors/start.sh @@ -1,4 +1,4 @@ -#! /bin/bash +#! /bin/sh while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' kafka-connect0:8083)" != "200" ]] do sleep 5 done diff --git a/documentation/compose/e2e-tests.yaml b/documentation/compose/e2e-tests.yaml index b8e746867..cdece30ce 100644 --- a/documentation/compose/e2e-tests.yaml +++ b/documentation/compose/e2e-tests.yaml @@ -29,7 +29,7 @@ services: KAFKA_CLUSTERS_0_KSQLDBSERVER: http://ksqldb:8088 kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 healthcheck: @@ -58,12 +58,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: @@ -88,7 +86,7 @@ services: build: context: ./kafka-connect args: - image: confluentinc/cp-kafka-connect:6.0.1 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: @@ -122,7 +120,7 @@ services: # AWS_SECRET_ACCESS_KEY: "" kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: @@ -151,7 +149,7 @@ services: POSTGRES_PASSWORD: '12345' create-connectors: - image: ellerbrock/alpine-bash-curl-ssl + image: curlimages/curl depends_on: postgres-db: condition: service_healthy @@ -159,10 +157,10 @@ services: condition: service_healthy volumes: - ./connectors:/connectors - command: bash -c '/connectors/start.sh' + command: sh -c '/connectors/start.sh' ksqldb: - image: confluentinc/ksqldb-server:0.18.0 + image: confluentinc/cp-ksqldb-server:7.8.0 healthcheck: test: [ "CMD", "timeout", "1", "curl", "--silent", "--fail", "http://localhost:8088/info" ] interval: 30s diff --git a/documentation/compose/kafbat-ui.yaml b/documentation/compose/kafbat-ui.yaml index 8848d6e5f..afbe0d26c 100644 --- a/documentation/compose/kafbat-ui.yaml +++ b/documentation/compose/kafbat-ui.yaml @@ -27,7 +27,7 @@ services: DYNAMIC_CONFIG_ENABLED: 'true' kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -50,12 +50,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' kafka1: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka1 container_name: kafka1 ports: @@ -78,12 +76,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'zlFiTJelTOuhnklFwLWixw' schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: @@ -99,7 +95,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas schemaregistry1: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 18085:8085 depends_on: @@ -115,7 +111,7 @@ services: SCHEMA_REGISTRY_KAFKASTORE_TOPIC: _schemas kafka-connect0: - image: confluentinc/cp-kafka-connect:7.2.1 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: @@ -140,7 +136,7 @@ services: CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components" kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/kafka-ssl-components.yaml b/documentation/compose/kafka-ssl-components.yaml index e1be6999d..e616a52e1 100644 --- a/documentation/compose/kafka-ssl-components.yaml +++ b/documentation/compose/kafka-ssl-components.yaml @@ -39,7 +39,7 @@ services: - ./ssl/kafka.keystore.jks:/kafka.keystore.jks kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -72,15 +72,14 @@ services: #KAFKA_SSL_CLIENT_AUTH: 'required' KAFKA_SSL_CLIENT_AUTH: 'requested' KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # COMMON NAME VERIFICATION IS DISABLED SERVER-SIDE + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./ssl/creds:/etc/kafka/secrets/creds - ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks - ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 depends_on: - kafka0 environment: @@ -111,7 +110,7 @@ services: - ./ssl/kafka.keystore.jks:/kafka.keystore.jks kafka-connect0: - image: confluentinc/cp-kafka-connect:7.2.1 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: diff --git a/documentation/compose/kafka-ssl.yml b/documentation/compose/kafka-ssl.yml index 2adb0c12b..5680a5cc6 100644 --- a/documentation/compose/kafka-ssl.yml +++ b/documentation/compose/kafka-ssl.yml @@ -22,7 +22,7 @@ services: - ./ssl/kafka.keystore.jks:/kafka.keystore.jks kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: @@ -55,9 +55,8 @@ services: #KAFKA_SSL_CLIENT_AUTH: 'required' KAFKA_SSL_CLIENT_AUTH: 'requested' KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: '' # COMMON NAME VERIFICATION IS DISABLED SERVER-SIDE + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./ssl/creds:/etc/kafka/secrets/creds - ./ssl/kafka.truststore.jks:/etc/kafka/secrets/kafka.truststore.jks - ./ssl/kafka.keystore.jks:/etc/kafka/secrets/kafka.keystore.jks - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" diff --git a/documentation/compose/kafka-zookeeper.yaml b/documentation/compose/kafka-zookeeper.yaml index 7342a9763..20015f19f 100644 --- a/documentation/compose/kafka-zookeeper.yaml +++ b/documentation/compose/kafka-zookeeper.yaml @@ -3,7 +3,7 @@ version: '2' services: zookeeper: - image: confluentinc/cp-zookeeper:7.2.1 + image: confluentinc/cp-zookeeper:7.8.0 hostname: zookeeper container_name: zookeeper ports: @@ -13,7 +13,7 @@ services: ZOOKEEPER_TICK_TIME: 2000 kafka: - image: confluentinc/cp-server:7.2.1 + image: confluentinc/cp-server:7.8.0 hostname: kafka container_name: kafka depends_on: @@ -36,7 +36,7 @@ services: KAFKA_JMX_HOSTNAME: kafka kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/ui-acl-with-zk.yaml b/documentation/compose/ui-acl-with-zk.yaml index 97aad1791..9572e0c8c 100644 --- a/documentation/compose/ui-acl-with-zk.yaml +++ b/documentation/compose/ui-acl-with-zk.yaml @@ -27,7 +27,7 @@ services: - 2181:2181 kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: @@ -55,5 +55,4 @@ services: KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT' KAFKA_SUPER_USERS: 'User:admin' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./jaas:/etc/kafka/jaas diff --git a/documentation/compose/ui-connectors-auth.yaml b/documentation/compose/ui-connectors-auth.yaml index a4c51f10f..723fa1534 100644 --- a/documentation/compose/ui-connectors-auth.yaml +++ b/documentation/compose/ui-connectors-auth.yaml @@ -21,7 +21,7 @@ services: KAFKA_CLUSTERS_0_KAFKACONNECT_0_PASSWORD: admin-secret kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -45,12 +45,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: "PLAINTEXT" KAFKA_CONTROLLER_LISTENER_NAMES: "CONTROLLER" KAFKA_LOG_DIRS: "/tmp/kraft-combined-logs" - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: 'bash -c ''if [ ! -f /tmp/update_run.sh ]; then echo "ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi''' + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: @@ -69,7 +67,7 @@ services: build: context: ./kafka-connect args: - image: confluentinc/cp-kafka-connect:7.2.1 + image: confluentinc/cp-kafka-connect:7.8.0 ports: - 8083:8083 depends_on: @@ -102,7 +100,7 @@ services: # AWS_SECRET_ACCESS_KEY: "" kafka-init-topics: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 volumes: - ./data/message.json:/data/message.json depends_on: diff --git a/documentation/compose/ui-jmx-secured.yml b/documentation/compose/ui-jmx-secured.yml index edb2439b1..531ec4368 100644 --- a/documentation/compose/ui-jmx-secured.yml +++ b/documentation/compose/ui-jmx-secured.yml @@ -27,7 +27,7 @@ services: - ./jmx/clientkeystore:/jmx/clientkeystore kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -49,6 +49,7 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' # CHMOD 700 FOR JMXREMOTE.* FILES KAFKA_JMX_OPTS: >- -Dcom.sun.management.jmxremote @@ -69,5 +70,3 @@ services: - ./jmx/servertruststore:/jmx/servertruststore - ./jmx/jmxremote.password:/jmx/jmxremote.password - ./jmx/jmxremote.access:/jmx/jmxremote.access - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" diff --git a/documentation/compose/ui-ldap.yaml b/documentation/compose/ui-ldap.yaml index b7855dfdb..94a8200a2 100644 --- a/documentation/compose/ui-ldap.yaml +++ b/documentation/compose/ui-ldap.yaml @@ -34,7 +34,7 @@ services: - 10389:10389 kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -58,12 +58,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: diff --git a/documentation/compose/ui-sasl.yaml b/documentation/compose/ui-sasl.yaml index 8512ff9a1..89708a54c 100644 --- a/documentation/compose/ui-sasl.yaml +++ b/documentation/compose/ui-sasl.yaml @@ -18,7 +18,7 @@ services: DYNAMIC_CONFIG_ENABLED: true # not necessary for sasl auth, added for tests kafka: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka container_name: kafka ports: @@ -46,7 +46,6 @@ services: KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' KAFKA_SECURITY_PROTOCOL: 'SASL_PLAINTEXT' KAFKA_SUPER_USERS: 'User:admin,User:enzo' + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - ./jaas:/etc/kafka/jaas - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" diff --git a/documentation/compose/ui-serdes.yaml b/documentation/compose/ui-serdes.yaml index b4060059a..6ac5701de 100644 --- a/documentation/compose/ui-serdes.yaml +++ b/documentation/compose/ui-serdes.yaml @@ -68,7 +68,7 @@ services: - ./proto:/protofiles kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -92,12 +92,10 @@ services: KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' - volumes: - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /etc/confluent/docker/run ; fi'" + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' schemaregistry0: - image: confluentinc/cp-schema-registry:7.2.1 + image: confluentinc/cp-schema-registry:7.8.0 ports: - 8085:8085 depends_on: diff --git a/documentation/compose/ui-with-jmx-exporter.yaml b/documentation/compose/ui-with-jmx-exporter.yaml index 3283e76e3..df781ca75 100644 --- a/documentation/compose/ui-with-jmx-exporter.yaml +++ b/documentation/compose/ui-with-jmx-exporter.yaml @@ -3,7 +3,7 @@ version: '2' services: kafka0: - image: confluentinc/cp-kafka:7.2.1 + image: confluentinc/cp-kafka:7.8.0 hostname: kafka0 container_name: kafka0 ports: @@ -25,10 +25,10 @@ services: KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' KAFKA_OPTS: -javaagent:/usr/share/jmx_exporter/jmx_prometheus_javaagent.jar=11001:/usr/share/jmx_exporter/kafka-broker.yml + CLUSTER_ID: 'MkU3OEVBNTcwNTJENDM2Qk' volumes: - ./jmx-exporter:/usr/share/jmx_exporter/ - - ./scripts/update_run.sh:/tmp/update_run.sh - command: "bash -c 'if [ ! -f /tmp/update_run.sh ]; then echo \"ERROR: Did you forget the update_run.sh file that came with this docker-compose.yml file?\" && exit 1 ; else /tmp/update_run.sh && /usr/share/jmx_exporter/kafka-prepare-and-run ; fi'" + command: "bash -c /usr/share/jmx_exporter/kafka-prepare-and-run" kafbat-ui: container_name: kafbat-ui diff --git a/e2e-tests/pom.xml b/e2e-tests/pom.xml index c46bb78eb..33db9404d 100644 --- a/e2e-tests/pom.xml +++ b/e2e-tests/pom.xml @@ -12,12 +12,11 @@ e2e-tests - 3.3.1 ${project.version} - 17 - 3.2.3 - 3.2.5 - 1.9.9.1 + 21 + 3.5.1 + 3.5.1 + 1.9.21 2.27.0 @@ -25,7 +24,7 @@ org.apache.kafka kafka_2.13 - ${kafka.version} + ${confluent.version}-ccs io.kafbat.ui @@ -73,6 +72,10 @@ webdrivermanager 5.8.0 + + io.netty + netty-resolver-dns-native-macos + diff --git a/e2e-tests/src/main/java/io/kafbat/ui/screens/panels/NaviSideBar.java b/e2e-tests/src/main/java/io/kafbat/ui/screens/panels/NaviSideBar.java index 6972f379d..b5dc2be8d 100644 --- a/e2e-tests/src/main/java/io/kafbat/ui/screens/panels/NaviSideBar.java +++ b/e2e-tests/src/main/java/io/kafbat/ui/screens/panels/NaviSideBar.java @@ -13,6 +13,7 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; +import org.openqa.selenium.By; public class NaviSideBar extends BasePage { @@ -45,7 +46,7 @@ public String getPagePath(MenuItem menuItem) { @Step public NaviSideBar openSideMenu(String clusterName, MenuItem menuItem) { WebUtil.clickByActions(expandCluster(clusterName).parent() - .$x(String.format(sideMenuOptionElementLocator, menuItem.getNaviTitle()))); + .find(By.linkText(menuItem.getNaviTitle()))); return this; } diff --git a/e2e-tests/src/main/java/io/kafbat/ui/screens/topics/TopicSettingsTab.java b/e2e-tests/src/main/java/io/kafbat/ui/screens/topics/TopicSettingsTab.java index ef32fe352..75e3fe8e7 100644 --- a/e2e-tests/src/main/java/io/kafbat/ui/screens/topics/TopicSettingsTab.java +++ b/e2e-tests/src/main/java/io/kafbat/ui/screens/topics/TopicSettingsTab.java @@ -9,10 +9,13 @@ import io.qameta.allure.Step; import java.util.ArrayList; import java.util.List; +import java.util.NoSuchElementException; public class TopicSettingsTab extends BasePage { protected SelenideElement defaultValueColumnHeaderLocator = $x("//div[text() = 'Default Value']"); + protected SelenideElement nextButton = $x("//button[contains(text(), 'Next')]"); + protected SelenideElement previousButton = $x("//button[contains(text(), 'Previous')]"); @Step public TopicSettingsTab waitUntilScreenReady() { @@ -36,7 +39,25 @@ private TopicSettingsTab.SettingsGridItem getItemByKey(String key) { @Step public String getValueByKey(String key) { - return getItemByKey(key).getValue(); + while (true) { + try { + String value = getItemByKey(key).getValue(); + resetPageNavigation(); + return value; + } catch (NoSuchElementException e) { + if (nextButton.isEnabled()) { + nextButton.click(); + } else { + throw e; + } + } + } + } + + private void resetPageNavigation() { + while (previousButton.isEnabled()) { + previousButton.click(); + } } public static class SettingsGridItem extends BasePage { diff --git a/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java b/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java index 64fb73cc4..ae5867bfc 100644 --- a/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java +++ b/e2e-tests/src/test/java/io/kafbat/ui/smokesuite/brokers/BrokersTest.java @@ -139,7 +139,7 @@ public void brokersSourceInfoCheck() { Assert.assertEquals(sourceInfoTooltip, Common.BROKER_SOURCE_INFO_TOOLTIP, "getSourceInfoTooltipText()"); } - @Test + @Test(enabled = false) // flaky, TODO issues/322 public void brokersConfigEditCheck() { navigateToBrokersAndOpenDetails(DEFAULT_BROKER_ID); brokersDetails diff --git a/etc/checkstyle/checkstyle.xml b/etc/checkstyle/checkstyle.xml index 745f1bc36..5c493f90c 100644 --- a/etc/checkstyle/checkstyle.xml +++ b/etc/checkstyle/checkstyle.xml @@ -46,7 +46,12 @@ + + + + + diff --git a/frontend/.nvmrc b/frontend/.nvmrc index 860cc5000..dc0bb0f43 100644 --- a/frontend/.nvmrc +++ b/frontend/.nvmrc @@ -1 +1 @@ -v18.17.1 +v22.12.0 diff --git a/frontend/package.json b/frontend/package.json index c4fef99db..6c55031f5 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -16,7 +16,7 @@ "ajv": "8.8.2", "ajv-formats": "2.1.1", "json-schema-faker": "0.5.6", - "jsonpath-plus": "8.1.0", + "jsonpath-plus": "10.2.0", "lossless-json": "2.0.11", "pretty-ms": "7.0.1", "react": "18.2.0", @@ -96,15 +96,15 @@ "ts-node": "10.9.2", "ts-prune": "0.10.3", "typescript": "5.3.3", - "vite": "5.2.10", + "vite": "5.2.14", "vite-plugin-checker": "0.6.4", "vite-plugin-ejs": "1.7.0", "vite-tsconfig-paths": "4.3.2", "whatwg-fetch": "3.6.20" }, "engines": { - "node": "v18.17.1", - "pnpm": "v9.11.0" + "node": "^22", + "pnpm": "^9" }, "pnpm": { "overrides": { @@ -116,7 +116,9 @@ "follow-redirects@<1.15.4": ">=1.15.4", "json5@>=2.0.0 <2.2.2": ">=2.2.2", "semver@>=7.0.0 <7.5.2": ">=7.5.2", - "axios@>=0.8.1 <0.28.0": ">=0.28.0" + "axios@>=0.8.1 <0.28.0": ">=0.28.0", + "axios@>=1.3.2 <=1.7.3": ">=1.7.4", + "braces": "3.0.3" } } } diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml index a7ba33ed0..d23c01a88 100644 --- a/frontend/pnpm-lock.yaml +++ b/frontend/pnpm-lock.yaml @@ -14,6 +14,8 @@ overrides: json5@>=2.0.0 <2.2.2: '>=2.2.2' semver@>=7.0.0 <7.5.2: '>=7.5.2' axios@>=0.8.1 <0.28.0: '>=0.28.0' + axios@>=1.3.2 <=1.7.3: '>=1.7.4' + braces: 3.0.3 importers: @@ -56,8 +58,8 @@ importers: specifier: 0.5.6 version: 0.5.6 jsonpath-plus: - specifier: 8.1.0 - version: 8.1.0 + specifier: 10.2.0 + version: 10.2.0 lossless-json: specifier: 2.0.11 version: 2.0.11 @@ -169,7 +171,7 @@ importers: version: 6.21.0(eslint@8.57.0)(typescript@5.3.3) '@vitejs/plugin-react-swc': specifier: 3.6.0 - version: 3.6.0(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1)) + version: 3.6.0(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1)) dotenv: specifier: 16.4.5 version: 16.4.5 @@ -178,10 +180,10 @@ importers: version: 8.57.0 eslint-config-airbnb: specifier: 19.0.4 - version: 19.0.4(eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0))(eslint-plugin-jsx-a11y@6.8.0(eslint@8.57.0))(eslint-plugin-react-hooks@4.6.0(eslint@8.57.0))(eslint-plugin-react@7.34.1(eslint@8.57.0))(eslint@8.57.0) + version: 19.0.4(eslint-plugin-import@2.29.1)(eslint-plugin-jsx-a11y@6.8.0(eslint@8.57.0))(eslint-plugin-react-hooks@4.6.0(eslint@8.57.0))(eslint-plugin-react@7.34.1(eslint@8.57.0))(eslint@8.57.0) eslint-config-airbnb-typescript: specifier: 18.0.0 - version: 18.0.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint@8.57.0)(typescript@5.3.3))(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0))(eslint@8.57.0) + version: 18.0.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint@8.57.0)(typescript@5.3.3))(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-plugin-import@2.29.1)(eslint@8.57.0) eslint-config-prettier: specifier: 9.1.0 version: 9.1.0(eslint@8.57.0) @@ -243,17 +245,17 @@ importers: specifier: 5.3.3 version: 5.3.3 vite: - specifier: 5.2.10 - version: 5.2.10(@types/node@20.11.17)(sass@1.66.1) + specifier: 5.2.14 + version: 5.2.14(@types/node@20.11.17)(sass@1.66.1) vite-plugin-checker: specifier: 0.6.4 - version: 0.6.4(eslint@8.57.0)(optionator@0.9.3)(typescript@5.3.3)(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1)) + version: 0.6.4(eslint@8.57.0)(optionator@0.9.3)(typescript@5.3.3)(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1)) vite-plugin-ejs: specifier: 1.7.0 - version: 1.7.0(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1)) + version: 1.7.0(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1)) vite-tsconfig-paths: specifier: 4.3.2 - version: 4.3.2(typescript@5.3.3)(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1)) + version: 4.3.2(typescript@5.3.3)(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1)) whatwg-fetch: specifier: 3.6.20 version: 3.6.20 @@ -666,6 +668,7 @@ packages: '@humanwhocodes/config-array@0.11.14': resolution: {integrity: sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==} engines: {node: '>=10.10.0'} + deprecated: Use @eslint/config-array instead '@humanwhocodes/module-importer@1.0.1': resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} @@ -673,6 +676,7 @@ packages: '@humanwhocodes/object-schema@2.0.3': resolution: {integrity: sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==} + deprecated: Use @eslint/object-schema instead '@isaacs/cliui@8.0.2': resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} @@ -789,6 +793,18 @@ packages: '@jridgewell/trace-mapping@0.3.9': resolution: {integrity: sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==} + '@jsep-plugin/assignment@1.3.0': + resolution: {integrity: sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ==} + engines: {node: '>= 10.16.0'} + peerDependencies: + jsep: ^0.4.0||^1.0.0 + + '@jsep-plugin/regex@1.0.4': + resolution: {integrity: sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg==} + engines: {node: '>= 10.16.0'} + peerDependencies: + jsep: ^0.4.0||^1.0.0 + '@lukeed/csprng@1.1.0': resolution: {integrity: sha512-Z7C/xXCiGWsg0KuKsHTKJxbWhpI3Vs5GwLfOean7MGyVFGqdRgBbAjOCh6u4bbjPc/8MJ2pZmK/0DLdCbivLDA==} engines: {node: '>=8'} @@ -800,7 +816,7 @@ packages: resolution: {integrity: sha512-Z6GuOUdNQjP7FX+OuV2Ybyamse+/e0BFdTWBX5JxpBDKA+YkdLynDgG6HTF04zy6e9zPa19UX0WA2VDoehwhXQ==} peerDependencies: '@nestjs/common': ^7.0.0 || ^8.0.0 || ^9.0.0 || ^10.0.0 - axios: ^1.3.1 + axios: '>=1.7.4' rxjs: ^6.0.0 || ^7.0.0 '@nestjs/common@10.3.0': @@ -867,83 +883,98 @@ packages: resolution: {integrity: sha512-Quz1KOffeEf/zwkCBM3kBtH4ZoZ+pT3xIXBG4PPW/XFtDP7EGhtTiC2+gpL9GnR7+Qdet5Oa6cYSvwKYg6kN9Q==} engines: {node: '>=14.0.0'} - '@rollup/rollup-android-arm-eabi@4.16.1': - resolution: {integrity: sha512-92/y0TqNLRYOTXpm6Z7mnpvKAG9P7qmK7yJeRJSdzElNCUnsgbpAsGqerUboYRIQKzgfq4pWu9xVkgpWLfmNsw==} + '@rollup/rollup-android-arm-eabi@4.29.1': + resolution: {integrity: sha512-ssKhA8RNltTZLpG6/QNkCSge+7mBQGUqJRisZ2MDQcEGaK93QESEgWK2iOpIDZ7k9zPVkG5AS3ksvD5ZWxmItw==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.16.1': - resolution: {integrity: sha512-ttWB6ZCfRLuDIUiE0yiu5gcqOsYjA5F7kEV1ggHMj20FwLZ8A1FMeahZJFl/pnOmcnD2QL0z4AcDuo27utGU8A==} + '@rollup/rollup-android-arm64@4.29.1': + resolution: {integrity: sha512-CaRfrV0cd+NIIcVVN/jx+hVLN+VRqnuzLRmfmlzpOzB87ajixsN/+9L5xNmkaUUvEbI5BmIKS+XTwXsHEb65Ew==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.16.1': - resolution: {integrity: sha512-QLDvPLetbqjHojTGFw9+nuSP3YY/iz2k1cep6crYlr97sS+ZJ0W43b8Z0zC00+lnFZj6JSNxiA4DjboNQMuh1A==} + '@rollup/rollup-darwin-arm64@4.29.1': + resolution: {integrity: sha512-2ORr7T31Y0Mnk6qNuwtyNmy14MunTAMx06VAPI6/Ju52W10zk1i7i5U3vlDRWjhOI5quBcrvhkCHyF76bI7kEw==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.16.1': - resolution: {integrity: sha512-TAUK/D8khRrRIa1KwRzo8JNKk3tcqaeXWdtsiLgA8zmACWwlWLjPCJ4DULGHQrMkeBjp1Cd3Yuwx04lZgFx5Vg==} + '@rollup/rollup-darwin-x64@4.29.1': + resolution: {integrity: sha512-j/Ej1oanzPjmN0tirRd5K2/nncAhS9W6ICzgxV+9Y5ZsP0hiGhHJXZ2JQ53iSSjj8m6cRY6oB1GMzNn2EUt6Ng==} cpu: [x64] os: [darwin] - '@rollup/rollup-linux-arm-gnueabihf@4.16.1': - resolution: {integrity: sha512-KO+WGZjrh6zyFTD1alIFkfdtxf8B4BC+hqd3kBZHscPLvE5FR/6QKsyuCT0JlERxxYBSUKNUQ/UHyX5uwO1x2A==} + '@rollup/rollup-freebsd-arm64@4.29.1': + resolution: {integrity: sha512-91C//G6Dm/cv724tpt7nTyP+JdN12iqeXGFM1SqnljCmi5yTXriH7B1r8AD9dAZByHpKAumqP1Qy2vVNIdLZqw==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.29.1': + resolution: {integrity: sha512-hEioiEQ9Dec2nIRoeHUP6hr1PSkXzQaCUyqBDQ9I9ik4gCXQZjJMIVzoNLBRGet+hIUb3CISMh9KXuCcWVW/8w==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.29.1': + resolution: {integrity: sha512-Py5vFd5HWYN9zxBv3WMrLAXY3yYJ6Q/aVERoeUFwiDGiMOWsMs7FokXihSOaT/PMWUty/Pj60XDQndK3eAfE6A==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.16.1': - resolution: {integrity: sha512-NqxbllzIB1WoAo4ThUXVtd21iiM5IHMTTXmXySKBLVcZvkU0HIZmatlP7hLzb5yQubcmdIeWmncd2NdsjocEiw==} + '@rollup/rollup-linux-arm-musleabihf@4.29.1': + resolution: {integrity: sha512-RiWpGgbayf7LUcuSNIbahr0ys2YnEERD4gYdISA06wa0i8RALrnzflh9Wxii7zQJEB2/Eh74dX4y/sHKLWp5uQ==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.16.1': - resolution: {integrity: sha512-snma5NvV8y7IECQ5rq0sr0f3UUu+92NVmG/913JXJMcXo84h9ak9TA5UI9Cl2XRM9j3m37QwDBtEYnJzRkSmxA==} + '@rollup/rollup-linux-arm64-gnu@4.29.1': + resolution: {integrity: sha512-Z80O+taYxTQITWMjm/YqNoe9d10OX6kDh8X5/rFCMuPqsKsSyDilvfg+vd3iXIqtfmp+cnfL1UrYirkaF8SBZA==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.16.1': - resolution: {integrity: sha512-KOvqGprlD84ueivhCi2flvcUwDRD20mAsE3vxQNVEI2Di9tnPGAfEu6UcrSPZbM+jG2w1oSr43hrPo0RNg6GGg==} + '@rollup/rollup-linux-arm64-musl@4.29.1': + resolution: {integrity: sha512-fOHRtF9gahwJk3QVp01a/GqS4hBEZCV1oKglVVq13kcK3NeVlS4BwIFzOHDbmKzt3i0OuHG4zfRP0YoG5OF/rA==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-powerpc64le-gnu@4.16.1': - resolution: {integrity: sha512-/gsNwtiGLqYwN4vP+EIdUC6Q6LTlpupWqokqIndvZcjn9ig/5P01WyaYCU2wvfL/2Z82jp5kX8c1mDBOvCP3zg==} + '@rollup/rollup-linux-loongarch64-gnu@4.29.1': + resolution: {integrity: sha512-5a7q3tnlbcg0OodyxcAdrrCxFi0DgXJSoOuidFUzHZ2GixZXQs6Tc3CHmlvqKAmOs5eRde+JJxeIf9DonkmYkw==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-powerpc64le-gnu@4.29.1': + resolution: {integrity: sha512-9b4Mg5Yfz6mRnlSPIdROcfw1BU22FQxmfjlp/CShWwO3LilKQuMISMTtAu/bxmmrE6A902W2cZJuzx8+gJ8e9w==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.16.1': - resolution: {integrity: sha512-uU8zuGkQfGqfD9w6VRJZI4IuG4JIfNxxJgEmLMAmPVHREKGsxFVfgHy5c6CexQF2vOfgjB33OsET3Vdn2lln9A==} + '@rollup/rollup-linux-riscv64-gnu@4.29.1': + resolution: {integrity: sha512-G5pn0NChlbRM8OJWpJFMX4/i8OEU538uiSv0P6roZcbpe/WfhEO+AT8SHVKfp8qhDQzaz7Q+1/ixMy7hBRidnQ==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.16.1': - resolution: {integrity: sha512-lsjLtDgtcGFEuBP6yrXwkRN5/wKlvUZtfbKZZu0yaoNpiBL4epgnO21osAALIspVRnl4qZgyLFd8xjCYYWgwfw==} + '@rollup/rollup-linux-s390x-gnu@4.29.1': + resolution: {integrity: sha512-WM9lIkNdkhVwiArmLxFXpWndFGuOka4oJOZh8EP3Vb8q5lzdSCBuhjavJsw68Q9AKDGeOOIHYzYm4ZFvmWez5g==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.16.1': - resolution: {integrity: sha512-N2ZizKhUryqqrMfdCnjhJhZRgv61C6gK+hwVtCIKC8ts8J+go+vqENnGexwg21nHIOvLN5mBM8a7DI2vlyIOPg==} + '@rollup/rollup-linux-x64-gnu@4.29.1': + resolution: {integrity: sha512-87xYCwb0cPGZFoGiErT1eDcssByaLX4fc0z2nRM6eMtV9njAfEE6OW3UniAoDhX4Iq5xQVpE6qO9aJbCFumKYQ==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.16.1': - resolution: {integrity: sha512-5ICeMxqg66FrOA2AbnBQ2TJVxfvZsKLxmof0ibvPLaYtbsJqnTUtJOofgWb46Gjd4uZcA4rdsp4JCxegzQPqCg==} + '@rollup/rollup-linux-x64-musl@4.29.1': + resolution: {integrity: sha512-xufkSNppNOdVRCEC4WKvlR1FBDyqCSCpQeMMgv9ZyXqqtKBfkw1yfGMTUTs9Qsl6WQbJnsGboWCp7pJGkeMhKA==} cpu: [x64] os: [linux] - '@rollup/rollup-win32-arm64-msvc@4.16.1': - resolution: {integrity: sha512-1vIP6Ce02L+qWD7uZYRiFiuAJo3m9kARatWmFSnss0gZnVj2Id7OPUU9gm49JPGasgcR3xMqiH3fqBJ8t00yVg==} + '@rollup/rollup-win32-arm64-msvc@4.29.1': + resolution: {integrity: sha512-F2OiJ42m77lSkizZQLuC+jiZ2cgueWQL5YC9tjo3AgaEw+KJmVxHGSyQfDUoYR9cci0lAywv2Clmckzulcq6ig==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.16.1': - resolution: {integrity: sha512-Y3M92DcVsT6LoP+wrKpoUWPaazaP1fzbNkp0a0ZSj5Y//+pQVfVe/tQdsYQQy7dwXR30ZfALUIc9PCh9Izir6w==} + '@rollup/rollup-win32-ia32-msvc@4.29.1': + resolution: {integrity: sha512-rYRe5S0FcjlOBZQHgbTKNrqxCBUmgDJem/VQTCcTnA2KCabYSWQDrytOzX7avb79cAAweNmMUb/Zw18RNd4mng==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.16.1': - resolution: {integrity: sha512-x0fvpHMuF7fK5r8oZxSi8VYXkrVmRgubXpO/wcf15Lk3xZ4Jvvh5oG+u7Su1776A7XzVKZhD2eRc4t7H50gL3w==} + '@rollup/rollup-win32-x64-msvc@4.29.1': + resolution: {integrity: sha512-+10CMg9vt1MoHj6x1pxyjPSMjHTIlqs8/tBztXvPAx24SKs9jwVnKqHJumlH/IzhaPUaj3T6T6wfZr8okdXaIg==} cpu: [x64] os: [win32] @@ -1156,8 +1187,8 @@ packages: '@types/babel__traverse@7.17.1': resolution: {integrity: sha512-kVzjari1s2YVi77D3w1yuvohV2idweYXMCDzqBiVNN63TcDWrIlTVOYpqVrvbbyOE/IyzBoTKF0fdnLPEORFxA==} - '@types/estree@1.0.5': - resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} + '@types/estree@1.0.6': + resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==} '@types/eventsource@1.1.15': resolution: {integrity: sha512-XQmGcbnxUNa06HR3VBVkc9+A2Vpi9ZyLJcdS5dwaQQ/4ZMWFO+5c90FnMUpbtMZwB/FChoYHwuVg8TvkECacTA==} @@ -1481,8 +1512,8 @@ packages: resolution: {integrity: sha512-M0JtH+hlOL5pLQwHOLNYZaXuhqmvS8oExsqB1SBYgA4Dk7u/xx+YdGHXaK5pyUfed5mYXdlYiphWq3G8cRi5JQ==} engines: {node: '>=4'} - axios@1.6.8: - resolution: {integrity: sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==} + axios@1.7.9: + resolution: {integrity: sha512-LhLcE7Hbiryz8oMDdDptSrWowmB4Bl6RCt6sIJKpRB4XtVf0iEgewX3au/pJqm+Py1kCASkb/FFKjxQaLtxJvw==} axobject-query@3.2.1: resolution: {integrity: sha512-jsyHu61e6N4Vbz/v18DHwWYKK0bSWLqn47eeDSKPB7m8tqMHF9YJ+mhIk2lVteyZrY8tnSj/jHOv4YiTCuCJgg==} @@ -1531,8 +1562,8 @@ packages: brace-expansion@2.0.1: resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} - braces@3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} browser-process-hrtime@1.0.0: @@ -1722,8 +1753,8 @@ packages: create-require@1.1.1: resolution: {integrity: sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==} - cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} css-color-keywords@1.0.0: @@ -2108,6 +2139,7 @@ packages: eslint@8.57.0: resolution: {integrity: sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + deprecated: This version is no longer supported. Please see https://eslint.org/version-support for other options. hasBin: true espree@9.6.1: @@ -2196,8 +2228,8 @@ packages: filelist@1.0.4: resolution: {integrity: sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==} - fill-range@7.0.1: - resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} engines: {node: '>=8'} find-up@4.1.0: @@ -2320,6 +2352,7 @@ packages: glob@7.2.3: resolution: {integrity: sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==} + deprecated: Glob versions prior to v9 are no longer supported globals@11.12.0: resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} @@ -2470,6 +2503,7 @@ packages: inflight@1.0.6: resolution: {integrity: sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==} + deprecated: This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful. inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} @@ -2869,6 +2903,10 @@ packages: canvas: optional: true + jsep@1.4.0: + resolution: {integrity: sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==} + engines: {node: '>= 10.16.0'} + jsesc@2.5.2: resolution: {integrity: sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==} engines: {node: '>=4'} @@ -2909,15 +2947,15 @@ packages: jsonfile@6.1.0: resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + jsonpath-plus@10.2.0: + resolution: {integrity: sha512-T9V+8iNYKFL2n2rF+w02LBOT2JjDnTjioaNFrxRy0Bv1y/hNsqR/EBK7Ojy2ythRHwmz2cRIls+9JitQGZC/sw==} + engines: {node: '>=18.0.0'} + hasBin: true + jsonpath-plus@7.2.0: resolution: {integrity: sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA==} engines: {node: '>=12.0.0'} - jsonpath-plus@8.1.0: - resolution: {integrity: sha512-qVTiuKztFGw0dGhYi3WNqvddx3/SHtyDT0xJaeyz4uP0d1tkpG+0y5uYQ4OcIo1TLAz3PE/qDOW9F0uDt3+CTw==} - engines: {node: '>=14.0.0'} - hasBin: true - jsx-ast-utils@3.3.5: resolution: {integrity: sha512-ZZow9HBI5O6EPgSJLUb8n2NKgmVWTwCvHGwFuJlMjvLFqlGG6pjirPhtdsseaLZjSibD8eegzmYpUZwoIlj2cQ==} engines: {node: '>=4.0'} @@ -3011,8 +3049,8 @@ packages: resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} engines: {node: '>= 8'} - micromatch@4.0.5: - resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} engines: {node: '>=8.6'} mime-db@1.52.0: @@ -3063,8 +3101,8 @@ packages: mute-stream@0.0.8: resolution: {integrity: sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==} - nanoid@3.3.7: - resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} + nanoid@3.3.8: + resolution: {integrity: sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true @@ -3490,6 +3528,7 @@ packages: rimraf@3.0.2: resolution: {integrity: sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==} + deprecated: Rimraf versions prior to v4 are no longer supported hasBin: true rimraf@5.0.5: @@ -3497,8 +3536,8 @@ packages: engines: {node: '>=14'} hasBin: true - rollup@4.16.1: - resolution: {integrity: sha512-5CaD3MPDlPKfhqzRvWXK96G6ELJfPZNb3LHiZxTHgDdC6jvwfGz2E8nY+9g1ONk4ttHsK1WaFP19Js4PSr1E3g==} + rollup@4.29.1: + resolution: {integrity: sha512-RaJ45M/kmJUzSWDs1Nnd5DdV4eerC98idtUOVr6FfKcgxqvjwHmxc5upLF9qZU9EpsVzzhleFahrT3shLuJzIw==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -4008,8 +4047,8 @@ packages: vite: optional: true - vite@5.2.10: - resolution: {integrity: sha512-PAzgUZbP7msvQvqdSD+ErD5qGnSFiGOoWmV5yAKUEI0kdhjbH6nMWVyZQC/hSc4aXwc0oJ9aEdIiF9Oje0JFCw==} + vite@5.2.14: + resolution: {integrity: sha512-TFQLuwWLPms+NBNlh0D9LZQ+HXW471COABxw/9TEUBrjuHMo9BrYBPrN/SYAwIuVL+rLerycxiLT41t4f5MZpA==} engines: {node: ^18.0.0 || >=20.0.0} hasBin: true peerDependencies: @@ -4151,12 +4190,12 @@ packages: resolution: {integrity: sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==} engines: {node: ^12.13.0 || ^14.15.0 || >=16.0.0} - ws@8.8.0: - resolution: {integrity: sha512-JDAgSYQ1ksuwqfChJusw1LSJ8BizJ2e/vVu5Lxjq3YvNJNlROv1ui4i+c/kUUrPheBvQl4c5UbERhTwKa6QBJQ==} + ws@8.18.0: + resolution: {integrity: sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw==} engines: {node: '>=10.0.0'} peerDependencies: bufferutil: ^4.0.1 - utf-8-validate: ^5.0.2 + utf-8-validate: '>=5.0.2' peerDependenciesMeta: bufferutil: optional: true @@ -4676,7 +4715,7 @@ snapshots: jest-util: 29.7.0 jest-validate: 29.7.0 jest-watcher: 29.7.0 - micromatch: 4.0.5 + micromatch: 4.0.8 pretty-format: 29.7.0 slash: 3.0.0 strip-ansi: 6.0.1 @@ -4798,7 +4837,7 @@ snapshots: jest-haste-map: 29.7.0 jest-regex-util: 29.6.3 jest-util: 29.7.0 - micromatch: 4.0.5 + micromatch: 4.0.8 pirates: 4.0.5 slash: 3.0.0 write-file-atomic: 4.0.2 @@ -4841,14 +4880,22 @@ snapshots: '@jridgewell/resolve-uri': 3.1.1 '@jridgewell/sourcemap-codec': 1.4.15 + '@jsep-plugin/assignment@1.3.0(jsep@1.4.0)': + dependencies: + jsep: 1.4.0 + + '@jsep-plugin/regex@1.0.4(jsep@1.4.0)': + dependencies: + jsep: 1.4.0 + '@lukeed/csprng@1.1.0': {} '@microsoft/fetch-event-source@2.0.1': {} - '@nestjs/axios@3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.6.8)(rxjs@7.8.1)': + '@nestjs/axios@3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.7.9)(rxjs@7.8.1)': dependencies: '@nestjs/common': 10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1) - axios: 1.6.8 + axios: 1.7.9 rxjs: 7.8.1 '@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1)': @@ -4895,11 +4942,11 @@ snapshots: '@openapitools/openapi-generator-cli@2.13.4': dependencies: - '@nestjs/axios': 3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.6.8)(rxjs@7.8.1) + '@nestjs/axios': 3.0.2(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(axios@1.7.9)(rxjs@7.8.1) '@nestjs/common': 10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1) '@nestjs/core': 10.3.0(@nestjs/common@10.3.0(reflect-metadata@0.1.13)(rxjs@7.8.1))(reflect-metadata@0.1.13)(rxjs@7.8.1) '@nuxtjs/opencollective': 0.3.2 - axios: 1.6.8 + axios: 1.7.9 chalk: 4.1.2 commander: 8.3.0 compare-versions: 4.1.4 @@ -4930,52 +4977,61 @@ snapshots: '@remix-run/router@1.16.0': {} - '@rollup/rollup-android-arm-eabi@4.16.1': + '@rollup/rollup-android-arm-eabi@4.29.1': + optional: true + + '@rollup/rollup-android-arm64@4.29.1': optional: true - '@rollup/rollup-android-arm64@4.16.1': + '@rollup/rollup-darwin-arm64@4.29.1': optional: true - '@rollup/rollup-darwin-arm64@4.16.1': + '@rollup/rollup-darwin-x64@4.29.1': optional: true - '@rollup/rollup-darwin-x64@4.16.1': + '@rollup/rollup-freebsd-arm64@4.29.1': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.16.1': + '@rollup/rollup-freebsd-x64@4.29.1': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.16.1': + '@rollup/rollup-linux-arm-gnueabihf@4.29.1': optional: true - '@rollup/rollup-linux-arm64-gnu@4.16.1': + '@rollup/rollup-linux-arm-musleabihf@4.29.1': optional: true - '@rollup/rollup-linux-arm64-musl@4.16.1': + '@rollup/rollup-linux-arm64-gnu@4.29.1': optional: true - '@rollup/rollup-linux-powerpc64le-gnu@4.16.1': + '@rollup/rollup-linux-arm64-musl@4.29.1': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.16.1': + '@rollup/rollup-linux-loongarch64-gnu@4.29.1': optional: true - '@rollup/rollup-linux-s390x-gnu@4.16.1': + '@rollup/rollup-linux-powerpc64le-gnu@4.29.1': optional: true - '@rollup/rollup-linux-x64-gnu@4.16.1': + '@rollup/rollup-linux-riscv64-gnu@4.29.1': optional: true - '@rollup/rollup-linux-x64-musl@4.16.1': + '@rollup/rollup-linux-s390x-gnu@4.29.1': optional: true - '@rollup/rollup-win32-arm64-msvc@4.16.1': + '@rollup/rollup-linux-x64-gnu@4.29.1': optional: true - '@rollup/rollup-win32-ia32-msvc@4.16.1': + '@rollup/rollup-linux-x64-musl@4.29.1': optional: true - '@rollup/rollup-win32-x64-msvc@4.16.1': + '@rollup/rollup-win32-arm64-msvc@4.29.1': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.29.1': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.29.1': optional: true '@sinclair/typebox@0.27.8': {} @@ -5174,7 +5230,7 @@ snapshots: dependencies: '@babel/types': 7.23.9 - '@types/estree@1.0.5': {} + '@types/estree@1.0.6': {} '@types/eventsource@1.1.15': {} @@ -5365,10 +5421,10 @@ snapshots: '@ungap/structured-clone@1.2.0': {} - '@vitejs/plugin-react-swc@3.6.0(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1))': + '@vitejs/plugin-react-swc@3.6.0(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1))': dependencies: '@swc/core': 1.3.107 - vite: 5.2.10(@types/node@20.11.17)(sass@1.66.1) + vite: 5.2.14(@types/node@20.11.17)(sass@1.66.1) transitivePeerDependencies: - '@swc/helpers' @@ -5568,7 +5624,7 @@ snapshots: axe-core@4.7.0: {} - axios@1.6.8: + axios@1.7.9: dependencies: follow-redirects: 1.15.6 form-data: 4.0.0 @@ -5653,9 +5709,9 @@ snapshots: dependencies: balanced-match: 1.0.2 - braces@3.0.2: + braces@3.0.3: dependencies: - fill-range: 7.0.1 + fill-range: 7.1.1 browser-process-hrtime@1.0.0: {} @@ -5730,7 +5786,7 @@ snapshots: chokidar@3.5.2: dependencies: anymatch: 3.1.2 - braces: 3.0.2 + braces: 3.0.3 glob-parent: 5.1.2 is-binary-path: 2.1.0 is-glob: 4.0.3 @@ -5847,7 +5903,7 @@ snapshots: create-require@1.1.1: {} - cross-spawn@7.0.3: + cross-spawn@7.0.6: dependencies: path-key: 3.1.1 shebang-command: 2.0.0 @@ -6223,7 +6279,7 @@ snapshots: optionalDependencies: source-map: 0.6.1 - eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0))(eslint@8.57.0): + eslint-config-airbnb-base@15.0.0(eslint-plugin-import@2.29.1)(eslint@8.57.0): dependencies: confusing-browser-globals: 1.0.11 eslint: 8.57.0 @@ -6232,19 +6288,19 @@ snapshots: object.entries: 1.1.7 semver: 6.3.1 - eslint-config-airbnb-typescript@18.0.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint@8.57.0)(typescript@5.3.3))(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0))(eslint@8.57.0): + eslint-config-airbnb-typescript@18.0.0(@typescript-eslint/eslint-plugin@6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint@8.57.0)(typescript@5.3.3))(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-plugin-import@2.29.1)(eslint@8.57.0): dependencies: '@typescript-eslint/eslint-plugin': 6.21.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint@8.57.0)(typescript@5.3.3) '@typescript-eslint/parser': 6.21.0(eslint@8.57.0)(typescript@5.3.3) eslint: 8.57.0 - eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0))(eslint@8.57.0) + eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.29.1)(eslint@8.57.0) transitivePeerDependencies: - eslint-plugin-import - eslint-config-airbnb@19.0.4(eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0))(eslint-plugin-jsx-a11y@6.8.0(eslint@8.57.0))(eslint-plugin-react-hooks@4.6.0(eslint@8.57.0))(eslint-plugin-react@7.34.1(eslint@8.57.0))(eslint@8.57.0): + eslint-config-airbnb@19.0.4(eslint-plugin-import@2.29.1)(eslint-plugin-jsx-a11y@6.8.0(eslint@8.57.0))(eslint-plugin-react-hooks@4.6.0(eslint@8.57.0))(eslint-plugin-react@7.34.1(eslint@8.57.0))(eslint@8.57.0): dependencies: eslint: 8.57.0 - eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0))(eslint@8.57.0) + eslint-config-airbnb-base: 15.0.0(eslint-plugin-import@2.29.1)(eslint@8.57.0) eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) eslint-plugin-jsx-a11y: 6.8.0(eslint@8.57.0) eslint-plugin-react: 7.34.1(eslint@8.57.0) @@ -6269,7 +6325,7 @@ snapshots: debug: 4.3.4 enhanced-resolve: 5.15.0 eslint: 8.57.0 - eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.57.0))(eslint@8.57.0) + eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) eslint-plugin-import: 2.29.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) fast-glob: 3.3.2 get-tsconfig: 4.7.2 @@ -6281,7 +6337,7 @@ snapshots: - eslint-import-resolver-webpack - supports-color - eslint-module-utils@2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.57.0))(eslint@8.57.0): + eslint-module-utils@2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0): dependencies: debug: 3.2.7 optionalDependencies: @@ -6302,7 +6358,7 @@ snapshots: doctrine: 2.1.0 eslint: 8.57.0 eslint-import-resolver-node: 0.3.9 - eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.57.0))(eslint@8.57.0) + eslint-module-utils: 2.8.0(@typescript-eslint/parser@6.21.0(eslint@8.57.0)(typescript@5.3.3))(eslint-import-resolver-node@0.3.9)(eslint-import-resolver-typescript@3.6.1)(eslint@8.57.0) hasown: 2.0.0 is-core-module: 2.13.1 is-glob: 4.0.3 @@ -6401,7 +6457,7 @@ snapshots: '@ungap/structured-clone': 1.2.0 ajv: 6.12.6 chalk: 4.1.2 - cross-spawn: 7.0.3 + cross-spawn: 7.0.6 debug: 4.3.4 doctrine: 3.0.0 escape-string-regexp: 4.0.0 @@ -6454,7 +6510,7 @@ snapshots: execa@5.1.1: dependencies: - cross-spawn: 7.0.3 + cross-spawn: 7.0.6 get-stream: 6.0.1 human-signals: 2.1.0 is-stream: 2.0.1 @@ -6490,7 +6546,7 @@ snapshots: '@nodelib/fs.walk': 1.2.8 glob-parent: 5.1.2 merge2: 1.4.1 - micromatch: 4.0.5 + micromatch: 4.0.8 fast-json-stable-stringify@2.1.0: {} @@ -6535,7 +6591,7 @@ snapshots: dependencies: minimatch: 5.1.2 - fill-range@7.0.1: + fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 @@ -6564,7 +6620,7 @@ snapshots: foreground-child@3.1.1: dependencies: - cross-spawn: 7.0.3 + cross-spawn: 7.0.6 signal-exit: 4.1.0 form-data@4.0.0: @@ -7117,7 +7173,7 @@ snapshots: jest-runner: 29.7.0 jest-util: 29.7.0 jest-validate: 29.7.0 - micromatch: 4.0.5 + micromatch: 4.0.8 parse-json: 5.2.0 pretty-format: 29.7.0 slash: 3.0.0 @@ -7185,7 +7241,7 @@ snapshots: jest-regex-util: 29.6.3 jest-util: 29.7.0 jest-worker: 29.7.0 - micromatch: 4.0.5 + micromatch: 4.0.8 walker: 1.0.8 optionalDependencies: fsevents: 2.3.3 @@ -7209,7 +7265,7 @@ snapshots: '@types/stack-utils': 2.0.1 chalk: 4.1.2 graceful-fs: 4.2.10 - micromatch: 4.0.5 + micromatch: 4.0.8 pretty-format: 29.6.3 slash: 3.0.0 stack-utils: 2.0.5 @@ -7221,7 +7277,7 @@ snapshots: '@types/stack-utils': 2.0.1 chalk: 4.1.2 graceful-fs: 4.2.10 - micromatch: 4.0.5 + micromatch: 4.0.8 pretty-format: 29.7.0 slash: 3.0.0 stack-utils: 2.0.5 @@ -7461,13 +7517,15 @@ snapshots: whatwg-encoding: 2.0.0 whatwg-mimetype: 3.0.0 whatwg-url: 11.0.0 - ws: 8.8.0 + ws: 8.18.0 xml-name-validator: 4.0.0 transitivePeerDependencies: - bufferutil - supports-color - utf-8-validate + jsep@1.4.0: {} + jsesc@2.5.2: {} json-parse-even-better-errors@2.3.1: {} @@ -7503,9 +7561,13 @@ snapshots: optionalDependencies: graceful-fs: 4.2.10 - jsonpath-plus@7.2.0: {} + jsonpath-plus@10.2.0: + dependencies: + '@jsep-plugin/assignment': 1.3.0(jsep@1.4.0) + '@jsep-plugin/regex': 1.0.4(jsep@1.4.0) + jsep: 1.4.0 - jsonpath-plus@8.1.0: {} + jsonpath-plus@7.2.0: {} jsx-ast-utils@3.3.5: dependencies: @@ -7587,9 +7649,9 @@ snapshots: merge2@1.4.1: {} - micromatch@4.0.5: + micromatch@4.0.8: dependencies: - braces: 3.0.2 + braces: 3.0.3 picomatch: 2.3.1 mime-db@1.52.0: {} @@ -7626,7 +7688,7 @@ snapshots: mute-stream@0.0.8: {} - nanoid@3.3.7: {} + nanoid@3.3.8: {} natural-compare@1.4.0: {} @@ -7820,13 +7882,13 @@ snapshots: postcss@8.4.31: dependencies: - nanoid: 3.3.7 + nanoid: 3.3.8 picocolors: 1.0.0 source-map-js: 1.0.2 postcss@8.4.38: dependencies: - nanoid: 3.3.7 + nanoid: 3.3.8 picocolors: 1.0.0 source-map-js: 1.2.0 @@ -8054,26 +8116,29 @@ snapshots: dependencies: glob: 10.3.12 - rollup@4.16.1: + rollup@4.29.1: dependencies: - '@types/estree': 1.0.5 + '@types/estree': 1.0.6 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.16.1 - '@rollup/rollup-android-arm64': 4.16.1 - '@rollup/rollup-darwin-arm64': 4.16.1 - '@rollup/rollup-darwin-x64': 4.16.1 - '@rollup/rollup-linux-arm-gnueabihf': 4.16.1 - '@rollup/rollup-linux-arm-musleabihf': 4.16.1 - '@rollup/rollup-linux-arm64-gnu': 4.16.1 - '@rollup/rollup-linux-arm64-musl': 4.16.1 - '@rollup/rollup-linux-powerpc64le-gnu': 4.16.1 - '@rollup/rollup-linux-riscv64-gnu': 4.16.1 - '@rollup/rollup-linux-s390x-gnu': 4.16.1 - '@rollup/rollup-linux-x64-gnu': 4.16.1 - '@rollup/rollup-linux-x64-musl': 4.16.1 - '@rollup/rollup-win32-arm64-msvc': 4.16.1 - '@rollup/rollup-win32-ia32-msvc': 4.16.1 - '@rollup/rollup-win32-x64-msvc': 4.16.1 + '@rollup/rollup-android-arm-eabi': 4.29.1 + '@rollup/rollup-android-arm64': 4.29.1 + '@rollup/rollup-darwin-arm64': 4.29.1 + '@rollup/rollup-darwin-x64': 4.29.1 + '@rollup/rollup-freebsd-arm64': 4.29.1 + '@rollup/rollup-freebsd-x64': 4.29.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.29.1 + '@rollup/rollup-linux-arm-musleabihf': 4.29.1 + '@rollup/rollup-linux-arm64-gnu': 4.29.1 + '@rollup/rollup-linux-arm64-musl': 4.29.1 + '@rollup/rollup-linux-loongarch64-gnu': 4.29.1 + '@rollup/rollup-linux-powerpc64le-gnu': 4.29.1 + '@rollup/rollup-linux-riscv64-gnu': 4.29.1 + '@rollup/rollup-linux-s390x-gnu': 4.29.1 + '@rollup/rollup-linux-x64-gnu': 4.29.1 + '@rollup/rollup-linux-x64-musl': 4.29.1 + '@rollup/rollup-win32-arm64-msvc': 4.29.1 + '@rollup/rollup-win32-ia32-msvc': 4.29.1 + '@rollup/rollup-win32-x64-msvc': 4.29.1 fsevents: 2.3.3 run-async@2.4.1: {} @@ -8582,7 +8647,7 @@ snapshots: '@types/istanbul-lib-coverage': 2.0.3 convert-source-map: 1.7.0 - vite-plugin-checker@0.6.4(eslint@8.57.0)(optionator@0.9.3)(typescript@5.3.3)(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1)): + vite-plugin-checker@0.6.4(eslint@8.57.0)(optionator@0.9.3)(typescript@5.3.3)(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1)): dependencies: '@babel/code-frame': 7.23.5 ansi-escapes: 4.3.2 @@ -8595,7 +8660,7 @@ snapshots: semver: 7.5.4 strip-ansi: 6.0.1 tiny-invariant: 1.3.3 - vite: 5.2.10(@types/node@20.11.17)(sass@1.66.1) + vite: 5.2.14(@types/node@20.11.17)(sass@1.66.1) vscode-languageclient: 7.0.0 vscode-languageserver: 7.0.0 vscode-languageserver-textdocument: 1.0.11 @@ -8605,27 +8670,27 @@ snapshots: optionator: 0.9.3 typescript: 5.3.3 - vite-plugin-ejs@1.7.0(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1)): + vite-plugin-ejs@1.7.0(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1)): dependencies: ejs: 3.1.10 - vite: 5.2.10(@types/node@20.11.17)(sass@1.66.1) + vite: 5.2.14(@types/node@20.11.17)(sass@1.66.1) - vite-tsconfig-paths@4.3.2(typescript@5.3.3)(vite@5.2.10(@types/node@20.11.17)(sass@1.66.1)): + vite-tsconfig-paths@4.3.2(typescript@5.3.3)(vite@5.2.14(@types/node@20.11.17)(sass@1.66.1)): dependencies: debug: 4.3.4 globrex: 0.1.2 tsconfck: 3.0.3(typescript@5.3.3) optionalDependencies: - vite: 5.2.10(@types/node@20.11.17)(sass@1.66.1) + vite: 5.2.14(@types/node@20.11.17)(sass@1.66.1) transitivePeerDependencies: - supports-color - typescript - vite@5.2.10(@types/node@20.11.17)(sass@1.66.1): + vite@5.2.14(@types/node@20.11.17)(sass@1.66.1): dependencies: esbuild: 0.20.2 postcss: 8.4.38 - rollup: 4.16.1 + rollup: 4.29.1 optionalDependencies: '@types/node': 20.11.17 fsevents: 2.3.3 @@ -8777,7 +8842,7 @@ snapshots: imurmurhash: 0.1.4 signal-exit: 3.0.7 - ws@8.8.0: {} + ws@8.18.0: {} xml-name-validator@4.0.0: {} diff --git a/frontend/public/serviceImage.png b/frontend/public/serviceImage.png new file mode 100644 index 000000000..8006b13f5 Binary files /dev/null and b/frontend/public/serviceImage.png differ diff --git a/frontend/src/components/App.tsx b/frontend/src/components/App.tsx index 16dd1305d..d15cb95a0 100644 --- a/frontend/src/components/App.tsx +++ b/frontend/src/components/App.tsx @@ -1,5 +1,5 @@ import React, { Suspense, useContext } from 'react'; -import { Routes, Route, Navigate } from 'react-router-dom'; +import { Routes, Route, Navigate, useMatch } from 'react-router-dom'; import { accessErrorPage, clusterPath, @@ -24,6 +24,7 @@ import { GlobalSettingsProvider } from './contexts/GlobalSettingsContext'; import { UserInfoRolesAccessProvider } from './contexts/UserInfoRolesAccessContext'; import PageContainer from './PageContainer/PageContainer'; +const AuthPage = React.lazy(() => import('components/AuthPage/AuthPage')); const Dashboard = React.lazy(() => import('components/Dashboard/Dashboard')); const ClusterPage = React.lazy( () => import('components/ClusterPage/ClusterPage') @@ -49,54 +50,59 @@ const queryClient = new QueryClient({ }); const App: React.FC = () => { const { isDarkMode } = useContext(ThemeModeContext); + const isAuthRoute = useMatch('/login'); return ( - - - }> - - - - - - - {['/', '/ui', '/ui/clusters'].map((path) => ( + + {isAuthRoute ? ( + + ) : ( + + }> + + + + + + + {['/', '/ui', '/ui/clusters'].map((path) => ( + } + /> + ))} } + path={getNonExactPath(clusterNewConfigPath)} + element={} /> - ))} - } - /> - } - /> - - } - /> - } /> - } - /> - - - - - - - - - - + } + /> + + } + /> + } /> + } + /> + + + + + + + + + + )} + ); diff --git a/frontend/src/components/AuthPage/AuthPage.styled.tsx b/frontend/src/components/AuthPage/AuthPage.styled.tsx new file mode 100644 index 000000000..16f86f714 --- /dev/null +++ b/frontend/src/components/AuthPage/AuthPage.styled.tsx @@ -0,0 +1,14 @@ +import styled, { css } from 'styled-components'; + +export const AuthPageStyled = styled.div( + ({ theme }) => css` + display: flex; + flex-direction: column; + align-items: center; + justify-content: space-between; + min-height: 100vh; + background-color: ${theme.auth_page.backgroundColor}; + font-family: ${theme.auth_page.fontFamily}; + overflow-x: hidden; + ` +); diff --git a/frontend/src/components/AuthPage/AuthPage.tsx b/frontend/src/components/AuthPage/AuthPage.tsx new file mode 100644 index 000000000..ceae3069a --- /dev/null +++ b/frontend/src/components/AuthPage/AuthPage.tsx @@ -0,0 +1,21 @@ +import React from 'react'; +import { useAuthSettings } from 'lib/hooks/api/appConfig'; + +import Header from './Header/Header'; +import SignIn from './SignIn/SignIn'; +import * as S from './AuthPage.styled'; + +function AuthPage() { + const { data } = useAuthSettings(); + + return ( + +

+ {data && ( + + )} + + ); +} + +export default AuthPage; diff --git a/frontend/src/components/AuthPage/Header/Header.styled.tsx b/frontend/src/components/AuthPage/Header/Header.styled.tsx new file mode 100644 index 000000000..4ba86f2bc --- /dev/null +++ b/frontend/src/components/AuthPage/Header/Header.styled.tsx @@ -0,0 +1,33 @@ +import styled, { css } from 'styled-components'; + +export const HeaderStyled = styled.div` + display: grid; + grid-template-columns: repeat(47, 41.11px); + grid-template-rows: repeat(4, 41.11px); + justify-content: center; + margin-bottom: 13.5px; +`; + +export const HeaderCell = styled.div<{ $sections?: number }>( + ({ theme, $sections }) => css` + border: 1.23px solid ${theme.auth_page.header.cellBorderColor}; + border-radius: 75.98px; + ${$sections && `grid-column: span ${$sections};`} + ` +); + +export const StyledSVG = styled.svg` + grid-column: span 3; +`; + +export const StyledRect = styled.rect( + ({ theme }) => css` + fill: ${theme.auth_page.header.LogoBgColor}; + ` +); + +export const StyledPath = styled.path( + ({ theme }) => css` + fill: ${theme.auth_page.header.LogoTextColor}; + ` +); diff --git a/frontend/src/components/AuthPage/Header/Header.tsx b/frontend/src/components/AuthPage/Header/Header.tsx new file mode 100644 index 000000000..16980af29 --- /dev/null +++ b/frontend/src/components/AuthPage/Header/Header.tsx @@ -0,0 +1,81 @@ +import React from 'react'; + +import * as S from './Header.styled'; +import HeaderLogo from './HeaderLogo'; + +function Header() { + return ( + + + {Array(2).fill()} + + {Array(2).fill()} + + {Array(2).fill()} + {Array(4).fill()} + {Array(2).fill()} + + {Array(2).fill()} + + {Array(3).fill()} + + {Array(2).fill()} + {Array(2).fill()} + {Array(2).fill()} + + + {Array(3).fill()} + {Array(8).fill()} + + {Array(2).fill()} + + {Array(3).fill()} + + {Array(6).fill()} + {Array(3).fill()} + + + {Array(2).fill()} + + + + + + + + {Array(2).fill()} + + + + {Array(3).fill()} + + + {Array(3).fill()} + + {Array(3).fill()} + {Array(3).fill()} + + + + + + {Array(2).fill()} + + {Array(2).fill()} + {Array(5).fill()} + {Array(2).fill()} + + + + {Array(5).fill()} + {Array(2).fill()} + + {Array(2).fill()} + + + + + ); +} + +export default Header; diff --git a/frontend/src/components/AuthPage/Header/HeaderLogo.tsx b/frontend/src/components/AuthPage/Header/HeaderLogo.tsx new file mode 100644 index 000000000..e5d9ca12d --- /dev/null +++ b/frontend/src/components/AuthPage/Header/HeaderLogo.tsx @@ -0,0 +1,29 @@ +import React from 'react'; + +import * as S from './Header.styled'; + +const HeaderLogo = () => ( + + + + + + + + + + +); + +export default HeaderLogo; diff --git a/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.styled.tsx b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.styled.tsx new file mode 100644 index 000000000..da1388b0a --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.styled.tsx @@ -0,0 +1,56 @@ +import styled from 'styled-components'; + +export const Fieldset = styled.fieldset` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + gap: 16px; + border: none; + width: 100%; +`; + +export const Form = styled.form` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + gap: 40px; + width: 100%; + + ${Fieldset} div { + width: 100%; + } +`; + +export const Field = styled.div` + ${({ theme }) => theme.auth_page.signIn.label}; + display: flex; + flex-direction: column; + justify-content: flex-start; + align-items: flex-start; + gap: 4px; +`; + +export const Label = styled.label` + font-size: 12px; + font-weight: 500; + line-height: 16px; +`; + +export const ErrorMessage = styled.div` + display: flex; + column-gap: 2px; + align-items: center; + justify-content: center; + font-weight: 400; + font-size: 14px; + line-height: 20px; +`; + +export const ErrorMessageText = styled.span` + ${({ theme }) => theme.auth_page.signIn.errorMessage}; + font-weight: 400; + font-size: 14px; + line-height: 20px; +`; diff --git a/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.tsx b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.tsx new file mode 100644 index 000000000..044f4781b --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/BasicSignIn/BasicSignIn.tsx @@ -0,0 +1,101 @@ +import React from 'react'; +import { Button } from 'components/common/Button/Button'; +import Input from 'components/common/Input/Input'; +import { Controller, FormProvider, useForm } from 'react-hook-form'; +import { useAuthenticate } from 'lib/hooks/api/appConfig'; +import AlertIcon from 'components/common/Icons/AlertIcon'; +import { useNavigate } from 'react-router-dom'; +import { useQueryClient } from '@tanstack/react-query'; + +import * as S from './BasicSignIn.styled'; + +interface FormValues { + username: string; + password: string; +} + +function BasicSignIn() { + const methods = useForm({ + defaultValues: { username: '', password: '' }, + }); + const navigate = useNavigate(); + const { mutateAsync, isLoading } = useAuthenticate(); + const client = useQueryClient(); + + const onSubmit = async (data: FormValues) => { + await mutateAsync(data, { + onSuccess: async (response) => { + if (response.raw.url.includes('error')) { + methods.setError('root', { message: 'error' }); + } else { + await client.invalidateQueries({ queryKey: ['app', 'info'] }); + navigate('/'); + } + }, + }); + }; + + return ( + + + + {methods.formState.errors.root && ( + + + + Username or password entered incorrectly + + + )} + ( + + Username + + + )} + /> + ( + + Password + + + )} + /> + + + + + ); +} + +export default BasicSignIn; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.styled.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.styled.tsx new file mode 100644 index 000000000..d1eae050f --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.styled.tsx @@ -0,0 +1,66 @@ +import styled, { css } from 'styled-components'; +import GitHubIcon from 'components/common/Icons/GitHubIcon'; +import { Button } from 'components/common/Button/Button'; + +export const AuthCardStyled = styled.div( + ({ theme }) => css` + display: flex; + flex-direction: column; + gap: 16px; + padding: 16px; + width: 400px; + border: 1px solid black; + border: 1px solid ${theme.auth_page.signIn.authCard.borderColor}; + border-radius: ${theme.auth_page.signIn.authCard.borderRadius}; + background-color: ${theme.auth_page.signIn.authCard.backgroundColor}; + ` +); + +export const ServiceData = styled.div( + ({ theme }) => css` + display: flex; + gap: 8px; + align-items: center; + + svg, + img { + margin: 8px; + width: 48px; + height: 48px; + } + + ${GitHubIcon} { + fill: ${theme.auth_page.icons.githubColor}; + } + ` +); + +export const ServiceDataTextContainer = styled.div` + display: flex; + flex-direction: column; +`; + +export const ServiceNameStyled = styled.span( + ({ theme }) => css` + color: ${theme.auth_page.signIn.authCard.serviceNamecolor}; + font-size: 16px; + font-weight: 500; + line-height: 24px; + ` +); + +export const ServiceTextStyled = styled.span( + ({ theme }) => css` + color: ${theme.auth_page.signIn.authCard.serviceTextColor}; + font-size: 12px; + font-weight: 500; + line-height: 16px; + ` +); + +export const ServiceButton = styled(Button)` + width: 100%; + border-radius: 8px; + font-size: 14px; + text-decoration: none; +`; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.tsx new file mode 100644 index 000000000..b9a09812b --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/AuthCard/AuthCard.tsx @@ -0,0 +1,41 @@ +import React, { ElementType, useState } from 'react'; +import ServiceImage from 'components/common/Icons/ServiceImage'; + +import * as S from './AuthCard.styled'; + +interface Props { + serviceName: string; + authPath: string | undefined; + Icon?: ElementType; +} + +function AuthCard({ serviceName, authPath, Icon = ServiceImage }: Props) { + const [isLoading, setIsLoading] = useState(false); + + return ( + + + + + {serviceName} + + Use an account issued by the organization + + + + { + setIsLoading(true); + window.location.replace(`${window.basePath}${authPath}`); + }} + inProgress={isLoading} + > + {!isLoading && `Log in with ${serviceName}`} + + + ); +} + +export default AuthCard; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.styled.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.styled.tsx new file mode 100644 index 000000000..bf238e9b2 --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.styled.tsx @@ -0,0 +1,25 @@ +import styled from 'styled-components'; + +export const OAuthSignInStyled = styled.div` + display: flex; + flex-direction: column; + gap: 8px; +`; + +export const ErrorMessage = styled.div` + display: flex; + column-gap: 2px; + align-items: center; + justify-content: center; + font-weight: 400; + font-size: 14px; + line-height: 20px; + margin-bottom: 8px; +`; + +export const ErrorMessageText = styled.span` + ${({ theme }) => theme.auth_page.signIn.errorMessage}; + font-weight: 400; + font-size: 14px; + line-height: 20px; +`; diff --git a/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.tsx b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.tsx new file mode 100644 index 000000000..fca5b4925 --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/OAuthSignIn/OAuthSignIn.tsx @@ -0,0 +1,55 @@ +import React, { ElementType } from 'react'; +import GitHubIcon from 'components/common/Icons/GitHubIcon'; +import GoogleIcon from 'components/common/Icons/GoogleIcon'; +import CognitoIcon from 'components/common/Icons/CognitoIcon'; +import OktaIcon from 'components/common/Icons/OktaIcon'; +import KeycloakIcon from 'components/common/Icons/KeycloakIcon'; +import ServiceImage from 'components/common/Icons/ServiceImage'; +import { OAuthProvider } from 'generated-sources'; +import { useLocation } from 'react-router-dom'; +import AlertIcon from 'components/common/Icons/AlertIcon'; + +import * as S from './OAuthSignIn.styled'; +import AuthCard from './AuthCard/AuthCard'; + +interface Props { + oAuthProviders: OAuthProvider[] | undefined; +} + +const ServiceIconMap: Record = { + github: GitHubIcon, + google: GoogleIcon, + cognito: CognitoIcon, + keycloak: KeycloakIcon, + okta: OktaIcon, + unknownService: ServiceImage, +}; + +function OAuthSignIn({ oAuthProviders }: Props) { + const { search } = useLocation(); + + return ( + + {search.includes('error') && ( + + + Invalid credentials + + )} + {oAuthProviders?.map((provider) => ( + + ))} + + ); +} + +export default OAuthSignIn; diff --git a/frontend/src/components/AuthPage/SignIn/SignIn.styled.tsx b/frontend/src/components/AuthPage/SignIn/SignIn.styled.tsx new file mode 100644 index 000000000..0f24b45fd --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/SignIn.styled.tsx @@ -0,0 +1,19 @@ +import styled, { css } from 'styled-components'; + +export const SignInStyled = styled.div` + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + width: 320px; + gap: 56px; + flex-grow: 1; +`; + +export const SignInTitle = styled.span( + ({ theme }) => css` + color: ${theme.auth_page.signIn.titleColor}; + font-size: 24px; + font-weight: 600; + ` +); diff --git a/frontend/src/components/AuthPage/SignIn/SignIn.tsx b/frontend/src/components/AuthPage/SignIn/SignIn.tsx new file mode 100644 index 000000000..987ee5ebf --- /dev/null +++ b/frontend/src/components/AuthPage/SignIn/SignIn.tsx @@ -0,0 +1,27 @@ +import React from 'react'; +import { AuthType, OAuthProvider } from 'generated-sources'; + +import BasicSignIn from './BasicSignIn/BasicSignIn'; +import * as S from './SignIn.styled'; +import OAuthSignIn from './OAuthSignIn/OAuthSignIn'; + +interface Props { + authType?: AuthType; + oAuthProviders?: OAuthProvider[]; +} + +function SignInForm({ authType, oAuthProviders }: Props) { + return ( + + Sign in + {(authType === AuthType.LDAP || authType === AuthType.LOGIN_FORM) && ( + + )} + {authType === AuthType.OAUTH2 && ( + + )} + + ); +} + +export default SignInForm; diff --git a/frontend/src/components/Brokers/Broker/Configs/lib/constants.ts b/frontend/src/components/Brokers/Broker/Configs/lib/constants.ts index 5a829323a..f5701a893 100644 --- a/frontend/src/components/Brokers/Broker/Configs/lib/constants.ts +++ b/frontend/src/components/Brokers/Broker/Configs/lib/constants.ts @@ -5,6 +5,8 @@ export const CONFIG_SOURCE_NAME_MAP: Record = { [ConfigSource.DYNAMIC_BROKER_LOGGER_CONFIG]: 'Dynamic broker logger config', [ConfigSource.DYNAMIC_BROKER_CONFIG]: 'Dynamic broker config', [ConfigSource.DYNAMIC_DEFAULT_BROKER_CONFIG]: 'Dynamic default broker config', + [ConfigSource.DYNAMIC_CLIENT_METRICS_CONFIG]: + 'Dynamic client metrics subscription config', [ConfigSource.STATIC_BROKER_CONFIG]: 'Static broker config', [ConfigSource.DEFAULT_CONFIG]: 'Default config', [ConfigSource.UNKNOWN]: 'Unknown', @@ -15,6 +17,7 @@ export const CONFIG_SOURCE_PRIORITY = { [ConfigSource.DYNAMIC_BROKER_LOGGER_CONFIG]: 1, [ConfigSource.DYNAMIC_BROKER_CONFIG]: 1, [ConfigSource.DYNAMIC_DEFAULT_BROKER_CONFIG]: 1, + [ConfigSource.DYNAMIC_CLIENT_METRICS_CONFIG]: 1, [ConfigSource.STATIC_BROKER_CONFIG]: 2, [ConfigSource.DEFAULT_CONFIG]: 3, [ConfigSource.UNKNOWN]: 4, diff --git a/frontend/src/components/Connect/Details/Actions/Actions.tsx b/frontend/src/components/Connect/Details/Actions/Actions.tsx index 61eeabda4..6909b4617 100644 --- a/frontend/src/components/Connect/Details/Actions/Actions.tsx +++ b/frontend/src/components/Connect/Details/Actions/Actions.tsx @@ -11,6 +11,7 @@ import useAppParams from 'lib/hooks/useAppParams'; import { useConnector, useDeleteConnector, + useResetConnectorOffsets, useUpdateConnectorState, } from 'lib/hooks/api/kafkaConnect'; import { @@ -37,7 +38,7 @@ const Actions: React.FC = () => { const deleteConnectorHandler = () => confirm( <> - Are you sure you want to remove {routerProps.connectorName}{' '} + Are you sure you want to remove the {routerProps.connectorName}{' '} connector? , async () => { @@ -59,11 +60,25 @@ const Actions: React.FC = () => { stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS); const pauseConnectorHandler = () => stateMutation.mutateAsync(ConnectorAction.PAUSE); + const stopConnectorHandler = () => + stateMutation.mutateAsync(ConnectorAction.STOP); const resumeConnectorHandler = () => stateMutation.mutateAsync(ConnectorAction.RESUME); + + const resetConnectorOffsetsMutation = useResetConnectorOffsets(routerProps); + const resetConnectorOffsetsHandler = () => + confirm( + <> + Are you sure you want to reset the {routerProps.connectorName}{' '} + connector offsets? + , + () => resetConnectorOffsetsMutation.mutateAsync() + ); + return ( Restart @@ -74,7 +89,6 @@ const Actions: React.FC = () => { {connector?.status.state === ConnectorState.RUNNING && ( { Pause )} - {connector?.status.state === ConnectorState.PAUSED && ( + {connector?.status.state === ConnectorState.RUNNING && ( + + Stop + + )} + {(connector?.status.state === ConnectorState.PAUSED || + connector?.status.state === ConnectorState.STOPPED) && ( { )} { { { + + Reset Offsets + ({ @@ -34,12 +35,14 @@ jest.mock('lib/hooks/api/kafkaConnect', () => ({ useConnector: jest.fn(), useDeleteConnector: jest.fn(), useUpdateConnectorState: jest.fn(), + useResetConnectorOffsets: jest.fn(), })); const expectActionButtonsExists = () => { expect(screen.getByText('Restart Connector')).toBeInTheDocument(); expect(screen.getByText('Restart All Tasks')).toBeInTheDocument(); expect(screen.getByText('Restart Failed Tasks')).toBeInTheDocument(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); expect(screen.getByText('Delete')).toBeInTheDocument(); }; const afterClickDropDownButton = async () => { @@ -55,6 +58,7 @@ describe('Actions', () => { mockHistoryPush.mockClear(); deleteConnector.mockClear(); cancelMock.mockClear(); + resetConnectorOffsets.mockClear(); }); describe('view', () => { @@ -82,6 +86,30 @@ describe('Actions', () => { expect(screen.getAllByRole('menuitem').length).toEqual(4); expect(screen.getByText('Resume')).toBeInTheDocument(); expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); + expectActionButtonsExists(); + }); + + it('renders buttons when stopped', async () => { + (useConnector as jest.Mock).mockImplementation(() => ({ + data: setConnectorStatus(connector, ConnectorState.STOPPED), + })); + renderComponent(); + await afterClickRestartButton(); + expect(screen.getAllByRole('menuitem').length).toEqual(4); + expect(screen.getByText('Resume')).toBeInTheDocument(); + expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).not.toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -94,6 +122,12 @@ describe('Actions', () => { expect(screen.getAllByRole('menuitem').length).toEqual(3); expect(screen.queryByText('Resume')).not.toBeInTheDocument(); expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -106,6 +140,12 @@ describe('Actions', () => { expect(screen.getAllByRole('menuitem').length).toEqual(3); expect(screen.queryByText('Resume')).not.toBeInTheDocument(); expect(screen.queryByText('Pause')).not.toBeInTheDocument(); + expect(screen.queryByText('Stop')).not.toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -115,9 +155,15 @@ describe('Actions', () => { })); renderComponent(); await afterClickRestartButton(); - expect(screen.getAllByRole('menuitem').length).toEqual(4); + expect(screen.getAllByRole('menuitem').length).toEqual(5); expect(screen.queryByText('Resume')).not.toBeInTheDocument(); expect(screen.getByText('Pause')).toBeInTheDocument(); + expect(screen.getByText('Stop')).toBeInTheDocument(); + await afterClickDropDownButton(); + expect(screen.getByText('Reset Offsets')).toBeInTheDocument(); + expect( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ).toHaveAttribute('aria-disabled'); expectActionButtonsExists(); }); @@ -137,6 +183,20 @@ describe('Actions', () => { expect(screen.getByRole('dialog')).toBeInTheDocument(); }); + it('opens confirmation modal when reset offsets button clicked on a STOPPED connector', async () => { + (useConnector as jest.Mock).mockImplementation(() => ({ + data: setConnectorStatus(connector, ConnectorState.STOPPED), + })); + renderComponent(); + await afterClickDropDownButton(); + await waitFor(async () => + userEvent.click( + screen.getByRole('menuitem', { name: 'Reset Offsets' }) + ) + ); + expect(screen.getByRole('dialog')).toBeInTheDocument(); + }); + it('calls restartConnector when restart button clicked', async () => { const restartConnector = jest.fn(); (useUpdateConnectorState as jest.Mock).mockImplementation(() => ({ @@ -191,7 +251,18 @@ describe('Actions', () => { expect(pauseConnector).toHaveBeenCalledWith(ConnectorAction.PAUSE); }); - it('calls resumeConnector when resume button clicked', async () => { + it('calls stopConnector when stop button clicked', async () => { + const stopConnector = jest.fn(); + (useUpdateConnectorState as jest.Mock).mockImplementation(() => ({ + mutateAsync: stopConnector, + })); + renderComponent(); + await afterClickRestartButton(); + await userEvent.click(screen.getByRole('menuitem', { name: 'Stop' })); + expect(stopConnector).toHaveBeenCalledWith(ConnectorAction.STOP); + }); + + it('calls resumeConnector when resume button clicked from PAUSED state', async () => { const resumeConnector = jest.fn(); (useConnector as jest.Mock).mockImplementation(() => ({ data: setConnectorStatus(connector, ConnectorState.PAUSED), @@ -204,6 +275,20 @@ describe('Actions', () => { await userEvent.click(screen.getByRole('menuitem', { name: 'Resume' })); expect(resumeConnector).toHaveBeenCalledWith(ConnectorAction.RESUME); }); + + it('calls resumeConnector when resume button clicked from STOPPED state', async () => { + const resumeConnector = jest.fn(); + (useConnector as jest.Mock).mockImplementation(() => ({ + data: setConnectorStatus(connector, ConnectorState.STOPPED), + })); + (useUpdateConnectorState as jest.Mock).mockImplementation(() => ({ + mutateAsync: resumeConnector, + })); + renderComponent(); + await afterClickRestartButton(); + await userEvent.click(screen.getByRole('menuitem', { name: 'Resume' })); + expect(resumeConnector).toHaveBeenCalledWith(ConnectorAction.RESUME); + }); }); }); }); diff --git a/frontend/src/components/Connect/List/ActionsCell.tsx b/frontend/src/components/Connect/List/ActionsCell.tsx index 9b219f20e..4ad50c00e 100644 --- a/frontend/src/components/Connect/List/ActionsCell.tsx +++ b/frontend/src/components/Connect/List/ActionsCell.tsx @@ -9,9 +9,10 @@ import { import { CellContext } from '@tanstack/react-table'; import { ClusterNameRoute } from 'lib/paths'; import useAppParams from 'lib/hooks/useAppParams'; -import { Dropdown, DropdownItem } from 'components/common/Dropdown'; +import { Dropdown } from 'components/common/Dropdown'; import { useDeleteConnector, + useResetConnectorOffsets, useUpdateConnectorState, } from 'lib/hooks/api/kafkaConnect'; import { useConfirm } from 'lib/hooks/useConfirm'; @@ -36,10 +37,15 @@ const ActionsCell: React.FC> = ({ connectName: connect, connectorName: name, }); + const resetConnectorOffsetsMutation = useResetConnectorOffsets({ + clusterName, + connectName: connect, + connectorName: name, + }); const handleDelete = () => { confirm( <> - Are you sure want to remove {name} connector? + Are you sure you want to remove the {name} connector? , async () => { await deleteMutation.mutateAsync(); @@ -58,9 +64,25 @@ const ActionsCell: React.FC> = ({ const restartFailedTasksHandler = () => stateMutation.mutateAsync(ConnectorAction.RESTART_FAILED_TASKS); + const pauseConnectorHandler = () => + stateMutation.mutateAsync(ConnectorAction.PAUSE); + + const stopConnectorHandler = () => + stateMutation.mutateAsync(ConnectorAction.STOP); + + const resetOffsetsHandler = () => { + confirm( + <> + Are you sure you want to reset the {name} connector offsets? + , + () => resetConnectorOffsetsMutation.mutateAsync() + ); + }; + return ( - {status.state === ConnectorState.PAUSED && ( + {(status.state === ConnectorState.PAUSED || + status.state === ConnectorState.STOPPED) && ( > = ({ Resume )} + {status.state === ConnectorState.RUNNING && ( + + Pause + + )} + {status.state === ConnectorState.RUNNING && ( + + Stop + + )} > = ({ > Restart Failed Tasks - - Remove Connector - + + Reset Offsets + + + Delete + ); }; diff --git a/frontend/src/components/Connect/List/__tests__/List.spec.tsx b/frontend/src/components/Connect/List/__tests__/List.spec.tsx index 82b4aab21..194d97246 100644 --- a/frontend/src/components/Connect/List/__tests__/List.spec.tsx +++ b/frontend/src/components/Connect/List/__tests__/List.spec.tsx @@ -12,11 +12,13 @@ import { clusterConnectConnectorPath, clusterConnectorsPath } from 'lib/paths'; import { useConnectors, useDeleteConnector, + useResetConnectorOffsets, useUpdateConnectorState, } from 'lib/hooks/api/kafkaConnect'; const mockedUsedNavigate = jest.fn(); const mockDelete = jest.fn(); +const mockResetOffsets = jest.fn(); jest.mock('react-router-dom', () => ({ ...jest.requireActual('react-router-dom'), @@ -27,6 +29,7 @@ jest.mock('lib/hooks/api/kafkaConnect', () => ({ useConnectors: jest.fn(), useDeleteConnector: jest.fn(), useUpdateConnectorState: jest.fn(), + useResetConnectorOffsets: jest.fn(), })); const clusterName = 'local'; @@ -56,7 +59,7 @@ describe('Connectors List', () => { it('renders', async () => { renderComponent(); expect(screen.getByRole('table')).toBeInTheDocument(); - expect(screen.getAllByRole('row').length).toEqual(3); + expect(screen.getAllByRole('row').length).toEqual(4); }); it('opens broker when row clicked', async () => { @@ -94,7 +97,7 @@ describe('Connectors List', () => { }); }); - describe('when remove connector modal is open', () => { + describe('when delete modal is open', () => { beforeEach(() => { (useConnectors as jest.Mock).mockImplementation(() => ({ data: connectors, @@ -104,10 +107,10 @@ describe('Connectors List', () => { })); }); - it('calls removeConnector on confirm', async () => { + it('calls deleteConnector on confirm', async () => { renderComponent(); - const removeButton = screen.getAllByText('Remove Connector')[0]; - await waitFor(() => userEvent.click(removeButton)); + const deleteButton = screen.getAllByText('Delete')[0]; + await waitFor(() => userEvent.click(deleteButton)); const submitButton = screen.getAllByRole('button', { name: 'Confirm', @@ -118,8 +121,43 @@ describe('Connectors List', () => { it('closes the modal when cancel button is clicked', async () => { renderComponent(); - const removeButton = screen.getAllByText('Remove Connector')[0]; - await waitFor(() => userEvent.click(removeButton)); + const deleteButton = screen.getAllByText('Delete')[0]; + await waitFor(() => userEvent.click(deleteButton)); + + const cancelButton = screen.getAllByRole('button', { + name: 'Cancel', + })[0]; + await waitFor(() => userEvent.click(cancelButton)); + expect(cancelButton).not.toBeInTheDocument(); + }); + }); + + describe('when reset connector offsets modal is open', () => { + beforeEach(() => { + (useConnectors as jest.Mock).mockImplementation(() => ({ + data: connectors, + })); + (useResetConnectorOffsets as jest.Mock).mockImplementation(() => ({ + mutateAsync: mockResetOffsets, + })); + }); + + it('calls resetConnectorOffsets on confirm', async () => { + renderComponent(); + const resetButton = screen.getAllByText('Reset Offsets')[2]; + await waitFor(() => userEvent.click(resetButton)); + + const submitButton = screen.getAllByRole('button', { + name: 'Confirm', + })[0]; + await userEvent.click(submitButton); + expect(mockResetOffsets).toHaveBeenCalledWith(); + }); + + it('closes the modal when cancel button is clicked', async () => { + renderComponent(); + const resetButton = screen.getAllByText('Reset Offsets')[2]; + await waitFor(() => userEvent.click(resetButton)); const cancelButton = screen.getAllByRole('button', { name: 'Cancel', diff --git a/frontend/src/components/ConsumerGroups/List.tsx b/frontend/src/components/ConsumerGroups/List.tsx index 54d3ebf4b..683f8b27b 100644 --- a/frontend/src/components/ConsumerGroups/List.tsx +++ b/frontend/src/components/ConsumerGroups/List.tsx @@ -62,7 +62,7 @@ const List = () => { header: 'Consumer Lag', accessorKey: 'consumerLag', cell: (args) => { - return args.getValue() || 'N/A'; + return args.getValue() ?? 'N/A'; }, }, { diff --git a/frontend/src/components/ConsumerGroups/__test__/List.spec.tsx b/frontend/src/components/ConsumerGroups/__test__/List.spec.tsx new file mode 100644 index 000000000..ad79c05f2 --- /dev/null +++ b/frontend/src/components/ConsumerGroups/__test__/List.spec.tsx @@ -0,0 +1,55 @@ +import React from 'react'; +import { screen } from '@testing-library/react'; +import { render } from 'lib/testHelpers'; +import { useConsumerGroups } from 'lib/hooks/api/consumers'; +import List from 'components/ConsumerGroups/List'; + +// Mock hooks +jest.mock('lib/hooks/api/consumers', () => ({ + useConsumerGroups: jest.fn(), +})); + +jest.mock('react-router-dom', () => ({ + ...jest.requireActual('react-router-dom'), + useSearchParams: () => [new URLSearchParams(), jest.fn()], + useNavigate: () => jest.fn(), +})); + +const mockUseConsumerGroups = useConsumerGroups as jest.Mock; + +describe('ConsumerGroups List', () => { + beforeEach(() => { + mockUseConsumerGroups.mockImplementation(() => ({ + data: { + consumerGroups: [ + { + groupId: 'group1', + consumerLag: 0, + members: 1, + topics: 1, + coordinator: { id: 1 }, + state: 'STABLE', + }, + { + groupId: 'group2', + consumerLag: null, + members: 1, + topics: 1, + coordinator: { id: 2 }, + state: 'STABLE', + }, + ], + pageCount: 1, + }, + isSuccess: true, + isFetching: false, + })); + }); + + it('renders consumer lag values correctly', () => { + render(); + const tableRows = screen.getAllByRole('row'); + expect(tableRows[1]).toHaveTextContent('0'); + expect(tableRows[2]).toHaveTextContent('N/A'); + }); +}); diff --git a/frontend/src/components/NavBar/UserInfo/UserInfo.tsx b/frontend/src/components/NavBar/UserInfo/UserInfo.tsx index dae43364c..b52cc7631 100644 --- a/frontend/src/components/NavBar/UserInfo/UserInfo.tsx +++ b/frontend/src/components/NavBar/UserInfo/UserInfo.tsx @@ -19,7 +19,7 @@ const UserInfo = () => { } > - + Log out diff --git a/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx b/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx index 4054ca51c..c05602990 100644 --- a/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx +++ b/frontend/src/components/Topics/Topic/Messages/Filters/AddEditFilterContainer.tsx @@ -196,7 +196,7 @@ const AddEditFilterContainer: React.FC = ({ - {!isEdit && } +