From 2b02fea459ca0cadd7abac7857b270cd57f28eb9 Mon Sep 17 00:00:00 2001 From: Confluent Jenkins Bot Date: Wed, 25 Jan 2023 19:11:49 +0000 Subject: [PATCH 01/44] Bump Confluent to 7.5.0-0, Kafka to 7.5.0-0 --- ce-kafka/pom.xml | 2 +- kafka-connect-base/pom.xml | 2 +- kafka-connect/pom.xml | 2 +- kafka/pom.xml | 2 +- pom.xml | 6 +++--- server-connect-base/pom.xml | 2 +- server-connect/pom.xml | 2 +- server/pom.xml | 2 +- zookeeper/pom.xml | 2 +- 9 files changed, 11 insertions(+), 11 deletions(-) diff --git a/ce-kafka/pom.xml b/ce-kafka/pom.xml index dcb1466c91..152cb8ca57 100644 --- a/ce-kafka/pom.xml +++ b/ce-kafka/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images diff --git a/kafka-connect-base/pom.xml b/kafka-connect-base/pom.xml index ed73c14bd5..445419f201 100644 --- a/kafka-connect-base/pom.xml +++ b/kafka-connect-base/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images diff --git a/kafka-connect/pom.xml b/kafka-connect/pom.xml index 87f15d9e55..3471807096 100644 --- a/kafka-connect/pom.xml +++ b/kafka-connect/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images diff --git a/kafka/pom.xml b/kafka/pom.xml index c566081815..90cf76009d 100644 --- a/kafka/pom.xml +++ b/kafka/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images diff --git a/pom.xml b/pom.xml index 97a82211e2..ee0982d374 100644 --- a/pom.xml +++ b/pom.xml @@ -22,7 +22,7 @@ io.confluent common-docker - [7.4.0-0, 7.4.1-0) + [7.5.0-0, 7.5.1-0) io.confluent.kafka-images @@ -30,7 +30,7 @@ pom Kafka and CE Kafka Docker Images Build files for Confluent's Kafka Docker images - 7.4.0-0 + 7.5.0-0 zookeeper @@ -45,6 +45,6 @@ kafka - 7.4.0-0 + 7.5.0-0 diff --git a/server-connect-base/pom.xml b/server-connect-base/pom.xml index 8c754dc285..a376621061 100644 --- a/server-connect-base/pom.xml +++ b/server-connect-base/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images diff --git a/server-connect/pom.xml b/server-connect/pom.xml index 725126be4c..96792a33b0 100644 --- a/server-connect/pom.xml +++ b/server-connect/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images diff --git a/server/pom.xml b/server/pom.xml index 7300c634de..8ee175739f 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images diff --git a/zookeeper/pom.xml b/zookeeper/pom.xml index 0647da0a84..2c3108bee8 100644 --- a/zookeeper/pom.xml +++ b/zookeeper/pom.xml @@ -23,7 +23,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 io.confluent.kafka-images From 7c2e3af692d26af0dd4852f7df257156b5c06087 Mon Sep 17 00:00:00 2001 From: utkarsh5474 Date: Sat, 11 Mar 2023 02:07:24 +0530 Subject: [PATCH 02/44] add new image cp-kafka-kraft --- Jenkinsfile | 4 +- kafka-plus-rest/Dockerfile.ubi8 | 85 +++++++++ kafka-plus-rest/README.md | 31 +++ .../etc/confluent/docker/admin.properties | 0 .../include/etc/confluent/docker/configure | 41 ++++ .../include/etc/confluent/docker/ensure | 27 +++ .../confluent/docker/kafka-rest.properties | 0 .../etc/confluent/docker/kafka.properties | 0 .../include/etc/confluent/docker/launch | 34 ++++ .../docker/log4j-kafka-rest.properties | 5 + .../etc/confluent/docker/log4j.properties | 14 ++ .../include/etc/confluent/docker/run | 38 ++++ .../confluent/docker/tools-log4j.properties | 6 + .../include/etc/confluent/docker/utility | 178 ++++++++++++++++++ kafka-plus-rest/pom.xml | 60 ++++++ pom.xml | 1 + 16 files changed, 522 insertions(+), 2 deletions(-) create mode 100644 kafka-plus-rest/Dockerfile.ubi8 create mode 100644 kafka-plus-rest/README.md create mode 100644 kafka-plus-rest/include/etc/confluent/docker/admin.properties create mode 100755 kafka-plus-rest/include/etc/confluent/docker/configure create mode 100755 kafka-plus-rest/include/etc/confluent/docker/ensure create mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka-rest.properties create mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka.properties create mode 100755 kafka-plus-rest/include/etc/confluent/docker/launch create mode 100644 kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties create mode 100644 kafka-plus-rest/include/etc/confluent/docker/log4j.properties create mode 100755 kafka-plus-rest/include/etc/confluent/docker/run create mode 100644 kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties create mode 100644 kafka-plus-rest/include/etc/confluent/docker/utility create mode 100644 kafka-plus-rest/pom.xml diff --git a/Jenkinsfile b/Jenkinsfile index 79c0cbf112..4b8374e399 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -4,14 +4,14 @@ dockerfile { dockerPush = true dockerRepos = ['confluentinc/cp-server-connect', 'confluentinc/cp-server-connect-base', 'confluentinc/cp-kafka-connect', 'confluentinc/cp-kafka-connect-base', - 'confluentinc/cp-enterprise-kafka', 'confluentinc/cp-kafka', + 'confluentinc/cp-enterprise-kafka', 'confluentinc/cp-kafka', 'confluentinc/cp-kafka-kraft', 'confluentinc/cp-server', 'confluentinc/cp-zookeeper'] mvnPhase = 'package' mvnSkipDeploy = true nodeLabel = 'docker-debian-jdk8-compose' slackChannel = 'kafka-warn' upstreamProjects = [] - dockerPullDeps = ['confluentinc/cp-base-new'] + dockerPullDeps = ['confluentinc/cp-base-new', 'confluentinc/cp-base-lite'] usePackages = true cron = '' // Disable the cron because this job requires parameters cpImages = true diff --git a/kafka-plus-rest/Dockerfile.ubi8 b/kafka-plus-rest/Dockerfile.ubi8 new file mode 100644 index 0000000000..1a424b8f0d --- /dev/null +++ b/kafka-plus-rest/Dockerfile.ubi8 @@ -0,0 +1,85 @@ +# +# Copyright 2019 Confluent Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +ARG DOCKER_UPSTREAM_REGISTRY +ARG DOCKER_UPSTREAM_TAG=ubi8-latest + +FROM ${DOCKER_UPSTREAM_REGISTRY}confluentinc/cp-base-lite:${DOCKER_UPSTREAM_TAG} + +ARG PROJECT_VERSION +ARG ARTIFACT_ID +ARG GIT_COMMIT + +LABEL maintainer="partner-support@confluent.io" +LABEL vendor="Confluent" +LABEL version=$GIT_COMMIT +LABEL release=$PROJECT_VERSION +LABEL name=$ARTIFACT_ID +LABEL summary="Kafka with Rest Proxy" +LABEL io.confluent.docker=true +LABEL io.confluent.docker.git.id=$GIT_COMMIT +ARG BUILD_NUMBER=-1 +LABEL io.confluent.docker.build.number=$BUILD_NUMBER +LABEL io.confluent.docker.git.repo="confluentinc/kafka-images" + +ARG CONFLUENT_VERSION +ARG CONFLUENT_PACKAGES_REPO +ARG CONFLUENT_PLATFORM_LABEL + +# allow arg override of required env params +ARG KAFKA_ZOOKEEPER_CONNECT +ENV KAFKA_ZOOKEEPER_CONNECT=${KAFKA_ZOOKEEPER_CONNECT} +ARG KAFKA_ADVERTISED_LISTENERS +ENV KAFKA_ADVERTISED_LISTENERS=${KAFKA_ADVERTISED_LISTENERS} + +# exposed ports +EXPOSE 9092 8082 + +USER root + +RUN echo "===> Installing kafka and kafka rest..." \ + && echo "===> Adding confluent repository...${CONFLUENT_PACKAGES_REPO}" \ + && rpm --import ${CONFLUENT_PACKAGES_REPO}/archive.key \ + && printf "[Confluent.dist] \n\ +name=Confluent repository (dist) \n\ +baseurl=${CONFLUENT_PACKAGES_REPO}/\$releasever \n\ +gpgcheck=1 \n\ +gpgkey=${CONFLUENT_PACKAGES_REPO}/archive.key \n\ +enabled=1 \n\ +\n\ +[Confluent] \n\ +name=Confluent repository \n\ +baseurl=${CONFLUENT_PACKAGES_REPO}/ \n\ +gpgcheck=1 \n\ +gpgkey=${CONFLUENT_PACKAGES_REPO}/archive.key \n\ +enabled=1 " > /etc/yum.repos.d/confluent.repo \ + && yum install -y confluent-kafka-${CONFLUENT_VERSION} \ + && yum install -y confluent-kafka-rest-${CONFLUENT_VERSION} \ + && echo "===> clean up ..." \ + && yum clean all \ + && rm -rf /tmp/* /etc/yum.repos.d/confluent.repo \ + && echo "===> Setting up dirs" \ + && mkdir -p /var/lib/kafka/data /etc/kafka/secrets \ + && chown appuser:root -R /etc/kafka-rest /etc/kafka /var/log/kafka /var/log/confluent /var/lib/kafka /var/lib/zookeeper /etc/kafka/secrets /var/lib/kafka /etc/kafka \ + && chmod -R ug+w /etc/kafka-rest /etc/kafka /var/log/kafka /var/log/confluent /var/lib/kafka /var/lib/zookeeper /var/lib/kafka /etc/kafka/secrets /etc/kafka + + +VOLUME ["/var/lib/kafka/data", "/etc/kafka/secrets"] + +COPY --chown=appuser:appuser include/etc/confluent/docker /etc/confluent/docker + +USER appuser + +CMD ["/etc/confluent/docker/run"] diff --git a/kafka-plus-rest/README.md b/kafka-plus-rest/README.md new file mode 100644 index 0000000000..888c477321 --- /dev/null +++ b/kafka-plus-rest/README.md @@ -0,0 +1,31 @@ +# Confluent Community Docker Image for Apache Kafka and Kafka Rest + +Docker image for deploying and running the Community Version of Kafka alongwith Kafka Rest Proxy. + +## Using the image + +* [Notes on using the image](https://docs.confluent.io/platform/current/installation/docker/installation.html) +* [Configuration Reference](https://docs.confluent.io/platform/current/installation/docker/config-reference.html#confluent-ak-configuration) + +## Resources + +* [Docker Quick Start for Apache Kafka using Confluent Platform](https://docs.confluent.io/platform/current/quickstart/ce-docker-quickstart.html#ce-docker-quickstart) + +* [Learn Kafka](https://developer.confluent.io/learn-kafka) + +* [Confluent Developer](https://developer.confluent.io): blogs, tutorials, videos, and podcasts for learning all about Apache Kafka and Confluent Platform + +* [confluentinc/cp-demo](https://github.com/confluentinc/cp-demo): GitHub demo that you can run locally. The demo uses this Docker image to showcase Confluent Server in a secured, end-to-end event streaming platform. It has an accompanying playbook that shows users how to use Confluent Control Center to manage and monitor Kafka connect, Schema Registry, REST Proxy, KSQL, and Kafka Streams. + +* [confluentinc/examples](https://github.com/confluentinc/examples): additional curated examples in GitHub that you can run locally. + +## Contribute + +Start by reading our guidelines on contributing to this project found here. + +* [Source Code](https://github.com/confluentinc/kafka-images) +* [Issue Tracker](https://github.com/confluentinc/kafka-images/issues) + +## License + +This Docker image is licensed under the Apache 2 license. For more information on the licenses for each of the individual Confluent Platform components packaged in this image, please refer to the respective [Confluent Platform documentation](https://docs.confluent.io/platform/current/installation/docker/image-reference.html). \ No newline at end of file diff --git a/kafka-plus-rest/include/etc/confluent/docker/admin.properties b/kafka-plus-rest/include/etc/confluent/docker/admin.properties new file mode 100644 index 0000000000..e69de29bb2 diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure new file mode 100755 index 0000000000..2182dc489a --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -0,0 +1,41 @@ +#!/usr/bin/env bash +# +# Copyright 2016 Confluent Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +. /etc/confluent/docker/bash-config +. /etc/confluent/docker/utility + +path /etc/kafka/ writable + +if [[ -z "${KAFKA_LOG_DIRS-}" ]] +then + export KAFKA_LOG_DIRS + KAFKA_LOG_DIRS="/var/lib/kafka/data" +fi + + +# for broker +exclude_props=("KAFKA_VERSION" "KAFKA_HEAP_OPTS" "KAFKA_LOG4J_OPTS" "KAFKA_JMX_OPTS" "KAFKA_JVM_PERFORMANCE_OPTS" "KAFKA_GC_LOG_OPTS" + "KAFKA_LOG4J_ROOT_LOGLEVEL" "KAFKA_LOG4J_LOGGERS" "KAFKA_TOOLS_LOG4J_LOGLEVEL" "KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET" "KAFKA_REST_") +parse_props "/etc/confluent/docker/kafka.properties" "/etc/kafka/kafka.properties" KAFKA_ "${exclude_props[@]}" +build_log4j_properties_kafka "/etc/confluent/docker/log4j.properties" "/etc/kafka/log4j.properties" +build_tools_log4j_properties_kafka "/etc/confluent/docker/tools-log4j.properties" "/etc/kafka/tools-log4j.properties" + + +# for rest proxy +exclude_props=("KAFKA_REST_LOG4J_ROOT_LOGLEVEL" "KAFKA_REST_LOG4J_LOGGERS") +parse_props "/etc/confluent/docker/kafka-rest.properties" "/etc/kafka-rest/kafka-rest.properties" KAFKA_REST_ "${exclude_props[@]}" +build_log4j_properties_kafka_rest "/etc/confluent/docker/log4j-kafka-rest.properties" "/etc/kafka-rest/log4j.properties" +parse_props "/etc/confluent/docker/admin.properties" "/etc/kafka-rest/admin.properties" KAFKA_REST_CLIENT_ "${exclude_props[@]}" diff --git a/kafka-plus-rest/include/etc/confluent/docker/ensure b/kafka-plus-rest/include/etc/confluent/docker/ensure new file mode 100755 index 0000000000..c80d5c3806 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/ensure @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# +# Copyright 2020 Confluent Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +. /etc/confluent/docker/bash-config +. /etc/confluent/docker/utility + +export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"} +echo "===> Check if $KAFKA_DATA_DIRS is writable ..." +path "$KAFKA_DATA_DIRS" writable + +# KRaft required step: Format the storage directory with a new cluster ID +if [[ "${KAFKA_PROCESS_ROLES-}" == *controller* ]]; then + kafka-storage format --ignore-formatted --cluster-id=$(kafka-storage random-uuid) -c /etc/kafka/kafka.properties +fi diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka-rest.properties b/kafka-plus-rest/include/etc/confluent/docker/kafka-rest.properties new file mode 100644 index 0000000000..e69de29bb2 diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka.properties b/kafka-plus-rest/include/etc/confluent/docker/kafka.properties new file mode 100644 index 0000000000..e69de29bb2 diff --git a/kafka-plus-rest/include/etc/confluent/docker/launch b/kafka-plus-rest/include/etc/confluent/docker/launch new file mode 100755 index 0000000000..660eed0c0d --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/launch @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# +# Copyright 2016 Confluent Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# Start kafka broker +echo "===> Launching kafka ... " +kafka-server-start /etc/kafka/kafka.properties & # your first application +P1=$! # capture PID of the process + +# exits after listing topics, continues if broker is unreachable +$KAFKA_HOME/bin/kafka-topics --list --bootstrap-server localhost:9092 + +# Start kafka rest +echo "===> Launching kafka-rest ... " +kafka-rest-start /etc/kafka-rest/kafka-rest.properties & # your second application +P2=$! # capture PID of the process + +# Wait for either of these 2 process to exit +wait -n $P1 $P2 +# Exit with status of process that exited first +exit $? diff --git a/kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties b/kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties new file mode 100644 index 0000000000..709a8b0eaf --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties @@ -0,0 +1,5 @@ +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n diff --git a/kafka-plus-rest/include/etc/confluent/docker/log4j.properties b/kafka-plus-rest/include/etc/confluent/docker/log4j.properties new file mode 100644 index 0000000000..ddf080f980 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/log4j.properties @@ -0,0 +1,14 @@ +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.logger.kafka=INFO +log4j.logger.kafka.network.RequestChannel=WARN +log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG +log4j.logger.kafka.request.logger=WARN +log4j.logger.kafka.controller=TRACE +log4j.logger.kafka.log.LogCleaner=INFO +log4j.logger.state.change.logger=TRACE +log4j.logger.kafka.authorizer.logger=WARN diff --git a/kafka-plus-rest/include/etc/confluent/docker/run b/kafka-plus-rest/include/etc/confluent/docker/run new file mode 100755 index 0000000000..b65fb2ad1e --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/run @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# +# Copyright 2016 Confluent Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +. /etc/confluent/docker/bash-config + +# Set environment values if they exist as arguments +if [ $# -ne 0 ]; then + echo "===> Overriding env params with args ..." + for var in "$@" + do + export "$var" + done +fi + +echo "===> User" +id + +echo "===> Configuring ..." +/etc/confluent/docker/configure + +echo "===> Running preflight checks ... " +/etc/confluent/docker/ensure + +echo "===> Launching ... " +exec /etc/confluent/docker/launch diff --git a/kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties b/kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties new file mode 100644 index 0000000000..27d9fbee48 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties @@ -0,0 +1,6 @@ +log4j.rootLogger=WARN, stderr + +log4j.appender.stderr=org.apache.log4j.ConsoleAppender +log4j.appender.stderr.layout=org.apache.log4j.PatternLayout +log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.stderr.Target=System.err diff --git a/kafka-plus-rest/include/etc/confluent/docker/utility b/kafka-plus-rest/include/etc/confluent/docker/utility new file mode 100644 index 0000000000..3cc5535050 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/utility @@ -0,0 +1,178 @@ +#!/usr/bin/env bash +# +# Copyright 2016 Confluent Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# checks path for permission +# $1 - path to check for +# $2 - permission to check for +path () { + if [[ "$2" == 'writable' ]] + then + if [ -w $1 ]; then return 0; else return 1; fi + elif [[ "$2" = 'readable' ]] + then + if [ -r $1 ]; then return 0; else return 1; fi + elif [[ "$2" = 'executable' ]] + then + if [ -x $1 ]; then return 0; else return 1; fi + elif [[ "$2" = 'existence' ]] + then + if [ -d $1 ]; then return 0; else return 1; fi + fi +} + + +# checks whether the environment variable is set +# $1 - env variable to check +ensure() { + if ! [[ -z "${1}" ]]; then return 0; else return 1; fi +} + + +# builds tool log4j props file for broker +# $1 - source base file prepopulated with required log4j configs +# $2 - final location of tools log4j props file +build_tools_log4j_properties_kafka() { + src_base_file=$1 + dest_file=$2 + cp $src_base_file $dest_file + if [[ -n "${KAFKA_TOOLS_LOG4J_LOGLEVEL-}" ]]; + then + sed -i "s|log4j.rootLogger=WARN, stderr|log4j.rootLogger=$KAFKA_TOOLS_LOG4J_LOGLEVEL, stderr|" $dest_file + fi +} + + +# builds tool log4j props file for broker +# $1 - source base file prepopulated with required log4j configs +# $2 - final location of tools log4j props file +build_log4j_properties_kafka() { + local src_base_file=$1 + local dest_file=$2 + cp $src_base_file $dest_file + if [[ -n "${KAFKA_LOG4J_ROOT_LOGLEVEL-}" ]]; + then + sed -i "s|^log4j.rootLogger=.*|log4j.rootLogger=$KAFKA_LOG4J_ROOT_LOGLEVEL, stdout|" $dest_file + fi + + if [[ -n "${KAFKA_LOG4J_LOGGERS-}" ]]; + then + IFS=',' + set -f + for line in $KAFKA_LOG4J_LOGGERS; do + separator="=" + key=${line%$separator*} + val=${line#*$separator} + key=$(echo $key | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') + val=$(echo $val | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') + + if grep -q ^log4j.logger.$key $dest_file + then + sed -i "s|^log4j.logger.$key.*|log4j.logger.$key=$val|" $dest_file + else + echo log4j.logger.$key=$val >> $dest_file + fi + done + set +f + unset IFS + fi +} + + +# builds tool log4j props file for broker +# $1 - source base file prepopulated with required log4j configs +# $2 - final location of tools log4j props file +build_log4j_properties_kafka_rest() { + local src_base_file=$1 + local dest_file=$2 + cp $src_base_file $dest_file + if [[ -n "${KAFKA_REST_LOG4J_ROOT_LOGLEVEL-}" ]]; + then + sed -i "s|^log4j.rootLogger=.*|log4j.rootLogger=$KAFKA_REST_LOG4J_ROOT_LOGLEVEL, stdout|" $dest_file + fi + + if [[ -n "${KAFKA_REST_LOG4J_LOGGERS-}" ]]; + then + IFS=',' + set -f + for line in $KAFKA_REST_LOG4J_LOGGERS; do + separator="=" + key=${line%$separator*} + val=${line#*$separator} + key=$(echo $key | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') + val=$(echo $val | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') + + if grep -q ^log4j.logger.$key $dest_file + then + sed -i "s|^log4j.logger.$key.*|log4j.logger.$key=$val, stdout|" $dest_file + else + echo log4j.logger.$key=$val, stdout >> $dest_file + fi + done + set +f + unset IFS + fi +} + + +# build service property file for any component +# $1 - source base file prepopulated with required props +# $2 - final location of the props file +# $3 - prefix to use for parsing the env variables +# $4 - list of all env variables to be ignored while building props file +parse_props() { + + local src_base_file=$1 + local dest_file=$2 + local prefix=$3 + shift 3 + local exclude_props=("$@") + cp $src_base_file $dest_file + + # loop over all env variables + env -0 | while IFS='=' read -r -d '' n v; + do + # ignore the ones not having the specified prefix + if ! [[ $n == $prefix* ]]; then continue; fi + # ignore if the value is empty + if [[ $v == "" ]]; then continue; fi + + # ignore the variables present in the exclude_props array + var='include' + for str in ${exclude_props[@]}; + do + if [[ $n == $str* ]]; + then + var='exclude' + break + fi + done + if [[ $var == 'exclude' ]]; then continue; fi + + n=${n//$prefix/} # remove prefix KAFKA_ + n=$(echo $n | tr '[:upper:]' '[:lower:]') # convert to lower-case + n=${n//__/-} # replace __ with - + n=${n//_/.} # replace _ with . + + # if property already present, override. if not present, append to file + if grep -q ^$n $dest_file + then + sed -i "s|^$n.*|$n=$v|" $dest_file + else + echo $n=$v >> $dest_file + fi + done +} diff --git a/kafka-plus-rest/pom.xml b/kafka-plus-rest/pom.xml new file mode 100644 index 0000000000..a81f6d8fce --- /dev/null +++ b/kafka-plus-rest/pom.xml @@ -0,0 +1,60 @@ + + + + + 4.0.0 + + + io.confluent.kafka-images + kafka-images-parent + 7.4.0-0 + + + io.confluent.kafka-images + cp-kafka-kraft + Kafka Docker Image with Rest Proxy + + + false + true + + + + + junit + junit + test + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.6 + + + none + + + + + + diff --git a/pom.xml b/pom.xml index ee0982d374..0c5541cac5 100644 --- a/pom.xml +++ b/pom.xml @@ -36,6 +36,7 @@ zookeeper server kafka + kafka-plus-rest ce-kafka kafka-connect-base kafka-connect From 07de5ae37e4940ae30a48b385e05933b4066fee7 Mon Sep 17 00:00:00 2001 From: utkarsh5474 Date: Wed, 15 Mar 2023 23:40:18 +0530 Subject: [PATCH 03/44] enhance config checks --- .../include/etc/confluent/docker/configure | 117 +++++++++++++++++- .../include/etc/confluent/docker/utility | 11 +- 2 files changed, 125 insertions(+), 3 deletions(-) diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index 2182dc489a..628b96bf9e 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -17,6 +17,8 @@ . /etc/confluent/docker/bash-config . /etc/confluent/docker/utility + +# --- for broker path /etc/kafka/ writable if [[ -z "${KAFKA_LOG_DIRS-}" ]] @@ -25,8 +27,119 @@ then KAFKA_LOG_DIRS="/var/lib/kafka/data" fi +ensure KAFKA_ADVERTISED_LISTENERS $KAFKA_ADVERTISED_LISTENERS + +# By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing +# hosts with 0.0.0.0. This is good default as it ensures that the broker +# process listens on all ports. +if [[ -z "${KAFKA_LISTENERS-}" ]] +then + export KAFKA_LISTENERS + KAFKA_LISTENERS=$(echo "$KAFKA_ADVERTISED_LISTENERS" | sed -e 's|://[[:alpha:]]*:|://0.0.0.0:|g') +fi + +# advertised.host, advertised.port, host and port are deprecated. Exit if these properties are set. +if [[ -n "${KAFKA_ADVERTISED_PORT-}" ]] +then + echo "advertised.port is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead." + exit 1 +fi + +if [[ -n "${KAFKA_ADVERTISED_HOST-}" ]] +then + echo "advertised.host is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead." + exit 1 +fi + +if [[ -n "${KAFKA_HOST-}" ]] +then + echo "host is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead." + exit 1 +fi + +if [[ -n "${KAFKA_PORT-}" ]] +then + echo "port is deprecated. Please use KAFKA_ADVERTISED_LISTENERS instead." + exit 1 +fi + +# Set if ADVERTISED_LISTENERS has SSL:// or SASL_SSL:// endpoints. +if [[ $KAFKA_ADVERTISED_LISTENERS == *"SSL://"* ]] +then + echo "SSL is enabled." + + ensure KAFKA_SSL_KEYSTORE_FILENAME $KAFKA_SSL_KEYSTORE_FILENAME + export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME" + path "$KAFKA_SSL_KEYSTORE_LOCATION" exists + + ensure KAFKA_SSL_KEY_CREDENTIALS $KAFKA_SSL_KEY_CREDENTIALS + KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS" + path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists + export KAFKA_SSL_KEY_PASSWORD + KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION") + + ensure KAFKA_SSL_KEYSTORE_CREDENTIALS $KAFKA_SSL_KEYSTORE_CREDENTIALS + KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_CREDENTIALS" + path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists + export KAFKA_SSL_KEYSTORE_PASSWORD + KAFKA_SSL_KEYSTORE_PASSWORD=$(cat "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION") + + if [[ -n "${KAFKA_SSL_CLIENT_AUTH-}" ]] && ( [[ $KAFKA_SSL_CLIENT_AUTH == *"required"* ]] || [[ $KAFKA_SSL_CLIENT_AUTH == *"requested"* ]] ) + then + ensure KAFKA_SSL_TRUSTSTORE_FILENAME $KAFKA_SSL_TRUSTSTORE_FILENAME + export KAFKA_SSL_TRUSTSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_FILENAME" + path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists + + ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS $KAFKA_SSL_TRUSTSTORE_CREDENTIALS + KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_CREDENTIALS" + path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists + export KAFKA_SSL_TRUSTSTORE_PASSWORD + KAFKA_SSL_TRUSTSTORE_PASSWORD=$(cat "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION") + fi + +fi + +# Set if KAFKA_ADVERTISED_LISTENERS has SASL_PLAINTEXT:// or SASL_SSL:// endpoints. +if [[ $KAFKA_ADVERTISED_LISTENERS =~ .*SASL_.*://.* ]] +then + echo "SASL" is enabled. + + ensure KAFKA_OPTS $KAFKA_OPTS + + if [[ ! $KAFKA_OPTS == *"java.security.auth.login.config"* ]] + then + echo "KAFKA_OPTS should contain 'java.security.auth.login.config' property." + fi +fi + +if [[ -n "${KAFKA_JMX_OPTS-}" ]] +then + if [[ ! $KAFKA_JMX_OPTS == *"com.sun.management.jmxremote.rmi.port"* ]] + then + echo "KAFKA_OPTS should contain 'com.sun.management.jmxremote.rmi.port' property. It is required for accessing the JMX metrics externally." + fi +fi + + +# --- for rest proxy +ensure KAFKA_REST_BOOTSTRAP_SERVERS $KAFKA_REST_BOOTSTRAP_SERVERS + +if [[ -n "${KAFKA_REST_PORT-}" ]] +then + echo "PORT is deprecated. Please use KAFKA_REST_LISTENERS instead." + exit 1 +fi + +if [[ -n "${KAFKAREST_JMX_OPTS-}" ]] +then + if [[ ! $KAFKAREST_JMX_OPTS == *"com.sun.management.jmxremote.rmi.port"* ]] + then + echo "KAFKA_REST_OPTS should contain 'com.sun.management.jmxremote.rmi.port' property. It is required for accessing the JMX metrics externally." + fi +fi + -# for broker +# --- for broker exclude_props=("KAFKA_VERSION" "KAFKA_HEAP_OPTS" "KAFKA_LOG4J_OPTS" "KAFKA_JMX_OPTS" "KAFKA_JVM_PERFORMANCE_OPTS" "KAFKA_GC_LOG_OPTS" "KAFKA_LOG4J_ROOT_LOGLEVEL" "KAFKA_LOG4J_LOGGERS" "KAFKA_TOOLS_LOG4J_LOGLEVEL" "KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET" "KAFKA_REST_") parse_props "/etc/confluent/docker/kafka.properties" "/etc/kafka/kafka.properties" KAFKA_ "${exclude_props[@]}" @@ -34,7 +147,7 @@ build_log4j_properties_kafka "/etc/confluent/docker/log4j.properties" "/etc/kafk build_tools_log4j_properties_kafka "/etc/confluent/docker/tools-log4j.properties" "/etc/kafka/tools-log4j.properties" -# for rest proxy +# --- for rest proxy exclude_props=("KAFKA_REST_LOG4J_ROOT_LOGLEVEL" "KAFKA_REST_LOG4J_LOGGERS") parse_props "/etc/confluent/docker/kafka-rest.properties" "/etc/kafka-rest/kafka-rest.properties" KAFKA_REST_ "${exclude_props[@]}" build_log4j_properties_kafka_rest "/etc/confluent/docker/log4j-kafka-rest.properties" "/etc/kafka-rest/log4j.properties" diff --git a/kafka-plus-rest/include/etc/confluent/docker/utility b/kafka-plus-rest/include/etc/confluent/docker/utility index 3cc5535050..993cb2662d 100644 --- a/kafka-plus-rest/include/etc/confluent/docker/utility +++ b/kafka-plus-rest/include/etc/confluent/docker/utility @@ -37,8 +37,17 @@ path () { # checks whether the environment variable is set # $1 - env variable to check +# $2 - value of the env variables ensure() { - if ! [[ -z "${1}" ]]; then return 0; else return 1; fi + set -e + if [ $# -ge 2 ] && [ -n "$2" ] + then + set +e + return 0 + else + echo $1 is required + return 1 + fi } From a8e36206a0a34e5dbf77647316118f56109fbd5d Mon Sep 17 00:00:00 2001 From: utkarsh5474 Date: Wed, 15 Mar 2023 23:47:59 +0530 Subject: [PATCH 04/44] rename image to cp-kafka-lite --- Jenkinsfile | 2 +- kafka-plus-rest/pom.xml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 4b8374e399..6ad2e3a5fb 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -4,7 +4,7 @@ dockerfile { dockerPush = true dockerRepos = ['confluentinc/cp-server-connect', 'confluentinc/cp-server-connect-base', 'confluentinc/cp-kafka-connect', 'confluentinc/cp-kafka-connect-base', - 'confluentinc/cp-enterprise-kafka', 'confluentinc/cp-kafka', 'confluentinc/cp-kafka-kraft', + 'confluentinc/cp-enterprise-kafka', 'confluentinc/cp-kafka', 'confluentinc/cp-kafka-lite', 'confluentinc/cp-server', 'confluentinc/cp-zookeeper'] mvnPhase = 'package' mvnSkipDeploy = true diff --git a/kafka-plus-rest/pom.xml b/kafka-plus-rest/pom.xml index a81f6d8fce..34347bba70 100644 --- a/kafka-plus-rest/pom.xml +++ b/kafka-plus-rest/pom.xml @@ -27,7 +27,7 @@ io.confluent.kafka-images - cp-kafka-kraft + cp-kafka-lite Kafka Docker Image with Rest Proxy From a0c6ca1a5c6b4e050f5429c72dc7289545c9ecc8 Mon Sep 17 00:00:00 2001 From: utkarsh5474 Date: Thu, 30 Mar 2023 17:05:53 +0530 Subject: [PATCH 05/44] sync with latest kraft mode changes in kafka image --- Jenkinsfile | 2 +- kafka-plus-rest/Dockerfile.ubi8 | 2 + .../include/etc/confluent/docker/configure | 42 +++++++++++++++---- .../include/etc/confluent/docker/ensure | 12 ++++-- kafka-plus-rest/pom.xml | 2 +- 5 files changed, 46 insertions(+), 14 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 6ad2e3a5fb..4b8374e399 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -4,7 +4,7 @@ dockerfile { dockerPush = true dockerRepos = ['confluentinc/cp-server-connect', 'confluentinc/cp-server-connect-base', 'confluentinc/cp-kafka-connect', 'confluentinc/cp-kafka-connect-base', - 'confluentinc/cp-enterprise-kafka', 'confluentinc/cp-kafka', 'confluentinc/cp-kafka-lite', + 'confluentinc/cp-enterprise-kafka', 'confluentinc/cp-kafka', 'confluentinc/cp-kafka-kraft', 'confluentinc/cp-server', 'confluentinc/cp-zookeeper'] mvnPhase = 'package' mvnSkipDeploy = true diff --git a/kafka-plus-rest/Dockerfile.ubi8 b/kafka-plus-rest/Dockerfile.ubi8 index 1a424b8f0d..e38a047d7a 100644 --- a/kafka-plus-rest/Dockerfile.ubi8 +++ b/kafka-plus-rest/Dockerfile.ubi8 @@ -43,6 +43,8 @@ ARG KAFKA_ZOOKEEPER_CONNECT ENV KAFKA_ZOOKEEPER_CONNECT=${KAFKA_ZOOKEEPER_CONNECT} ARG KAFKA_ADVERTISED_LISTENERS ENV KAFKA_ADVERTISED_LISTENERS=${KAFKA_ADVERTISED_LISTENERS} +ARG CLUSTER_ID +ENV CLUSTER_ID=${CLUSTER_ID} # exposed ports EXPOSE 9092 8082 diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index 628b96bf9e..4f24cba3ab 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -19,25 +19,49 @@ # --- for broker -path /etc/kafka/ writable -if [[ -z "${KAFKA_LOG_DIRS-}" ]] +# If KAFKA_PROCESS_ROLES is defined it means we are running in KRaft mode + +# unset KAFKA_ADVERTISED_LISTENERS from ENV in KRaft mode when running as controller only +if [[ -n "${KAFKA_PROCESS_ROLES-}" ]] then - export KAFKA_LOG_DIRS - KAFKA_LOG_DIRS="/var/lib/kafka/data" + echo "Running in KRaft mode..." + ensure CLUSTER_ID $CLUSTER_ID + if [[ $KAFKA_PROCESS_ROLES == "controller" ]] + then + if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] + then + echo "KAFKA_ADVERTISED_LISTENERS is not supported on a KRaft controller." + exit 1 + else + unset KAFKA_ADVERTISED_LISTENERS + fi + else + ensure KAFKA_ADVERTISED_LISTENERS $KAFKA_ADVERTISED_LISTENERS + fi +else + echo "Running in Zookeeper mode..." + ensure KAFKA_ZOOKEEPER_CONNECT $KAFKA_ZOOKEEPER_CONNECT + ensure KAFKA_ADVERTISED_LISTENERS $KAFKA_ADVERTISED_LISTENERS fi -ensure KAFKA_ADVERTISED_LISTENERS $KAFKA_ADVERTISED_LISTENERS - # By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing # hosts with 0.0.0.0. This is good default as it ensures that the broker # process listens on all ports. -if [[ -z "${KAFKA_LISTENERS-}" ]] +if [[ -z "${KAFKA_LISTENERS-}" ]] && ( [[ -z "${KAFKA_PROCESS_ROLES-}" ]] || [[ $KAFKA_PROCESS_ROLES != "controller" ]] ) then export KAFKA_LISTENERS KAFKA_LISTENERS=$(echo "$KAFKA_ADVERTISED_LISTENERS" | sed -e 's|://[[:alpha:]]*:|://0.0.0.0:|g') fi +path /etc/kafka/ writable + +if [[ -z "${KAFKA_LOG_DIRS-}" ]] +then + export KAFKA_LOG_DIRS + KAFKA_LOG_DIRS="/var/lib/kafka/data" +fi + # advertised.host, advertised.port, host and port are deprecated. Exit if these properties are set. if [[ -n "${KAFKA_ADVERTISED_PORT-}" ]] then @@ -64,7 +88,7 @@ then fi # Set if ADVERTISED_LISTENERS has SSL:// or SASL_SSL:// endpoints. -if [[ $KAFKA_ADVERTISED_LISTENERS == *"SSL://"* ]] +if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS == *"SSL://"* ]] then echo "SSL is enabled." @@ -100,7 +124,7 @@ then fi # Set if KAFKA_ADVERTISED_LISTENERS has SASL_PLAINTEXT:// or SASL_SSL:// endpoints. -if [[ $KAFKA_ADVERTISED_LISTENERS =~ .*SASL_.*://.* ]] +if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS =~ .*SASL_.*://.* ]] then echo "SASL" is enabled. diff --git a/kafka-plus-rest/include/etc/confluent/docker/ensure b/kafka-plus-rest/include/etc/confluent/docker/ensure index c80d5c3806..b8d503aa00 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/ensure +++ b/kafka-plus-rest/include/etc/confluent/docker/ensure @@ -21,7 +21,13 @@ export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"} echo "===> Check if $KAFKA_DATA_DIRS is writable ..." path "$KAFKA_DATA_DIRS" writable -# KRaft required step: Format the storage directory with a new cluster ID -if [[ "${KAFKA_PROCESS_ROLES-}" == *controller* ]]; then - kafka-storage format --ignore-formatted --cluster-id=$(kafka-storage random-uuid) -c /etc/kafka/kafka.properties +# KRaft required step: Format the storage directory with provided cluster ID unless it already exists. +if [[ -n "${KAFKA_PROCESS_ROLES-}" ]] +then + echo "===> Using provided cluster id $CLUSTER_ID ..." + + # A bit of a hack to not error out if the storage is already formatted. Need storage-tool to support this + result=$(kafka-storage format --cluster-id=$CLUSTER_ID -c /etc/kafka/kafka.properties 2>&1) || \ + echo $result | grep -i "already formatted" || \ + { echo $result && (exit 1) } fi diff --git a/kafka-plus-rest/pom.xml b/kafka-plus-rest/pom.xml index 34347bba70..a81f6d8fce 100644 --- a/kafka-plus-rest/pom.xml +++ b/kafka-plus-rest/pom.xml @@ -27,7 +27,7 @@ io.confluent.kafka-images - cp-kafka-lite + cp-kafka-kraft Kafka Docker Image with Rest Proxy From 9b0c727dda2afaa07d924b194ed3a64fd418f104 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Wed, 3 May 2023 03:17:31 +0530 Subject: [PATCH 06/44] adding testcontainer tests (initial commit) --- kafka-plus-rest/pom.xml | 82 +++++++++- kafka-plus-rest/src/test/java/env.yml | 17 ++ .../java/org/dockerImageTests/kafkaIT.java | 133 ++++++++++++++++ .../org/dockerImageTests/utils/Admin.java | 129 +++++++++++++++ .../org/dockerImageTests/utils/Consumer.java | 147 ++++++++++++++++++ .../utils/CustomKafkaContainer.java | 93 +++++++++++ .../org/dockerImageTests/utils/Producer.java | 62 ++++++++ 7 files changed, 658 insertions(+), 5 deletions(-) create mode 100644 kafka-plus-rest/src/test/java/env.yml create mode 100644 kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java create mode 100644 kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java create mode 100644 kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java create mode 100644 kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java create mode 100644 kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java diff --git a/kafka-plus-rest/pom.xml b/kafka-plus-rest/pom.xml index a81f6d8fce..050661e7b9 100644 --- a/kafka-plus-rest/pom.xml +++ b/kafka-plus-rest/pom.xml @@ -19,27 +19,88 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 - io.confluent.kafka-images kafka-images-parent 7.4.0-0 + io.confluent.kafka-images cp-kafka-kraft Kafka Docker Image with Rest Proxy + + 11 + 11 + UTF-8 + + false - true + false + 2.0.0-alpha5 - - junit - junit + com.squareup.okhttp3 + okhttp + 4.9.1 + + + org.apache.httpcomponents + httpclient + 4.5.13 + + + org.json + json + 20210307 + + + org.apache.kafka + kafka-clients + 2.8.1 + + + org.junit.jupiter + junit-jupiter + 5.8.1 + test + + + org.testcontainers + testcontainers + 1.18.0 test + + + log4j + log4j + + + + + org.testcontainers + junit-jupiter + 1.18.0 + test + + + org.yaml + snakeyaml + 1.21 + + + org.slf4j + slf4j-api + ${slf4j.version} + + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} @@ -55,6 +116,17 @@ + + org.apache.maven.plugins + maven-failsafe-plugin + 3.0.0-M7 + + + ${docker.registry} + ${docker.tag} + + + diff --git a/kafka-plus-rest/src/test/java/env.yml b/kafka-plus-rest/src/test/java/env.yml new file mode 100644 index 0000000000..8b2ed24bf3 --- /dev/null +++ b/kafka-plus-rest/src/test/java/env.yml @@ -0,0 +1,17 @@ +KAFKA_NODE_ID: 1 +KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT' +KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092' +KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 +KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 +KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 +KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 +KAFKA_PROCESS_ROLES: 'broker,controller' +KAFKA_CONTROLLER_QUORUM_VOTERS: '1@broker:29093' +KAFKA_LISTENERS: 'PLAINTEXT://broker:29092,CONTROLLER://broker:29093,PLAINTEXT_HOST://0.0.0.0:9092' +KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT' +KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' +KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' +KAFKA_REST_HOST_NAME: rest-proxy +KAFKA_REST_BOOTSTRAP_SERVERS: 'broker:29092' +KAFKA_REST_LISTENERS: "http://0.0.0.0:8082" +CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw' \ No newline at end of file diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java new file mode 100644 index 0000000000..aed98175dd --- /dev/null +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java @@ -0,0 +1,133 @@ +package org.dockerImageTests; + +//import org.junit.Test; +import io.confluent.common.utils.IntegrationTest; +import org.dockerImageTests.utils.Admin; +import org.dockerImageTests.utils.Consumer; +import org.dockerImageTests.utils.CustomKafkaContainer; +import org.dockerImageTests.utils.Producer; +import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import java.util.Map; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.junit.jupiter.*; +import org.testcontainers.junit.jupiter.Testcontainers; + +import java.util.List; +import java.util.Properties; +import java.util.concurrent.TimeUnit; + + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + + +@Category(IntegrationTest.class) +@Tag("IntegrationTest") +@Testcontainers +public class kafkaIT { + private static final int KAFKA_PORT = 9093; + private static final int KAFKA_REST_PORT = 8082; + + private static final String TOPIC_1 = "test-topic1"; + private static final String TOPIC_2 = "test-topic2"; + @Container + public static GenericContainer container1=new CustomKafkaContainer();; + @BeforeAll + public static void setup(){ + try { + container1.start(); + } + catch(Exception e) { + System.out.println(container1.getLogs()); + System.out.println(container1.isRunning()); + } + } + @AfterAll + public static void teardown(){ + System.out.println(container1.isRunning()); + System.out.println(container1.isRunning()); + container1.stop(); + System.out.println(container1.isRunning()); + System.out.println("tearing down"); + } + @Test + public void kafkaApiTest() { + Map env = System.getenv(); + Properties props = new Properties(); + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + String baseUrl = String.format("http://localhost:%s",container1.getMappedPort(KAFKA_REST_PORT)); + System.out.println(baseUrl); + System.out.println(KAFKA_PORT); + System.out.println(container1.getMappedPort(KAFKA_PORT)); + String bootstrapUrl = String.format("localhost:%s",container1.getMappedPort(KAFKA_PORT)); + String bootstrapUrl1 = String.format("localhost:%s","9092"); + System.out.println(bootstrapUrl); + System.out.println(container1.getHost()); + Admin admin = new Admin(bootstrapUrl,baseUrl); + Consumer consumer = new Consumer(bootstrapUrl,"test-1","abc",baseUrl); + props.put("bootstrap.servers", bootstrapUrl); + props.put("acks", "all"); + Producer producer = new Producer(props,baseUrl); + try { + admin.createTopic(TOPIC_1,3, (short) 1); + System.out.println(admin.listTopicsUsingKafkaApi()); + TimeUnit.MILLISECONDS.sleep(100); + System.out.println(admin.listTopicsUsingRestApi()); + List topics = admin.listTopicsUsingRestApi(); + assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_1))::equals)); + producer.send(TOPIC_1,10); + assertTrue(consumer.consume(10,TOPIC_1)); + } + catch (Exception e){ + System.out.println(e); + System.out.println(container1.getLogs()); + fail(); + } + + producer.close(); + System.out.println(container1.isRunning()); + + } + @Test + public void kafkaRestApiTest() { + Properties props = new Properties(); + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + String baseUrl = String.format("http://localhost:%s",container1.getMappedPort(KAFKA_REST_PORT)); + System.out.println(baseUrl); + System.out.println(KAFKA_PORT); + System.out.println(container1.getMappedPort(KAFKA_PORT)); + String bootstrapUrl = String.format("localhost:%s",container1.getMappedPort(KAFKA_PORT)); + String bootstrapUrl1 = String.format("localhost:%s","9092"); + System.out.println(bootstrapUrl); + System.out.println(container1.getHost()); + Admin admin = new Admin(bootstrapUrl,baseUrl); + Consumer consumer = new Consumer(bootstrapUrl,"test-1","abc",baseUrl); + props.put("bootstrap.servers", bootstrapUrl); + Producer producer = new Producer(props,baseUrl); + try { + admin.createTopic(TOPIC_2,3, (short) 1); + TimeUnit.MILLISECONDS.sleep(100); + System.out.println(admin.listTopicsUsingRestApi()); + List topics = admin.listTopicsUsingRestApi(); + assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_2))::equals)); + producer.sendRest(TOPIC_2,10); + TimeUnit.MILLISECONDS.sleep(100); + consumer.subscribeTopicRest(TOPIC_2); + assertTrue(consumer.consumeRest(10)); + } + catch (Exception e){ + System.out.println(e); + fail(); + } + + producer.close(); + System.out.println(container1.isRunning()); + + } +} diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java new file mode 100644 index 0000000000..b5e2e9d630 --- /dev/null +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java @@ -0,0 +1,129 @@ +package org.dockerImageTests.utils; + +import java.io.IOException; +import java.io.InputStream; +import java.util.*; + +import org.apache.http.HttpEntity; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.util.EntityUtils; +import org.apache.kafka.clients.admin.*; +import org.apache.kafka.common.config.TopicConfig; + +public class Admin { + private final String bootstrapServers; + private final String restEndpoint; + AdminClient adminClient; + public Admin(String bootstrapServers, String restEndpoint) { + this.bootstrapServers = bootstrapServers; + this.restEndpoint = restEndpoint; + Properties props = new Properties(); + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + adminClient = AdminClient.create(props); + + } + + public List listTopicsUsingKafkaApi() throws Exception { + ListTopicsResult topics = adminClient.listTopics(); + return new ArrayList<>(topics.names().get()); + } + + public void createTopic(String topicName, int numPartitions, short replicationFactor) throws Exception { + Map configs = new HashMap<>(); + configs.put(TopicConfig.RETENTION_MS_CONFIG, "86400000"); + + NewTopic newTopic = new NewTopic(topicName, numPartitions, replicationFactor).configs(configs); + + CreateTopicsResult result = adminClient.createTopics(Collections.singleton(newTopic)); + + result.values().get(topicName).get(); + } + + public void createTopicRest(String topicName, int numPartitions, short replicationFactor) throws Exception { + String url = restEndpoint + "/topics/"; + + String requestBody = String.format( + "{\"topic_name\" : \"%s\",\"partitions_count\": %d,\"replication-factor\": %d}", + topicName, + numPartitions, + replicationFactor + ); + + + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpPost postRequest = new HttpPost(url); + postRequest.addHeader("Content-Type", "application/vnd.kafka.binary.v2+json"); + StringEntity requestEntity = new StringEntity(requestBody); + + postRequest.setEntity(requestEntity); + HttpResponse response = httpClient.execute(postRequest); + HttpEntity responseBody = response.getEntity(); + String responseString = ""; + if (responseBody != null) { + InputStream instream = responseBody.getContent(); + responseString = EntityUtils.toString(responseBody); + } + if (response.getStatusLine().getStatusCode() != 200) { + throw new RuntimeException("Failed to create topic: " + response.toString()); + } + } + + + + public List listTopicsUsingRestApi() throws IOException { + String endpoint = restEndpoint + "/topics"; + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpGet getRequest = new HttpGet(endpoint); + HttpResponse response = httpClient.execute(getRequest); + HttpEntity responseBody = response.getEntity(); + String responseString = ""; + if (responseBody != null) { + InputStream instream = responseBody.getContent(); + responseString = EntityUtils.toString(responseBody); + } + + return Arrays.asList(responseString.replaceAll("\\[|\\]", "").split(",")); + } + + public void deleteTopicUsingKafkaApi(String topicName) throws Exception { + Properties props = new Properties(); + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + AdminClient adminClient = AdminClient.create(props); + DeleteTopicsResult result = adminClient.deleteTopics(Arrays.asList(topicName)); + result.all().get(); + } + + public void deleteTopicUsingRestApi(String topicName) throws Exception { + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpDelete deleteRequest = new HttpDelete(restEndpoint + "/" + topicName); + HttpResponse response = httpClient.execute(deleteRequest); + response.getEntity().getContent().close(); + } + + public boolean topicExistsUsingKafkaApi(String topicName) throws Exception { + Properties props = new Properties(); + props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + AdminClient adminClient = AdminClient.create(props); + ListTopicsResult topics = adminClient.listTopics(); + for (TopicListing topic : topics.listings().get()) { + if (topic.name().equals(topicName)) { + return true; + } + } + return false; + } + + public boolean topicExistsUsingRestApi(String topicName) throws IOException { + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpGet getRequest = new HttpGet(restEndpoint); + HttpResponse response = httpClient.execute(getRequest); + String responseBody = response.getEntity().getContent().toString(); + return Arrays.asList(responseBody.split(",")).contains(topicName); + } +} diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java new file mode 100644 index 0000000000..43b1b20a42 --- /dev/null +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java @@ -0,0 +1,147 @@ +package org.dockerImageTests.utils; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.util.Collections; +import java.util.Properties; + +import org.apache.http.HttpEntity; +import org.apache.http.HttpStatus; +import org.apache.http.client.HttpClient; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.HttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.clients.consumer.KafkaConsumer; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.json.JSONArray; + +public class Consumer { + private final String bootstrapServers; + private final String groupId; + private final String topic; + private final String restEndpoint; + + private static final String CONSUMER_GROUP_ID = "dockerTests"; + private static final String CONSUMER_INSTANCE_ID = "instance1"; + + public Consumer(String bootstrapServers, String groupId, String topic, String restEndpoint) { + this.bootstrapServers = bootstrapServers; + this.groupId = groupId; + this.topic = topic; + this.restEndpoint = restEndpoint; + } + + public boolean consume(int numMessages,String topicName) { + Properties props = new Properties(); + props.put("bootstrap.servers", bootstrapServers); + props.put("group.id", groupId); + props.put("auto.offset.reset", "earliest"); + props.put("enable.auto.commit", "true"); + props.put("auto.commit.interval.ms", "1000"); + props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); + props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); + KafkaConsumer consumer = new KafkaConsumer<>(props); + consumer.subscribe(Collections.singletonList(topicName)); + ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); + int retry=0; + while(records.count()==0){ + System.out.println("Polling again"); + records = consumer.poll(Duration.ofMillis(100)); + } + System.out.println("subcscribed to topic" + consumer.subscription()); + for (ConsumerRecord record : records) { + handleMessage(record.value()); + numMessages--; + } + System.out.println("records === " + records.count()); + consumer.commitSync(); + if (numMessages>0){ + return false; + } + return true; + + } + private void createConsumerInstance() throws IOException { + String url = restEndpoint + "/consumers/" + CONSUMER_GROUP_ID; + String requestBody = "{\"name\":\"" + CONSUMER_INSTANCE_ID + "\",\"format\":\"binary\",\"auto.offset.reset\":\"earliest\"}"; + + HttpClient httpClient = HttpClientBuilder.create().build(); + HttpPost request = new HttpPost(url); + request.addHeader("Content-Type", "application/vnd.kafka.v2+json"); + request.setEntity(new StringEntity(requestBody)); + + HttpResponse response = httpClient.execute(request); + int statusCode = response.getStatusLine().getStatusCode(); + if (statusCode != HttpStatus.SC_OK) { + throw new RuntimeException("Failed to create Kafka consumer instance. Response code: " + statusCode); + } + System.out.println("successfully created the consumer instance"); + } + + public boolean subscribeTopicRest(String topicName){ + try { + createConsumerInstance(); + CloseableHttpClient httpClient = HttpClients.createDefault(); + String subscriptionUrl = restEndpoint + "/consumers/" + CONSUMER_GROUP_ID + "/instances/" + CONSUMER_INSTANCE_ID + "/subscription"; + HttpPost subscriptionRequest = new HttpPost(subscriptionUrl); + String subscriptionJson = "{ \"topics\": [ \"" + topicName + "\" ] }"; + StringEntity subscriptionEntity = new StringEntity(subscriptionJson, ContentType.APPLICATION_JSON); + subscriptionRequest.setEntity(subscriptionEntity); + subscriptionRequest.addHeader("Content-Type", "application/vnd.kafka.json.v2+json"); + HttpResponse subscriptionResponse = httpClient.execute(subscriptionRequest); + if (subscriptionResponse.getStatusLine().getStatusCode() == 204) { + System.out.println("Subscribed to Kafka topic: " + topicName); + return true; + } else { + System.out.println("Failed to subscribe to Kafka topic. Response code: " + subscriptionResponse.getStatusLine().getStatusCode()); + return false;} + } catch (IOException e) { + System.out.println("Error subscribing to Kafka topic: " + e.getMessage()); + return false; + } + } + public boolean consumeRest(int numMessages) throws Exception { + + // Continuously fetch messages from the Kafka topic + try { + CloseableHttpClient httpClient = HttpClients.createDefault(); + String fetchUrl = restEndpoint + "/consumers/" + CONSUMER_GROUP_ID + "/instances/" + CONSUMER_INSTANCE_ID + "/records?timeout=10000"; + HttpGet fetchRequest = new HttpGet(fetchUrl); + HttpResponse fetchResponse = httpClient.execute(fetchRequest); + HttpEntity fetchEntity = fetchResponse.getEntity(); + while(fetchEntity==null){ + fetchResponse = httpClient.execute(fetchRequest); + fetchEntity = fetchResponse.getEntity(); + } + if (fetchEntity != null) { + String messageJson = EntityUtils.toString(fetchEntity, StandardCharsets.UTF_8); + System.out.println(messageJson); + JSONArray jsonArray = new JSONArray(messageJson); + + for (int i = 0; i < jsonArray.length(); i++) { + numMessages--; + } + if (numMessages>0){ + System.out.println("numMessages = " + numMessages); + return false; + } + } + } catch (IOException e) { + System.out.println("Error fetching messages from Kafka topic: " + e.getMessage()); + return false; + } + return true; + } + + private void handleMessage(String message) { + System.out.println("Received message: " + message); + } +} diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java new file mode 100644 index 0000000000..e4167ec45f --- /dev/null +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java @@ -0,0 +1,93 @@ +package org.dockerImageTests.utils; + +import com.github.dockerjava.api.command.InspectContainerResponse; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.images.PullPolicy; +import org.testcontainers.images.builder.Transferable; +import org.testcontainers.utility.DockerImageName; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +public class CustomKafkaContainer extends GenericContainer { + private static final int KAFKA_PORT = 9093; + private static final int KAFKA_REST_PORT = 8082; + + private static final String STARTER_SCRIPT = "/testcontainers_start.sh"; + + private static final String DOCKER_REGISTRY = System.getenv("DOCKER_REGISTRY"); + + private static final String DOCKER_TAG = System.getenv("DOCKER_TAG"); + public CustomKafkaContainer() { + super(DockerImageName.parse(String.format("%s:%s",DOCKER_REGISTRY,DOCKER_TAG))); + System.out.println("Using image " + String.format("%s:%s",DOCKER_REGISTRY,DOCKER_TAG)); + Map env = new HashMap(); + env.put("KAFKA_NODE_ID","1"); + env.put( "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,HOST:PLAINTEXT"); + env.put("KAFKA_ADVERTISED_LISTENERS","PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092"); + env.put("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1"); + env.put("KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS", "0"); + env.put("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", "1"); + env.put("KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", "1"); + env.put("KAFKA_PROCESS_ROLES", "broker,controller"); + env.put("KAFKA_CONTROLLER_QUORUM_VOTERS", "1@broker:29093"); + env.put("KAFKA_LISTENERS", "PLAINTEXT://broker:29092,CONTROLLER://broker:29093,PLAINTEXT_HOST://0.0.0.0:9092,HOST://0.0.0.0:9093"); + env.put("KAFKA_INTER_BROKER_LISTENER_NAME", "PLAINTEXT"); + env.put("KAFKA_CONTROLLER_LISTENER_NAMES", "CONTROLLER"); + env.put("KAFKA_LOG_DIRS", "/tmp/kraft-combined-logs"); + env.put("KAFKA_REST_HOST_NAME", "rest-proxy"); + env.put("KAFKA_REST_BOOTSTRAP_SERVERS", "broker:29092"); + env.put("KAFKA_REST_LISTENERS", "http://0.0.0.0:8082"); + env.put("CLUSTER_ID", "4L6g3nShT-eMCtK--X86sw"); + env.put("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "false"); + // set up container ports and environment variables + Network network = Network.newNetwork(); + withNetwork(network) ; + withNetworkAliases("broker"); + withImagePullPolicy(PullPolicy.defaultPolicy()); + withEnv(env); + withExposedPorts(KAFKA_REST_PORT,KAFKA_PORT); + withCreateContainerCmdModifier(cmd -> { + cmd.withEntrypoint("sh"); + }); + withCommand("-c", "while [ ! -f " + STARTER_SCRIPT + " ]; do sleep 0.1; done; " + STARTER_SCRIPT); + + } + + @Override + protected void containerIsStarting(InspectContainerResponse containerInfo) { + // Customize the container behavior before it starts + Integer mappedPort = getMappedPort(KAFKA_PORT); + + // use the mapped port to configure the application + String url = "HOST://localhost:" + mappedPort; + withEnv("KAFKA_ADVERTISED_LISTENERS",String.format("PLAINTEXT://broker:29092,PLAINTEXT_HOST://%s",url)); + + String command = "#!/bin/bash\n"; + // exporting KAFKA_ADVERTISED_LISTENERS with the container hostname + command += + String.format( + "export KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092,%s\n", + url + ); + + command += "sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure\n"; + String clusterId = ""; + try { + clusterId = execInContainer("kafka-storage", "random-uuid").getStdout().trim(); + } catch (IOException | InterruptedException e) { + logger().error("Failed to execute `kafka-storage random-uuid`. Exception message: {}", e.getMessage()); + } + command += + "echo 'kafka-storage format --ignore-formatted -t \"" + + clusterId + + "\" -c /etc/kafka/kafka.properties' >> /etc/confluent/docker/configure\n"; + command += "echo '' > /etc/confluent/docker/ensure \n"; + // Run the original command + command += "/etc/confluent/docker/run \n"; + copyFileToContainer(Transferable.of(command, 0777), STARTER_SCRIPT); + } +} + diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java new file mode 100644 index 0000000000..8a55ae7cb4 --- /dev/null +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java @@ -0,0 +1,62 @@ +package org.dockerImageTests.utils; + +import okhttp3.*; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; + +import java.io.IOException; +import java.util.Properties; +import java.util.concurrent.ExecutionException; + +public class Producer { + + private KafkaProducer producer; + private OkHttpClient client; + private String baseUrl; + + public Producer(Properties props, String baseUrl) { + this.producer = new KafkaProducer(props); + this.client = new OkHttpClient(); + this.baseUrl = baseUrl; + } + + public void send(String topic, int value) throws IOException, ExecutionException, InterruptedException { + for(int start=0;start Date: Thu, 4 May 2023 14:15:50 +0000 Subject: [PATCH 07/44] Make use of go utilities(ub) --- .../docker/admin-propertiesSpec.json | 5 +++ .../include/etc/confluent/docker/configure | 33 ++++++++----------- .../include/etc/confluent/docker/ensure | 3 +- .../docker/kafka-log4j.properties.template | 11 +++++++ .../docker/kafka-propertiesSpec.json | 25 ++++++++++++++ .../kafka-rest-log4j.properties.template | 10 ++++++ .../docker/kafka-rest-propertiesSpec.json | 9 +++++ .../kafka-tools-log4j.properties.template | 6 ++++ .../include/etc/confluent/docker/launch | 22 +++++++++++-- 9 files changed, 101 insertions(+), 23 deletions(-) create mode 100644 kafka-plus-rest/include/etc/confluent/docker/admin-propertiesSpec.json create mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka-log4j.properties.template create mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka-propertiesSpec.json create mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka-rest-log4j.properties.template create mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka-rest-propertiesSpec.json create mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka-tools-log4j.properties.template diff --git a/kafka-plus-rest/include/etc/confluent/docker/admin-propertiesSpec.json b/kafka-plus-rest/include/etc/confluent/docker/admin-propertiesSpec.json new file mode 100644 index 0000000000..384c4843bb --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/admin-propertiesSpec.json @@ -0,0 +1,5 @@ +{ + "prefixes": { + "KAFKA_REST_CLIENT": false + } +} \ No newline at end of file diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index 4f24cba3ab..7e207282a4 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -15,8 +15,6 @@ # limitations under the License. . /etc/confluent/docker/bash-config -. /etc/confluent/docker/utility - # --- for broker @@ -26,7 +24,7 @@ if [[ -n "${KAFKA_PROCESS_ROLES-}" ]] then echo "Running in KRaft mode..." - ensure CLUSTER_ID $CLUSTER_ID + ub ensure CLUSTER_ID if [[ $KAFKA_PROCESS_ROLES == "controller" ]] then if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] @@ -37,12 +35,12 @@ then unset KAFKA_ADVERTISED_LISTENERS fi else - ensure KAFKA_ADVERTISED_LISTENERS $KAFKA_ADVERTISED_LISTENERS + ub ensure KAFKA_ADVERTISED_LISTENERS fi else echo "Running in Zookeeper mode..." - ensure KAFKA_ZOOKEEPER_CONNECT $KAFKA_ZOOKEEPER_CONNECT - ensure KAFKA_ADVERTISED_LISTENERS $KAFKA_ADVERTISED_LISTENERS + ub ensure KAFKA_ZOOKEEPER_CONNECT + ub ensure KAFKA_ADVERTISED_LISTENERS fi # By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing @@ -51,10 +49,10 @@ fi if [[ -z "${KAFKA_LISTENERS-}" ]] && ( [[ -z "${KAFKA_PROCESS_ROLES-}" ]] || [[ $KAFKA_PROCESS_ROLES != "controller" ]] ) then export KAFKA_LISTENERS - KAFKA_LISTENERS=$(echo "$KAFKA_ADVERTISED_LISTENERS" | sed -e 's|://[[:alpha:]]*:|://0.0.0.0:|g') + KAFKA_LISTENERS=$(echo "$KAFKA_ADVERTISED_LISTENERS" | sed -e 's|://[^:]*:|://0.0.0.0:|g') fi -path /etc/kafka/ writable +ub path /etc/kafka/ writable if [[ -z "${KAFKA_LOG_DIRS-}" ]] then @@ -146,7 +144,8 @@ fi # --- for rest proxy -ensure KAFKA_REST_BOOTSTRAP_SERVERS $KAFKA_REST_BOOTSTRAP_SERVERS +ub ensure KAFKA_REST_BOOTSTRAP_SERVERS +ub path /etc/kafka-rest/ writable if [[ -n "${KAFKA_REST_PORT-}" ]] then @@ -164,15 +163,11 @@ fi # --- for broker -exclude_props=("KAFKA_VERSION" "KAFKA_HEAP_OPTS" "KAFKA_LOG4J_OPTS" "KAFKA_JMX_OPTS" "KAFKA_JVM_PERFORMANCE_OPTS" "KAFKA_GC_LOG_OPTS" - "KAFKA_LOG4J_ROOT_LOGLEVEL" "KAFKA_LOG4J_LOGGERS" "KAFKA_TOOLS_LOG4J_LOGLEVEL" "KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET" "KAFKA_REST_") -parse_props "/etc/confluent/docker/kafka.properties" "/etc/kafka/kafka.properties" KAFKA_ "${exclude_props[@]}" -build_log4j_properties_kafka "/etc/confluent/docker/log4j.properties" "/etc/kafka/log4j.properties" -build_tools_log4j_properties_kafka "/etc/confluent/docker/tools-log4j.properties" "/etc/kafka/tools-log4j.properties" - +ub render-properties /etc/confluent/docker/kafka-propertiesSpec.json > /etc/kafka/kafka.properties +ub render-template /etc/confluent/docker/kafka-log4j.properties.template > /etc/kafka/log4j.properties +ub render-template /etc/confluent/docker/kafka-tools-log4j.properties.template > /etc/kafka/tools-log4j.properties # --- for rest proxy -exclude_props=("KAFKA_REST_LOG4J_ROOT_LOGLEVEL" "KAFKA_REST_LOG4J_LOGGERS") -parse_props "/etc/confluent/docker/kafka-rest.properties" "/etc/kafka-rest/kafka-rest.properties" KAFKA_REST_ "${exclude_props[@]}" -build_log4j_properties_kafka_rest "/etc/confluent/docker/log4j-kafka-rest.properties" "/etc/kafka-rest/log4j.properties" -parse_props "/etc/confluent/docker/admin.properties" "/etc/kafka-rest/admin.properties" KAFKA_REST_CLIENT_ "${exclude_props[@]}" +ub render-properties /etc/confluent/docker/kafka-rest-propertiesSpec.json > /etc/kafka-rest/kafka-rest.properties +ub render-properties /etc/confluent/docker/admin-propertiesSpec.json > /etc/kafka-rest/admin.properties +ub render-template /etc/confluent/docker/kafka-rest-log4j.properties.template > /etc/kafka-rest/log4j.properties diff --git a/kafka-plus-rest/include/etc/confluent/docker/ensure b/kafka-plus-rest/include/etc/confluent/docker/ensure index b8d503aa00..3ca92f56be 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/ensure +++ b/kafka-plus-rest/include/etc/confluent/docker/ensure @@ -15,11 +15,10 @@ # limitations under the License. . /etc/confluent/docker/bash-config -. /etc/confluent/docker/utility export KAFKA_DATA_DIRS=${KAFKA_DATA_DIRS:-"/var/lib/kafka/data"} echo "===> Check if $KAFKA_DATA_DIRS is writable ..." -path "$KAFKA_DATA_DIRS" writable +ub path "$KAFKA_DATA_DIRS" writable # KRaft required step: Format the storage directory with provided cluster ID unless it already exists. if [[ -n "${KAFKA_PROCESS_ROLES-}" ]] diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka-log4j.properties.template b/kafka-plus-rest/include/etc/confluent/docker/kafka-log4j.properties.template new file mode 100644 index 0000000000..3a7b4744e3 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/kafka-log4j.properties.template @@ -0,0 +1,11 @@ +log4j.rootLogger={{ getEnv "KAFKA_LOG4J_ROOT_LOGLEVEL" "INFO" }}, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +{{ $loggerDefaults := "kafka=INFO,kafka.network.RequestChannel$=WARN,kafka.producer.async.DefaultEventHandler=DEBUG,kafka.request.logger=WARN,kafka.controller=TRACE,kafka.log.LogCleaner=INFO,state.change.logger=TRACE,kafka.authorizer.logger=WARN"}} +{{ $loggers := getEnv "KAFKA_LOG4J_LOGGERS" "" -}} +{{ range $k, $v := splitToMapDefaults "," $loggerDefaults $loggers}} +log4j.logger.{{ $k }}={{ $v -}} +{{ end }} diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka-propertiesSpec.json b/kafka-plus-rest/include/etc/confluent/docker/kafka-propertiesSpec.json new file mode 100644 index 0000000000..219be3faef --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/kafka-propertiesSpec.json @@ -0,0 +1,25 @@ +{ + "prefixes": { + "KAFKA": false, + "CONFLUENT": true + }, + "renamed": { + "KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET": "zookeeper.clientCnxnSocket" + }, + "excludes": [ + "KAFKA_VERSION", + "KAFKA_HEAP_OPT", + "KAFKA_LOG4J_OPTS", + "KAFKA_OPTS", + "KAFKA_JMX_OPTS", + "KAFKA_JVM_PERFORMANCE_OPTS", + "KAFKA_GC_LOG_OPTS", + "KAFKA_LOG4J_ROOT_LOGLEVEL", + "KAFKA_LOG4J_LOGGERS", + "KAFKA_TOOLS_LOG4J_LOGLEVEL", + "KAFKA_ZOOKEEPER_CLIENT_CNXN_SOCKET" + ], + "defaults": { + }, + "excludeWithPrefix": "KAFKA_REST_" +} \ No newline at end of file diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka-rest-log4j.properties.template b/kafka-plus-rest/include/etc/confluent/docker/kafka-rest-log4j.properties.template new file mode 100644 index 0000000000..c2bc5bffb2 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/kafka-rest-log4j.properties.template @@ -0,0 +1,10 @@ +log4j.rootLogger={{ getEnv "KAFKA_REST_LOG4J_ROOT_LOGLEVEL" "INFO" }}, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +{{ $loggers := getEnv "KAFKA_REST_LOG4J_LOGGERS" "" -}} +{{ range $k, $v := splitToMapDefaults "," "" $loggers}} +log4j.logger.{{ $k }}={{ $v }}, stdout +{{ end }} diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka-rest-propertiesSpec.json b/kafka-plus-rest/include/etc/confluent/docker/kafka-rest-propertiesSpec.json new file mode 100644 index 0000000000..d7ae50e2d4 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/kafka-rest-propertiesSpec.json @@ -0,0 +1,9 @@ +{ + "prefixes": { + "KAFKA_REST": false + }, + "excludes": [ + "KAFKA_REST_LOG4J_ROOT_LOGLEVEL", + "KAFKA_REST_LOG4J_LOGGERS" + ] +} \ No newline at end of file diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka-tools-log4j.properties.template b/kafka-plus-rest/include/etc/confluent/docker/kafka-tools-log4j.properties.template new file mode 100644 index 0000000000..c2df5bcf06 --- /dev/null +++ b/kafka-plus-rest/include/etc/confluent/docker/kafka-tools-log4j.properties.template @@ -0,0 +1,6 @@ +log4j.rootLogger={{ getEnv "KAFKA_TOOLS_LOG4J_LOGLEVEL" "WARN" }}, stderr + +log4j.appender.stderr=org.apache.log4j.ConsoleAppender +log4j.appender.stderr.layout=org.apache.log4j.PatternLayout +log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.stderr.Target=System.err diff --git a/kafka-plus-rest/include/etc/confluent/docker/launch b/kafka-plus-rest/include/etc/confluent/docker/launch index 660eed0c0d..755533da51 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/launch +++ b/kafka-plus-rest/include/etc/confluent/docker/launch @@ -20,8 +20,26 @@ echo "===> Launching kafka ... " kafka-server-start /etc/kafka/kafka.properties & # your first application P1=$! # capture PID of the process -# exits after listing topics, continues if broker is unreachable -$KAFKA_HOME/bin/kafka-topics --list --bootstrap-server localhost:9092 +echo "===> Check if kafka brokers are up using ub..." + +if [[ -n "${KAFKA_REST_CLIENT_SECURITY_PROTOCOL-}" ]] && [[ $KAFKA_REST_CLIENT_SECURITY_PROTOCOL != "PLAINTEXT" ]] +then + echo "DEBUG 2" + ub kafka-ready \ + "${KAFKA_REST_CUB_KAFKA_MIN_BROKERS:-1}" \ + "${KAFKA_REST_CUB_KAFKA_TIMEOUT:-40}" \ + -b "${KAFKA_REST_BOOTSTRAP_SERVERS}" \ + -c /etc/kafka-rest/admin.properties +else + if [[ -n "${KAFKA_REST_BOOTSTRAP_SERVERS-}" ]] + then + echo "DEBUG 3" + ub kafka-ready \ + "${KAFKA_REST_CUB_KAFKA_MIN_BROKERS:-1}" \ + "${KAFKA_REST_CUB_KAFKA_TIMEOUT:-40}" \ + -b "${KAFKA_REST_BOOTSTRAP_SERVERS}" + fi +fi # Start kafka rest echo "===> Launching kafka-rest ... " From 3cced0eb70e6610aa333738ccf2c4c54f014ae67 Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Thu, 4 May 2023 15:09:37 +0000 Subject: [PATCH 08/44] Remove unused files --- .../etc/confluent/docker/admin.properties | 0 .../confluent/docker/kafka-rest.properties | 0 .../etc/confluent/docker/kafka.properties | 0 .../docker/log4j-kafka-rest.properties | 5 - .../etc/confluent/docker/log4j.properties | 14 -- .../confluent/docker/tools-log4j.properties | 6 - .../include/etc/confluent/docker/utility | 187 ------------------ 7 files changed, 212 deletions(-) delete mode 100644 kafka-plus-rest/include/etc/confluent/docker/admin.properties delete mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka-rest.properties delete mode 100644 kafka-plus-rest/include/etc/confluent/docker/kafka.properties delete mode 100644 kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties delete mode 100644 kafka-plus-rest/include/etc/confluent/docker/log4j.properties delete mode 100644 kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties delete mode 100644 kafka-plus-rest/include/etc/confluent/docker/utility diff --git a/kafka-plus-rest/include/etc/confluent/docker/admin.properties b/kafka-plus-rest/include/etc/confluent/docker/admin.properties deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka-rest.properties b/kafka-plus-rest/include/etc/confluent/docker/kafka-rest.properties deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/kafka-plus-rest/include/etc/confluent/docker/kafka.properties b/kafka-plus-rest/include/etc/confluent/docker/kafka.properties deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties b/kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties deleted file mode 100644 index 709a8b0eaf..0000000000 --- a/kafka-plus-rest/include/etc/confluent/docker/log4j-kafka-rest.properties +++ /dev/null @@ -1,5 +0,0 @@ -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n diff --git a/kafka-plus-rest/include/etc/confluent/docker/log4j.properties b/kafka-plus-rest/include/etc/confluent/docker/log4j.properties deleted file mode 100644 index ddf080f980..0000000000 --- a/kafka-plus-rest/include/etc/confluent/docker/log4j.properties +++ /dev/null @@ -1,14 +0,0 @@ -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.logger.kafka=INFO -log4j.logger.kafka.network.RequestChannel=WARN -log4j.logger.kafka.producer.async.DefaultEventHandler=DEBUG -log4j.logger.kafka.request.logger=WARN -log4j.logger.kafka.controller=TRACE -log4j.logger.kafka.log.LogCleaner=INFO -log4j.logger.state.change.logger=TRACE -log4j.logger.kafka.authorizer.logger=WARN diff --git a/kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties b/kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties deleted file mode 100644 index 27d9fbee48..0000000000 --- a/kafka-plus-rest/include/etc/confluent/docker/tools-log4j.properties +++ /dev/null @@ -1,6 +0,0 @@ -log4j.rootLogger=WARN, stderr - -log4j.appender.stderr=org.apache.log4j.ConsoleAppender -log4j.appender.stderr.layout=org.apache.log4j.PatternLayout -log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.stderr.Target=System.err diff --git a/kafka-plus-rest/include/etc/confluent/docker/utility b/kafka-plus-rest/include/etc/confluent/docker/utility deleted file mode 100644 index 993cb2662d..0000000000 --- a/kafka-plus-rest/include/etc/confluent/docker/utility +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env bash -# -# Copyright 2016 Confluent Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# checks path for permission -# $1 - path to check for -# $2 - permission to check for -path () { - if [[ "$2" == 'writable' ]] - then - if [ -w $1 ]; then return 0; else return 1; fi - elif [[ "$2" = 'readable' ]] - then - if [ -r $1 ]; then return 0; else return 1; fi - elif [[ "$2" = 'executable' ]] - then - if [ -x $1 ]; then return 0; else return 1; fi - elif [[ "$2" = 'existence' ]] - then - if [ -d $1 ]; then return 0; else return 1; fi - fi -} - - -# checks whether the environment variable is set -# $1 - env variable to check -# $2 - value of the env variables -ensure() { - set -e - if [ $# -ge 2 ] && [ -n "$2" ] - then - set +e - return 0 - else - echo $1 is required - return 1 - fi -} - - -# builds tool log4j props file for broker -# $1 - source base file prepopulated with required log4j configs -# $2 - final location of tools log4j props file -build_tools_log4j_properties_kafka() { - src_base_file=$1 - dest_file=$2 - cp $src_base_file $dest_file - if [[ -n "${KAFKA_TOOLS_LOG4J_LOGLEVEL-}" ]]; - then - sed -i "s|log4j.rootLogger=WARN, stderr|log4j.rootLogger=$KAFKA_TOOLS_LOG4J_LOGLEVEL, stderr|" $dest_file - fi -} - - -# builds tool log4j props file for broker -# $1 - source base file prepopulated with required log4j configs -# $2 - final location of tools log4j props file -build_log4j_properties_kafka() { - local src_base_file=$1 - local dest_file=$2 - cp $src_base_file $dest_file - if [[ -n "${KAFKA_LOG4J_ROOT_LOGLEVEL-}" ]]; - then - sed -i "s|^log4j.rootLogger=.*|log4j.rootLogger=$KAFKA_LOG4J_ROOT_LOGLEVEL, stdout|" $dest_file - fi - - if [[ -n "${KAFKA_LOG4J_LOGGERS-}" ]]; - then - IFS=',' - set -f - for line in $KAFKA_LOG4J_LOGGERS; do - separator="=" - key=${line%$separator*} - val=${line#*$separator} - key=$(echo $key | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') - val=$(echo $val | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') - - if grep -q ^log4j.logger.$key $dest_file - then - sed -i "s|^log4j.logger.$key.*|log4j.logger.$key=$val|" $dest_file - else - echo log4j.logger.$key=$val >> $dest_file - fi - done - set +f - unset IFS - fi -} - - -# builds tool log4j props file for broker -# $1 - source base file prepopulated with required log4j configs -# $2 - final location of tools log4j props file -build_log4j_properties_kafka_rest() { - local src_base_file=$1 - local dest_file=$2 - cp $src_base_file $dest_file - if [[ -n "${KAFKA_REST_LOG4J_ROOT_LOGLEVEL-}" ]]; - then - sed -i "s|^log4j.rootLogger=.*|log4j.rootLogger=$KAFKA_REST_LOG4J_ROOT_LOGLEVEL, stdout|" $dest_file - fi - - if [[ -n "${KAFKA_REST_LOG4J_LOGGERS-}" ]]; - then - IFS=',' - set -f - for line in $KAFKA_REST_LOG4J_LOGGERS; do - separator="=" - key=${line%$separator*} - val=${line#*$separator} - key=$(echo $key | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') - val=$(echo $val | sed -e 's|^[[:space:]]*||' -e 's|[[:space:]]*$||') - - if grep -q ^log4j.logger.$key $dest_file - then - sed -i "s|^log4j.logger.$key.*|log4j.logger.$key=$val, stdout|" $dest_file - else - echo log4j.logger.$key=$val, stdout >> $dest_file - fi - done - set +f - unset IFS - fi -} - - -# build service property file for any component -# $1 - source base file prepopulated with required props -# $2 - final location of the props file -# $3 - prefix to use for parsing the env variables -# $4 - list of all env variables to be ignored while building props file -parse_props() { - - local src_base_file=$1 - local dest_file=$2 - local prefix=$3 - shift 3 - local exclude_props=("$@") - cp $src_base_file $dest_file - - # loop over all env variables - env -0 | while IFS='=' read -r -d '' n v; - do - # ignore the ones not having the specified prefix - if ! [[ $n == $prefix* ]]; then continue; fi - # ignore if the value is empty - if [[ $v == "" ]]; then continue; fi - - # ignore the variables present in the exclude_props array - var='include' - for str in ${exclude_props[@]}; - do - if [[ $n == $str* ]]; - then - var='exclude' - break - fi - done - if [[ $var == 'exclude' ]]; then continue; fi - - n=${n//$prefix/} # remove prefix KAFKA_ - n=$(echo $n | tr '[:upper:]' '[:lower:]') # convert to lower-case - n=${n//__/-} # replace __ with - - n=${n//_/.} # replace _ with . - - # if property already present, override. if not present, append to file - if grep -q ^$n $dest_file - then - sed -i "s|^$n.*|$n=$v|" $dest_file - else - echo $n=$v >> $dest_file - fi - done -} From 2dbb708600738d72b0d28fc80b72ba38251d28c3 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 11 May 2023 02:02:41 +0530 Subject: [PATCH 09/44] trial run with junit fix --- Jenkinsfile | 2 +- server/pom.xml | 10 +++++++++- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Jenkinsfile b/Jenkinsfile index 4b8374e399..f17759c557 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -6,7 +6,7 @@ dockerfile { 'confluentinc/cp-kafka-connect', 'confluentinc/cp-kafka-connect-base', 'confluentinc/cp-enterprise-kafka', 'confluentinc/cp-kafka', 'confluentinc/cp-kafka-kraft', 'confluentinc/cp-server', 'confluentinc/cp-zookeeper'] - mvnPhase = 'package' + mvnPhase = 'verify' mvnSkipDeploy = true nodeLabel = 'docker-debian-jdk8-compose' slackChannel = 'kafka-warn' diff --git a/server/pom.xml b/server/pom.xml index 8ee175739f..8351f2774f 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -33,5 +33,13 @@ false false - + + + + + junit + junit + test + + From ae2cca8981f29d17e13fa7afe9f4be0ad8d9503a Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 11 May 2023 18:48:57 +0530 Subject: [PATCH 10/44] adding changes for rebase in kafka plus rest --- kafka-plus-rest/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kafka-plus-rest/pom.xml b/kafka-plus-rest/pom.xml index 050661e7b9..e9ba507f12 100644 --- a/kafka-plus-rest/pom.xml +++ b/kafka-plus-rest/pom.xml @@ -22,7 +22,7 @@ io.confluent.kafka-images kafka-images-parent - 7.4.0-0 + 7.5.0-0 From c44470a94c01f5af64741c0aaba13da2d3df32f9 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 12 May 2023 02:27:42 +0530 Subject: [PATCH 11/44] skipping tests in server pom --- server/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/pom.xml b/server/pom.xml index 8351f2774f..0aa2fb0008 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -32,7 +32,7 @@ false - false + true From 99641dead7a5d2baf672f695e213323e560d4c93 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 12 May 2023 02:41:28 +0530 Subject: [PATCH 12/44] skipping python tox tests --- kafka-plus-rest/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kafka-plus-rest/pom.xml b/kafka-plus-rest/pom.xml index e9ba507f12..f133e7ac3b 100644 --- a/kafka-plus-rest/pom.xml +++ b/kafka-plus-rest/pom.xml @@ -38,7 +38,7 @@ false - false + true 2.0.0-alpha5 From dbb7f684d81955608ab35259a7e8c0b327b8170c Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 12 May 2023 03:02:36 +0530 Subject: [PATCH 13/44] image name fix --- .../java/org/dockerImageTests/utils/CustomKafkaContainer.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java index e4167ec45f..2ca8cc57bf 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java @@ -19,9 +19,11 @@ public class CustomKafkaContainer extends GenericContainer private static final String DOCKER_REGISTRY = System.getenv("DOCKER_REGISTRY"); + private static final String IMAGE_NAME = "confluentinc/cp-kafka-kraft"; + private static final String DOCKER_TAG = System.getenv("DOCKER_TAG"); public CustomKafkaContainer() { - super(DockerImageName.parse(String.format("%s:%s",DOCKER_REGISTRY,DOCKER_TAG))); + super(DockerImageName.parse(String.format("%s%s:%s",DOCKER_REGISTRY,IMAGE_NAME,DOCKER_TAG))); System.out.println("Using image " + String.format("%s:%s",DOCKER_REGISTRY,DOCKER_TAG)); Map env = new HashMap(); env.put("KAFKA_NODE_ID","1"); From 94533ed617d9f9e26dbd47fd4af907db08e94c34 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 01:47:20 +0530 Subject: [PATCH 14/44] trying with latest test changes --- kafka-connect-base/pom.xml | 10 +- .../java/org/dockerImageTests/kafkaIT.java | 139 +++++++----------- .../java/org/dockerImageTests/sslKafkaIT.java | 110 ++++++++++++++ .../org/dockerImageTests/utils/Admin.java | 38 ++++- .../org/dockerImageTests/utils/Consumer.java | 131 ++++++++++++++--- .../utils/CustomKafkaContainer.java | 61 ++------ .../org/dockerImageTests/utils/Producer.java | 44 +++++- .../client-creds/kafka.client.keystore.pkcs12 | Bin 0 -> 3976 bytes .../kafka.client.truststore.pkcs12 | Bin 0 -> 1398 bytes .../src/test/{java => resources}/env.yml | 19 +-- .../kafka-1-creds/kafka-1_keystore_creds | 1 + .../kafka-1-creds/kafka-1_sslkey_creds | 1 + .../kafka-1-creds/kafka-1_truststore_creds | 1 + .../kafka.kafka-1.keystore.pkcs12 | Bin 0 -> 4026 bytes .../kafka.kafka-1.truststore.pkcs12 | Bin 0 -> 1398 bytes .../kafka.restproxy.keystore.pkcs12 | Bin 0 -> 4046 bytes .../kafka.restproxy.truststore.pkcs12 | Bin 0 -> 1398 bytes .../src/test/resources/sslconfigs.yml | 46 ++++++ server-connect-base/pom.xml | 10 +- server-connect/pom.xml | 10 +- 20 files changed, 456 insertions(+), 165 deletions(-) create mode 100644 kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java create mode 100644 kafka-plus-rest/src/test/resources/client-creds/kafka.client.keystore.pkcs12 create mode 100644 kafka-plus-rest/src/test/resources/client-creds/kafka.client.truststore.pkcs12 rename kafka-plus-rest/src/test/{java => resources}/env.yml (54%) create mode 100644 kafka-plus-rest/src/test/resources/kafka-1-creds/kafka-1_keystore_creds create mode 100644 kafka-plus-rest/src/test/resources/kafka-1-creds/kafka-1_sslkey_creds create mode 100644 kafka-plus-rest/src/test/resources/kafka-1-creds/kafka-1_truststore_creds create mode 100644 kafka-plus-rest/src/test/resources/kafka-1-creds/kafka.kafka-1.keystore.pkcs12 create mode 100644 kafka-plus-rest/src/test/resources/kafka-1-creds/kafka.kafka-1.truststore.pkcs12 create mode 100644 kafka-plus-rest/src/test/resources/restproxy-creds/kafka.restproxy.keystore.pkcs12 create mode 100644 kafka-plus-rest/src/test/resources/restproxy-creds/kafka.restproxy.truststore.pkcs12 create mode 100644 kafka-plus-rest/src/test/resources/sslconfigs.yml diff --git a/kafka-connect-base/pom.xml b/kafka-connect-base/pom.xml index 445419f201..926f459c2c 100644 --- a/kafka-connect-base/pom.xml +++ b/kafka-connect-base/pom.xml @@ -32,6 +32,14 @@ false - false + true + + + + junit + junit + test + + diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java index aed98175dd..c04a68722a 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java @@ -6,15 +6,18 @@ import org.dockerImageTests.utils.Consumer; import org.dockerImageTests.utils.CustomKafkaContainer; import org.dockerImageTests.utils.Producer; +import org.jetbrains.annotations.NotNull; import org.junit.experimental.categories.Category; -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.AfterAll; -import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.*; + +import java.io.InputStream; import java.util.Map; + +import org.testcontainers.containers.BindMode; import org.testcontainers.containers.GenericContainer; import org.testcontainers.junit.jupiter.*; import org.testcontainers.junit.jupiter.Testcontainers; +import org.yaml.snakeyaml.Yaml; import java.util.List; import java.util.Properties; @@ -28,106 +31,76 @@ @Category(IntegrationTest.class) @Tag("IntegrationTest") @Testcontainers +@TestInstance(TestInstance.Lifecycle.PER_CLASS) public class kafkaIT { - private static final int KAFKA_PORT = 9093; + private static final int KAFKA_PORT = 19092; private static final int KAFKA_REST_PORT = 8082; + private static final String IMAGE_NAME = "confluentinc/confluent-local"; + private static final String DOCKER_REGISTRY = System.getenv("DOCKER_REGISTRY"); + private static final String DOCKER_TAG = System.getenv("DOCKER_TAG"); private static final String TOPIC_1 = "test-topic1"; private static final String TOPIC_2 = "test-topic2"; - @Container - public static GenericContainer container1=new CustomKafkaContainer();; + Admin admin; + Consumer consumer; + Producer producer; + + + public GenericContainer container1;; @BeforeAll - public static void setup(){ + public void setup(){ + Yaml yaml = new Yaml(); + InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); + Map env = yaml.load(inputStream); + env.put("KAFKA_REST_LISTENERS","http://0.0.0.0:8082"); + env.put("KAFKA_REST_BOOTSTRAP_SERVERS","BROKER://kafka-1:9092"); + env.remove("KAFKA_REST_CLIENT_SECURITY_PROTOCOL"); + String imageName = String.format("%s%s:%s",DOCKER_REGISTRY,IMAGE_NAME,DOCKER_TAG); + //String imageName = String.format("placeholder/confluentinc/kafka-local:7.4.0-80-ubi8"); + container1=new CustomKafkaContainer(imageName,env) + .withClasspathResourceMapping("/kafka-1-creds","/etc/kafka/secrets", BindMode.READ_WRITE) + .withClasspathResourceMapping("/restproxy-creds","/etc/restproxy/secrets",BindMode.READ_WRITE);; try { container1.start(); } catch(Exception e) { System.out.println(container1.getLogs()); - System.out.println(container1.isRunning()); } + String baseUrl = String.format("http://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); + String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); + Properties props = new Properties(); + admin = new Admin(bootstrapUrl,baseUrl,props,false); + consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,false); + producer = new Producer(baseUrl,bootstrapUrl,props,false); } @AfterAll - public static void teardown(){ - System.out.println(container1.isRunning()); - System.out.println(container1.isRunning()); + public void teardown(){ + System.out.println("tearing down"); container1.stop(); System.out.println(container1.isRunning()); - System.out.println("tearing down"); + producer.close(); } @Test - public void kafkaApiTest() { - Map env = System.getenv(); - Properties props = new Properties(); - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - String baseUrl = String.format("http://localhost:%s",container1.getMappedPort(KAFKA_REST_PORT)); - System.out.println(baseUrl); - System.out.println(KAFKA_PORT); - System.out.println(container1.getMappedPort(KAFKA_PORT)); - String bootstrapUrl = String.format("localhost:%s",container1.getMappedPort(KAFKA_PORT)); - String bootstrapUrl1 = String.format("localhost:%s","9092"); - System.out.println(bootstrapUrl); - System.out.println(container1.getHost()); - Admin admin = new Admin(bootstrapUrl,baseUrl); - Consumer consumer = new Consumer(bootstrapUrl,"test-1","abc",baseUrl); - props.put("bootstrap.servers", bootstrapUrl); - props.put("acks", "all"); - Producer producer = new Producer(props,baseUrl); - try { - admin.createTopic(TOPIC_1,3, (short) 1); - System.out.println(admin.listTopicsUsingKafkaApi()); - TimeUnit.MILLISECONDS.sleep(100); - System.out.println(admin.listTopicsUsingRestApi()); - List topics = admin.listTopicsUsingRestApi(); - assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_1))::equals)); - producer.send(TOPIC_1,10); - assertTrue(consumer.consume(10,TOPIC_1)); - } - catch (Exception e){ - System.out.println(e); - System.out.println(container1.getLogs()); - fail(); - } - - producer.close(); - System.out.println(container1.isRunning()); + public void kafkaApiTest() throws Exception { + admin.createTopic(TOPIC_1,3, (short) 1); + // TimeUnit.MILLISECONDS.sleep(100); + List topics = admin.listTopicsUsingRestApi(); + assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_1))::equals)); + producer.send(TOPIC_1,10); + assertTrue(consumer.consume(10,TOPIC_1)); } @Test - public void kafkaRestApiTest() { - Properties props = new Properties(); - props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); - String baseUrl = String.format("http://localhost:%s",container1.getMappedPort(KAFKA_REST_PORT)); - System.out.println(baseUrl); - System.out.println(KAFKA_PORT); - System.out.println(container1.getMappedPort(KAFKA_PORT)); - String bootstrapUrl = String.format("localhost:%s",container1.getMappedPort(KAFKA_PORT)); - String bootstrapUrl1 = String.format("localhost:%s","9092"); - System.out.println(bootstrapUrl); - System.out.println(container1.getHost()); - Admin admin = new Admin(bootstrapUrl,baseUrl); - Consumer consumer = new Consumer(bootstrapUrl,"test-1","abc",baseUrl); - props.put("bootstrap.servers", bootstrapUrl); - Producer producer = new Producer(props,baseUrl); - try { - admin.createTopic(TOPIC_2,3, (short) 1); - TimeUnit.MILLISECONDS.sleep(100); - System.out.println(admin.listTopicsUsingRestApi()); - List topics = admin.listTopicsUsingRestApi(); - assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_2))::equals)); - producer.sendRest(TOPIC_2,10); - TimeUnit.MILLISECONDS.sleep(100); - consumer.subscribeTopicRest(TOPIC_2); - assertTrue(consumer.consumeRest(10)); - } - catch (Exception e){ - System.out.println(e); - fail(); - } - - producer.close(); - System.out.println(container1.isRunning()); + public void kafkaRestApiTest() throws Exception { + admin.createTopic(TOPIC_2,3, (short) 1); + // TimeUnit.MILLISECONDS.sleep(100); + List topics = admin.listTopicsUsingRestApi(); + assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_2))::equals)); + consumer.subscribeTopicRest(TOPIC_2); + // TimeUnit.MILLISECONDS.sleep(1000); + producer.sendRest(TOPIC_2,10); + assertTrue(consumer.consumeWithRetry(10)); } + } diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java new file mode 100644 index 0000000000..c030357934 --- /dev/null +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -0,0 +1,110 @@ +package org.dockerImageTests; + +//import org.junit.Test; +import io.confluent.common.utils.IntegrationTest; +import org.apache.kafka.clients.admin.AdminClientConfig; +import org.apache.kafka.common.config.SslConfigs; +import org.dockerImageTests.utils.Admin; +import org.dockerImageTests.utils.Consumer; +import org.dockerImageTests.utils.CustomKafkaContainer; +import org.dockerImageTests.utils.Producer; +import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.*; + +import java.io.InputStream; +import java.util.Map; + +import org.testcontainers.containers.BindMode; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.junit.jupiter.*; +import org.testcontainers.junit.jupiter.Testcontainers; +import org.yaml.snakeyaml.Yaml; + +import java.util.List; +import java.util.Properties; +import java.util.concurrent.TimeUnit; +import org.dockerImageTests.kafkaIT; + + +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + + +@Category(IntegrationTest.class) +@Tag("IntegrationTest") +@Testcontainers +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public class sslKafkaIT { + private static final int KAFKA_PORT = 19093; + private static final int KAFKA_REST_PORT = 8082; + private static final String IMAGE_NAME = "confluentinc/confluent-local"; + private static final String DOCKER_REGISTRY = System.getenv("DOCKER_REGISTRY"); + + private static final String DOCKER_TAG = System.getenv("DOCKER_TAG"); + private static final String TOPIC_1 = "test-topic1"; + private static final String TOPIC_2 = "test-topic2"; + Admin admin; + Consumer consumer; + Producer producer; + + + public GenericContainer container1;; + @BeforeAll + public void setup(){ + Yaml yaml = new Yaml(); + InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); + Map env = yaml.load(inputStream); + String imageName = String.format("%s%s:%s",DOCKER_REGISTRY,IMAGE_NAME,DOCKER_TAG); + // String imageName = String.format("placeholder/confluentinc/kafka-local:7.4.0-80-ubi8"); + container1=new CustomKafkaContainer(imageName,env) + .withClasspathResourceMapping("/kafka-1-creds","/etc/kafka/secrets", BindMode.READ_WRITE) + .withClasspathResourceMapping("/restproxy-creds","/etc/restproxy/secrets",BindMode.READ_WRITE); + try { + container1.start(); + } + catch(Exception e) { + System.out.println(container1.getLogs()); + } + String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); + String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); + Properties props = new Properties(); + props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); + props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); + props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); + props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); + admin = new Admin(bootstrapUrl,baseUrl,props,true); + consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,true); + producer = new Producer(baseUrl,bootstrapUrl,props,true); + } + @AfterAll + public void teardown(){ + System.out.println("tearing down"); + container1.stop(); + producer.close(); + } + + @Test + public void kafkaApiSslTest() throws Exception { + admin.createTopic(TOPIC_1,3, (short) 1); + TimeUnit.MILLISECONDS.sleep(100); + List topics = admin.listTopicsUsingRestApi(); + assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_1))::equals)); + producer.send(TOPIC_1,10); + assertTrue(consumer.consume(10,TOPIC_1)); + + } + @Test + public void kafkaRestApiSslTest() throws Exception { + admin.createTopic(TOPIC_2,3, (short) 1); + TimeUnit.MILLISECONDS.sleep(100); + List topics = admin.listTopicsUsingRestApi(); + assertTrue(topics.stream().anyMatch((String.format("\"%s\"",TOPIC_2))::equals)); + consumer.subscribeTopicRest(TOPIC_2); + TimeUnit.MILLISECONDS.sleep(1000); + producer.sendRest(TOPIC_2,10); + TimeUnit.MILLISECONDS.sleep(10000); + assertTrue(consumer.consumeWithRetry(10)); + + } +} diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java index b5e2e9d630..38c4e3bfe3 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java @@ -2,6 +2,7 @@ import java.io.IOException; import java.io.InputStream; +import java.security.KeyStore; import java.util.*; import org.apache.http.HttpEntity; @@ -10,22 +11,30 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpPost; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustSelfSignedStrategy; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.HttpClientBuilder; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.util.EntityUtils; import org.apache.kafka.clients.admin.*; import org.apache.kafka.common.config.TopicConfig; +import javax.net.ssl.SSLContext; + public class Admin { private final String bootstrapServers; private final String restEndpoint; AdminClient adminClient; - public Admin(String bootstrapServers, String restEndpoint) { + + private final Boolean isSsl; + public Admin(String bootstrapServers, String restEndpoint,Properties props,Boolean isSsl) { this.bootstrapServers = bootstrapServers; this.restEndpoint = restEndpoint; - Properties props = new Properties(); props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); adminClient = AdminClient.create(props); + this.isSsl = isSsl; } @@ -76,9 +85,29 @@ public void createTopicRest(String topicName, int numPartitions, short replicati - public List listTopicsUsingRestApi() throws IOException { + public List listTopicsUsingRestApi() throws Exception { String endpoint = restEndpoint + "/topics"; - HttpClient httpClient = HttpClientBuilder.create().build(); + HttpClient httpClient = HttpClientBuilder.create().build();; + if (isSsl == true) { + String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); + KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); + truststore.load(truststoreStream, "confluent".toCharArray()); + + // Build SSL context + SSLContext sslContext = SSLContextBuilder.create() + .loadTrustMaterial(truststore, new TrustSelfSignedStrategy()) + .build(); + + // Create SSL connection socket factory + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + + // Create HTTP client + httpClient = HttpClients.custom() + .setSSLSocketFactory(sslSocketFactory) + .build(); + } + HttpGet getRequest = new HttpGet(endpoint); HttpResponse response = httpClient.execute(getRequest); HttpEntity responseBody = response.getEntity(); @@ -91,6 +120,7 @@ public List listTopicsUsingRestApi() throws IOException { return Arrays.asList(responseString.replaceAll("\\[|\\]", "").split(",")); } + public void deleteTopicUsingKafkaApi(String topicName) throws Exception { Properties props = new Properties(); props.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java index 43b1b20a42..2a17b28450 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java @@ -1,10 +1,14 @@ package org.dockerImageTests.utils; import java.io.IOException; +import java.io.InputStream; import java.nio.charset.StandardCharsets; +import java.security.KeyStore; import java.time.Duration; +import java.util.Base64; import java.util.Collections; import java.util.Properties; +import java.util.concurrent.TimeUnit; import org.apache.http.HttpEntity; import org.apache.http.HttpStatus; @@ -12,35 +16,43 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpPost; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustSelfSignedStrategy; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.impl.client.HttpClients; +import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.util.EntityUtils; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.jetbrains.annotations.NotNull; import org.json.JSONArray; +import javax.net.ssl.SSLContext; + public class Consumer { private final String bootstrapServers; private final String groupId; - private final String topic; private final String restEndpoint; + private final Boolean isSsl; + Properties props; + private static final String CONSUMER_GROUP_ID = "dockerTests"; private static final String CONSUMER_INSTANCE_ID = "instance1"; - public Consumer(String bootstrapServers, String groupId, String topic, String restEndpoint) { + public Consumer(String bootstrapServers, String groupId, String restEndpoint,Properties props,Boolean isSsl) { this.bootstrapServers = bootstrapServers; this.groupId = groupId; - this.topic = topic; this.restEndpoint = restEndpoint; + this.props = props; + this.isSsl = isSsl; } - public boolean consume(int numMessages,String topicName) { - Properties props = new Properties(); + public boolean consume(int numMessages,String topicName) throws Exception { props.put("bootstrap.servers", bootstrapServers); props.put("group.id", groupId); props.put("auto.offset.reset", "earliest"); @@ -51,10 +63,11 @@ public boolean consume(int numMessages,String topicName) { KafkaConsumer consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList(topicName)); ConsumerRecords records = consumer.poll(Duration.ofMillis(100)); - int retry=0; - while(records.count()==0){ - System.out.println("Polling again"); + int tries = 0; + while (records.isEmpty() && tries < 3){ + TimeUnit.SECONDS.sleep(1); records = consumer.poll(Duration.ofMillis(100)); + tries += 1; } System.out.println("subcscribed to topic" + consumer.subscription()); for (ConsumerRecord record : records) { @@ -69,12 +82,34 @@ public boolean consume(int numMessages,String topicName) { return true; } - private void createConsumerInstance() throws IOException { + private void createConsumerInstance() throws Exception { String url = restEndpoint + "/consumers/" + CONSUMER_GROUP_ID; String requestBody = "{\"name\":\"" + CONSUMER_INSTANCE_ID + "\",\"format\":\"binary\",\"auto.offset.reset\":\"earliest\"}"; + // Kafka REST API URL + HttpPost request = new HttpPost(url); + // Create HTTP POST request HttpClient httpClient = HttpClientBuilder.create().build(); - HttpPost request = new HttpPost(url); + if (isSsl == true) { + String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); + KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); + truststore.load(truststoreStream, "confluent".toCharArray()); + + // Build SSL context + SSLContext sslContext = SSLContextBuilder.create() + .loadTrustMaterial(truststore, new TrustSelfSignedStrategy()) + .build(); + + // Create SSL connection socket factory + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + + // Create HTTP client + httpClient = HttpClients.custom() + .setSSLSocketFactory(sslSocketFactory) + .build(); + + } request.addHeader("Content-Type", "application/vnd.kafka.v2+json"); request.setEntity(new StringEntity(requestBody)); @@ -85,17 +120,39 @@ private void createConsumerInstance() throws IOException { } System.out.println("successfully created the consumer instance"); } - - public boolean subscribeTopicRest(String topicName){ + public boolean subscribeTopicRest(String topicName) throws Exception{ try { createConsumerInstance(); - CloseableHttpClient httpClient = HttpClients.createDefault(); + HttpClient httpClient = HttpClientBuilder.create().build(); + if (isSsl==true) { + String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); + KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); + truststore.load(truststoreStream, "confluent".toCharArray()); + + // Build SSL context + SSLContext sslContext = SSLContextBuilder.create() + .loadTrustMaterial(truststore, new TrustSelfSignedStrategy()) + .build(); + + // Create SSL connection socket factory + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + + // Create HTTP client + httpClient = HttpClients.custom() + .setSSLSocketFactory(sslSocketFactory) + .build(); + + } + + // CloseableHttpClient httpClient = HttpClients.createDefault(); String subscriptionUrl = restEndpoint + "/consumers/" + CONSUMER_GROUP_ID + "/instances/" + CONSUMER_INSTANCE_ID + "/subscription"; HttpPost subscriptionRequest = new HttpPost(subscriptionUrl); String subscriptionJson = "{ \"topics\": [ \"" + topicName + "\" ] }"; StringEntity subscriptionEntity = new StringEntity(subscriptionJson, ContentType.APPLICATION_JSON); subscriptionRequest.setEntity(subscriptionEntity); subscriptionRequest.addHeader("Content-Type", "application/vnd.kafka.json.v2+json"); + HttpResponse subscriptionResponse = httpClient.execute(subscriptionRequest); if (subscriptionResponse.getStatusLine().getStatusCode() == 204) { System.out.println("Subscribed to Kafka topic: " + topicName); @@ -112,7 +169,28 @@ public boolean consumeRest(int numMessages) throws Exception { // Continuously fetch messages from the Kafka topic try { - CloseableHttpClient httpClient = HttpClients.createDefault(); + HttpClient httpClient = HttpClientBuilder.create().build(); + if (isSsl==true) { + String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); + KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); + truststore.load(truststoreStream, "confluent".toCharArray()); + + // Build SSL context + SSLContext sslContext = SSLContextBuilder.create() + .loadTrustMaterial(truststore, new TrustSelfSignedStrategy()) + .build(); + + // Create SSL connection socket factory + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + + // Create HTTP client + httpClient = HttpClients.custom() + .setSSLSocketFactory(sslSocketFactory) + .build(); + + } + // CloseableHttpClient httpClient = HttpClients.createDefault(); String fetchUrl = restEndpoint + "/consumers/" + CONSUMER_GROUP_ID + "/instances/" + CONSUMER_INSTANCE_ID + "/records?timeout=10000"; HttpGet fetchRequest = new HttpGet(fetchUrl); HttpResponse fetchResponse = httpClient.execute(fetchRequest); @@ -122,9 +200,7 @@ public boolean consumeRest(int numMessages) throws Exception { fetchEntity = fetchResponse.getEntity(); } if (fetchEntity != null) { - String messageJson = EntityUtils.toString(fetchEntity, StandardCharsets.UTF_8); - System.out.println(messageJson); - JSONArray jsonArray = new JSONArray(messageJson); + JSONArray jsonArray = parseJson(fetchEntity); for (int i = 0; i < jsonArray.length(); i++) { numMessages--; @@ -141,6 +217,27 @@ public boolean consumeRest(int numMessages) throws Exception { return true; } + @NotNull + public Boolean consumeWithRetry(int numMessages) throws Exception { + Boolean isConsumeSuccessful = false; + int tries = 0; + isConsumeSuccessful = consumeRest(numMessages); + while (isConsumeSuccessful == false && tries < 3){ + TimeUnit.SECONDS.sleep(1); + isConsumeSuccessful = consumeRest(numMessages); + tries += 1; + } + return isConsumeSuccessful; + } + + @NotNull + private static JSONArray parseJson(HttpEntity fetchEntity) throws IOException { + String messageJson = EntityUtils.toString(fetchEntity, StandardCharsets.UTF_8); + System.out.println(messageJson); + JSONArray jsonArray = new JSONArray(messageJson); + return jsonArray; + } + private void handleMessage(String message) { System.out.println("Received message: " + message); } diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java index 2ca8cc57bf..520f8c19ef 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java @@ -1,56 +1,36 @@ package org.dockerImageTests.utils; import com.github.dockerjava.api.command.InspectContainerResponse; +import org.testcontainers.containers.BindMode; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.Network; import org.testcontainers.images.PullPolicy; import org.testcontainers.images.builder.Transferable; import org.testcontainers.utility.DockerImageName; +import org.yaml.snakeyaml.Yaml; import java.io.IOException; +import java.io.InputStream; import java.util.HashMap; import java.util.Map; public class CustomKafkaContainer extends GenericContainer { private static final int KAFKA_PORT = 9093; + private static final int KAFKA_PLAIN_PORT = 19092; + private static final int KAFKA_SSL_PORT = 19093; private static final int KAFKA_REST_PORT = 8082; private static final String STARTER_SCRIPT = "/testcontainers_start.sh"; - private static final String DOCKER_REGISTRY = System.getenv("DOCKER_REGISTRY"); - - private static final String IMAGE_NAME = "confluentinc/cp-kafka-kraft"; - - private static final String DOCKER_TAG = System.getenv("DOCKER_TAG"); - public CustomKafkaContainer() { - super(DockerImageName.parse(String.format("%s%s:%s",DOCKER_REGISTRY,IMAGE_NAME,DOCKER_TAG))); - System.out.println("Using image " + String.format("%s:%s",DOCKER_REGISTRY,DOCKER_TAG)); - Map env = new HashMap(); - env.put("KAFKA_NODE_ID","1"); - env.put( "KAFKA_LISTENER_SECURITY_PROTOCOL_MAP", "CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,HOST:PLAINTEXT"); - env.put("KAFKA_ADVERTISED_LISTENERS","PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092"); - env.put("KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR", "1"); - env.put("KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS", "0"); - env.put("KAFKA_TRANSACTION_STATE_LOG_MIN_ISR", "1"); - env.put("KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR", "1"); - env.put("KAFKA_PROCESS_ROLES", "broker,controller"); - env.put("KAFKA_CONTROLLER_QUORUM_VOTERS", "1@broker:29093"); - env.put("KAFKA_LISTENERS", "PLAINTEXT://broker:29092,CONTROLLER://broker:29093,PLAINTEXT_HOST://0.0.0.0:9092,HOST://0.0.0.0:9093"); - env.put("KAFKA_INTER_BROKER_LISTENER_NAME", "PLAINTEXT"); - env.put("KAFKA_CONTROLLER_LISTENER_NAMES", "CONTROLLER"); - env.put("KAFKA_LOG_DIRS", "/tmp/kraft-combined-logs"); - env.put("KAFKA_REST_HOST_NAME", "rest-proxy"); - env.put("KAFKA_REST_BOOTSTRAP_SERVERS", "broker:29092"); - env.put("KAFKA_REST_LISTENERS", "http://0.0.0.0:8082"); - env.put("CLUSTER_ID", "4L6g3nShT-eMCtK--X86sw"); - env.put("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "false"); + public CustomKafkaContainer(String image, Map env) { + super(DockerImageName.parse(image)); // set up container ports and environment variables Network network = Network.newNetwork(); withNetwork(network) ; - withNetworkAliases("broker"); + withNetworkAliases("kafka-1"); withImagePullPolicy(PullPolicy.defaultPolicy()); withEnv(env); - withExposedPorts(KAFKA_REST_PORT,KAFKA_PORT); + withExposedPorts(KAFKA_REST_PORT,KAFKA_PORT,KAFKA_PLAIN_PORT,KAFKA_SSL_PORT); withCreateContainerCmdModifier(cmd -> { cmd.withEntrypoint("sh"); }); @@ -61,33 +41,20 @@ public CustomKafkaContainer() { @Override protected void containerIsStarting(InspectContainerResponse containerInfo) { // Customize the container behavior before it starts - Integer mappedPort = getMappedPort(KAFKA_PORT); + Integer mappedOpenPort = getMappedPort(KAFKA_PLAIN_PORT); + Integer mappedSslPort = getMappedPort(KAFKA_SSL_PORT); // use the mapped port to configure the application - String url = "HOST://localhost:" + mappedPort; - withEnv("KAFKA_ADVERTISED_LISTENERS",String.format("PLAINTEXT://broker:29092,PLAINTEXT_HOST://%s",url)); - + String url = String.format("PLAINTEXT://%s:%s,SSL://%s:%s",getHost(),mappedOpenPort,getHost(),mappedSslPort); + System.out.println(url); String command = "#!/bin/bash\n"; // exporting KAFKA_ADVERTISED_LISTENERS with the container hostname command += String.format( - "export KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092,%s\n", + "export KAFKA_ADVERTISED_LISTENERS=%s,SSL-INT://kafka-1:9093,BROKER://kafka-1:9092\n", url ); - command += "sed -i '/KAFKA_ZOOKEEPER_CONNECT/d' /etc/confluent/docker/configure\n"; - String clusterId = ""; - try { - clusterId = execInContainer("kafka-storage", "random-uuid").getStdout().trim(); - } catch (IOException | InterruptedException e) { - logger().error("Failed to execute `kafka-storage random-uuid`. Exception message: {}", e.getMessage()); - } - command += - "echo 'kafka-storage format --ignore-formatted -t \"" + - clusterId + - "\" -c /etc/kafka/kafka.properties' >> /etc/confluent/docker/configure\n"; - command += "echo '' > /etc/confluent/docker/ensure \n"; - // Run the original command command += "/etc/confluent/docker/run \n"; copyFileToContainer(Transferable.of(command, 0777), STARTER_SCRIPT); } diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java index 8a55ae7cb4..6460e65b12 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java @@ -1,10 +1,21 @@ package org.dockerImageTests.utils; import okhttp3.*; +import org.apache.http.client.HttpClient; +import org.apache.http.conn.ssl.SSLConnectionSocketFactory; +import org.apache.http.conn.ssl.TrustSelfSignedStrategy; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.ssl.SSLContextBuilder; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.protocol.types.Field; +import javax.net.ssl.*; import java.io.IOException; +import java.io.InputStream; +import java.security.KeyStore; +import java.security.SecureRandom; +import java.util.Base64; import java.util.Properties; import java.util.concurrent.ExecutionException; @@ -14,10 +25,17 @@ public class Producer { private OkHttpClient client; private String baseUrl; - public Producer(Properties props, String baseUrl) { + private Boolean isSsl; + + public Producer(String baseUrl, String bootstrapUrl,Properties props,Boolean isSsl) { + props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); + props.put("bootstrap.servers", bootstrapUrl); + props.put("acks", "all"); this.producer = new KafkaProducer(props); this.client = new OkHttpClient(); this.baseUrl = baseUrl; + this.isSsl = isSsl; } public void send(String topic, int value) throws IOException, ExecutionException, InterruptedException { @@ -27,13 +45,35 @@ public void send(String topic, int value) throws IOException, ExecutionException } } - public void sendRest(String topic, int value) throws IOException { + public void sendRest(String topic, int value) throws Exception { // Configure request body // Configure request URL String endpoint = String.format("/topics/%s", topic); String url = baseUrl + endpoint; + OkHttpClient client = new OkHttpClient.Builder().build(); + if (isSsl == true) { + String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); + KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); + truststore.load(truststoreStream, "confluent".toCharArray()); + + // Build SSL context + TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); + trustManagerFactory.init(truststore); + TrustManager[] trustManagers = trustManagerFactory.getTrustManagers(); + + SSLContext sslContext = SSLContext.getInstance("TLS"); + sslContext.init(null, trustManagers, new SecureRandom()); + + SSLSocketFactory sslSocketFactory = sslContext.getSocketFactory(); + // Create HTTP client + client = new OkHttpClient.Builder() + .sslSocketFactory(sslSocketFactory, (X509TrustManager) trustManagers[0]) + .build(); + + } // Configure request headers MediaType mediaType = MediaType.parse("application/vnd.kafka.json.v2+json"); Headers headers = new Headers.Builder() diff --git a/kafka-plus-rest/src/test/resources/client-creds/kafka.client.keystore.pkcs12 b/kafka-plus-rest/src/test/resources/client-creds/kafka.client.keystore.pkcs12 new file mode 100644 index 0000000000000000000000000000000000000000..9834e5cb673e47794f3541ef4b03ffd1ae39c67e GIT binary patch literal 3976 zcma)8-kmNam2 z5DUExd9!YUbfokcwM7o3Fuzz0w+#edWdM@F;4~DJ|Gg0i1q0wb6i~+mT{1Uv5SbW= zfw-oFQVjQm|J3qSu(Gn617Sduo8RhNH*gHD3-M216o`-*Qz|lVI*07sksUq!Sg`5R zAFEvNG@y+r;F8x=R^1z{b+x$jY&fP(-PgO%+r3Ba<#w099No;y=CC}P;HpG5{4gJZ z)T9RCipfyXtg5)xwKA%FTV6u%NTsLAl`tP~^BsIbsv@l)zb-EP)P=ZPR(Ec%TyIb%R0Ky^uW_gzCG&%9<~<)dwOI0tA+YFpf(hD z`gBat^3YFzLnPfkAME6NLQvdjQhH(e`UGj?_Q5+gib}|__*C^{DbzacXw$kp;3?vq4`HlB`Fl#wU!29aWC@mKLJ6h|8~(gDhGkHStR}QO6K&Y@d#yaI_VG z>2a@7#w1omi6JYtthfI4-MVSZVD31%eWlwB9Fr=R5ouLiwiN~@XUtyYuk1>oyq!oR zhy955`F5DKlK$Frou}1-Khn8WXXNu9S&@s(lwHW0(%~j%zW|S~bY{Ce613J^+;L)( z_axqb2N&1vgJ{TIIscl22Y z9PViVv9U49_l91`<*MZg+g~l;d5|CL%EvZUagB4#>sFLZuOLGPp~+Q=owJyAtE$VQ zgcPw8p=R?H?oX( zw^)f_xz$Q3Y)bSUSS0=Fbk7Tmvl$J$l9tXI=c9VQF$@jt$F9FIUppNBx*z4#AWHra zeF;ln&ZanO9y|*-knnF={0ww?K;o^tlC8|v$cX#tOuXsGm`42v5oE1~d89$*y3-9G zwwlYuy%uwtN1`>3CMc7rg47ij;3<+x*FI$MCiOFR6=Y@&d#q_&TuHtlv6*yL*Gu zfn#~~*xF!Kv2n@Bp!(#&WgqA?nD6)zr=96w6Pxi|9Yh}1@!JblBxtNNZo7s%+N zt5A?5Dx2dnGg&<6DONTh1bNoE9Tl(qQBh@gy&2CFV^p76v+bF0=b6q_bL)Y8HBn5k ziF>dIAtXu-Hi&(ShkpvT8@QMow3`0?-Xp?rA#$YSEQxXMVEU z0dhUq7MtraZWK|{lc^@-!=GsHu2Fa^g(LYm>a)<3Jmpd-Hb3!hKXyjtrG)n5IcR4c zB{P9iiu}*OStK6vQfssi?wb^L1Jx=Mc%O~*?EZ~>qIUH~_MFCYNU|F6g@%>rUF^Fq79q!pB8Wt46x$t%dq z-hgAEPX9DPC`vIWXo)gknn!hH?@>s<139N+CzIkvlwMj%PE=Rr9Wmr9oC4f9BL$kg`%i zpQ)QELvwzW4{h?%uOD+&^=bCIXZ-946G&b8rY`4^l47pQ5pTef^uWR*&iS#Ml9}iCLMsk$y(QhD5b5*!x z-mSa#e_y<{l^U$AB-NurUT}#oKHmgcw|7z(30g=H#x$%GGdxjt&ih<$pR+Y?saVR3 zzBXuSC9G~|KyJte2jd>=Gn{V??UjDAIoQ$pu<%=vKCP%qsz1*y`jlVdh*w%}Z=Ez(^t z_XHHmB}G{j9X}cbnC0bofrxv0vxcf=lqrU)T!7l3HYr!F2mt`fN0U zyN)21lE{c})F>5`BDn!yMG4?2}h7i!px>128gJ3^T7{OM_ik<|7YYk9-ws`c^; z%o_B?=xP+U5Vkj-QKkoGeC(0^zB;`FtI0^gWFr5m%djODB{bDEIx0r>iB``;imc`8 zCC|pDC|Fvc@0$L}MAG1~CDnz4t73!8yAGbir8Ehj{BUy4JG~hYuKgwwhVuHI!HQ>! zmb?6mQf_p98n&VB$}w7i$;c5ObgX<^V{*+s`*U3DoA~xy7PwFYE~haGp{!u`v*{cw zdVI}}gp{|Y{0^Tcpz0R{BSbltl=;ZNbE|ikwkQqA;27`l-TUQuqyy0|^nE^F1w_B? zucJzrvB}l~cBI`02`{>Qi<=Y0)*6J`;`9V| zYyKf*=?^v_?WejB!tBpepyjx9RS>=9H6xYMJ1F;wuXt36Uenc7whi}6!u@^*9jklc z24KqD(Tvhpo<(NDNUXwCGA4vmlkUh-jr4p!mZC82ytEto6R|UOzV;k6y=OC`PktOG z?WL)Yfz)^ZgqaUPW1kzg-Ch zMFo$zhOx9UFh8->o`rvOb7}Kl5%ryehMgvTbm~Y-k4$bpayU8YvI8|05nt#GAtA9r z&JLqq$GlM+B(JCg^R&34O7myA`RlrbW-|n`s>$K^2>aBKBJVS1TG|pj-43@Nxfc4`}e*f*E~rjL$tH z*J287%!w^_j4N9V_V16J%f6aQ{0)2OATb*BZR%`QO7pEeZJPm&CbA5w=SFOD7wNtHcAX0Jpujw$iV|Tu5hfB#^y<2qmH1g~e~~m6-*2UCyVDs@L#EaRumw zmuEUl+LD!JIdy7>1U0@uoApmFg?v5K3%A+kW4N&tvjJ^^;XXYiX=HBgK#F zqiM9IZp&AXkk|)dyCQ{5i$c%XGS#cO9n}oCq!Zufaw#Ru; z3M<|+1q2DsFJO~SxIn()zo(fSNNQLWIgRW3scj~GU;X=aj+nSPb>Dt$bE4OZ3R>Eq zVxL=GU#(vBv4-*Xr*KUzi9jON#i&Nq&6I2HGW8J%OA-;)qefGd#Cn!k5Ia-?ed3U=ah_~&kma<;};y71Em%hD~*#C z-=PJ^=K&R0_iagsX39jgOpkxnnVo`A4fD+(>2EsCST+ExY?dUS{@TM!m0*tjwfnR< z&#$s?`pIpWRfCyre~Dlgo|$A1^1Tg~w0!I9n&H$a>-6)>Grxm~m%r8or~$PZ;Tz38 z%7g2r*Y0^$UY4JF`4Hxk&q*x@?Dy6R$o1*xx)nuxf;^-kZ!Nnr7S*QPd=Gk8fMEfU zMf(U<_xUs6g#?aold(`qZ0c1~W9htA5&!PDPZT_QJA1K=HGW@^A!NhluOFc4r<{Le zcxRM*m-}(9R9C@fbrWZ!8QoNkM_wG@L6rx!W44{+jML-cj0kzSAzT_x`_Fd@Bm)CL zF!O@IYT6T<+8dn_M^doQ(}&*LGe-G8kQD_`T&-%fEQlqkDdc9)s=et+LfBL0RtTXp La8=>IPOAR^R@F}< literal 0 HcmV?d00001 diff --git a/kafka-plus-rest/src/test/resources/client-creds/kafka.client.truststore.pkcs12 b/kafka-plus-rest/src/test/resources/client-creds/kafka.client.truststore.pkcs12 new file mode 100644 index 0000000000000000000000000000000000000000..c5a5f7c57a99a4f6b61ebbc3202ba32a60e85dd5 GIT binary patch literal 1398 zcmV-+1&R7Ff(3E{0Ru3C1snzmDuzgg_YDCD0ic2f4FrM(2{3{M1u%jI0R{;whDe6@ z4FLxRpn?SQFoFc`0s#Opf&}LV2`Yw2hW8Bt2LUi<1_>&LNQU+thDZTr0|Wso1Q5SjlcAdf0qh@)>L9`&P49q$1aN8Z4fMChjki+qf^^Qw2j8KB zZ+Eq;_I;gpS`-@oSfik>(>to)jJpNY@hNWy*{-aCsPtpJXO?M-X>odA zZ!DY38Lc$rd>F-+chXC;_w%y?~R2wutHn(|)g@0{uR< z=~mR-Rh12&%IudM#FiK!mo8=T>u$#+K|c%`#60UxIuk`Le_& z29jEcz~7&=A{!wDE?KZCTpR2oSTfA95r2iHEi05z21kAQtwjLemgB;q)b*6MG7Dsx zvmiCP*#p6lYMdA(O)e3xYA4LG4vq46&6-tjps+;c2@Vd}9V1n8k>$pJScQ*-^^pn3 zIZdiTz)+1jP0=B}OF=s{LC4J>t> z9ffMFVHd)Q-#hZH#eFGgS#jrYb(U|J-B3T&EG1=uJ#aw=f*tfFM_%Xcp5@jWxH|#) zAE4W~?r92a)N|2{b1P`;;Koh-Cw&bjBMHim$3%vEMU>J1Ct%5W_~5{b+@x^CDFvx_ z?lNDLoR@A&j#dE!5qyrcUIejsiOe+$nKK`R_1IA^Ws0-SN;3Z4RRC(RCCELWI-*7H zr_m$v3>E7L)lA0jcet0R=D|Ix+DllkBQJzMsBymbt}2`3x`BkXklccheCRo&>%fnKza%%?jS$(wB$$SQZ|T|hBa zMddG&W1-3<-IQ=T6?^gu%J+MWP9S1AY5hHZf*`10r@#m_u-2G$<3-_OpU6k(Ck%5! z_WU57IarBenS6hW0b|wX)Y@6W7EL{H^YdL8aeI;`b04U_E8ou~hT5t3UlcCLx#ae#t zZ?;tSy3c-p{CeO0tNq*p z2j1!sEK%EVM=P%wB0vv>Q-<#stP^@((1tnMF!>zN3Coq*RtcCYeDfq;w?u`k!ILS@ z{FNC9O71OfpC00baroQ)If zn>xV!R4P{NK!nMqrI+#}K3m}`^+%2)U2qWc}+r+wuity=e#Ls9^BmYZU8 zf3n(s`P9vW$Ls9%U&`V$9c_+HwPb~d&KKy1Yd(`-!5}FbR8BsN|}x1dkA&BGp9B-6w{Qi?YB45yw=buO85Om#Do|;3qqifC$O_xf~E!a zilh?snVGh^ z8ST<`$^oj2|A7Z$<5$xXxygEt#q%C{ZoPh59uKnUjHPyYL(G#aD|ulP&z(-#_OMW- zwgzri+cTd*lZ04m4%)%`W~nLMCPpQbWcwx-X3ECo8BZlW6gE|UP2xEQPT%1BY_Qc- z6ZBNYS-&g)We!_|q(}o(`AyvJ(60~ThCf6KTvgqH6n-bsVLy0FZL%&tn2zwBecFDj zRxYq;KItI9-GYZLTqZuGEC1%n)ziV@@5@Yq-1s_et?3+B8}`mnKt8J@z<(5tGZ8y` zZXrQ|N{qeWfABb!P!Ug&8)@x*1VSQV#X)R9?|2FxI$yJ$*oeWjvP|dnSTFdBnZ;Rk z!3>>H2_+d?nC7~r){bF7`$AO!KtqH>ILD^ z2>SvJHfoeF@Q$^Iwv^>%3Z*pDdM!^GtPadwp10Rmq)exrlBqJLDUd5f$uk+psU^ya zJ*fE!;jj8BlM27dJmYN(L$=%K>1Kfh9p&nd^{Xuys<%nW+jc)k|3Kq(YdN8{ZBK&Jr|k0HJv!CUQrv~CAJaTdYzDTCr@u$%Eq^{Vo5|272En(G5PQEiK;C& z$6j=}a)63kIU6T(>WN37_9=38kQmA3^u_XgR_4w1g5%wYWMyr9lJ2~%Q5V}bdzfn$ zD)j*V#(OV*N6>#BtGzPJY%w6(mKC2a(sSG2Ju$pV%s z7m3MYN5h3BbLXK^?pZs%a?`=xVpEw>&}YnNeI;`(xia^Q2lVN159COQ)*py`EFAjE z@M=5y@V3!v{;?IXQlzK+9vck73-@= zovl0kR`T|V=H-{Ld&~tZiXBp>&BV4fr;7qfw=XkLFvFtJ!jSgT$-%T*Sf4-M9=r0E zpM>haOz+>ON=zp~#X*7Nfn$&3^1oRWM*_tC4^1yYhfibt%*&ZeLK^%)5-baT_&^%; z5QHF3|9gmtuoOXz_ydLG0&xD6pnn*^e==(Ggps1wH%5A0;luZ+l2ZgfOSSF%H%6W1 z2`f%!DJNObKI3RH(~pTn5aS4+&PDevjjS{yscCsWRE1E6(y-<#RibRh^A4%^^@kqY zlH3q+#vS^`5)`}`ynj_j0nccZ^!?q$z)m_-$DREuJ4$$m8u3A^UH?7L&!Ye`#a27> zgYUG8J*o$l4b!)LsttKrg5)Dda*Xv9L;&rEuU=Ua;?*IB_9oW`z3Y3`^D2%M-Y-~W zfAg7iOZ`U`PcStJovf+f9oaWGrylH1)H3=-31azcnUm zrRly29ED3~irF#6mNaK~|NIoXYAprjI^_spb6*V>zj~b9<{zhfugloXt(%{A|~N>sa67xsaIy%8LA5(u^N-|9)T- z)5pkk(@B>Js2-%xy4U9yy$)q`v*(*Vkkh=k$pr$o^1f{4u|Cpr(ir|!s?_>Bb1_l8 zhOyC&nVIAzVAFLTgpF;8+jL; zb#B9J#|v1^*>V@%ct*=Fr1DRHgd9HhWSf-yk_{ZTFHde1@~FJ2PV}@{t2;$XBua;< zwDsCIiN1?ErIN-vdrcin5!+(lT8bjI-1W#)^I5T%;tc2Yni2_NG?oRx5tDL3u0lPX zmawP?&sI|DuYsZUs1s|9fM+nCAmYVe5xy_C`IE|%+#rvWdb|AzTLN|H00$Ksi#c~I z(T&bqpm}|%Ffy*fnW{a!4J(sAsjdYhOvqXKgn#5ogQ$FGknmf$nv{YNK|4^aU#9*; zs0w@O6Wvqh?+rof;oAFAh)r5gns9*e z;*8Tw<1;dL&mV`+N!^|?vpj$BNu#(83w#DGm#BhFe9-yYMO^p>hB+S^UGTtkJ1$^? zM2hPu%*27oZc5p03Vs0?FA|S-zv?FH*{Y1={!0f0>(lo&qEM3PC*Z#Vcg%x&#yg|X zEdb()S&B9H6j6Y?y`M%eOE2`tb0(#C2&QuFEYD3)hnr*dnKoyqyOAi`$C$V^D=2I@ z`}|gI0Ks^#GTct=1V&l*T2j1>!G-l}hN$6Qi33@#^F@v#Y3I7}Fi!cL-zIP_29>s2 zFQxk;x#w$t6%ks^jw`WY5b=21 z@Ysa8%wrzuwwQAH_4l-XSxPL#tjfOTkx75PtGpd4PcOUE7_A!AXh0v!yS22nRAuz!lQyBy(CLq z?qcR{9!j$e)H*`dzt~`1&sAjY`v}ON zrAAgJ2Q6_O9nWP>ax;!noZhO|kl!;6fiLXQ>beO3`f#5tegyVJ{ky(u4>cHyB2@6O zslAJ7m-D=l_Mq|b1DM((gBO3mcBUV}2d}(ufW8^nyrS#DuSYHAjeJnz!rWVziB9vA zASaOeF)Vnp7)Ss>``5tgdZ(4M3d+H4=$(>9~{h|&ZzK&CzHjrZ| zR$s?L18|$1VvusB64Y|Oaty2ASe->pPX)#2){EahbS&BD&ePcaSt=(mX9x`Erk8k= z%}ib&MOX(@<5V25zP8yRf2FI!*s`j72r&NklC&r86g}TNHx{X#VDLy=T>e%NqhloY zJ8On^RA#$Wow7{*M-a#HTV^`8kSS;pSX1J<>gVO6(S^`VXSb6kIc2jt1G+B}3)IOx zP?rav-sH+p(eWemc!^?*W)ab08$UdRz=A1sxsR33igNY+jtpDM?^fwjjepj_%6}^Q(*UyCpWZ95?dr|5cZ0hy&J=9^W zT>`=b4NE=BCDzpEtfTu}*S4iOz{;K;)LALBoq5S-U4k4E2{UkO*0>E$PBMD*k!Q=h z;jMHyj)QR0A}y69F-xxG@{wD2Vk5I{&+%cAD+$9FMr;;F!*fc&uJ`5dK&7bQ3s1cB zrtiNh29$nS^DQ)mT#FyZphK~nmU~A$rIY2b;8EZ#WO&%0-n!uGuv$BGV}J_-Zqy0N z?Mc;|xA!gSZoR#8H4e9AmKV@vY*bA77@n4)ppZl1Yqi?iK$&v6`*o zpgO_(y_rD=)Fa5L*5*)@4q5k^!W#J7Lz^gp*3y#jpdUOnIgqHXD&@B11-fY|*+3in zYM*g^#>%!~rW)ZXVZ z{sY7L)%;kqXyd0EBOUQP0~~g0L+o@yzR3yLDdEHQ6O1MZ0+Ilc|NYP60tj&Mxk?p; zNf&+JU}u|OD8%t@CXR6?k6s_U33Jau+9YMe*6`^{pfOtalc%zJ{B?h`SRcyMJJ?bE H>je8BO^9sF literal 0 HcmV?d00001 diff --git a/kafka-plus-rest/src/test/resources/kafka-1-creds/kafka.kafka-1.truststore.pkcs12 b/kafka-plus-rest/src/test/resources/kafka-1-creds/kafka.kafka-1.truststore.pkcs12 new file mode 100644 index 0000000000000000000000000000000000000000..643f8e18679092214705678542102faab4c9903e GIT binary patch literal 1398 zcmV-+1&R7Ff(3E{0Ru3C1snzmDuzgg_YDCD0ic2f4FrM(2{3{M1u%jI0R{;whDe6@ z4FLxRpn?SQFoFc`0s#Opf&}LV2`Yw2hW8Bt2LUi<1_>&LNQU+thDZTr0|Wso1Q1*r$yT_NKk4+{976GQ1WAB`1aM;50Nznp&oku2A^$l*azC1d zH-#0L*c3t(AJbr>rjN6N0y_oBI6VK_oEeX}Hsu7K3o=b%Kz5OZ!T0HmG5YdP3LOOr zNFw5DU4}wa(~ZgrxL9n509I}oPV2AFQ;kB*>w{b$#&``7zVNCC#Kf1h5NPb9_gqm> z3&ZvrL?X{aB7%W?wvRP>@xi{p9^U1GkoFFcl{Rm9Uk8@vF<88sn*B1AqR4UjN>rY^ zf1mH21753rqwo#Iz-9UeT(TeMv>#QzZ+1I+O1dlH&MLo?opyPoBl>nDhT&_ zOmMPp3J%$efE__&)15f8s1QUHKBeTez?H+cM^;cCyfIncflS_V|D&<=OIc4RC`2gy zbl$lmL`BS+e zU<7J!vC?yOcb(do-kF=@P@nA;Yy|hpb?E`BHKNE-eoQQA!~g@ib$?os4U+FQTwxtO z%wa`bK^I#lO{yE&{>a909)}d|ghfdTt$5}dd`Co_-iQC%CrQ;}2?8Jgn zdV8a$9ke{#r9H{`n^F~WGV}!df(88lp(X{r0R2-V7BIp@e7rhWgbCW^3DIE2A^q`&eQC+=IG(D#n03nwplj6nVt`>APS=P}Oufvj>Pif9`HL zxc1Y?0}`pw)R-)-T|zryS#rdeOXk(X#M!>=H{bu7CpK7c@qY`{j zB&W{8j6I@7Cgf@mvrQEZn)C^`@rwb=3|I;Y`sWwX4;-qsC>S)W-%}^I4iB z)TMx!St6<;Vch7fxW#R{{rcLxyW6In)I(Uv?<4b-=%Z$xr@)ugnGAvaXUAxq5psA-%{)E{%MFwGq z@RwkzHF_=H%EP&0tCwZ=ix|xQe00pC=SFV_#unQ&{e%e<@o1X^(NC9O71OfpC00bZs23me3 zdjt-zmP;Q5&brPtc$!z%;H7;kL(~UAyz*iM6b>|}>1SNZD7zV`vf4El58BIWc>)3_ E5I|6p2mk;8 literal 0 HcmV?d00001 diff --git a/kafka-plus-rest/src/test/resources/restproxy-creds/kafka.restproxy.keystore.pkcs12 b/kafka-plus-rest/src/test/resources/restproxy-creds/kafka.restproxy.keystore.pkcs12 new file mode 100644 index 0000000000000000000000000000000000000000..82ef0597d29eae11194b6174ad1083c44feea467 GIT binary patch literal 4046 zcma)9cQ_jkw-zGy-lIy@j6{e%YPASawQ3ZtSu<*sQZv=qN~qX-7EzRFDYa`=?Y&pk zR;wtt_qq4^efR$LeSe(iob#UZzJI>YIWRO`FEM}whNklYQHWx+F{d;DGC(ex&H;#~ zv-t~K!O)~1{@;kS2#6-l`wM6NjWiI||FWnl0mQjzQq{kp63p=*1}P29947a#mKp{I zGB*srFUP9HjPd0>-234tpgR5TJeZhRh@Ke03!?^+|BoUu1t}5i28iMbMjPNj0tAQx z={HNwW>#jRDcUj9r=kfGJwP<@JBR49cG0R-|1Y?=hWSYAp!NleWJ;jgj0P=k$;Q^i zh9O^!6EwVL`W~T}7yI}wSO9<}L}y^?D_)itPEzIwyPjxJvS*Zd z#I-MjF>ws656bk9HRd~z{Y%~K9l;uLP)=Q_Bs+ftX&ooST={}0>#bHVfK06S+2)Lc zr30`@59bkZ`W1WwU?8)Fth%dd=<`%Zl`MP5)VjiBt=D`^%4yR;hrOh8Hvw8gav=Q1 z-90NZ?j0p??ZszhX7#;9UkG*N=YRivis6tLxMjqp4k>qJph<;qF8k z{A;bEd?z#yWR%S1glFOD_+33?SQhWI^<@3v0R7hF^|+IHtrIp4z2op6JP)xm3)R|M z91mqCvr(E%RQE_pEO5bJiiN)#)SR>_DhO}k!G+w)Os7PByy}DRL~ij{P?Ta4Y4FDv zn4o#))~i7q_CU*>xeKK)TF9=9Rp`uWoWbF9=#Z2OPks)6G-Tm4>Vqfp*~?eWS03I1 z{WwTJYi4E&^v_d;4a+lYA+Apu-(L5j)+XCB%7zWIM7rj$UKUnTe1=OQsSYKa4Q?#d zR3{%s28R0Nz+n;+fewHiCN8)9Q`Xf}xyeAa_thyw-YB)3r-h`i#afDnY&;dRda(%c z+1G%@9wRS<_aH>ed~SQLaqXYD)^yjRM-k&5W@L-DJ=6|c5QilTQ0xA9osp~oV3oQ1 z9eIIIq5Vu}8v@8Oo|mv-P5bdwF23=pEVQ7Acf)d}&NM*BQ$}iKs!ev#7Ymr?n=5sP_|o?Mqs$CZCl6_m|CXy~#zGN8C%2alTS? zI8oQ=He@wV;FIENQp;lOFnMmu4i3YNaSUeJI|c5pEA>@|e_#{1$flQ8wcoZ5k_43` z^dqoQ7j&)OW&=K*@hV0MZGT2rF28;6YP(>>y-=i|cas0Gac(B*JS!5vSmhBKE>Ekw zOHs(Z-s`9cm%!I}C9PukN=Mkh(?M}XRaB<3DcXmNMBj*aH?%(2Hm6;HXh_VQCXTtp zWowK4i;k*qj6_p!C7h?c=^$4XXN-+Rid91z`LrAdMIVY-uFZJ|1-(5jhauBXd7jmc z_^pMgeYy?RoCU`}PSlT)#c{)x_z!)RTh}n)c9`7kc;m?-DD>rBCEKJ_@4N0lJZ#E3 z)$&)wys#P@hY|hljFm3*worQ^DjJ?pqbA}YC=b$&;>J!GQ1m zF>IF3;yp~h)sZ^;qtVd%P>tLpalcR5nIfz17V|4t{wP#!?jXH3K`YXQ%dg806IfQ| z>esYC9KRe|;sjs7iK>Y-1OS&Lfzxh$t`D}fE=4#8sAZ&Y@|ek!z`2&noJMtmyXk%+ z$UJn}fAD#8ZVJftN8s<71<)m0r76#54h#4Buq8y;?qAmn7t~I{-NGfYwM=Q4Fs@%* zao!mX1AUsP{XM%iAM;66Ke@CUU*N0gqrHnk+3D_6-aA8wn_=2>$x0@hRZOqhVwNtp zB;0akb6tI8l6y&Nwk_wf42+JvTZB)7>E*TKcQfJZx3S`TZ6=>Z+FHZ79@rs0oMV)w z!zDCfeE--b1+xS*4FkdcE+%E`+~ zOT*wW7@8vQA00AKE}8=S7m6na5dBRN|C#{)omm_7>7BA&cDX;!*S0@nIu7|jdnf-t zG3#&Ts#cE3HTF&`QHyB6ovlzb1^p*Bi#*fEw{$kY%CCWRrrcR_^capZGKv=EpLc7q zB3z=r-L{2ipzQ{mC~47y%5sYj%DV;U&p^#n$0}Ux`)PB9OX@Q+B+7MFu1Df zNwuQ2fCeXA-H)|bi9S4uvjv8o724#<@1P>zo_4&ll8G6gGvY z>yPQXB@R`ubOsVe+ONf`ZlKR6H4z0Z6vCI>o3`Q8ApN;oKa6dgCpsaHsm%t6~cUUwAy?ettIEQ^=+wp4mFxy&i_9(v0BTjq*> zl6B(if$uzhKQ)L^?aQT1dQhOW0z}oel-GoNd%?R2-ztaEFdFI)sH2Xk@Ev?+(k|%L z-8u|8TaEt{7~j|$Jts>n9|icW_0B$6UMjk$lGa+tblHQ;Po z23mKJM- zl3;zQrO?8tU2gn=gasLLS>G+sKI)@3ROqACqDPStgtGU0z6t5R#td_eGGBUqwupQh zoT;&1d87YlABM@zhcizrxIx8vXhmVFhw~v^qPGcA*O}yBeyA!0FQK|qfwOQ{r1$H9 zUs_g;rCW)##>k=;R#l0h1j>CF5XdLo9TL!eqYb{jf&eJIGQ7GLM z`_tl$OM zO?~!hi~RzdCc)i?kN6?E&8N|ET01VkUWv{<#6xNqwEU!pkPcpvZby90XMuh?>x5n` zVOntlDtqSDpI@4vbo&jUVXr0XAxaa4E}xAEem2Wj^%nnnU1<9cR#lIViL_o)CR6+5 z49;3Zey?aPW+&~6@J1j>^7G-_7no=1M>G0&Asncbhx6JxfB2(32DZ%d_nUa^a&Wu$ zs1M=iyL7q4%M4yo7f0G>)7p=d!MoY5JzrjtmC<@kZKdVq=YHnL+ydNc5pA7 zyM3L0I%LIP=x&(1d%5MWIk>Df;kFuHk%Y07-x}V~GEpQ`ENkleD0$1VTAfcnVfh|% zc20?5b;E<#_bTye<%w>m#1;$xyuyk4b?QznVPLKeC$h}ADZVL2eqFqE_pqh9QbAt9 zzHdacmFR?*ZShC_cjklGTh2pw>>01~frDQe6Nl-M3wPSMzPpkThqWr5+JWC;nIrwy zZ@Q~&$iDU|d~MAv#*VDWkvuYxxQhA)KY(*4HR7)zy!8U?f?H0hu;y?sI8*Y#h5!AfR9$Z{Q62WUV#k!^F{P11G{H~x8ErA~-x*NewvW<9$< ze*S<1T<6gTi06}PZIZ7}NNbihVhiejEx-Lv-*+@}1Ur9WlW#@0z^*;Up6X_mjd-sj~3)3Lg5S~)VmVi{($+cNr70`vOK_b_;+XhTc zKD)YdG#8r%?q|=;wL8cgv&jW!z0S&Z72iWwVhj2h-e!jO5Vq4?@!MD?IsRPiU`SYT z1yf@Eu1``+qk-!R6?Gd<{dcyixa>w^#CbW+XUK`mG#a#NTavC-a#p4e=p?p&!0y4Z zK`n?`)@ANTMuw%Fd#0Dp?E4q>&JxN3_$Y)O2)u(eZVh$ZqodNEr=_Ea{WDjfm}Ga_ zz-rR14`cmJ_%;~<{u1f?`_##%XmORucR}Y#Ri0G(EM}B`5lT_iBD?`PS zEi)Re0VZeEQ2j9W$1pLpNeC0UN=s})HWfJH7VTRPCk25>$Tu~wLaXqT==6V zF7vp_^gZ?~a&~?B#od`UT@~fJY(4QWpB^|W^(L;{jFoy+m2`Jbu{*2JriQoT6Yhwm zMXbDqjjJedMQ_BkIPaL^P_u>1hNNa5{u#^~zV|i=qqVn=4#y`;Q(~9zu0&KKp+?{y zTlUJC^#?yKJMnduhUt-svbotk3)f?)CAUQm5e=oM9L5Sq=-44AxSw*@u`sP zbeJAY0!H)Ck4p?7B?9suC6_M$eE9qf6=6OWrk?o<_i~Z{LgW*8B5`NPua&e9$UOPc a$9r&+=FXeFB6TlSykKs+l-z%xWd8;KHj4`Y literal 0 HcmV?d00001 diff --git a/kafka-plus-rest/src/test/resources/restproxy-creds/kafka.restproxy.truststore.pkcs12 b/kafka-plus-rest/src/test/resources/restproxy-creds/kafka.restproxy.truststore.pkcs12 new file mode 100644 index 0000000000000000000000000000000000000000..cb8bf797a5a56fcf98b7a84b80250581f4c789f0 GIT binary patch literal 1398 zcmV-+1&R7Ff(3E{0Ru3C1snzmDuzgg_YDCD0ic2f4FrM(2{3{M1u%jI0R{;whDe6@ z4FLxRpn?SQFoFc`0s#Opf&}LV2`Yw2hW8Bt2LUi<1_>&LNQU6=|zRY`28KI&qTff+thDZTr0|Wso1Q5vwp@YJclox;2+?z?sRQ$Z5ti9@U|)mfOR z(xQTE*m)LtHn}2)5bu(*N4uo~z1ogil1DP7oAM1Uw?R?^2vmx{!)Ne_O z(7b)Za(Q6sI!*fJcA|dV-VXu(u3R?(-W~I>GMvw4pDq>q16lJ~(or-gN}sj+pwCyji^a|{WS>wu{A7lQ$K za#{F*<>j4VXh(}FS7ki;nWtK2#YQL1#*Yul$<49v9lUA|JlCAW)4!$(Ye0;Zt8+yC ztHGMWwZs&MF4JmkPAWFjx;2ZABFoHNblkJit!R>++j0Hnwo67;Km%Z<#arludAtTH(u^6`BG}Gdj92F26&0v$SUw+ zIaUexmFc~dui~s0)Jc)___DK6UE=TeV;FXl?JI)2?&&QbkiXycVi>{sbD&2*R=Pkt zyX7|>QPwO?VZMF9C)T5Z=U;V22H&w7zX~0I2);4DzaOk<>X`ryxYA1|yfE?9lX#P- zJ5NW&1CfNu_P%s;j$T6=v+LZ0$Qmg)H`*;cG%c{SW278^Cr#A4F*jj;J@c;)%VW)J zn?%owe$JBk~3HD_aIfr?FK6x7w)q_iYed=VEWgrkVvKXpSFeR7`>)Y!YBOp-F|OQS6^9J*|79LL84}pt`PdlRhwk1x z`GJ3wkvR;+t&@<+*k8LcjCiQ)xaA>rE8@*|H{9X0?N{WJtK)tlFq4owq)?1=7dx4H zUe+k)b;hpLX)_%Y6f;$Xzs>-Q-N@4#8W7`L7}e{`Y6v3Ja`niV9&jt}3g~{4QZSrc z-**jhh1H;v+TO0Qzr3X?nfZa?1yD+q)SmCSsoQG0q8C#{EY$pkw`NjDiuhgyWYW>h zG{cv;7cP#jDlR1F>-dd(HOu9X`CBh^qrENM)`(`NC9O71OfpC00bb46`E3| z7B`?-&BHLx(aHXCLjTG)7^$K3$#F`9k8BDA6b|>hNQj8)mhV&ceTN4Xj(w265&{Az E5b1cCI{*Lx literal 0 HcmV?d00001 diff --git a/kafka-plus-rest/src/test/resources/sslconfigs.yml b/kafka-plus-rest/src/test/resources/sslconfigs.yml new file mode 100644 index 0000000000..abcf201da0 --- /dev/null +++ b/kafka-plus-rest/src/test/resources/sslconfigs.yml @@ -0,0 +1,46 @@ +KAFKA_BROKER_ID: "1" +CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw' +KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: "1" +KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: "0" +KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: "1" +KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: "1" +KAFKA_JMX_PORT: "9101" +KAFKA_JMX_HOSTNAME: "localhost" +KAFKA_PROCESS_ROLES: 'broker,controller' +KAFKA_NODE_ID: "1" + +KAFKA_LISTENERS: "PLAINTEXT://0.0.0.0:19092,SSL://0.0.0.0:19093,SSL-INT://0.0.0.0:9093,BROKER://0.0.0.0:9092,CONTROLLER://kafka-1:29093" +KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://localhost:19092,SSL://localhost:19093,SSL-INT://kafka-1:9093,BROKER://0.0.0.0:9092" +KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "PLAINTEXT:PLAINTEXT,SSL:SSL,SSL-INT:SSL,BROKER:PLAINTEXT,CONTROLLER:PLAINTEXT" + +KAFKA_INTER_BROKER_LISTENER_NAME: "BROKER" +KAFKA_CONTROLLER_QUORUM_VOTERS: '1@kafka-1:29093' +KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER' +KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs' + +KAFKA_SSL_KEYSTORE_FILENAME: "kafka.kafka-1.keystore.pkcs12" +KAFKA_SSL_KEYSTORE_CREDENTIALS: "kafka-1_keystore_creds" +KAFKA_SSL_KEY_CREDENTIALS: "kafka-1_sslkey_creds" +KAFKA_SSL_TRUSTSTORE_FILENAME: "kafka.kafka-1.truststore.pkcs12" +KAFKA_SSL_TRUSTSTORE_CREDENTIALS: "kafka-1_truststore_creds" +KAFKA_SSL_CLIENT_AUTH: "required" +#### ABOVE IS ENOUGH FOR CLIENT AUTH AND ENCRYPTION +# KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: "HTTPS" + +KAFKA_REST_HOST_NAME: "restproxy" +#KAFKA_REST_BOOTSTRAP_SERVERS: BROKER://kafka-1:9092 +KAFKA_REST_BOOTSTRAP_SERVERS: "SSL-INT://kafka-1:9093" +KAFKA_REST_LISTENERS: "https://0.0.0.0:8082" +KAFKA_REST_CLIENT_SECURITY_PROTOCOL: "SSL" +KAFKA_REST_CLIENT_SSL_TRUSTSTORE_LOCATION: "/etc/restproxy/secrets/kafka.restproxy.truststore.pkcs12" +KAFKA_REST_CLIENT_SSL_TRUSTSTORE_PASSWORD: "confluent" +KAFKA_REST_CLIENT_SSL_KEYSTORE_LOCATION: "/etc/restproxy/secrets/kafka.restproxy.keystore.pkcs12" +KAFKA_REST_CLIENT_SSL_KEYSTORE_PASSWORD: "confluent" +KAFKA_REST_CLIENT_SSL_KEY_PASSWORD: "confluent" + + +KAFKA_REST_SSL_TRUSTSTORE_LOCATION: "/etc/restproxy/secrets/kafka.restproxy.truststore.pkcs12" +KAFKA_REST_SSL_TRUSTSTORE_PASSWORD: "confluent" +KAFKA_REST_SSL_KEYSTORE_LOCATION: "/etc/restproxy/secrets/kafka.restproxy.keystore.pkcs12" +KAFKA_REST_SSL_KEYSTORE_PASSWORD: "confluent" +KAFKA_REST_SSL_KEY_PASSWORD: "confluent" \ No newline at end of file diff --git a/server-connect-base/pom.xml b/server-connect-base/pom.xml index a376621061..43152e277d 100644 --- a/server-connect-base/pom.xml +++ b/server-connect-base/pom.xml @@ -32,6 +32,14 @@ false - false + true + + + + junit + junit + test + + diff --git a/server-connect/pom.xml b/server-connect/pom.xml index 96792a33b0..12a05b18b9 100644 --- a/server-connect/pom.xml +++ b/server-connect/pom.xml @@ -32,6 +32,14 @@ false - false + true + + + + junit + junit + test + + From b8d3ca0d8fc2d70d3241056bf66e5dcee18b57c2 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 01:49:08 +0530 Subject: [PATCH 15/44] image changes --- kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java | 2 +- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java index c04a68722a..3410fd17db 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java @@ -35,7 +35,7 @@ public class kafkaIT { private static final int KAFKA_PORT = 19092; private static final int KAFKA_REST_PORT = 8082; - private static final String IMAGE_NAME = "confluentinc/confluent-local"; + private static final String IMAGE_NAME = "confluentinc/cp-kafka-kraft"; private static final String DOCKER_REGISTRY = System.getenv("DOCKER_REGISTRY"); private static final String DOCKER_TAG = System.getenv("DOCKER_TAG"); diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index c030357934..8bcff1921a 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -37,7 +37,7 @@ public class sslKafkaIT { private static final int KAFKA_PORT = 19093; private static final int KAFKA_REST_PORT = 8082; - private static final String IMAGE_NAME = "confluentinc/confluent-local"; + private static final String IMAGE_NAME = "confluentinc/cp-kafka-kraft"; private static final String DOCKER_REGISTRY = System.getenv("DOCKER_REGISTRY"); private static final String DOCKER_TAG = System.getenv("DOCKER_TAG"); From b9475ef276b720d53f551a877cabefbf1f61ef9d Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 02:22:06 +0530 Subject: [PATCH 16/44] appending ub --- .../include/etc/confluent/docker/configure | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index 7e207282a4..e708798d9f 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -90,31 +90,31 @@ if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS = then echo "SSL is enabled." - ensure KAFKA_SSL_KEYSTORE_FILENAME $KAFKA_SSL_KEYSTORE_FILENAME + ub ensure KAFKA_SSL_KEYSTORE_FILENAME $KAFKA_SSL_KEYSTORE_FILENAME export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME" - path "$KAFKA_SSL_KEYSTORE_LOCATION" exists + ub path "$KAFKA_SSL_KEYSTORE_LOCATION" exists - ensure KAFKA_SSL_KEY_CREDENTIALS $KAFKA_SSL_KEY_CREDENTIALS + ub ensure KAFKA_SSL_KEY_CREDENTIALS $KAFKA_SSL_KEY_CREDENTIALS KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS" - path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists + ub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists export KAFKA_SSL_KEY_PASSWORD KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION") - ensure KAFKA_SSL_KEYSTORE_CREDENTIALS $KAFKA_SSL_KEYSTORE_CREDENTIALS + ub ensure KAFKA_SSL_KEYSTORE_CREDENTIALS $KAFKA_SSL_KEYSTORE_CREDENTIALS KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_CREDENTIALS" - path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists + ub path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists export KAFKA_SSL_KEYSTORE_PASSWORD KAFKA_SSL_KEYSTORE_PASSWORD=$(cat "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION") if [[ -n "${KAFKA_SSL_CLIENT_AUTH-}" ]] && ( [[ $KAFKA_SSL_CLIENT_AUTH == *"required"* ]] || [[ $KAFKA_SSL_CLIENT_AUTH == *"requested"* ]] ) then - ensure KAFKA_SSL_TRUSTSTORE_FILENAME $KAFKA_SSL_TRUSTSTORE_FILENAME + ub ensure KAFKA_SSL_TRUSTSTORE_FILENAME $KAFKA_SSL_TRUSTSTORE_FILENAME export KAFKA_SSL_TRUSTSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_FILENAME" - path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists + ub path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists - ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS $KAFKA_SSL_TRUSTSTORE_CREDENTIALS + ub ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS $KAFKA_SSL_TRUSTSTORE_CREDENTIALS KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_CREDENTIALS" - path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists + ub path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists export KAFKA_SSL_TRUSTSTORE_PASSWORD KAFKA_SSL_TRUSTSTORE_PASSWORD=$(cat "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION") fi @@ -126,7 +126,7 @@ if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS = then echo "SASL" is enabled. - ensure KAFKA_OPTS $KAFKA_OPTS + ub ensure KAFKA_OPTS $KAFKA_OPTS if [[ ! $KAFKA_OPTS == *"java.security.auth.login.config"* ]] then From 0ed4eb62517b15e3d1043e1bec67e553f8cc0cb4 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 02:56:12 +0530 Subject: [PATCH 17/44] test fixes --- kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java | 1 + .../java/org/dockerImageTests/utils/CustomKafkaContainer.java | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java index 3410fd17db..d915980969 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java @@ -76,6 +76,7 @@ public void setup(){ @AfterAll public void teardown(){ System.out.println("tearing down"); + System.out.println(container1.getLogs()); container1.stop(); System.out.println(container1.isRunning()); producer.close(); diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java index 520f8c19ef..001d2126bb 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java @@ -45,7 +45,7 @@ protected void containerIsStarting(InspectContainerResponse containerInfo) { Integer mappedSslPort = getMappedPort(KAFKA_SSL_PORT); // use the mapped port to configure the application - String url = String.format("PLAINTEXT://%s:%s,SSL://%s:%s",getHost(),mappedOpenPort,getHost(),mappedSslPort); + String url = String.format("PLAINTEXT://0.0.0.0:%s,SSL://0.0.0.0:%s",mappedOpenPort,mappedSslPort); System.out.println(url); String command = "#!/bin/bash\n"; // exporting KAFKA_ADVERTISED_LISTENERS with the container hostname From 0fb6551623a3fef70c810078975c5dc71224b2ef Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 11:38:08 +0530 Subject: [PATCH 18/44] fixes related to ub and encryption format --- .../include/etc/confluent/docker/configure | 24 +++++++++---------- .../java/org/dockerImageTests/sslKafkaIT.java | 1 + 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index e708798d9f..0293610ee5 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -90,31 +90,31 @@ if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS = then echo "SSL is enabled." - ub ensure KAFKA_SSL_KEYSTORE_FILENAME $KAFKA_SSL_KEYSTORE_FILENAME + ub ensure KAFKA_SSL_KEYSTORE_FILENAME export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME" - ub path "$KAFKA_SSL_KEYSTORE_LOCATION" exists + ub path "$KAFKA_SSL_KEYSTORE_LOCATION" existence - ub ensure KAFKA_SSL_KEY_CREDENTIALS $KAFKA_SSL_KEY_CREDENTIALS + ub ensure KAFKA_SSL_KEY_CREDENTIALS KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS" - ub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" exists + ub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" existence export KAFKA_SSL_KEY_PASSWORD KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION") - ub ensure KAFKA_SSL_KEYSTORE_CREDENTIALS $KAFKA_SSL_KEYSTORE_CREDENTIALS + ub ensure KAFKA_SSL_KEYSTORE_CREDENTIALS KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_CREDENTIALS" - ub path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" exists + ub path "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION" existence export KAFKA_SSL_KEYSTORE_PASSWORD KAFKA_SSL_KEYSTORE_PASSWORD=$(cat "$KAFKA_SSL_KEYSTORE_CREDENTIALS_LOCATION") if [[ -n "${KAFKA_SSL_CLIENT_AUTH-}" ]] && ( [[ $KAFKA_SSL_CLIENT_AUTH == *"required"* ]] || [[ $KAFKA_SSL_CLIENT_AUTH == *"requested"* ]] ) then - ub ensure KAFKA_SSL_TRUSTSTORE_FILENAME $KAFKA_SSL_TRUSTSTORE_FILENAME + ub ensure KAFKA_SSL_TRUSTSTORE_FILENAME export KAFKA_SSL_TRUSTSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_FILENAME" - ub path "$KAFKA_SSL_TRUSTSTORE_LOCATION" exists + ub path "$KAFKA_SSL_TRUSTSTORE_LOCATION" existence - ub ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS $KAFKA_SSL_TRUSTSTORE_CREDENTIALS + ub ensure KAFKA_SSL_TRUSTSTORE_CREDENTIALS KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_TRUSTSTORE_CREDENTIALS" - ub path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" exists + ub path "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION" existence export KAFKA_SSL_TRUSTSTORE_PASSWORD KAFKA_SSL_TRUSTSTORE_PASSWORD=$(cat "$KAFKA_SSL_TRUSTSTORE_CREDENTIALS_LOCATION") fi @@ -126,7 +126,7 @@ if [[ -n "${KAFKA_ADVERTISED_LISTENERS-}" ]] && [[ $KAFKA_ADVERTISED_LISTENERS = then echo "SASL" is enabled. - ub ensure KAFKA_OPTS $KAFKA_OPTS + ub ensure KAFKA_OPTS if [[ ! $KAFKA_OPTS == *"java.security.auth.login.config"* ]] then @@ -170,4 +170,4 @@ ub render-template /etc/confluent/docker/kafka-tools-log4j.properties.template > # --- for rest proxy ub render-properties /etc/confluent/docker/kafka-rest-propertiesSpec.json > /etc/kafka-rest/kafka-rest.properties ub render-properties /etc/confluent/docker/admin-propertiesSpec.json > /etc/kafka-rest/admin.properties -ub render-template /etc/confluent/docker/kafka-rest-log4j.properties.template > /etc/kafka-rest/log4j.properties +ub render-template /etc/confluent/docker/kafka-rest-log4j.properties.template > /etc/kafka-rest/log4j.properties \ No newline at end of file diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index 8bcff1921a..aeab67f84d 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -68,6 +68,7 @@ public void setup(){ String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); Properties props = new Properties(); + props.put("ssl.keystore.type", "PKCS12"); props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); From 96dd57154e004efca7fc44fe8512a50c86a27f83 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 18:26:54 +0530 Subject: [PATCH 19/44] fixes related to ub and encryption format --- .../include/etc/confluent/docker/configure | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index 0293610ee5..d25cc8cef1 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -92,6 +92,18 @@ then ub ensure KAFKA_SSL_KEYSTORE_FILENAME export KAFKA_SSL_KEYSTORE_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEYSTORE_FILENAME" + directory="/etc/kafka/secrets" + +# Check if the directory exists +if [ -d "$directory" ]; then + # Loop through each file and subdirectory in the directory + for file in "$directory"/*; do + # Echo the file or subdirectory name + echo "$(basename "$file")" + done +else + echo "Directory not found: $directory" +fi ub path "$KAFKA_SSL_KEYSTORE_LOCATION" existence ub ensure KAFKA_SSL_KEY_CREDENTIALS From f1e316c19df9058931b787c5eae95c46201f398d Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 19:16:56 +0530 Subject: [PATCH 20/44] fixes related to ub and encryption format --- kafka-plus-rest/include/etc/confluent/docker/configure | 3 +++ 1 file changed, 3 insertions(+) diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index d25cc8cef1..cace4a5ff2 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -104,10 +104,13 @@ if [ -d "$directory" ]; then else echo "Directory not found: $directory" fi + ub path "$KAFKA_SSL_KEYSTORE_LOCATION" existence ub ensure KAFKA_SSL_KEY_CREDENTIALS KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS" + + sleep 3000 ub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" existence export KAFKA_SSL_KEY_PASSWORD KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION") From 62e48f447bb5b7dbcaedf18eebbe66d2de7e1465 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 19:59:34 +0530 Subject: [PATCH 21/44] fixes related to ub and encryption format --- kafka-plus-rest/include/etc/confluent/docker/configure | 5 +++-- pom.xml | 8 ++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index cace4a5ff2..f6bde3536a 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -42,7 +42,7 @@ else ub ensure KAFKA_ZOOKEEPER_CONNECT ub ensure KAFKA_ADVERTISED_LISTENERS fi - +sleep 3000 # By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing # hosts with 0.0.0.0. This is good default as it ensures that the broker # process listens on all ports. @@ -105,12 +105,13 @@ else echo "Directory not found: $directory" fi + ub path "$KAFKA_SSL_KEYSTORE_LOCATION" existence ub ensure KAFKA_SSL_KEY_CREDENTIALS KAFKA_SSL_KEY_CREDENTIALS_LOCATION="/etc/kafka/secrets/$KAFKA_SSL_KEY_CREDENTIALS" - sleep 3000 + ub path "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION" existence export KAFKA_SSL_KEY_PASSWORD KAFKA_SSL_KEY_PASSWORD=$(cat "$KAFKA_SSL_KEY_CREDENTIALS_LOCATION") diff --git a/pom.xml b/pom.xml index 0c5541cac5..84e00a2f1c 100644 --- a/pom.xml +++ b/pom.xml @@ -33,15 +33,15 @@ 7.5.0-0 - zookeeper + kafka-plus-rest - ce-kafka + From 48c3ee48bd25e1297662f3c8d2683d95412ee652 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 20:24:34 +0530 Subject: [PATCH 22/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index aeab67f84d..e5b4caa991 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -34,7 +34,7 @@ @Tag("IntegrationTest") @Testcontainers @TestInstance(TestInstance.Lifecycle.PER_CLASS) -public class sslKafkaIT { +public class sslKafkaIT throws InterruptedException { private static final int KAFKA_PORT = 19093; private static final int KAFKA_REST_PORT = 8082; private static final String IMAGE_NAME = "confluentinc/cp-kafka-kraft"; @@ -65,6 +65,7 @@ public void setup(){ catch(Exception e) { System.out.println(container1.getLogs()); } + Thread.sleep(3600000); String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); Properties props = new Properties(); From 7a691832497a0538cc7aeeb9f7d2325d9e4688ea Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Thu, 18 May 2023 20:45:46 +0530 Subject: [PATCH 23/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index e5b4caa991..fb7ead70a1 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -34,7 +34,7 @@ @Tag("IntegrationTest") @Testcontainers @TestInstance(TestInstance.Lifecycle.PER_CLASS) -public class sslKafkaIT throws InterruptedException { +public class sslKafkaIT { private static final int KAFKA_PORT = 19093; private static final int KAFKA_REST_PORT = 8082; private static final String IMAGE_NAME = "confluentinc/cp-kafka-kraft"; @@ -50,7 +50,7 @@ public class sslKafkaIT throws InterruptedException { public GenericContainer container1;; @BeforeAll - public void setup(){ + public void setup() throws InterruptedException { Yaml yaml = new Yaml(); InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); Map env = yaml.load(inputStream); From b762b5bbac468f330bb3db52d7c23179cfb3c497 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 19 May 2023 15:28:41 +0530 Subject: [PATCH 24/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index fb7ead70a1..762886c1ab 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -57,8 +57,8 @@ public void setup() throws InterruptedException { String imageName = String.format("%s%s:%s",DOCKER_REGISTRY,IMAGE_NAME,DOCKER_TAG); // String imageName = String.format("placeholder/confluentinc/kafka-local:7.4.0-80-ubi8"); container1=new CustomKafkaContainer(imageName,env) - .withClasspathResourceMapping("/kafka-1-creds","/etc/kafka/secrets", BindMode.READ_WRITE) - .withClasspathResourceMapping("/restproxy-creds","/etc/restproxy/secrets",BindMode.READ_WRITE); + .withClasspathResourceMapping("/kafka-1-creds","/etc/kafka/secrets", BindMode.READ_ONLY) + .withClasspathResourceMapping("/restproxy-creds","/etc/restproxy/secrets",BindMode.READ_ONLY); try { container1.start(); } From 74c7be1270590e0f1f85dc3bcc8e86ef3e95e8d0 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 19 May 2023 15:29:11 +0530 Subject: [PATCH 25/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/kafkaIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java index d915980969..6193d7c260 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/kafkaIT.java @@ -58,8 +58,8 @@ public void setup(){ String imageName = String.format("%s%s:%s",DOCKER_REGISTRY,IMAGE_NAME,DOCKER_TAG); //String imageName = String.format("placeholder/confluentinc/kafka-local:7.4.0-80-ubi8"); container1=new CustomKafkaContainer(imageName,env) - .withClasspathResourceMapping("/kafka-1-creds","/etc/kafka/secrets", BindMode.READ_WRITE) - .withClasspathResourceMapping("/restproxy-creds","/etc/restproxy/secrets",BindMode.READ_WRITE);; + .withClasspathResourceMapping("/kafka-1-creds","/etc/kafka/secrets", BindMode.READ_ONLY) + .withClasspathResourceMapping("/restproxy-creds","/etc/restproxy/secrets",BindMode.READ_ONLY);; try { container1.start(); } From f08e3ab9293da92a3a0b47ea1152f50793091f7e Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 19 May 2023 17:44:22 +0530 Subject: [PATCH 26/44] fixes related to ub and encryption format --- kafka-plus-rest/include/etc/confluent/docker/configure | 1 - .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/kafka-plus-rest/include/etc/confluent/docker/configure b/kafka-plus-rest/include/etc/confluent/docker/configure index f6bde3536a..c9db2ac108 100755 --- a/kafka-plus-rest/include/etc/confluent/docker/configure +++ b/kafka-plus-rest/include/etc/confluent/docker/configure @@ -42,7 +42,6 @@ else ub ensure KAFKA_ZOOKEEPER_CONNECT ub ensure KAFKA_ADVERTISED_LISTENERS fi -sleep 3000 # By default, LISTENERS is derived from ADVERTISED_LISTENERS by replacing # hosts with 0.0.0.0. This is good default as it ensures that the broker # process listens on all ports. diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index 762886c1ab..f10e628cd9 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -50,7 +50,7 @@ public class sslKafkaIT { public GenericContainer container1;; @BeforeAll - public void setup() throws InterruptedException { + public void setup() { Yaml yaml = new Yaml(); InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); Map env = yaml.load(inputStream); @@ -65,7 +65,6 @@ public void setup() throws InterruptedException { catch(Exception e) { System.out.println(container1.getLogs()); } - Thread.sleep(3600000); String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); Properties props = new Properties(); @@ -82,6 +81,7 @@ public void setup() throws InterruptedException { @AfterAll public void teardown(){ System.out.println("tearing down"); + System.out.println(container1.getLogs()); container1.stop(); producer.close(); } From e711be9f1d634c981e4c5505531febe0fb62524b Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 19 May 2023 18:09:54 +0530 Subject: [PATCH 27/44] fixes related to ub and encryption format --- .../java/org/dockerImageTests/utils/CustomKafkaContainer.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java index 001d2126bb..b07466328c 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/CustomKafkaContainer.java @@ -45,7 +45,8 @@ protected void containerIsStarting(InspectContainerResponse containerInfo) { Integer mappedSslPort = getMappedPort(KAFKA_SSL_PORT); // use the mapped port to configure the application - String url = String.format("PLAINTEXT://0.0.0.0:%s,SSL://0.0.0.0:%s",mappedOpenPort,mappedSslPort); + //String url = String.format("PLAINTEXT://0.0.0.0:%s,SSL://0.0.0.0:%s",mappedOpenPort,mappedSslPort); + String url = String.format("PLAINTEXT://%s:%s,SSL://%s:%s",getHost(),mappedOpenPort,getHost(),mappedSslPort); System.out.println(url); String command = "#!/bin/bash\n"; // exporting KAFKA_ADVERTISED_LISTENERS with the container hostname From 70a2bd954ac77648aa73c77a01f7815847bb9b50 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 19 May 2023 21:34:38 +0530 Subject: [PATCH 28/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index f10e628cd9..c8239326f7 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -70,9 +70,9 @@ public void setup() { Properties props = new Properties(); props.put("ssl.keystore.type", "PKCS12"); props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); - props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs121").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); - props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); + props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs121").getPath()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); admin = new Admin(bootstrapUrl,baseUrl,props,true); consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,true); From d3628abbe5dd437d418bde08fd782bf9331beb1a Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 19 May 2023 23:03:17 +0530 Subject: [PATCH 29/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index c8239326f7..762886c1ab 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -50,7 +50,7 @@ public class sslKafkaIT { public GenericContainer container1;; @BeforeAll - public void setup() { + public void setup() throws InterruptedException { Yaml yaml = new Yaml(); InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); Map env = yaml.load(inputStream); @@ -65,14 +65,15 @@ public void setup() { catch(Exception e) { System.out.println(container1.getLogs()); } + Thread.sleep(3600000); String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); Properties props = new Properties(); props.put("ssl.keystore.type", "PKCS12"); props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); - props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs121").getPath()); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); - props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs121").getPath()); + props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); admin = new Admin(bootstrapUrl,baseUrl,props,true); consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,true); @@ -81,7 +82,6 @@ public void setup() { @AfterAll public void teardown(){ System.out.println("tearing down"); - System.out.println(container1.getLogs()); container1.stop(); producer.close(); } From eddea1a5f624ee70451c6a6696511b61eeed2949 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Fri, 19 May 2023 23:50:34 +0530 Subject: [PATCH 30/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index 762886c1ab..a8d87fd63a 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -50,7 +50,7 @@ public class sslKafkaIT { public GenericContainer container1;; @BeforeAll - public void setup() throws InterruptedException { + public void setup() { Yaml yaml = new Yaml(); InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); Map env = yaml.load(inputStream); @@ -65,15 +65,17 @@ public void setup() throws InterruptedException { catch(Exception e) { System.out.println(container1.getLogs()); } - Thread.sleep(3600000); + // Thread.sleep(3600000); String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); Properties props = new Properties(); + System.out.println(getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getFile()); + System.out.println(getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); props.put("ssl.keystore.type", "PKCS12"); props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); - props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getFile()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); - props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); + props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getFile()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); admin = new Admin(bootstrapUrl,baseUrl,props,true); consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,true); From 2803d7c2cb049f0554c560baf76f79c52922d265 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sat, 20 May 2023 00:06:44 +0530 Subject: [PATCH 31/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index a8d87fd63a..762886c1ab 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -50,7 +50,7 @@ public class sslKafkaIT { public GenericContainer container1;; @BeforeAll - public void setup() { + public void setup() throws InterruptedException { Yaml yaml = new Yaml(); InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); Map env = yaml.load(inputStream); @@ -65,17 +65,15 @@ public void setup() { catch(Exception e) { System.out.println(container1.getLogs()); } - // Thread.sleep(3600000); + Thread.sleep(3600000); String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); Properties props = new Properties(); - System.out.println(getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getFile()); - System.out.println(getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); props.put("ssl.keystore.type", "PKCS12"); props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); - props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getFile()); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); - props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getFile()); + props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); admin = new Admin(bootstrapUrl,baseUrl,props,true); consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,true); From 06ac6648b33de642e992c948b90ba523913853ae Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sat, 20 May 2023 01:19:45 +0530 Subject: [PATCH 32/44] fixes related to ub and encryption format --- .../java/org/dockerImageTests/sslKafkaIT.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index 762886c1ab..852d43c4cd 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -50,7 +50,7 @@ public class sslKafkaIT { public GenericContainer container1;; @BeforeAll - public void setup() throws InterruptedException { + public void setup() { Yaml yaml = new Yaml(); InputStream inputStream = getClass().getResourceAsStream("/sslconfigs.yml"); Map env = yaml.load(inputStream); @@ -65,16 +65,26 @@ public void setup() throws InterruptedException { catch(Exception e) { System.out.println(container1.getLogs()); } - Thread.sleep(3600000); + // Thread.sleep(3600000); String baseUrl = String.format("https://%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_REST_PORT)); String bootstrapUrl = String.format("%s:%s",container1.getHost(),container1.getMappedPort(KAFKA_PORT)); Properties props = new Properties(); + String path = "src/test/resources/client-creds/kafka.client.truststore.pkcs12"; + File file = new File(path); + String absolutePath1 = file.getAbsolutePath(); + System.out.println(absolutePath1); + path = "src/test/resources/client-creds/kafka.client.keystore.pkcs12"; + File file1 = new File(path); + String absolutePath2 = file1.getAbsolutePath(); + System.out.println(absolutePath2); props.put("ssl.keystore.type", "PKCS12"); props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); - props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); + // props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); - props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); + // props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,absolutePath1); + props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, absolutePath2); admin = new Admin(bootstrapUrl,baseUrl,props,true); consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,true); producer = new Producer(baseUrl,bootstrapUrl,props,true); From 07a4f6b0b2c915c04dd78f844a84dce6d119756a Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sat, 20 May 2023 05:56:06 +0530 Subject: [PATCH 33/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index 852d43c4cd..37d443b1b4 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -11,6 +11,7 @@ import org.junit.experimental.categories.Category; import org.junit.jupiter.api.*; +import java.io.File; import java.io.InputStream; import java.util.Map; From ee1af7071b415de5fd4dd9039792e005bec60e3c Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sat, 20 May 2023 20:50:47 +0530 Subject: [PATCH 34/44] fixes related to ub and encryption format --- .../java/org/dockerImageTests/sslKafkaIT.java | 8 +++++--- .../resources/client-creds/client-keystore.jks | Bin 0 -> 3976 bytes .../resources/client-creds/client-truststore.jks | Bin 0 -> 1090 bytes .../src/test/resources/client-creds/client.jks | Bin 0 -> 3341 bytes 4 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 kafka-plus-rest/src/test/resources/client-creds/client-keystore.jks create mode 100644 kafka-plus-rest/src/test/resources/client-creds/client-truststore.jks create mode 100644 kafka-plus-rest/src/test/resources/client-creds/client.jks diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index 37d443b1b4..a05bb9e74c 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -78,14 +78,16 @@ public void setup() { File file1 = new File(path); String absolutePath2 = file1.getAbsolutePath(); System.out.println(absolutePath2); - props.put("ssl.keystore.type", "PKCS12"); + // props.put("ssl.keystore.type", "PKCS12"); props.put(AdminClientConfig.SECURITY_PROTOCOL_CONFIG, "SSL"); // props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/kafka.client.truststore.pkcs12").getPath()); props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "confluent"); // props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/kafka.client.keystore.pkcs12").getPath()); props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); - props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,absolutePath1); - props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, absolutePath2); + // props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,absolutePath1); + // props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, absolutePath2); + props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/client-truststore.jks").getPath()); + props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/client.jks").getPath()); admin = new Admin(bootstrapUrl,baseUrl,props,true); consumer = new Consumer(bootstrapUrl,"test-1",baseUrl,props,true); producer = new Producer(baseUrl,bootstrapUrl,props,true); diff --git a/kafka-plus-rest/src/test/resources/client-creds/client-keystore.jks b/kafka-plus-rest/src/test/resources/client-creds/client-keystore.jks new file mode 100644 index 0000000000000000000000000000000000000000..e934a886694862581241ea1fc768ee5ed4758c4b GIT binary patch literal 3976 zcma)n1CSNB`qo)ju51z zBt&Y|K*GPzd7kt8o#)l_ytwC{`}y8`Uw+TM2rO+Bkc1S0r4=EAz+>*l5UEMXN%F9? z93U($>tD=_z=Es(mjr$T!h*B^;#YqojqKY0Oi@vi0Q0b5mA~)~!ttL8Fg3ygA^UGh zg%AcoONIC2sP>i<&y|?Gq>GC_N1gD^1Azi`KoU*_6&b~UZv;ZX0K`o)$fKCMBxq6) z2^>V{sXk)FWt^1lvbD;Vzey?$!h-M>I1Oh2_dx+k0Ny1VPyQvvSL%-UJREm)LfU$# zxC&t>sf0GR%o3AvEu>(Ox{X(UHMeqaAbxGQ*UUKc)C-1sxhL-uwE%q!4t%D8cF#wizWf-Q*M;Z4c7U8z&)c+RKS%gme$N?r*a3~G+ULK>(7E(gGA z3}+4|jghfFC4(clu|U>`E65_?F-_V|-lfc-+pnH)YVHHlKeWQA&JTRPB=76uA2Vj7z}$53di!>kqY zMx%8m5&+SJ`bj@x#G9UzS z>nP{L#t7PV)^P(knJGuL0+5o{QLN$eVTraxKfB5t-)qKx*;d1I^o96EB4Ol~8&ue) z;cH||YwMVA8>+&+1wNT!J6sU*{6dz?_4K~7_rYWq2 zgKtKha{>SB`x4tBMPWJl0*$yGz%YwCKGmfl+i>Y?>!EF6#7-7>yZMoVdOV%&mHC81 zx|K=%MNL+1>tc$?F0<^Cp(_5GXrc%5U20Wr10?~~%l|;{XZdz$Lv()v9XX0G!_b74 z<3U&@$iiqeKX~&T{1Yb&O&uIlCbRF-Ua)4VH0U*LbMuugdj2RLK-$2He(eLV=Oq5% zb>b*QNh^n0pJ=l~<_8qj_|#j#d+(nMJn=KX}Gce`SiZ+lpKTyS%a{3i@vH3Le{ z!};=#JIh{95+2W-k=o?pE#>E0{!lAXp;CBim0>gW>aM5JGAMjC@$Cs~ zz*Z(MAxBhzH}GS|?2m(5{V43ZJf(T#>2$fS(;s=g_E7qgV>Ugfn6Ph*A0ZDwgg!BI zTce~J(uZ=b-q(ms5{C`c)tc=DT&%sgv95jL5r!>ZJ?i4y61SK3v}JvCg!;~rS=^uf z1i$b8ow)&A-e0mwiXR`5ds-A;zu%hSNB?AD<=W&&y&Br$L#d`1c!z_N$$Wb-+1f{e zTXK^hU?4lg)+G-$M*Jf0aR`J#)$KZ$F~G+Me1u5agtwyxbs`S+o^8CW8zHYk#z8%I z9asK{68$@E4gk*iw$uXxT#}1l(gVvHKBPlLzbH&q?yasky-o$c#J+YFZdvv5^j}G< zdwwWF@GXEP_yf^U~Nmo1%lVWtrJznPX@ zl9e?4X6W9CsUTSYu}cV4jE0>GU=MHuU;t=<2fz=(^{>b%1_d#gVBFE1V$!mbQqr;# zGSafr;s`7R^-mKySsoT*{1@p1NdSKn@?VAI|FUU5!2B+}W^__0e6cm+dO7=Tnz~2- zADa%4st6@aj%?8lSh#!r>Qs6&~Ren?+s{L6~F=~t)%tC&SSFzu+jrCnk3(l_M z*|aRX4G^x~WjlDQF}ezNr+}rp7M%Z@wC#^#6PeA*H>VC=7Si&9Wp*L^VvS44RlTw7 ze9n8f#|`@SF!-M7EMYZt^Piyr`1AZ(cp=%`gun%!l=-2QOz_13(eIW5npI&$slcGx zXu&=>jsYKKPN6lSqp#W_%sQs1fWFrgFLTdsMk8rFO;wki<L#Oc0!U9Iy7=)>Hjcqe$)= zPZj@`QYixhQ-7uh5}!pTKLwK+L+nV;nZaB`a{;fzkaz5AS?zs8 z(mzwQNoYAg1IJT~gv|F5$)@pl7x~a!AAbtEJ3sRuu=s>uFRLSUk@5|z?)%#Zk^v^*^G*I^eC?Q(%W6-&*xEeg%Sd6u&O^w2h{ zYwFV`&hX$$_Vg@krHAA!KGoGQXnTC}$qSrtvOq=h11lS1-cBw)6d_n#GC~~` z%PTzBHqyC zKr^e|MiV)n;8Lz}RCs^Izlc`&AFSHDi^(8Fj`vw7`xp4}_L{CD6)C4y{CDOM|lU2D+Wz%INj5N_va7?d*A&~Prc$Uc+zpU4ca7pZ`Ut7h24%Y z95~+o@GLVyH}?`}LO0s*OM{#C} z=R%u+XPsf5x2t_w%we?JtiC$FBlxI~iIKZg-$(`CAGFBQJQL|b<7-r;()t^lrf$m5 zDGmGx^Ks$-BySP7OtmvpXz>NQQCKG3dw8Lw%F4}x9uYbxJ(?c=nCtwjPsY^I-@S?} zl{dzh%QkdME^%TzY0TFaz8j%KRrkVV$Wc(lwE3$ubD-j)Yp#QtZ0Oc~$;iR8w(XGC z4jrDM1y!AEodsrUwPmLgfe=FyjDKDZc#?4Zu_frUk5hh}vD7&|?EoKy>v zGiT%4!JVaOVbOjd5f`V;z+UT$Gb}f^}*4vKb$R9TL-9JX(hU;2F~^Z=p+RxG=St1 zhK^r#I2t@?P;Z#FI~MuoRsiNabKyYOdN#ksonqXC*j5A?ggfJkG2oIfZMg>Eem-ct z?38H5`TqOLA`mTd^=;t7Qt%5^nk;qE9ZSM@OG9lL8rQ@r-NIB+|E`mY@DFQ74pa<{ zMT|yPit*g9)0Z=E$~N#2G^e}>PNu}4AM6$8H=e)Ep95j@Z|L2S3}9to4%*mFkkH%^ z|2F->-_slHLqjFHA1bS^FZ1})f|D82Vr^FgGf>WcnI9$UsZUw{IWs}zvXI$z5M-=Z zMRDy#w;E{JEY?q^V!ZoCb3Nw9_d*mYMoCm;*2&@fy;E@zgY)OZbiFNcjA<`V3}YJQ z>DnMFE}(5$|46J{TQJ-U9YZG2xaeA}mc!;H579UZ{KWXM7dfGA7WIJoUOC-M>il=G zc$WQA*e*q%^9=hya?i7noNfl@${zO>;iC4=Ps!hV*k85sqL*rctpUD%Tk1G(Em?s# z+HOI;CndTfRp$MYppZvD9Q+P*|Ac~tHHAJv#MD>ZMIV^Ia|4cW8P@nGtJBm{Eici9 zA%n9rx8!D?J5)O@k9BxYxvQeycXrH^8~#w_5ct$%WEsLIXII~3 zN52B3fSty35KO}x9@8i$v_Bil%2M#Im_8jWwEY&SX$WtlOR_8|qd~BY?;Tv+%op!e z8;nUlhnDW@i6Jg+(i|8H9VgEd)E6$Y6%Y`iM07->4-(MC;BfUzurHYqvi z3!-jS*w1Qu59_^CiCM9-6Quy~5>odKmU4m#P(p0ijDeUeoY!V&4DHA$zjYjQ$ zt}cfr5_hBk#czoZ4bom2yQ=s&52jYhSTTPREm$XyMVj~FMxCz7Jyd_x)O_@ntYRB% z3HPNo8BSvs_(!065>ZFyB9L)^lJ+uvy-?jghw)c<^^KoT$j z#MwT_yY{Sf@X>Ww9ZpRF4{}$`L@p1z)`_I#{R&XmtuPQ&N5z;kp)LB|I#T+Vc`}9X MD*@`U|2nDu12+6o&Hw-a literal 0 HcmV?d00001 diff --git a/kafka-plus-rest/src/test/resources/client-creds/client-truststore.jks b/kafka-plus-rest/src/test/resources/client-creds/client-truststore.jks new file mode 100644 index 0000000000000000000000000000000000000000..bad75f72832c36fa457164dcca6d31aac1c131d3 GIT binary patch literal 1090 zcmezO_TO6u1_mY|W(3o0$%#ez`6WPZheeh30|wRzJyQcq1_l;pgC^$x22D)Q7cet1 zGBJtdTrD;^)OhdLo2p9|Zw^KLPB&H(H{fOC)N1o+`_9YA$j!=N;BUxnz{$oO%EBhh z6dG(OWFP?IaPe?D=jWy6l&0pD7>XJQgT&Z*czp9q^GXsk^TIMy%VEOIJUq!TrMk(9 z26E!OM#cuF2BwA}$|wrRH34!>pj_%(*u?uZ82IeM4eg=akMlPl%Mn;CUf7kv# z`&71W{sJ-6xI5gMR^GP!&lNbkk9n1xe)H$;k|ci%@$XeNo`25Ci~lqX@>yeP`a5yM z$*of_9%*09*#EvDXxe1OOZ;Yx?uVA=+TFNQv3G(8N9@kN%Xe8Zop-K2p1(3Q@DaXF)nJ?-*=T*D7?9}6&rvmr-2Ff9Y4 zosofG^4@>%z0$lPj@D`KzKZ1CV_oB;@w#c-oPw426svSUXuduFX@*(mc_CP+KyST%TRXos|FIFbNg~=pnTKT483wDFs=Y@|;PyODtY!}D+tOLIN7AidR9^7ec nbQSs_KJzS_$Q`{?64&>w-A zB91LG$WF2ivSfRw-qU-X_gv?_-hbZrkNdix@9%n^`*%P0@AvuM`-}UFAP@*bAK)JY z?(9J%ko@Q}THUuo0tAAAfpF*^zzRPk4F^NPQV=#U7zTodLqF?jQCMnY0u6zsqen;r z*@6AN@fc)eQ2fsGgUZGrlb#}zjrsXCXVH@`$|=!XZIiMVLPgaNyKq#I(~x^=oK$Q5 zm)gseap;|Pi{a6xhJ3~8np4w3PG>_eQ}D@{d+6FAd&dKx`A36q9j@)&co&>*MXAJ9 zHVoWn#*vgmXKh+l^qK|6X&XXeGPO}^+i?qeotU38-SzF9cPVi9@b#z!;b$KzGS8PAH%R(Tp+}_; zG2~rt=Fd&GXY~!aU3RTUN&U_twBXc(rvzzD_6!+Kmx+gV$H&fg%8y0mEcHGZ9Ne(m zQp!tIQg;*DZ}B0-#eV7cNtW;t_%&&uEntS1^BJMwL>7N{h>4h0j(34tnJtK$c7V@z zYsQ@vtbEG9N9lZ}S*;^Bi@v16n}6oKYrB@1>kD!BQdaknMQA%4YKeD_$7j&LLi6iM z7;-<_GjNMGU%Y*IyyU8%3S3b7uA3lF&B~}gQ(spxH`%5=-cOikwJIW`Tzf4=*O59t zx7oKKA7ilM6*;2Kr%jGreEz=BB}O2Yb>H9A;%1VG)?v7Go=v*!a;9#=0VAU=ty}zU zYG0f>s#YCuUF6MiZk@`clX#`SY2b>e{E>}PN!Tslor+`G<}_nYLpeK7i^iBN%``NLeC79JKe7<`i!a*yDux_zRp>EM=SDmg7%$PB|HJ;h@JZUf%Lm4 zR0-jkc~jd$#E~UXo2)~twwH0n@A9%s5+&fcnFzJ!$fYI~lh|0>U-@%8CLRa}?|8^A+b9ZxF+ky1R%^e;uEgt!rSmyk z9S+E7$HynoRR&MB*_1`vxQ8(7YT$uQ*KIRedXu5`tvfr%e0)NeT0D5_DVv-xHI1;lx?B<+0tYKj&QaCzxonB zi`yb$Fo%2`?E$~lqKT|1W?JR+b3)fQmOhap`69DJl%roQ^@GjjhEk=8t|7By;|TVZ zXU#HDHJ$zHfEIbS(>T|G1)7Y>c9Y`p@F1a6oWCu7X1HZ@z_LHb+VA%;4J+CAyYKx1 ztTZ2cZGo3O_eup5TMPwx0^x#O%M;AE**~*7n01_ZnLRSpLDONBdXlDRV@NdHKeeLg zAS%a4DjcuIqvSFnWaZBVOlh+5&y|$(r?YmHd+w?0vI-YCH7*LMua`SXEruDr{u)(f z9xrYb^MsXFa?2%CqPO!&Lf!;e&+n6715qsN=&8`(OW^3b5rc&$pB z8kYm`f4RAtjJHkiX~#FeWgg!>F$zn zHTI2d1rt`ByC&tCGUDv;nr19l>pK>)k<>Vcr{%vW&}>*el7gs5&QZ9!BX;^sAGHPl zFi4o4oC=@;0x>)T!WpW7a7cO%0|X3#plu|*iy4fuq~pZe2zcDI&9!) z1!Z7+8*_LF<`M6>)|zp6I9@QO-$8=MnC3h8A$?&={b*D$6&#CWl8C{5xyi;`&z$j1 zY`{hlOAcy&0qGD#i9?(%-DDHE9+xfnOTTSRPcpKnNv}>n;xkxnRZA7j*yeuTwAcFN zMt-K%0i)FWH`k1wJ<62)@KJM~ioo(u2Kpc->gofyHX|1y3}6sAj~ok-qW}T=;R&!p zxuEDQ9^DA5l%eio^jP_jfH6~+rHA5=CJ^9&LI4mT14lSU8h|6WPJoe6K`sF^Zvx5E z(ip2xAQ60={0JAZx&$9T;w7RpUC{4e`no7E_y7uJfWUJ8nMVxD1)$+f^otn=V`KpT z>k0Dba?$&bUM?`mHZ{=(7yZ#%E0q6*y((TOFrm2mb@ys{@eTHDqqScuUvPwJ^IDTU7YX`>=AOI#)0J?(yvf zUT(`iRADx3IDoxrO{m+f31;7Mm3=!^vqlEy>1r)%7gczvGQ$9?Z&EWkqEYs;FQAbM zB_1!nUuZ7$O$zjrU?>Bf-~&3rvwslm(eJC=L>~RN5Y(f-&}6scq9ly@%L)F!309;N ztf)*USosIRiuV8eg#V4=(!J81_tOH`vU9jp&J8n5Y8q*yX0eF47DK=Gh26#c3ufw= z&7k1ZyPblVZG|(&B^oL_PUUS+?sUJ2$^$1Yd!I?YE!2Zj1?x9aJaL1=zcnNqFyU(x zdxr;?)=VChg%bj|Lre=}{QaieDJ*G#wIp!Q2;h zN9j1T5OO}0w9J~f^6OV7+oa}pw-|qU!`rUcoCv~&PWI$E`B(GTn`*X2M?(y8R5XG! zC|PBKy?h5t>E%@#B9mNq3i<@duO1vtLL2m=`C z6F`ptj;9Eed;=hp!_a^CQLbga#s9ph;po5Z-G4I+F#IpQ{KE}UJfjCj4SYze<64)N zzM@H^uo7d5FX2x!ydRAU1<9^RE_SWnJ7w{Ou(F%Xo1BMU(#mwRay+c+nUUwQaT)q? z)TT8MQzp?nW}rQ8QITG!5w}D0%g>J~pFL)}*z&mn#)!)c^4d90*(Yzzq&*KS;q`R2 zW3Q|OE`GXNR5T(JYCIVuDB9Sxk=7;@0N_7ouT>y_Q3Lf9OgwTwyH9Zd{Ihy5`2~pt z03*b#Xk}gbw+^N z#nH-lXEXL;9n)jCmnKQ-qAXcs!(_4^`wAxQJsdqO_l_HNR~c2a$~aq{ijLvloU0(; H#w`5<=df{o literal 0 HcmV?d00001 From 16b2c20db121f5cc3bfaf61b39632bb12a602429 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sat, 20 May 2023 21:25:28 +0530 Subject: [PATCH 35/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index a05bb9e74c..a21f3b801c 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -86,6 +86,7 @@ public void setup() { props.put(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "confluent"); // props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,absolutePath1); // props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, absolutePath2); + props.put(SslConfigs.DEFAULT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM, ""); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/client-truststore.jks").getPath()); props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/client.jks").getPath()); admin = new Admin(bootstrapUrl,baseUrl,props,true); From d74469a37ec3be6c1ad43c563a00fe1bc24b94b9 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sat, 20 May 2023 22:53:50 +0530 Subject: [PATCH 36/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/sslKafkaIT.java | 1 + kafka-plus-rest/src/test/resources/sslconfigs.yml | 2 ++ 2 files changed, 3 insertions(+) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java index a21f3b801c..7ef8b09c83 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/sslKafkaIT.java @@ -87,6 +87,7 @@ public void setup() { // props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,absolutePath1); // props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, absolutePath2); props.put(SslConfigs.DEFAULT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM, ""); + props.put("ssl.endpoint.identification.algorithm", ""); props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,getClass().getResource("/client-creds/client-truststore.jks").getPath()); props.put(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, getClass().getResource("/client-creds/client.jks").getPath()); admin = new Admin(bootstrapUrl,baseUrl,props,true); diff --git a/kafka-plus-rest/src/test/resources/sslconfigs.yml b/kafka-plus-rest/src/test/resources/sslconfigs.yml index abcf201da0..5d25473d26 100644 --- a/kafka-plus-rest/src/test/resources/sslconfigs.yml +++ b/kafka-plus-rest/src/test/resources/sslconfigs.yml @@ -9,6 +9,8 @@ KAFKA_JMX_HOSTNAME: "localhost" KAFKA_PROCESS_ROLES: 'broker,controller' KAFKA_NODE_ID: "1" +KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: "" + KAFKA_LISTENERS: "PLAINTEXT://0.0.0.0:19092,SSL://0.0.0.0:19093,SSL-INT://0.0.0.0:9093,BROKER://0.0.0.0:9092,CONTROLLER://kafka-1:29093" KAFKA_ADVERTISED_LISTENERS: "PLAINTEXT://localhost:19092,SSL://localhost:19093,SSL-INT://kafka-1:9093,BROKER://0.0.0.0:9092" KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: "PLAINTEXT:PLAINTEXT,SSL:SSL,SSL-INT:SSL,BROKER:PLAINTEXT,CONTROLLER:PLAINTEXT" From f205aa036a45a74a708d6e36b34bf68dbb7ad291 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sat, 20 May 2023 23:41:03 +0530 Subject: [PATCH 37/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/utils/Consumer.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java index 2a17b28450..d2b320126a 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java @@ -171,7 +171,7 @@ public boolean consumeRest(int numMessages) throws Exception { try { HttpClient httpClient = HttpClientBuilder.create().build(); if (isSsl==true) { - String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + String truststoreFile = "/client-creds/client-truststore.jks"; InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); truststore.load(truststoreStream, "confluent".toCharArray()); From 36fceb6a61fc22e5e4238f4d028a76c02a228135 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sun, 21 May 2023 00:09:56 +0530 Subject: [PATCH 38/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/utils/Admin.java | 2 +- .../src/test/java/org/dockerImageTests/utils/Consumer.java | 4 ++-- .../src/test/java/org/dockerImageTests/utils/Producer.java | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java index 38c4e3bfe3..e236f611a7 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java @@ -89,7 +89,7 @@ public List listTopicsUsingRestApi() throws Exception { String endpoint = restEndpoint + "/topics"; HttpClient httpClient = HttpClientBuilder.create().build();; if (isSsl == true) { - String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + String truststoreFile = "/client-creds/client-truststore.jks"; InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); truststore.load(truststoreStream, "confluent".toCharArray()); diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java index d2b320126a..858414313c 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java @@ -91,7 +91,7 @@ private void createConsumerInstance() throws Exception { // Create HTTP POST request HttpClient httpClient = HttpClientBuilder.create().build(); if (isSsl == true) { - String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + String truststoreFile = "/client-creds/client-truststore.jks"; InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); truststore.load(truststoreStream, "confluent".toCharArray()); @@ -125,7 +125,7 @@ public boolean subscribeTopicRest(String topicName) throws Exception{ createConsumerInstance(); HttpClient httpClient = HttpClientBuilder.create().build(); if (isSsl==true) { - String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + String truststoreFile = "/client-creds/client-truststore.jks"; InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); truststore.load(truststoreStream, "confluent".toCharArray()); diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java index 6460e65b12..1d835c2df3 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java @@ -53,7 +53,7 @@ public void sendRest(String topic, int value) throws Exception { String url = baseUrl + endpoint; OkHttpClient client = new OkHttpClient.Builder().build(); if (isSsl == true) { - String truststoreFile = "/client-creds/kafka.client.truststore.pkcs12"; + String truststoreFile = "/client-creds/client-truststore.jks"; InputStream truststoreStream = getClass().getResourceAsStream(truststoreFile); KeyStore truststore = KeyStore.getInstance(KeyStore.getDefaultType()); truststore.load(truststoreStream, "confluent".toCharArray()); From 65a827933462086c394ed73b85d75bf633f03e85 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sun, 21 May 2023 01:05:13 +0530 Subject: [PATCH 39/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/utils/Admin.java | 1 + .../src/test/java/org/dockerImageTests/utils/Consumer.java | 3 +++ .../src/test/java/org/dockerImageTests/utils/Producer.java | 7 +++++++ 3 files changed, 11 insertions(+) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java index e236f611a7..981917d7a5 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java @@ -105,6 +105,7 @@ public List listTopicsUsingRestApi() throws Exception { // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java index 858414313c..03b63d37bf 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java @@ -107,6 +107,7 @@ private void createConsumerInstance() throws Exception { // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } @@ -141,6 +142,7 @@ public boolean subscribeTopicRest(String topicName) throws Exception{ // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } @@ -187,6 +189,7 @@ public boolean consumeRest(int numMessages) throws Exception { // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java index 1d835c2df3..a5f0101dd6 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Producer.java @@ -71,6 +71,13 @@ public void sendRest(String topic, int value) throws Exception { // Create HTTP client client = new OkHttpClient.Builder() .sslSocketFactory(sslSocketFactory, (X509TrustManager) trustManagers[0]) + .hostnameVerifier(new HostnameVerifier() { + @Override + public boolean verify(String hostname, SSLSession session) { + // Allow all hostnames + return true; + } + }) .build(); } From a3e3b267735ec8a14a006df7d007f4f9b72a06a3 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sun, 21 May 2023 01:20:09 +0530 Subject: [PATCH 40/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/utils/Admin.java | 1 + .../src/test/java/org/dockerImageTests/utils/Consumer.java | 1 + 2 files changed, 2 insertions(+) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java index 981917d7a5..1e99f3320c 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java @@ -11,6 +11,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpPost; +import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.TrustSelfSignedStrategy; import org.apache.http.entity.StringEntity; diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java index 03b63d37bf..708a3c8e44 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java @@ -16,6 +16,7 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpPost; +import org.apache.http.conn.ssl.NoopHostnameVerifier; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.TrustSelfSignedStrategy; import org.apache.http.entity.ContentType; From cfbd3dad15132ebdd5ee5cd1c1c3abddbadfbc5e Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Sun, 21 May 2023 02:35:23 +0530 Subject: [PATCH 41/44] fixes related to ub and encryption format --- .../src/test/java/org/dockerImageTests/utils/Admin.java | 3 +-- .../test/java/org/dockerImageTests/utils/Consumer.java | 9 +++------ 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java index 1e99f3320c..d94e306f1d 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Admin.java @@ -101,12 +101,11 @@ public List listTopicsUsingRestApi() throws Exception { .build(); // Create SSL connection socket factory - SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext,NoopHostnameVerifier.INSTANCE); // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) - .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } diff --git a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java index 708a3c8e44..be37fa8f04 100644 --- a/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java +++ b/kafka-plus-rest/src/test/java/org/dockerImageTests/utils/Consumer.java @@ -103,12 +103,11 @@ private void createConsumerInstance() throws Exception { .build(); // Create SSL connection socket factory - SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext,NoopHostnameVerifier.INSTANCE); // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) - .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } @@ -138,12 +137,11 @@ public boolean subscribeTopicRest(String topicName) throws Exception{ .build(); // Create SSL connection socket factory - SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext,NoopHostnameVerifier.INSTANCE); // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) - .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } @@ -185,12 +183,11 @@ public boolean consumeRest(int numMessages) throws Exception { .build(); // Create SSL connection socket factory - SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext); + SSLConnectionSocketFactory sslSocketFactory = new SSLConnectionSocketFactory(sslContext,NoopHostnameVerifier.INSTANCE); // Create HTTP client httpClient = HttpClients.custom() .setSSLSocketFactory(sslSocketFactory) - .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .build(); } From de0b5e08d878e705bd7c8936678fd0587cf8d025 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Mon, 22 May 2023 12:17:53 +0530 Subject: [PATCH 42/44] fixes related to ub and encryption format --- pom.xml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pom.xml b/pom.xml index 84e00a2f1c..0c5541cac5 100644 --- a/pom.xml +++ b/pom.xml @@ -33,15 +33,15 @@ 7.5.0-0 - + kafka kafka-plus-rest - + server-connect From c6907dbfed885758fb27e9aa4f92d5b210a36bd0 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Tue, 23 May 2023 12:15:30 +0530 Subject: [PATCH 43/44] fixes related to ub and encryption format --- kafka-plus-rest/pom.xml | 224 ++++++++++++++++++++-------------------- 1 file changed, 110 insertions(+), 114 deletions(-) diff --git a/kafka-plus-rest/pom.xml b/kafka-plus-rest/pom.xml index f133e7ac3b..a322869b1f 100644 --- a/kafka-plus-rest/pom.xml +++ b/kafka-plus-rest/pom.xml @@ -14,119 +14,115 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. ~--> - - - 4.0.0 - - io.confluent.kafka-images - kafka-images-parent - 7.5.0-0 - - - - io.confluent.kafka-images - cp-kafka-kraft - Kafka Docker Image with Rest Proxy - - - - 11 - 11 - UTF-8 - - - false - true - 2.0.0-alpha5 - - - - com.squareup.okhttp3 - okhttp - 4.9.1 - - - org.apache.httpcomponents - httpclient - 4.5.13 - - - org.json - json - 20210307 - - - org.apache.kafka - kafka-clients - 2.8.1 - - - org.junit.jupiter - junit-jupiter - 5.8.1 - test - - - org.testcontainers - testcontainers - 1.18.0 - test - - - log4j - log4j - - - - - org.testcontainers - junit-jupiter - 1.18.0 - test - - - org.yaml - snakeyaml - 1.21 - - - org.slf4j - slf4j-api - ${slf4j.version} - - - - org.slf4j - slf4j-log4j12 - ${slf4j.version} - - - - - - - org.apache.maven.plugins - maven-jar-plugin - 2.6 - - - none - - - - - org.apache.maven.plugins - maven-failsafe-plugin - 3.0.0-M7 - - - ${docker.registry} - ${docker.tag} - - - - - + 4.0.0 + + io.confluent.kafka-images + kafka-images-parent + 7.5.0-0 + + io.confluent.kafka-images + cp-kafka-kraft + + UTF-8 + false + true + 2.0.0-alpha5 + + + + com.squareup.okhttp3 + okhttp + 4.9.1 + test + + + org.apache.httpcomponents + httpclient + 4.5.13 + test + + + org.json + json + 20210307 + test + + + org.apache.kafka + kafka-clients + 2.8.1 + test + + + org.junit.jupiter + junit-jupiter + 5.8.1 + test + + + org.testcontainers + testcontainers + 1.18.0 + test + + + log4j + log4j + + + + + org.testcontainers + junit-jupiter + 1.18.0 + test + + + org.yaml + snakeyaml + 1.21 + test + + + org.slf4j + slf4j-api + ${slf4j.version} + test + + + org.slf4j + slf4j-log4j12 + ${slf4j.version} + test + + + + + + org.apache.maven.plugins + maven-jar-plugin + 2.6 + + + none + + + + + org.apache.maven.plugins + maven-failsafe-plugin + 3.0.0-M7 + + + ${docker.registry} + ${docker.tag} + + + + + From 1c80a2ca4f378312d674c3a4810069c16442e2c7 Mon Sep 17 00:00:00 2001 From: Ujjwal Date: Tue, 23 May 2023 21:21:35 +0530 Subject: [PATCH 44/44] fixes related to ub and encryption format --- kafka-connect-base/pom.xml | 8 -------- pom.xml | 8 ++++++++ server-connect-base/pom.xml | 8 -------- server-connect/pom.xml | 8 -------- server/pom.xml | 8 -------- 5 files changed, 8 insertions(+), 32 deletions(-) diff --git a/kafka-connect-base/pom.xml b/kafka-connect-base/pom.xml index 926f459c2c..df4829c942 100644 --- a/kafka-connect-base/pom.xml +++ b/kafka-connect-base/pom.xml @@ -34,12 +34,4 @@ false true - - - - junit - junit - test - - diff --git a/pom.xml b/pom.xml index 0c5541cac5..16eb507cce 100644 --- a/pom.xml +++ b/pom.xml @@ -48,4 +48,12 @@ kafka 7.5.0-0 + + + + junit + junit + test + + diff --git a/server-connect-base/pom.xml b/server-connect-base/pom.xml index 43152e277d..88e137fa9e 100644 --- a/server-connect-base/pom.xml +++ b/server-connect-base/pom.xml @@ -34,12 +34,4 @@ false true - - - - junit - junit - test - - diff --git a/server-connect/pom.xml b/server-connect/pom.xml index 12a05b18b9..414b41c86b 100644 --- a/server-connect/pom.xml +++ b/server-connect/pom.xml @@ -34,12 +34,4 @@ false true - - - - junit - junit - test - - diff --git a/server/pom.xml b/server/pom.xml index 0aa2fb0008..d76a9285a6 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -34,12 +34,4 @@ false true - - - - junit - junit - test - -