From 75c1ab11db4165bb16caffca056b93f2909c3fd7 Mon Sep 17 00:00:00 2001 From: vruttantmankad Date: Thu, 18 Apr 2024 18:06:30 +0530 Subject: [PATCH 01/26] added support for acl auth --- src/ziggurat/config.clj | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 9db7afaf..2e17def7 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -6,7 +6,8 @@ [mount.core :refer [defstate]] [ziggurat.util.java-util :as util]) (:import (java.util Properties) - [org.apache.kafka.common.config SaslConfigs]) + [org.apache.kafka.common.config SaslConfigs] + [org.apache.kafka.clients CommonClientConfigs]) (:gen-class :methods [^{:static true} [get [String] Object] @@ -199,7 +200,8 @@ (def jaas-template {"PLAIN" "org.apache.kafka.common.security.plain.PlainLoginModule" - "SCRAM-SHA-512" "org.apache.kafka.common.security.scram.ScramLoginModule"}) + "SCRAM-SHA-512" "org.apache.kafka.common.security.scram.ScramLoginModule" + "SCRAM-SHA-256" "org.apache.kafka.common.security.scram.ScramLoginModule"}) (defn create-jaas-properties [user-name password mechanism] @@ -211,10 +213,13 @@ (if (some? jaas-config) (let [username (get jaas-config :username) password (get jaas-config :password) - mechanism (get jaas-config :mechanism)] + mechanism (get jaas-config :mechanism) + protocol (get jaas-config :protocol) + jaas_props (create-jaas-properties username password mechanism)] (doto properties - (.put SaslConfigs/SASL_JAAS_CONFIG - (create-jaas-properties username password mechanism)))) + (.put SaslConfigs/SASL_JAAS_CONFIG jaas_props) + (.put SaslConfigs/SASL_MECHANISM mechanism) + (.put CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol))) properties)) (defn build-ssl-properties From 8b91956e443ed1f552b6ca7930a409a6fbffd9b1 Mon Sep 17 00:00:00 2001 From: vruttantmankad Date: Fri, 19 Apr 2024 16:45:36 +0530 Subject: [PATCH 02/26] fix tests --- src/ziggurat/config.clj | 3 ++- test/ziggurat/config_test.clj | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 2e17def7..4cfedcc7 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -239,7 +239,8 @@ :ssl-keystore-password <> {:jaas {:username <> :password <> - :mechanism <>}}} + :mechanism <> + :protocol <>}}} " (let [ssl-configs-enabled (:enabled ssl-config-map) jaas-config (get ssl-config-map :jaas)] diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index 1337bdd4..ac13c51a 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -323,7 +323,8 @@ :ssl-keystore-password "some-password" :jaas {:username "myuser" :password "mypassword" - :mechanism "SCRAM-SHA-512"}})] + :mechanism "SCRAM-SHA-512" + :protocol "PLAINTEXT"}})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") From 6287bca210064d9fd0f6d1d21936126c01762f39 Mon Sep 17 00:00:00 2001 From: vruttantmankad Date: Tue, 23 Apr 2024 14:40:50 +0530 Subject: [PATCH 03/26] code refactor --- src/ziggurat/config.clj | 50 ++++++++++++++++++++++++++++++----- test/ziggurat/config_test.clj | 21 ++++++++++++--- 2 files changed, 62 insertions(+), 9 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 4cfedcc7..2a03f8be 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -96,6 +96,9 @@ (defn ssl-config [] (get-in config [:ziggurat :ssl])) +(defn sasl-config [] + (get-in config [:ziggurat :sasl])) + (defn rabbitmq-config [] (get (ziggurat-config) :rabbit-mq)) @@ -214,12 +217,17 @@ (let [username (get jaas-config :username) password (get jaas-config :password) mechanism (get jaas-config :mechanism) - protocol (get jaas-config :protocol) jaas_props (create-jaas-properties username password mechanism)] (doto properties - (.put SaslConfigs/SASL_JAAS_CONFIG jaas_props) - (.put SaslConfigs/SASL_MECHANISM mechanism) - (.put CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol))) + (.put SaslConfigs/SASL_JAAS_CONFIG jaas_props))) + properties)) + +(defn- add-sasl-properties + [properties mechanism protocol] + (if (and (some? mechanism) (some? protocol)) + (doto properties + (.put SaslConfigs/SASL_MECHANISM mechanism) + (.put CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol)) properties)) (defn build-ssl-properties @@ -250,6 +258,35 @@ (reduce-kv set-property-fn pr ssl-config-map)) properties))) +(defn build-sasl-properties + [properties set-property-fn sasl-config-map] + "Builds SASL properties from sasl-config-map which is a map where keys are + Clojure keywords in kebab case. These keys are converted to Kafka properties by set-property-fn. + + SASL properties are only set if [:ziggurat :sasl :enabled] returns true. + + Creates JAAS template if values are provided in the map provided agains this key sequence + [:ziggurat :ssl :jaas]. + + Example of sasl-config-map + {:enabled true + :protocol <> + {:jaas + {:username <> + :password <> + :mechanism}}} + " + (let [sasl-configs-enabled (:enabled sasl-config-map) + jaas-config (get sasl-config-map :jaas) + mechanism (get jaas-config :mechanism) + protocol (get sasl-config-map :protocol)] + (if (true? sasl-configs-enabled) + (as-> properties pr + (add-jaas-properties pr jaas-config) + (add-sasl-properties pr mechanism protocol) + (reduce-kv set-property-fn pr sasl-config-map)) + properties))) + (defn build-properties "Builds Properties object from the provided config-map which is a map where keys are Clojure keywords in kebab case. These keys are converted to Kafka properties by set-property-fn. @@ -270,8 +307,9 @@ " [set-property-fn config-map] (as-> (Properties.) pr - (build-ssl-properties pr set-property-fn (ssl-config)) - (reduce-kv set-property-fn pr config-map))) + (build-ssl-properties pr set-property-fn (ssl-config)) + (build-sasl-properties pr set-property-fn (sasl-config)) + (reduce-kv set-property-fn pr config-map))) (def build-consumer-config-properties (partial build-properties (partial set-property consumer-config-mapping-table))) diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index ac13c51a..dfbc708a 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -21,6 +21,7 @@ statsd-config ziggurat-config ssl-config + sasl-config create-jaas-properties]] [ziggurat.fixtures :as f]) (:import (java.util ArrayList Properties))) @@ -323,8 +324,7 @@ :ssl-keystore-password "some-password" :jaas {:username "myuser" :password "mypassword" - :mechanism "SCRAM-SHA-512" - :protocol "PLAINTEXT"}})] + :mechanism "SCRAM-SHA-512"}})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") @@ -348,7 +348,22 @@ (is (= auto-offset-reset "latest")) (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password")) - (is (nil? sasl-jaas-config))))))) + (is (nil? sasl-jaas-config))))) + (testing "sasl properties create jaas template from the map provided in [:ziggurat :sasl :jaas]" + (with-redefs [sasl-config (constantly {:enabled true + :protocol "SASL_PLAINTEXT" + :jaas {:username "myuser" + :password "mypassword" + :mechanism "SCRAM-SHA-256"}})] + (let [streams-config-map {:auto-offset-reset :latest} + props (build-streams-config-properties streams-config-map) + auto-offset-reset (.getProperty props "auto.offset.reset") + sasl-jaas-config (.getProperty props "sasl.jaas.config") + sasl-protocol (.getProperty props "security.protocol") + sasl-mechanism (.getProperty props "sasl.mechanism")] + (is (= auto-offset-reset "latest")) + (is (= sasl-protocol "SASL_PLAINTEXT")) + (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "SCRAM-SHA-256")))))))) (deftest test-set-property (testing "set-property with empty (with spaces) value" From 92a2edbe9e110b4aae9fc5782a58732991730ec6 Mon Sep 17 00:00:00 2001 From: vruttantmankad Date: Mon, 29 Apr 2024 15:37:30 +0530 Subject: [PATCH 04/26] login module as config --- src/ziggurat/config.clj | 23 +++++++++-------------- test/ziggurat/config_test.clj | 10 ++++++---- 2 files changed, 15 insertions(+), 18 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 2a03f8be..8ca3484f 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -201,23 +201,17 @@ (.setProperty p sk nv)))) p) -(def jaas-template - {"PLAIN" "org.apache.kafka.common.security.plain.PlainLoginModule" - "SCRAM-SHA-512" "org.apache.kafka.common.security.scram.ScramLoginModule" - "SCRAM-SHA-256" "org.apache.kafka.common.security.scram.ScramLoginModule"}) - (defn create-jaas-properties - [user-name password mechanism] - (let [jaas-template (get jaas-template mechanism)] - (format "%s required username=\"%s\" password=\"%s\";" jaas-template user-name password))) + [user-name password login-module] + (format "%s required username=\"%s\" password=\"%s\";" login-module user-name password)) (defn- add-jaas-properties [properties jaas-config] (if (some? jaas-config) (let [username (get jaas-config :username) password (get jaas-config :password) - mechanism (get jaas-config :mechanism) - jaas_props (create-jaas-properties username password mechanism)] + login-module (get jaas-config :login-module) + jaas_props (create-jaas-properties username password login-module)] (doto properties (.put SaslConfigs/SASL_JAAS_CONFIG jaas_props))) properties)) @@ -265,20 +259,21 @@ SASL properties are only set if [:ziggurat :sasl :enabled] returns true. - Creates JAAS template if values are provided in the map provided agains this key sequence - [:ziggurat :ssl :jaas]. + Creates JAAS template if values are provided in the map provided against this key sequence + [:ziggurat :sasl :jaas]. Example of sasl-config-map {:enabled true :protocol <> + :mechanism <> {:jaas {:username <> :password <> - :mechanism}}} + :login-module <>}}} " (let [sasl-configs-enabled (:enabled sasl-config-map) jaas-config (get sasl-config-map :jaas) - mechanism (get jaas-config :mechanism) + mechanism (get sasl-config-map :mechanism) protocol (get sasl-config-map :protocol)] (if (true? sasl-configs-enabled) (as-> properties pr diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index dfbc708a..48b76b93 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -322,9 +322,10 @@ (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" :ssl-keystore-password "some-password" + :mechanism "SCRAM-SHA-512" :jaas {:username "myuser" :password "mypassword" - :mechanism "SCRAM-SHA-512"}})] + :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") @@ -334,7 +335,7 @@ (is (= auto-offset-reset "latest")) (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password")) - (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "SCRAM-SHA-512")))))) + (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "org.apache.kafka.common.security.scram.ScramLoginModule")))))) (testing "ssl properties DO NOT create jaas template if no value is provided for key sequence [:ziggurat :ssl :jaas]" (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" @@ -352,9 +353,10 @@ (testing "sasl properties create jaas template from the map provided in [:ziggurat :sasl :jaas]" (with-redefs [sasl-config (constantly {:enabled true :protocol "SASL_PLAINTEXT" + :mechanism "SCRAM-SHA-256" :jaas {:username "myuser" :password "mypassword" - :mechanism "SCRAM-SHA-256"}})] + :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") @@ -363,7 +365,7 @@ sasl-mechanism (.getProperty props "sasl.mechanism")] (is (= auto-offset-reset "latest")) (is (= sasl-protocol "SASL_PLAINTEXT")) - (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "SCRAM-SHA-256")))))))) + (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "org.apache.kafka.common.security.scram.ScramLoginModule")))))))) (deftest test-set-property (testing "set-property with empty (with spaces) value" From 199fc2d4f3cdee73ae1c6d4a826ce279c897cb9f Mon Sep 17 00:00:00 2001 From: vruttantmankad Date: Mon, 29 Apr 2024 15:42:53 +0530 Subject: [PATCH 05/26] lint fix --- test/ziggurat/config_test.clj | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index 48b76b93..581056df 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -354,9 +354,9 @@ (with-redefs [sasl-config (constantly {:enabled true :protocol "SASL_PLAINTEXT" :mechanism "SCRAM-SHA-256" - :jaas {:username "myuser" - :password "mypassword" - :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] + :jaas {:username "myuser" + :password "mypassword" + :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") From 7943cf60ecf5d59d1154b50f0f9c2ee919b30e66 Mon Sep 17 00:00:00 2001 From: vruttantmankad Date: Mon, 29 Apr 2024 15:58:15 +0530 Subject: [PATCH 06/26] fix lint --- src/ziggurat/config.clj | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 8ca3484f..838f1a74 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -277,9 +277,9 @@ protocol (get sasl-config-map :protocol)] (if (true? sasl-configs-enabled) (as-> properties pr - (add-jaas-properties pr jaas-config) - (add-sasl-properties pr mechanism protocol) - (reduce-kv set-property-fn pr sasl-config-map)) + (add-jaas-properties pr jaas-config) + (add-sasl-properties pr mechanism protocol) + (reduce-kv set-property-fn pr sasl-config-map)) properties))) (defn build-properties @@ -302,9 +302,9 @@ " [set-property-fn config-map] (as-> (Properties.) pr - (build-ssl-properties pr set-property-fn (ssl-config)) - (build-sasl-properties pr set-property-fn (sasl-config)) - (reduce-kv set-property-fn pr config-map))) + (build-ssl-properties pr set-property-fn (ssl-config)) + (build-sasl-properties pr set-property-fn (sasl-config)) + (reduce-kv set-property-fn pr config-map))) (def build-consumer-config-properties (partial build-properties (partial set-property consumer-config-mapping-table))) From fdd4c0481882fe4db935727aa38337f1538118d8 Mon Sep 17 00:00:00 2001 From: vruttantmankad Date: Mon, 29 Apr 2024 16:03:09 +0530 Subject: [PATCH 07/26] fix ssl config map --- src/ziggurat/config.clj | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 838f1a74..663ed020 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -239,16 +239,20 @@ {:enabled true :ssl-keystore-location <> :ssl-keystore-password <> + :mechanism <> + :protocol <> {:jaas {:username <> :password <> - :mechanism <> - :protocol <>}}} + :login-module <>}}} " (let [ssl-configs-enabled (:enabled ssl-config-map) - jaas-config (get ssl-config-map :jaas)] + jaas-config (get ssl-config-map :jaas) + mechanism (get ssl-config-map :mechanism) + protocol (get ssl-config-map :protocol)] (if (true? ssl-configs-enabled) (as-> properties pr (add-jaas-properties pr jaas-config) + (add-sasl-properties pr mechanism protocol) (reduce-kv set-property-fn pr ssl-config-map)) properties))) From 8e517b34749894c7097bb7dc4f4f6f999a4a17e6 Mon Sep 17 00:00:00 2001 From: Akash Chhabra Date: Tue, 15 Oct 2024 16:07:13 +0700 Subject: [PATCH 08/26] Linter Fixes --- project.clj | 4 +- src/ziggurat/config.clj | 51 ++++++++----- test/ziggurat/config_test.clj | 130 ++++++++++++++++++---------------- 3 files changed, 106 insertions(+), 79 deletions(-) diff --git a/project.clj b/project.clj index 64a42a61..92032332 100644 --- a/project.clj +++ b/project.clj @@ -54,7 +54,9 @@ [ch.qos.logback.contrib/logback-jackson "0.1.5"] [net.logstash.logback/logstash-logback-encoder "6.6"] [clj-commons/iapetos "0.1.9"] - [org.apache.commons/commons-pool2 "2.11.1"]] + [org.apache.commons/commons-pool2 "2.11.1"] + [com.github.jnr/jffi "1.3.12"] + [com.github.jnr/jnr-unixsocket "0.38.21"]] :deploy-repositories [["clojars" {:url "https://clojars.org/repo" :username :env/clojars_username :password :env/clojars_password diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 663ed020..eca4ce53 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -6,7 +6,7 @@ [mount.core :refer [defstate]] [ziggurat.util.java-util :as util]) (:import (java.util Properties) - [org.apache.kafka.common.config SaslConfigs] + [org.apache.kafka.common.config SaslConfigs SslConfigs] [org.apache.kafka.clients CommonClientConfigs]) (:gen-class :methods @@ -14,6 +14,8 @@ ^{:static true} [getIn [java.lang.Iterable] Object]] :name tech.gojek.ziggurat.internal.Config)) +(def DEFAULT-LOGIN-CALLBACK-HANDLER "io.gtflabs.kafka.security.oauthbearer.kubernetes.PodLoginCallbackHandler") + (def config-file "config.edn") (def default-config @@ -201,9 +203,18 @@ (.setProperty p sk nv)))) p) -(defn create-jaas-properties - [user-name password login-module] - (format "%s required username=\"%s\" password=\"%s\";" login-module user-name password)) +(defn create-jaas-properties [user-name password login-module] + (let [username-str (if user-name (format " username=\"%s\"" user-name) "") + password-str (if password (format " password=\"%s\"" password) "") + credentials (str username-str password-str)] + (format "%s required%s;" login-module (if (empty? credentials) "" credentials)))) + +(defn- add-ssl-properties + [properties ssl-config-map] + (doto properties + (.put SslConfigs/SSL_TRUSTSTORE_LOCATION_CONFIG (:ssl-truststore-location ssl-config-map "/etc/kafka/certs/truststore.p12")) + (.put SslConfigs/SSL_TRUSTSTORE_PASSWORD_CONFIG (:ssl-truststore-password ssl-config-map))) + properties) (defn- add-jaas-properties [properties jaas-config] @@ -217,12 +228,11 @@ properties)) (defn- add-sasl-properties - [properties mechanism protocol] - (if (and (some? mechanism) (some? protocol)) - (doto properties - (.put SaslConfigs/SASL_MECHANISM mechanism) - (.put CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol)) - properties)) + [properties mechanism protocol login-callback-handler] + (when (some? mechanism) (.put properties SaslConfigs/SASL_MECHANISM mechanism)) + (when (some? protocol) (.put properties CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol)) + (when (some? login-callback-handler) (.put properties SaslConfigs/SASL_LOGIN_CALLBACK_HANDLER_CLASS login-callback-handler)) + properties) (defn build-ssl-properties [properties set-property-fn ssl-config-map] @@ -251,8 +261,9 @@ protocol (get ssl-config-map :protocol)] (if (true? ssl-configs-enabled) (as-> properties pr + (add-ssl-properties pr ssl-config-map) (add-jaas-properties pr jaas-config) - (add-sasl-properties pr mechanism protocol) + (add-sasl-properties pr mechanism protocol nil) (reduce-kv set-property-fn pr ssl-config-map)) properties))) @@ -275,14 +286,15 @@ :password <> :login-module <>}}} " - (let [sasl-configs-enabled (:enabled sasl-config-map) - jaas-config (get sasl-config-map :jaas) - mechanism (get sasl-config-map :mechanism) - protocol (get sasl-config-map :protocol)] + (let [sasl-configs-enabled (:enabled sasl-config-map) + jaas-config (get sasl-config-map :jaas) + mechanism (get sasl-config-map :mechanism "OAUTHBEARER") + protocol (get sasl-config-map :protocol "SASL_SSL") + login-callback-handler (get sasl-config-map :login-callback-handler DEFAULT-LOGIN-CALLBACK-HANDLER)] (if (true? sasl-configs-enabled) (as-> properties pr (add-jaas-properties pr jaas-config) - (add-sasl-properties pr mechanism protocol) + (add-sasl-properties pr mechanism protocol login-callback-handler) (reduce-kv set-property-fn pr sasl-config-map)) properties))) @@ -324,3 +336,10 @@ (defn get-channel-retry-count [topic-entity channel] (:count (channel-retry-config topic-entity channel))) + +;; 1. Bump up kafka version to 3.7.0 +;; 2. Introduce changes for sasl and ssl properties: +;; 2.1 + + +;;ZIGGURAT_ \ No newline at end of file diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index 581056df..d6ae535f 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -155,10 +155,10 @@ (mount/stop))))) (deftest test-build-properties - (let [config-mapping-table (merge consumer-config-mapping-table - producer-config-mapping-table - streams-config-mapping-table) - set-all-property (partial set-property config-mapping-table) + (let [config-mapping-table (merge consumer-config-mapping-table + producer-config-mapping-table + streams-config-mapping-table) + set-all-property (partial set-property config-mapping-table) build-all-config-properties (partial build-properties set-all-property)] (testing "all valid kafka configs" (let [config-map {:auto-offset-reset :latest @@ -187,20 +187,20 @@ (is (= auto-commit-interval-ms "NOT FOUND")) (is (= commit-interval-ms "5000")))) (testing "mapping table for backward compatibility" - (let [config-map {:auto-offset-reset-config "latest" - :changelog-topic-replication-factor 2 - :commit-interval-ms 20000 - :consumer-group-id "foo" - :default-api-timeout-ms-config 3000 - :default-key-serde "key-serde" - :default-value-serde "value-serde" - :key-deserializer-class-config "key-deserializer" - :key-serializer-class "key-serializer" - :retries-config 5 - :session-timeout-ms-config 4000 - :stream-threads-count 4 - :value-deserializer-class-config "value-deserializer" - :value-serializer-class "value-serializer"} + (let [config-map {:auto-offset-reset-config "latest" + :changelog-topic-replication-factor 2 + :commit-interval-ms 20000 + :consumer-group-id "foo" + :default-api-timeout-ms-config 3000 + :default-key-serde "key-serde" + :default-value-serde "value-serde" + :key-deserializer-class-config "key-deserializer" + :key-serializer-class "key-serializer" + :retries-config 5 + :session-timeout-ms-config 4000 + :stream-threads-count 4 + :value-deserializer-class-config "value-deserializer" + :value-serializer-class "value-serializer"} props (build-all-config-properties config-map) auto-offset-reset (.getProperty props "auto.offset.reset") auto-commit-interval-ms (.getProperty props "auto.commit.interval.ms") @@ -245,11 +245,12 @@ (is (= v not-found)))) config-map)))) (testing "should set ssl properties for streams if enabled is set to true" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] - (let [streams-config-map {:auto-offset-reset :latest - :group-id "foo"} + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :ssl-truststore-password "some-password-2"})] + (let [streams-config-map {:auto-offset-reset :latest + :group-id "foo"} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") group-id (.getProperty props "group.id") @@ -260,9 +261,10 @@ (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password"))))) (testing "should set ssl properties for consumer API if enabled is set to true" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :ssl-truststore-password "some-password-2"})] (let [streams-config-map {:max-poll-records 500 :enable-auto-commit true} props (build-consumer-config-properties streams-config-map) @@ -270,31 +272,32 @@ enable-auto-comit (.getProperty props "enable.auto.commit") ssl-ks-location (.getProperty props "ssl.keystore.location") ssl-ks-password (.getProperty props "ssl.keystore.password")] - (is (= max-poll-records "500")) + (is (= max-poll-records "500")) (is (= enable-auto-comit "true")) (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password"))))) (testing "should set ssl properties for producer API if enabled is set to true" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] - (let [streams-config-map {:batch.size 500 - :acks 1} + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :ssl-truststore-password "some-password-2"})] + (let [streams-config-map {:batch.size 500 + :acks 1} props (build-producer-config-properties streams-config-map) batch-size (.getProperty props "batch.size") acks (.getProperty props "acks") ssl-ks-location (.getProperty props "ssl.keystore.location") ssl-ks-password (.getProperty props "ssl.keystore.password")] - (is (= batch-size "500")) + (is (= batch-size "500")) (is (= acks "1")) (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password"))))) (testing "should not set ssl properties for streams if eenabled is set to false" - (with-redefs [ssl-config (constantly {:enabled false + (with-redefs [ssl-config (constantly {:enabled false :ssl-keystore-location "/some/location" :ssl-keystore-password "some-password"})] - (let [streams-config-map {:auto-offset-reset :latest - :group-id "foo"} + (let [streams-config-map {:auto-offset-reset :latest + :group-id "foo"} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") group-id (.getProperty props "group.id") @@ -305,10 +308,11 @@ (is (nil? ssl-ks-location)) (is (nil? ssl-ks-password))))) (testing "ssl properties from streams config map overrides the ssl properties provided in [:ziggurat :ssl]" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] - (let [streams-config-map {:auto-offset-reset :latest + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :ssl-truststore-password "some-password-2"})] + (let [streams-config-map {:auto-offset-reset :latest :ssl-keystore-location "/some/different/location" :ssl-keystore-password "different-password"} props (build-streams-config-properties streams-config-map) @@ -319,45 +323,47 @@ (is (= ssl-ks-location "/some/different/location")) (is (= ssl-ks-password "different-password"))))) (testing "ssl properties create jaas template from the map provided in [:ziggurat :ssl :jaas]" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password" - :mechanism "SCRAM-SHA-512" - :jaas {:username "myuser" - :password "mypassword" - :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] - (let [streams-config-map {:auto-offset-reset :latest} + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :ssl-truststore-password "some-password-2" + :mechanism "SCRAM-SHA-512" + :jaas {:username "myuser" + :password "mypassword" + :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] + (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") ssl-ks-location (.getProperty props "ssl.keystore.location") ssl-ks-password (.getProperty props "ssl.keystore.password") sasl-jaas-config (.getProperty props "sasl.jaas.config")] (is (= auto-offset-reset "latest")) - (is (= ssl-ks-location "/some/location")) - (is (= ssl-ks-password "some-password")) + (is (= ssl-ks-location "/some/location")) + (is (= ssl-ks-password "some-password")) (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "org.apache.kafka.common.security.scram.ScramLoginModule")))))) (testing "ssl properties DO NOT create jaas template if no value is provided for key sequence [:ziggurat :ssl :jaas]" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] - (let [streams-config-map {:auto-offset-reset :latest} + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :ssl-truststore-password "some-password-2"})] + (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") ssl-ks-location (.getProperty props "ssl.keystore.location") ssl-ks-password (.getProperty props "ssl.keystore.password") sasl-jaas-config (.getProperty props "sasl.jaas.config")] (is (= auto-offset-reset "latest")) - (is (= ssl-ks-location "/some/location")) - (is (= ssl-ks-password "some-password")) + (is (= ssl-ks-location "/some/location")) + (is (= ssl-ks-password "some-password")) (is (nil? sasl-jaas-config))))) (testing "sasl properties create jaas template from the map provided in [:ziggurat :sasl :jaas]" - (with-redefs [sasl-config (constantly {:enabled true - :protocol "SASL_PLAINTEXT" + (with-redefs [sasl-config (constantly {:enabled true + :protocol "SASL_PLAINTEXT" :mechanism "SCRAM-SHA-256" - :jaas {:username "myuser" - :password "mypassword" - :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] - (let [streams-config-map {:auto-offset-reset :latest} + :jaas {:username "myuser" + :password "mypassword" + :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] + (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") sasl-jaas-config (.getProperty props "sasl.jaas.config") From 7e43f7fb6039ef0882d73f6a5df3c71a4aebc23b Mon Sep 17 00:00:00 2001 From: Akash Chhabra Date: Wed, 16 Oct 2024 16:08:57 +0700 Subject: [PATCH 09/26] Removes default values --- project.clj | 10 +++++----- src/ziggurat/config.clj | 36 +++++++++++++++--------------------- 2 files changed, 20 insertions(+), 26 deletions(-) diff --git a/project.clj b/project.clj index 92032332..5e7ddf55 100644 --- a/project.clj +++ b/project.clj @@ -2,7 +2,7 @@ (cemerick.pomegranate.aether/register-wagon-factory! "http" #(org.apache.maven.wagon.providers.http.HttpWagon.)) -(defproject tech.gojek/ziggurat "4.11.1" +(defproject tech.gojek/ziggurat "5.0.0" :description "A stream processing framework to build stateless applications on kafka" :url "https://github.com/gojektech/ziggurat" :license {:name "Apache License, Version 2.0" @@ -23,8 +23,8 @@ [io.jaegertracing/jaeger-core "1.6.0"] [io.jaegertracing/jaeger-client "1.6.0"] [org.apache.httpcomponents/fluent-hc "4.5.13"] - [org.apache.kafka/kafka-clients "2.8.2" :exclusions [org.slf4j/slf4j-log4j12 log4j]] - [org.apache.kafka/kafka-streams "2.8.2" :exclusions [org.slf4j/slf4j-log4j12 log4j]] + [org.apache.kafka/kafka-clients "3.7.0" :exclusions [org.slf4j/slf4j-log4j12 log4j]] + [org.apache.kafka/kafka-streams "3.7.0" :exclusions [org.slf4j/slf4j-log4j12 log4j]] [org.clojure/clojure "1.10.3"] [org.clojure/tools.logging "1.1.0"] [nrepl/nrepl "0.8.3"] @@ -74,8 +74,8 @@ :dependencies [[com.google.protobuf/protobuf-java "3.17.0"] [junit/junit "4.13.2"] [org.hamcrest/hamcrest-core "2.2"] - [org.apache.kafka/kafka-streams "2.8.2" :classifier "test" :exclusions [org.slf4j/slf4j-log4j12 log4j]] - [org.apache.kafka/kafka-clients "2.8.2" :classifier "test"] + [org.apache.kafka/kafka-streams "3.7.0" :classifier "test" :exclusions [org.slf4j/slf4j-log4j12 log4j]] + [org.apache.kafka/kafka-clients "3.7.0" :classifier "test"] [org.clojure/test.check "1.1.0"]] :plugins [[lein-cloverage "1.2.2" :exclusions [org.clojure/clojure]]] :cloverage {:exclude-call ['cambium.core/info diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index eca4ce53..7fed9b2a 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -14,8 +14,6 @@ ^{:static true} [getIn [java.lang.Iterable] Object]] :name tech.gojek.ziggurat.internal.Config)) -(def DEFAULT-LOGIN-CALLBACK-HANDLER "io.gtflabs.kafka.security.oauthbearer.kubernetes.PodLoginCallbackHandler") - (def config-file "config.edn") (def default-config @@ -211,9 +209,10 @@ (defn- add-ssl-properties [properties ssl-config-map] - (doto properties - (.put SslConfigs/SSL_TRUSTSTORE_LOCATION_CONFIG (:ssl-truststore-location ssl-config-map "/etc/kafka/certs/truststore.p12")) - (.put SslConfigs/SSL_TRUSTSTORE_PASSWORD_CONFIG (:ssl-truststore-password ssl-config-map))) + (if (and (some? (:ssl-truststore-location ssl-config-map)) (some? (:ssl-truststore-password ssl-config-map))) + (doto properties + (.put SslConfigs/SSL_TRUSTSTORE_LOCATION_CONFIG (:ssl-truststore-location ssl-config-map)) + (.put SslConfigs/SSL_TRUSTSTORE_PASSWORD_CONFIG (:ssl-truststore-password ssl-config-map)))) properties) (defn- add-jaas-properties @@ -228,11 +227,13 @@ properties)) (defn- add-sasl-properties - [properties mechanism protocol login-callback-handler] - (when (some? mechanism) (.put properties SaslConfigs/SASL_MECHANISM mechanism)) - (when (some? protocol) (.put properties CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol)) - (when (some? login-callback-handler) (.put properties SaslConfigs/SASL_LOGIN_CALLBACK_HANDLER_CLASS login-callback-handler)) - properties) + ([properties mechanism protocol] + (add-sasl-properties properties mechanism protocol nil)) + ([properties mechanism protocol login-callback-handler] + (when (some? mechanism) (.put properties SaslConfigs/SASL_MECHANISM mechanism)) + (when (some? protocol) (.put properties CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol)) + (when (some? login-callback-handler) (.put properties SaslConfigs/SASL_LOGIN_CALLBACK_HANDLER_CLASS login-callback-handler)) + properties)) (defn build-ssl-properties [properties set-property-fn ssl-config-map] @@ -263,7 +264,7 @@ (as-> properties pr (add-ssl-properties pr ssl-config-map) (add-jaas-properties pr jaas-config) - (add-sasl-properties pr mechanism protocol nil) + (add-sasl-properties pr mechanism protocol) (reduce-kv set-property-fn pr ssl-config-map)) properties))) @@ -288,9 +289,9 @@ " (let [sasl-configs-enabled (:enabled sasl-config-map) jaas-config (get sasl-config-map :jaas) - mechanism (get sasl-config-map :mechanism "OAUTHBEARER") - protocol (get sasl-config-map :protocol "SASL_SSL") - login-callback-handler (get sasl-config-map :login-callback-handler DEFAULT-LOGIN-CALLBACK-HANDLER)] + mechanism (get sasl-config-map :mechanism) + protocol (get sasl-config-map :protocol) + login-callback-handler (get sasl-config-map :login-callback-handler)] (if (true? sasl-configs-enabled) (as-> properties pr (add-jaas-properties pr jaas-config) @@ -336,10 +337,3 @@ (defn get-channel-retry-count [topic-entity channel] (:count (channel-retry-config topic-entity channel))) - -;; 1. Bump up kafka version to 3.7.0 -;; 2. Introduce changes for sasl and ssl properties: -;; 2.1 - - -;;ZIGGURAT_ \ No newline at end of file From a50d6c47cfc29ec97cd23fce30ac4f507fc6b7c7 Mon Sep 17 00:00:00 2001 From: Akash Chhabra Date: Thu, 17 Oct 2024 10:24:33 +0700 Subject: [PATCH 10/26] Removes updated tests --- test/ziggurat/config_test.clj | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index d6ae535f..6085ddc1 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -247,8 +247,7 @@ (testing "should set ssl properties for streams if enabled is set to true" (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password" - :ssl-truststore-password "some-password-2"})] + :ssl-keystore-password "some-password"})] (let [streams-config-map {:auto-offset-reset :latest :group-id "foo"} props (build-streams-config-properties streams-config-map) @@ -263,8 +262,7 @@ (testing "should set ssl properties for consumer API if enabled is set to true" (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password" - :ssl-truststore-password "some-password-2"})] + :ssl-keystore-password "some-password"})] (let [streams-config-map {:max-poll-records 500 :enable-auto-commit true} props (build-consumer-config-properties streams-config-map) @@ -279,8 +277,7 @@ (testing "should set ssl properties for producer API if enabled is set to true" (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password" - :ssl-truststore-password "some-password-2"})] + :ssl-keystore-password "some-password"})] (let [streams-config-map {:batch.size 500 :acks 1} props (build-producer-config-properties streams-config-map) @@ -310,8 +307,7 @@ (testing "ssl properties from streams config map overrides the ssl properties provided in [:ziggurat :ssl]" (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password" - :ssl-truststore-password "some-password-2"})] + :ssl-keystore-password "some-password"})] (let [streams-config-map {:auto-offset-reset :latest :ssl-keystore-location "/some/different/location" :ssl-keystore-password "different-password"} @@ -326,7 +322,6 @@ (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" :ssl-keystore-password "some-password" - :ssl-truststore-password "some-password-2" :mechanism "SCRAM-SHA-512" :jaas {:username "myuser" :password "mypassword" @@ -344,8 +339,7 @@ (testing "ssl properties DO NOT create jaas template if no value is provided for key sequence [:ziggurat :ssl :jaas]" (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password" - :ssl-truststore-password "some-password-2"})] + :ssl-keystore-password "some-password"})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") From 6930085d9faf4361defd4530c64fb86960d8a93c Mon Sep 17 00:00:00 2001 From: Akash Chhabra Date: Thu, 17 Oct 2024 13:47:39 +0700 Subject: [PATCH 11/26] Adds test --- src/ziggurat/config.clj | 19 +++---- test/ziggurat/config_test.clj | 94 +++++++++++++++++++++++------------ 2 files changed, 70 insertions(+), 43 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 7fed9b2a..1160540d 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -172,6 +172,10 @@ :enabled :jaas]) +(defn- not-blank? + [s] + (and (not (nil? s)) (not (str/blank? (str/trim s))))) + (defn- to-list [s] (if (empty? s) @@ -201,20 +205,12 @@ (.setProperty p sk nv)))) p) -(defn create-jaas-properties [user-name password login-module] - (let [username-str (if user-name (format " username=\"%s\"" user-name) "") - password-str (if password (format " password=\"%s\"" password) "") +(defn create-jaas-properties [username password login-module] + (let [username-str (when (not-blank? username) (format " username=\"%s\"" username)) + password-str (when (not-blank? password) (format " password=\"%s\"" password)) credentials (str username-str password-str)] (format "%s required%s;" login-module (if (empty? credentials) "" credentials)))) -(defn- add-ssl-properties - [properties ssl-config-map] - (if (and (some? (:ssl-truststore-location ssl-config-map)) (some? (:ssl-truststore-password ssl-config-map))) - (doto properties - (.put SslConfigs/SSL_TRUSTSTORE_LOCATION_CONFIG (:ssl-truststore-location ssl-config-map)) - (.put SslConfigs/SSL_TRUSTSTORE_PASSWORD_CONFIG (:ssl-truststore-password ssl-config-map)))) - properties) - (defn- add-jaas-properties [properties jaas-config] (if (some? jaas-config) @@ -262,7 +258,6 @@ protocol (get ssl-config-map :protocol)] (if (true? ssl-configs-enabled) (as-> properties pr - (add-ssl-properties pr ssl-config-map) (add-jaas-properties pr jaas-config) (add-sasl-properties pr mechanism protocol) (reduce-kv set-property-fn pr ssl-config-map)) diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index 6085ddc1..0955c799 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -160,6 +160,7 @@ streams-config-mapping-table) set-all-property (partial set-property config-mapping-table) build-all-config-properties (partial build-properties set-all-property)] + (testing "all valid kafka configs" (let [config-map {:auto-offset-reset :latest :replication-factor 2 @@ -174,11 +175,13 @@ (is (= replication-factor "2")) (is (= enable-idempotence "true")) (is (= group-id "foo")))) + (testing "valid kafka consumer configs converts commit-interval-ms to auto-commit-interval-ms" (let [config-map {:commit-interval-ms 5000} props (build-consumer-config-properties config-map) auto-commit-interval-ms (.getProperty props "auto.commit.interval.ms")] (is (= auto-commit-interval-ms "5000")))) + (testing "valid kafka streams configs does not convert commit-interval-ms to auto-commit-interval-ms" (let [config-map {:commit-interval-ms 5000} props (build-streams-config-properties config-map) @@ -186,6 +189,7 @@ commit-interval-ms (.getProperty props "commit.interval.ms")] (is (= auto-commit-interval-ms "NOT FOUND")) (is (= commit-interval-ms "5000")))) + (testing "mapping table for backward compatibility" (let [config-map {:auto-offset-reset-config "latest" :changelog-topic-replication-factor 2 @@ -226,6 +230,7 @@ (is (= value-deserializer "value-deserializer")) (is (= value-serializer "value-serializer")) (is (= group-id "foo")))) + (testing "non kafka config keys should not be in Properties" (let [config-map {:consumer-type :joins :producer {:foo "bar"} @@ -238,16 +243,17 @@ :poll-timeout-ms-config 10000} props (build-all-config-properties config-map)] (doall - (map (fn [[k _]] - (let [string-key (str/replace (name k) #"-" ".") - not-found "NOT FOUND!" - v (.getProperty props string-key not-found)] - (is (= v not-found)))) - config-map)))) + (map (fn [[k _]] + (let [string-key (str/replace (name k) #"-" ".") + not-found "NOT FOUND!" + v (.getProperty props string-key not-found)] + (is (= v not-found)))) + config-map)))) + (testing "should set ssl properties for streams if enabled is set to true" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password"})] (let [streams-config-map {:auto-offset-reset :latest :group-id "foo"} props (build-streams-config-properties streams-config-map) @@ -259,25 +265,33 @@ (is (= group-id "foo")) (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password"))))) + (testing "should set ssl properties for consumer API if enabled is set to true" (with-redefs [ssl-config (constantly {:enabled true :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] + :ssl-keystore-password "some-password" + :ssl-truststore-location "/some/truststore/location" + :ssl-truststore-password "some-truststore-password"})] (let [streams-config-map {:max-poll-records 500 :enable-auto-commit true} props (build-consumer-config-properties streams-config-map) max-poll-records (.getProperty props "max.poll.records") enable-auto-comit (.getProperty props "enable.auto.commit") ssl-ks-location (.getProperty props "ssl.keystore.location") - ssl-ks-password (.getProperty props "ssl.keystore.password")] + ssl-ks-password (.getProperty props "ssl.keystore.password") + ssl-ts-location (.getProperty props "ssl.truststore.location") + ssl-ts-password (.getProperty props "ssl.truststore.password")] (is (= max-poll-records "500")) (is (= enable-auto-comit "true")) (is (= ssl-ks-location "/some/location")) - (is (= ssl-ks-password "some-password"))))) + (is (= ssl-ks-password "some-password")) + (is (= ssl-ts-location "/some/truststore/location")) + (is (= ssl-ts-password "some-truststore-password"))))) + (testing "should set ssl properties for producer API if enabled is set to true" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password"})] (let [streams-config-map {:batch.size 500 :acks 1} props (build-producer-config-properties streams-config-map) @@ -289,6 +303,7 @@ (is (= acks "1")) (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password"))))) + (testing "should not set ssl properties for streams if eenabled is set to false" (with-redefs [ssl-config (constantly {:enabled false :ssl-keystore-location "/some/location" @@ -304,10 +319,11 @@ (is (= group-id "foo")) (is (nil? ssl-ks-location)) (is (nil? ssl-ks-password))))) + (testing "ssl properties from streams config map overrides the ssl properties provided in [:ziggurat :ssl]" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password"})] (let [streams-config-map {:auto-offset-reset :latest :ssl-keystore-location "/some/different/location" :ssl-keystore-password "different-password"} @@ -318,14 +334,15 @@ (is (= auto-offset-reset "latest")) (is (= ssl-ks-location "/some/different/location")) (is (= ssl-ks-password "different-password"))))) + (testing "ssl properties create jaas template from the map provided in [:ziggurat :ssl :jaas]" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password" - :mechanism "SCRAM-SHA-512" - :jaas {:username "myuser" - :password "mypassword" - :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :mechanism "SCRAM-SHA-512" + :jaas {:username "myuser" + :password "mypassword" + :login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") @@ -336,10 +353,11 @@ (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password")) (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "org.apache.kafka.common.security.scram.ScramLoginModule")))))) + (testing "ssl properties DO NOT create jaas template if no value is provided for key sequence [:ziggurat :ssl :jaas]" - (with-redefs [ssl-config (constantly {:enabled true - :ssl-keystore-location "/some/location" - :ssl-keystore-password "some-password"})] + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password"})] (let [streams-config-map {:auto-offset-reset :latest} props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") @@ -350,6 +368,7 @@ (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password")) (is (nil? sasl-jaas-config))))) + (testing "sasl properties create jaas template from the map provided in [:ziggurat :sasl :jaas]" (with-redefs [sasl-config (constantly {:enabled true :protocol "SASL_PLAINTEXT" @@ -361,11 +380,24 @@ props (build-streams-config-properties streams-config-map) auto-offset-reset (.getProperty props "auto.offset.reset") sasl-jaas-config (.getProperty props "sasl.jaas.config") - sasl-protocol (.getProperty props "security.protocol") - sasl-mechanism (.getProperty props "sasl.mechanism")] + sasl-protocol (.getProperty props "security.protocol")] + (is (= auto-offset-reset "latest")) + (is (= sasl-protocol "SASL_PLAINTEXT")) + (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "org.apache.kafka.common.security.scram.ScramLoginModule")))))) + + (testing "sasl properties create jaas template from the map provided in [:ziggurat :sasl :jaas] without username password" + (with-redefs [sasl-config (constantly {:enabled true + :protocol "SASL_PLAINTEXT" + :mechanism "SCRAM-SHA-256" + :jaas {:login-module "org.apache.kafka.common.security.scram.ScramLoginModule"}})] + (let [streams-config-map {:auto-offset-reset :latest} + props (build-streams-config-properties streams-config-map) + auto-offset-reset (.getProperty props "auto.offset.reset") + sasl-jaas-config (.getProperty props "sasl.jaas.config") + sasl-protocol (.getProperty props "security.protocol")] (is (= auto-offset-reset "latest")) (is (= sasl-protocol "SASL_PLAINTEXT")) - (is (= sasl-jaas-config (create-jaas-properties "myuser" "mypassword" "org.apache.kafka.common.security.scram.ScramLoginModule")))))))) + (is (= sasl-jaas-config (create-jaas-properties "" "" "org.apache.kafka.common.security.scram.ScramLoginModule")))))))) (deftest test-set-property (testing "set-property with empty (with spaces) value" From 5fea14aaf92e6c22a5fdf3adf6aaa3887381d3a6 Mon Sep 17 00:00:00 2001 From: Akash Chhabra Date: Fri, 18 Oct 2024 13:13:38 +0700 Subject: [PATCH 12/26] [CI_SKIP] Work In Progress --- project.clj | 48 +++++++++++++++++++++++++++++------------------- 1 file changed, 29 insertions(+), 19 deletions(-) diff --git a/project.clj b/project.clj index 5e7ddf55..f4346d88 100644 --- a/project.clj +++ b/project.clj @@ -15,16 +15,25 @@ [com.cemerick/url "0.1.1"] [com.datadoghq/java-dogstatsd-client "2.4"] [com.fasterxml.jackson.core/jackson-databind "2.9.9"] - [com.novemberain/langohr "5.2.0" :exclusions [org.clojure/clojure]] + [com.novemberain/langohr "5.2.0" :exclusions [org.clojure/clojure org.slf4j/slf4j-api]] [com.taoensso/nippy "3.1.1"] [io.dropwizard.metrics5/metrics-core "5.0.0" :scope "compile"] [medley "1.3.0" :exclusions [org.clojure/clojure]] [mount "0.1.16"] - [io.jaegertracing/jaeger-core "1.6.0"] - [io.jaegertracing/jaeger-client "1.6.0"] + [io.jaegertracing/jaeger-core "1.6.0" :exclusions [org.slf4j/slf4j-api]] + [io.jaegertracing/jaeger-client "1.6.0" :exclusions [org.jetbrains.kotlin/kotlin-stdlib-common + org.slf4j/slf4j-api]] [org.apache.httpcomponents/fluent-hc "4.5.13"] - [org.apache.kafka/kafka-clients "3.7.0" :exclusions [org.slf4j/slf4j-log4j12 log4j]] - [org.apache.kafka/kafka-streams "3.7.0" :exclusions [org.slf4j/slf4j-log4j12 log4j]] + [org.apache.kafka/kafka-clients "3.7.0" :exclusions [log4j + org.lz4/lz4-java + org.slf4j/slf4j-api + org.slf4j/slf4j-log4j12]] + [org.apache.kafka/kafka-streams "3.7.0" :exclusions [log4j + org.lz4/lz4-java + org.slf4j/slf4j-api + org.slf4j/slf4j-log4j12 + com.fasterxml.jackson.core/jackson-databind + com.fasterxml.jackson.core/jackson-annotations]] [org.clojure/clojure "1.10.3"] [org.clojure/tools.logging "1.1.0"] [nrepl/nrepl "0.8.3"] @@ -32,7 +41,7 @@ [prismatic/schema "1.1.12"] [clj-statsd "0.4.0"] [ring/ring "1.9.3"] - [ring/ring-core "1.9.3"] + [ring/ring-core "1.9.3" :exclusions [commons-codec]] [ring/ring-defaults "0.3.2"] [ring/ring-jetty-adapter "1.9.3"] [ring/ring-json "0.5.1"] @@ -41,22 +50,17 @@ [com.newrelic.agent.java/newrelic-api "6.5.0"] [yleisradio/new-reliquary "1.1.0" :exclusions [org.clojure/clojure]] [metosin/ring-swagger "0.26.2" - :exclusions [cheshire - com.fasterxml.jackson.core/jackson-core - com.fasterxml.jackson.dataformat/jackson-dataformat-smile - com.fasterxml.jackson.dataformat/jackson-dataformat-cbor]] + :exclusions [org.mozilla/rhino com.fasterxml.jackson.dataformat/jackson-dataformat-smile com.fasterxml.jackson.dataformat/jackson-dataformat-cbor cheshire com.google.code.findbugs/jsr305 com.fasterxml.jackson.core/jackson-core]] [metosin/ring-swagger-ui "3.46.0"] - [cambium/cambium.core "1.1.0"] + [cambium/cambium.core "1.1.0" :exclusions [org.slf4j/slf4j-api]] [cambium/cambium.codec-cheshire "1.0.0"] - [cambium/cambium.logback.json "0.4.4"] - [ch.qos.logback/logback-classic "1.2.9"] + [cambium/cambium.logback.json "0.4.4" :exclusions [com.fasterxml.jackson.core/jackson-annotations com.fasterxml.jackson.core/jackson-databind]] + [ch.qos.logback/logback-classic "1.2.9" :exclusions [org.slf4j/slf4j-api]] [ch.qos.logback.contrib/logback-json-classic "0.1.5"] [ch.qos.logback.contrib/logback-jackson "0.1.5"] - [net.logstash.logback/logstash-logback-encoder "6.6"] + [net.logstash.logback/logstash-logback-encoder "6.6" :exclusions [com.fasterxml.jackson.core/jackson-databind com.fasterxml.jackson.core/jackson-core]] [clj-commons/iapetos "0.1.9"] - [org.apache.commons/commons-pool2 "2.11.1"] - [com.github.jnr/jffi "1.3.12"] - [com.github.jnr/jnr-unixsocket "0.38.21"]] + [org.apache.commons/commons-pool2 "2.11.1"]] :deploy-repositories [["clojars" {:url "https://clojars.org/repo" :username :env/clojars_username :password :env/clojars_password @@ -74,8 +78,14 @@ :dependencies [[com.google.protobuf/protobuf-java "3.17.0"] [junit/junit "4.13.2"] [org.hamcrest/hamcrest-core "2.2"] - [org.apache.kafka/kafka-streams "3.7.0" :classifier "test" :exclusions [org.slf4j/slf4j-log4j12 log4j]] - [org.apache.kafka/kafka-clients "3.7.0" :classifier "test"] + [org.apache.kafka/kafka-streams "3.7.0" :classifier "test" :exclusions [log4j + org.lz4/lz4-java + org.slf4j/slf4j-api + org.slf4j/slf4j-log4j12 + com.fasterxml.jackson.core/jackson-databind + com.fasterxml.jackson.core/jackson-annotations]] + [org.apache.kafka/kafka-clients "3.7.0" :classifier "test" :exclusions [org.slf4j/slf4j-api]] + [org.apache.kafka/kafka-streams-test-utils "3.7.0" :classifier "test" :exclusions [org.lz4/lz4-java log4j org.slf4j/slf4j-log4j12 org.slf4j/slf4j-api]] [org.clojure/test.check "1.1.0"]] :plugins [[lein-cloverage "1.2.2" :exclusions [org.clojure/clojure]]] :cloverage {:exclude-call ['cambium.core/info From cf9594d487c63f095f9cd8449b6cdab801734f3e Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Thu, 14 Nov 2024 14:38:34 +0530 Subject: [PATCH 13/26] Implement SSL SASL ACLs for ziggurat --- project.clj | 39 +++++--------- resources/config.test.edn | 1 + src/ziggurat/config.clj | 25 +++++---- src/ziggurat/streams.clj | 22 ++++---- test/ziggurat/config_test.clj | 92 +++++++++++++++++++++++++++------ test/ziggurat/producer_test.clj | 7 +-- 6 files changed, 115 insertions(+), 71 deletions(-) diff --git a/project.clj b/project.clj index f4346d88..8f757f2c 100644 --- a/project.clj +++ b/project.clj @@ -2,7 +2,7 @@ (cemerick.pomegranate.aether/register-wagon-factory! "http" #(org.apache.maven.wagon.providers.http.HttpWagon.)) -(defproject tech.gojek/ziggurat "5.0.0" +(defproject tech.gojek/ziggurat "4.12.0" :description "A stream processing framework to build stateless applications on kafka" :url "https://github.com/gojektech/ziggurat" :license {:name "Apache License, Version 2.0" @@ -20,20 +20,11 @@ [io.dropwizard.metrics5/metrics-core "5.0.0" :scope "compile"] [medley "1.3.0" :exclusions [org.clojure/clojure]] [mount "0.1.16"] - [io.jaegertracing/jaeger-core "1.6.0" :exclusions [org.slf4j/slf4j-api]] - [io.jaegertracing/jaeger-client "1.6.0" :exclusions [org.jetbrains.kotlin/kotlin-stdlib-common - org.slf4j/slf4j-api]] + [io.jaegertracing/jaeger-core "1.6.0"] + [io.jaegertracing/jaeger-client "1.6.0"] [org.apache.httpcomponents/fluent-hc "4.5.13"] - [org.apache.kafka/kafka-clients "3.7.0" :exclusions [log4j - org.lz4/lz4-java - org.slf4j/slf4j-api - org.slf4j/slf4j-log4j12]] - [org.apache.kafka/kafka-streams "3.7.0" :exclusions [log4j - org.lz4/lz4-java - org.slf4j/slf4j-api - org.slf4j/slf4j-log4j12 - com.fasterxml.jackson.core/jackson-databind - com.fasterxml.jackson.core/jackson-annotations]] + [org.apache.kafka/kafka-clients "2.8.2" :exclusions [org.slf4j/slf4j-log4j12 log4j]] + [org.apache.kafka/kafka-streams "2.8.2" :exclusions [org.slf4j/slf4j-log4j12 log4j]] [org.clojure/clojure "1.10.3"] [org.clojure/tools.logging "1.1.0"] [nrepl/nrepl "0.8.3"] @@ -41,7 +32,7 @@ [prismatic/schema "1.1.12"] [clj-statsd "0.4.0"] [ring/ring "1.9.3"] - [ring/ring-core "1.9.3" :exclusions [commons-codec]] + [ring/ring-core "1.9.3"] [ring/ring-defaults "0.3.2"] [ring/ring-jetty-adapter "1.9.3"] [ring/ring-json "0.5.1"] @@ -52,13 +43,13 @@ [metosin/ring-swagger "0.26.2" :exclusions [org.mozilla/rhino com.fasterxml.jackson.dataformat/jackson-dataformat-smile com.fasterxml.jackson.dataformat/jackson-dataformat-cbor cheshire com.google.code.findbugs/jsr305 com.fasterxml.jackson.core/jackson-core]] [metosin/ring-swagger-ui "3.46.0"] - [cambium/cambium.core "1.1.0" :exclusions [org.slf4j/slf4j-api]] + [cambium/cambium.core "1.1.0"] [cambium/cambium.codec-cheshire "1.0.0"] - [cambium/cambium.logback.json "0.4.4" :exclusions [com.fasterxml.jackson.core/jackson-annotations com.fasterxml.jackson.core/jackson-databind]] - [ch.qos.logback/logback-classic "1.2.9" :exclusions [org.slf4j/slf4j-api]] + [cambium/cambium.logback.json "0.4.4"] + [ch.qos.logback/logback-classic "1.2.9"] [ch.qos.logback.contrib/logback-json-classic "0.1.5"] [ch.qos.logback.contrib/logback-jackson "0.1.5"] - [net.logstash.logback/logstash-logback-encoder "6.6" :exclusions [com.fasterxml.jackson.core/jackson-databind com.fasterxml.jackson.core/jackson-core]] + [net.logstash.logback/logstash-logback-encoder "6.6"] [clj-commons/iapetos "0.1.9"] [org.apache.commons/commons-pool2 "2.11.1"]] :deploy-repositories [["clojars" {:url "https://clojars.org/repo" @@ -78,14 +69,8 @@ :dependencies [[com.google.protobuf/protobuf-java "3.17.0"] [junit/junit "4.13.2"] [org.hamcrest/hamcrest-core "2.2"] - [org.apache.kafka/kafka-streams "3.7.0" :classifier "test" :exclusions [log4j - org.lz4/lz4-java - org.slf4j/slf4j-api - org.slf4j/slf4j-log4j12 - com.fasterxml.jackson.core/jackson-databind - com.fasterxml.jackson.core/jackson-annotations]] - [org.apache.kafka/kafka-clients "3.7.0" :classifier "test" :exclusions [org.slf4j/slf4j-api]] - [org.apache.kafka/kafka-streams-test-utils "3.7.0" :classifier "test" :exclusions [org.lz4/lz4-java log4j org.slf4j/slf4j-log4j12 org.slf4j/slf4j-api]] + [org.apache.kafka/kafka-streams "2.8.2" :classifier "test" :exclusions [org.slf4j/slf4j-log4j12 log4j]] + [org.apache.kafka/kafka-clients "2.8.2" :classifier "test"] [org.clojure/test.check "1.1.0"]] :plugins [[lein-cloverage "1.2.2" :exclusions [org.clojure/clojure]]] :cloverage {:exclude-call ['cambium.core/info diff --git a/resources/config.test.edn b/resources/config.test.edn index d9327c45..d39843d9 100644 --- a/resources/config.test.edn +++ b/resources/config.test.edn @@ -96,4 +96,5 @@ :new-relic {:report-errors false} :prometheus {:port 8002 :enabled false} + :ssl {:enabled false} :log-format "text"}} diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 1160540d..bd21a95d 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -6,8 +6,8 @@ [mount.core :refer [defstate]] [ziggurat.util.java-util :as util]) (:import (java.util Properties) - [org.apache.kafka.common.config SaslConfigs SslConfigs] - [org.apache.kafka.clients CommonClientConfigs]) + (org.apache.kafka.clients CommonClientConfigs) + (org.apache.kafka.common.config SaslConfigs)) (:gen-class :methods [^{:static true} [get [String] Object] @@ -223,13 +223,11 @@ properties)) (defn- add-sasl-properties - ([properties mechanism protocol] - (add-sasl-properties properties mechanism protocol nil)) - ([properties mechanism protocol login-callback-handler] - (when (some? mechanism) (.put properties SaslConfigs/SASL_MECHANISM mechanism)) - (when (some? protocol) (.put properties CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol)) - (when (some? login-callback-handler) (.put properties SaslConfigs/SASL_LOGIN_CALLBACK_HANDLER_CLASS login-callback-handler)) - properties)) + [properties mechanism protocol login-callback-handler] + (when (some? mechanism) (.putIfAbsent properties SaslConfigs/SASL_MECHANISM mechanism)) + (when (some? protocol) (.putIfAbsent properties CommonClientConfigs/SECURITY_PROTOCOL_CONFIG protocol)) + (when (some? login-callback-handler) (.putIfAbsent properties SaslConfigs/SASL_LOGIN_CALLBACK_HANDLER_CLASS login-callback-handler)) + properties) (defn build-ssl-properties [properties set-property-fn ssl-config-map] @@ -255,11 +253,12 @@ (let [ssl-configs-enabled (:enabled ssl-config-map) jaas-config (get ssl-config-map :jaas) mechanism (get ssl-config-map :mechanism) - protocol (get ssl-config-map :protocol)] - (if (true? ssl-configs-enabled) + protocol (get ssl-config-map :protocol) + login-callback-handler (get ssl-config-map :login-callback-handler)] + (if (or (true? ssl-configs-enabled) (= ssl-configs-enabled "true")) (as-> properties pr (add-jaas-properties pr jaas-config) - (add-sasl-properties pr mechanism protocol) + (add-sasl-properties pr mechanism protocol login-callback-handler) (reduce-kv set-property-fn pr ssl-config-map)) properties))) @@ -287,7 +286,7 @@ mechanism (get sasl-config-map :mechanism) protocol (get sasl-config-map :protocol) login-callback-handler (get sasl-config-map :login-callback-handler)] - (if (true? sasl-configs-enabled) + (if (or (true? sasl-configs-enabled) (= sasl-configs-enabled "true")) (as-> properties pr (add-jaas-properties pr jaas-config) (add-sasl-properties pr mechanism protocol login-callback-handler) diff --git a/src/ziggurat/streams.clj b/src/ziggurat/streams.clj index f10c5947..a247b2bc 100644 --- a/src/ziggurat/streams.clj +++ b/src/ziggurat/streams.clj @@ -1,5 +1,6 @@ (ns ziggurat.streams - (:require [clojure.tools.logging :as log] + (:require [cambium.core :as clog] + [clojure.tools.logging :as log] [mount.core :as mount :refer [defstate]] [ziggurat.channel :as chl] [ziggurat.config :refer [build-streams-config-properties get-in-config ziggurat-config]] @@ -8,16 +9,15 @@ [ziggurat.message-payload :refer [->MessagePayload]] [ziggurat.metrics :as metrics] [ziggurat.timestamp-transformer :as timestamp-transformer] - [ziggurat.util.map :as umap] - [cambium.core :as clog]) - (:import [java.time Duration] - [java.util Properties] - [java.util.regex Pattern] - [org.apache.kafka.common.errors TimeoutException] - [org.apache.kafka.streams KafkaStreams KafkaStreams$State StreamsConfig StreamsBuilder Topology] - [org.apache.kafka.streams.errors StreamsUncaughtExceptionHandler StreamsUncaughtExceptionHandler$StreamThreadExceptionResponse] - [org.apache.kafka.streams.kstream JoinWindows ValueMapper TransformerSupplier ValueJoiner ValueTransformerWithKeySupplier] - [ziggurat.timestamp_transformer IngestionTimeExtractor])) + [ziggurat.util.map :as umap]) + (:import (java.time Duration) + (java.util Properties) + (java.util.regex Pattern) + (org.apache.kafka.common.errors TimeoutException) + (org.apache.kafka.streams KafkaStreams KafkaStreams$State StreamsBuilder StreamsConfig Topology) + (org.apache.kafka.streams.errors StreamsUncaughtExceptionHandler StreamsUncaughtExceptionHandler$StreamThreadExceptionResponse) + (org.apache.kafka.streams.kstream JoinWindows TransformerSupplier ValueJoiner ValueMapper ValueTransformerWithKeySupplier) + (ziggurat.timestamp_transformer IngestionTimeExtractor))) (def default-config-for-stream {:buffered-records-per-partition 10000 diff --git a/test/ziggurat/config_test.clj b/test/ziggurat/config_test.clj index 0955c799..9f9bc916 100644 --- a/test/ziggurat/config_test.clj +++ b/test/ziggurat/config_test.clj @@ -5,24 +5,24 @@ [mount.core :as mount] [ziggurat.config :refer [-get -getIn - build-properties build-consumer-config-properties - build-streams-config-properties build-producer-config-properties + build-properties + build-streams-config-properties channel-retry-config config config-file config-from-env consumer-config-mapping-table - producer-config-mapping-table - streams-config-mapping-table - default-config get-in-config + create-jaas-properties + default-config + get-in-config producer-config-mapping-table rabbitmq-config + sasl-config set-property - statsd-config - ziggurat-config ssl-config - sasl-config - create-jaas-properties]] + statsd-config + streams-config-mapping-table + ziggurat-config]] [ziggurat.fixtures :as f]) (:import (java.util ArrayList Properties))) @@ -176,6 +176,34 @@ (is (= enable-idempotence "true")) (is (= group-id "foo")))) + (testing "all valid kafka configs with ssl enabled" + (with-redefs [ssl-config (constantly {:enabled true + :ssl-truststore-location "/some/location" + :ssl-truststore-password "some-password" + :mechanism "OAUTHBEARER" + :protocol "SASL_SSL" + :login-callback-handler "someLoginHandler" + :jaas {:login-module "someLoginModule"}})] + (let [config-map {:auto-offset-reset :latest + :replication-factor 2 + :group-id "foo" + :enable-idempotence true} + props (build-all-config-properties config-map) + auto-offset-reset (.getProperty props "auto.offset.reset") + group-id (.getProperty props "group.id") + enable-idempotence (.getProperty props "enable.idempotence") + replication-factor (.getProperty props "replication.factor")] + (is (= auto-offset-reset "latest")) + (is (= replication-factor "2")) + (is (= enable-idempotence "true")) + (is (= (.getProperty props "ssl.truststore.location") "/some/location")) + (is (= (.getProperty props "ssl.truststore.password") "some-password")) + (is (= (.getProperty props "security.protocol") "SASL_SSL")) + (is (= (.getProperty props "sasl.mechanism") "OAUTHBEARER")) + (is (= (.getProperty props "sasl.login.callback.handler.class") "someLoginHandler")) + (is (= (.getProperty props "sasl.jaas.config") "someLoginModule required;")) + (is (= group-id "foo"))))) + (testing "valid kafka consumer configs converts commit-interval-ms to auto-commit-interval-ms" (let [config-map {:commit-interval-ms 5000} props (build-consumer-config-properties config-map) @@ -243,12 +271,12 @@ :poll-timeout-ms-config 10000} props (build-all-config-properties config-map)] (doall - (map (fn [[k _]] - (let [string-key (str/replace (name k) #"-" ".") - not-found "NOT FOUND!" - v (.getProperty props string-key not-found)] - (is (= v not-found)))) - config-map)))) + (map (fn [[k _]] + (let [string-key (str/replace (name k) #"-" ".") + not-found "NOT FOUND!" + v (.getProperty props string-key not-found)] + (is (= v not-found)))) + config-map)))) (testing "should set ssl properties for streams if enabled is set to true" (with-redefs [ssl-config (constantly {:enabled true @@ -304,7 +332,7 @@ (is (= ssl-ks-location "/some/location")) (is (= ssl-ks-password "some-password"))))) - (testing "should not set ssl properties for streams if eenabled is set to false" + (testing "should not set ssl properties for streams if enabled is set to false" (with-redefs [ssl-config (constantly {:enabled false :ssl-keystore-location "/some/location" :ssl-keystore-password "some-password"})] @@ -397,7 +425,37 @@ sasl-protocol (.getProperty props "security.protocol")] (is (= auto-offset-reset "latest")) (is (= sasl-protocol "SASL_PLAINTEXT")) - (is (= sasl-jaas-config (create-jaas-properties "" "" "org.apache.kafka.common.security.scram.ScramLoginModule")))))))) + (is (= sasl-jaas-config (create-jaas-properties "" "" "org.apache.kafka.common.security.scram.ScramLoginModule")))))) + (testing "should set ssl properties with sasl elements if present" + (with-redefs [ssl-config (constantly {:enabled true + :ssl-keystore-location "/some/location" + :ssl-keystore-password "some-password" + :mechanism "SCRAM-SHA-512" + :protocol "SASL_SSL" + :login-callback-handler "abc" + :ssl-truststore-location "/some/truststore/location" + :ssl-truststore-password "some-truststore-password"})] + (let [streams-config-map {:max-poll-records 500 + :enable-auto-commit true} + props (build-consumer-config-properties streams-config-map) + max-poll-records (.getProperty props "max.poll.records") + enable-auto-commit (.getProperty props "enable.auto.commit") + ssl-ks-location (.getProperty props "ssl.keystore.location") + ssl-ks-password (.getProperty props "ssl.keystore.password") + ssl-ts-location (.getProperty props "ssl.truststore.location") + ssl-ts-password (.getProperty props "ssl.truststore.password") + protocol (.getProperty props "security.protocol") + mechanism (.getProperty props "sasl.mechanism") + login-callback (.getProperty props "sasl.login.callback.handler.class")] + (is (= max-poll-records "500")) + (is (= enable-auto-commit "true")) + (is (= ssl-ks-location "/some/location")) + (is (= ssl-ks-password "some-password")) + (is (= ssl-ts-location "/some/truststore/location")) + (is (= ssl-ts-password "some-truststore-password")) + (is (= protocol "SASL_SSL")) + (is (= mechanism "SCRAM-SHA-512")) + (is (= login-callback "abc"))))))) (deftest test-set-property (testing "set-property with empty (with spaces) value" diff --git a/test/ziggurat/producer_test.clj b/test/ziggurat/producer_test.clj index 824d14e8..5a7923ba 100644 --- a/test/ziggurat/producer_test.clj +++ b/test/ziggurat/producer_test.clj @@ -5,7 +5,8 @@ [ziggurat.config :refer [ziggurat-config]] [ziggurat.fixtures :as fix :refer [*producer-properties* *consumer-properties*]] [ziggurat.producer :refer [producer-properties-map send kafka-producers -send]]) - (:import [org.apache.kafka.clients.producer KafkaProducer] + (:import (java.util Properties) + [org.apache.kafka.clients.producer KafkaProducer] [org.apache.kafka.streams.integration.utils IntegrationTestUtils])) (use-fixtures :once fix/mount-producer-with-config-and-tracer) @@ -20,13 +21,13 @@ :enabled [true :bool]}}}}}) (deftest send-data-with-topic-and-value-test - (with-redefs [kafka-producers (hash-map :default (KafkaProducer. *producer-properties*))] + (with-redefs [kafka-producers (hash-map :default (KafkaProducer. ^Properties *producer-properties*))] (let [alphanum-gen (gen/such-that #(not (blank? %)) gen/string-alphanumeric) topic (gen/generate alphanum-gen 10) key "message" value "Hello World!!"] (send :default topic key value) - (let [result (IntegrationTestUtils/waitUntilMinKeyValueRecordsReceived *consumer-properties* topic 1 8000)] + (let [result (IntegrationTestUtils/waitUntilMinKeyValueRecordsReceived ^Properties *consumer-properties* topic 1 8000)] (is (= value (.value (first result)))))))) (deftest send-data-with-topic-key-partition-and-value-test From e9d720c5d2772930f08ae968b454854083b97d52 Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Wed, 20 Nov 2024 12:27:34 +0530 Subject: [PATCH 14/26] SASL ACL test attempt - failed --- project.clj | 2 + test/ziggurat/kafka_server_jaas.conf | 3 + test/ziggurat/streams_test.clj | 177 +++++++++++++++++++++++++-- 3 files changed, 173 insertions(+), 9 deletions(-) create mode 100644 test/ziggurat/kafka_server_jaas.conf diff --git a/project.clj b/project.clj index 8f757f2c..ddefe2c7 100644 --- a/project.clj +++ b/project.clj @@ -69,8 +69,10 @@ :dependencies [[com.google.protobuf/protobuf-java "3.17.0"] [junit/junit "4.13.2"] [org.hamcrest/hamcrest-core "2.2"] + [org.apache.kafka/kafka_2.12 "2.8.0"] [org.apache.kafka/kafka-streams "2.8.2" :classifier "test" :exclusions [org.slf4j/slf4j-log4j12 log4j]] [org.apache.kafka/kafka-clients "2.8.2" :classifier "test"] + [org.apache.kafka/kafka-streams-test-utils "2.8.2" :classifier "test"] [org.clojure/test.check "1.1.0"]] :plugins [[lein-cloverage "1.2.2" :exclusions [org.clojure/clojure]]] :cloverage {:exclude-call ['cambium.core/info diff --git a/test/ziggurat/kafka_server_jaas.conf b/test/ziggurat/kafka_server_jaas.conf new file mode 100644 index 00000000..bb22f90b --- /dev/null +++ b/test/ziggurat/kafka_server_jaas.conf @@ -0,0 +1,3 @@ +KafkaServer { + org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required; +}; \ No newline at end of file diff --git a/test/ziggurat/streams_test.clj b/test/ziggurat/streams_test.clj index efd3a5e3..cf72b498 100644 --- a/test/ziggurat/streams_test.clj +++ b/test/ziggurat/streams_test.clj @@ -1,5 +1,6 @@ (ns ziggurat.streams-test - (:require [clojure.test :refer [deftest is join-fixtures testing use-fixtures]] + (:require [clojure.java.shell :refer [sh]] + [clojure.test :refer [deftest is join-fixtures testing use-fixtures]] [mount.core :as mount] [protobuf.core :as proto] [ziggurat.config :refer [ziggurat-config]] @@ -7,21 +8,32 @@ [ziggurat.middleware.default :as default-middleware] [ziggurat.middleware.json :as json-middleware] [ziggurat.middleware.stream-joins :as stream-joins-middleware] - [ziggurat.streams :refer [add-stream-thread get-stream-thread-count remove-stream-thread start-streams stop-streams stop-stream start-stream]] + [ziggurat.streams :refer [add-stream-thread get-stream-thread-count remove-stream-thread start-stream start-streams stop-stream stop-streams]] [ziggurat.streams :refer [handle-uncaught-exception start-stream start-streams stop-stream stop-streams]]) - (:import [com.gojek.test.proto Example$Photo] - [java.util Properties] - [org.apache.kafka.clients.producer ProducerConfig] + (:import (com.gojek.test.proto Example$Photo) + (java.net URI) + (java.nio.file Files Paths) + (java.nio.file.attribute FileAttribute) + (java.util Properties) + (kafka.server KafkaConfig KafkaServer) + (org.apache.kafka.clients CommonClientConfigs) + (org.apache.kafka.clients.producer ProducerConfig) + (org.apache.kafka.common.config SaslConfigs) (org.apache.kafka.common.utils MockTime) - [org.apache.kafka.streams KeyValue] - [org.apache.kafka.streams KafkaStreams$State] - [org.apache.kafka.streams.errors StreamsUncaughtExceptionHandler$StreamThreadExceptionResponse] - [org.apache.kafka.streams.integration.utils IntegrationTestUtils])) + (org.apache.kafka.streams KafkaStreams$State) + (org.apache.kafka.streams KeyValue) + (org.apache.kafka.streams.errors StreamsUncaughtExceptionHandler$StreamThreadExceptionResponse) + (org.apache.kafka.streams.integration.utils IntegrationTestUtils) + (scala Some))) (use-fixtures :once (join-fixtures [fix/mount-config-with-tracer fix/silence-logging fix/mount-metrics])) +(def truststore-path "/tmp/truststore/kafka.server.truststore.jks") +(def saas-config-path "test/ziggurat/kafka_server_jaas.conf") +(def truststore-password "testpassword") + (defn- start-mount [] (mount/start-with-states [[#'ziggurat.messaging.consumer/consumers {:start (constantly nil) @@ -35,11 +47,91 @@ (.put ProducerConfig/KEY_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.ByteArraySerializer") (.put ProducerConfig/VALUE_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.ByteArraySerializer"))) +(defn- props-with-sasl-config [] + (doto (props) + (.put ProducerConfig/BOOTSTRAP_SERVERS_CONFIG "localhost:9093") + (.put CommonClientConfigs/SECURITY_PROTOCOL_CONFIG "SASL_SSL") + (.put SaslConfigs/SASL_MECHANISM "OAUTHBEARER") + (.put SaslConfigs/SASL_LOGIN_CALLBACK_HANDLER_CLASS "org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerValidatorCallbackHandler") + (.put props SaslConfigs/SASL_JAAS_CONFIG + "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;") + (.put props "ssl.truststore.location" truststore-path) + (.put props "ssl.truststore.password" truststore-password) + (.put props "ssl.endpoint.identification.algorithm" "https"))) + (defn- props-with-string-serializer [] (doto (props) (.put ProducerConfig/KEY_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.StringSerializer") (.put ProducerConfig/VALUE_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.StringSerializer"))) +(defn create-truststore [] + "Generates a new truststore and self-signed certificate for testing." + (let [some-path (Paths/get (URI. (str "file:///" truststore-path))) + dir-path (.getParent some-path)] + ;; Ensure directory exists + (Files/createDirectories dir-path (into-array FileAttribute [])) + ;; Generate truststore with keytool + (sh "keytool" "-genkeypair" + "-alias" "test-cert" + "-keyalg" "RSA" + "-keystore" truststore-path + "-storepass" truststore-password + "-validity" "365" + "-keysize" "2048" + "-dname" "CN=localhost, OU=Test, O=Ziggurat, L=City, S=State, C=US"))) + +(defn cleanup-truststore [] + "Deletes the generated truststore after the test." + (let [path (Paths/get truststore-path nil)] + (Files/deleteIfExists path))) + +(defn create-broker-config [port log-dir] + "Manually create a Kafka broker configuration." + (let [props (Properties.)] + (.put props "broker.id" "0") + (.put props "log.dirs" (.toString log-dir)) + (.put props "listeners" (str "SASL_SSL://localhost:" port)) + (.put props "advertised.listeners" (str "SASL_SSL://localhost:" port)) + (.put props "listener.security.protocol.map" "SASL_SSL:SASL_SSL") + (.put props "inter.broker.listener.name" "SASL_SSL") + (.put props "ssl.truststore.location" truststore-path) + (.put props "ssl.truststore.password" truststore-password) + (.put props "ssl.endpoint.identification.algorithm" "") + (.put props "sasl.enabled.mechanisms" "OAUTHBEARER") + (.put props "sasl.mechanism.inter.broker.protocol" "OAUTHBEARER") + (.put props "zookeeper.connect" "localhost:2181") + (.put props "sasl.jaas.config" "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;") + props)) + +(defn create-kafka-config [] + "Create Kafka Streams configuration with SASL/SSL." + (let [props (Properties.)] + (.put props "security.protocol" "SASL_SSL") + (.put props "ssl.truststore.location" truststore-path) + (.put props "ssl.truststore.password" truststore-password) + (.put props "sasl.mechanism" "OAUTHBEARER") + (.put props "sasl.jaas.config" "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;") + props)) + +(defn start-embedded-kafka [port] + "Start an embedded Kafka broker." + (create-truststore) + (System/setProperty "java.security.auth.login.config" saas-config-path) + (let [log-dir (Files/createTempDirectory "kafka-logs" (into-array FileAttribute [])) + config (KafkaConfig. (create-broker-config port log-dir)) + kafka-server (KafkaServer. config (MockTime.) (new Some "a") false)] + (.startup kafka-server) + kafka-server)) + +(defn stop-embedded-kafka [kafka-server] + "Stop the embedded Kafka broker." + (.shutdown kafka-server) + (cleanup-truststore) + (System/clearProperty "java.security.auth.login.config")) + + +(def ^:dynamic *embedded-kafka* nil) + (def message {:id 7 :path "/photos/h2k3j4h9h23"}) @@ -121,6 +213,73 @@ (stop-streams streams) (is (= times @message-received-count)))) +(deftest start-streams-test-when-sasl-configs-are-not-provided + (with-redefs [ziggurat.config/ssl-config (constantly {}) + ziggurat.config/sasl-config (constantly {})] + (let [message-received-count (atom 0) + mapped-fn (get-mapped-fn message-received-count) + times 6 + kvs (repeat times message-key-value) + handler-fn (default-middleware/protobuf->hash mapped-fn proto-class :default) + streams (start-streams {:default {:handler-fn handler-fn}} + (-> (ziggurat-config) + (assoc-in [:stream-router :default :application-id] (rand-application-id)) + (assoc-in [:stream-router :default :changelog-topic-replication-factor] changelog-topic-replication-factor)))] + (Thread/sleep 10000) ;;waiting for streams to start + (IntegrationTestUtils/produceKeyValuesSynchronously (get-in (ziggurat-config) [:stream-router :default :origin-topic]) + kvs + (props) + (MockTime.)) + (Thread/sleep 5000) ;;wating for streams to consume messages + (stop-streams streams) + (is (= times @message-received-count))))) + +(deftest start-streams-test-should-fail-with-invalid-sasl-configs + (with-redefs [ziggurat.config/ssl-config (fn [] {:enabled true + :protocol "SASL_SSL" + :mechanism "OAUTHBEARER" + :ssl-truststore-location "/path/to/truststore.jks" + :ssl-truststore-password "some-password" + :login-callback-handler "com.example.oauthbearer.OAuthBearerLoginCallbackHandler" + :jaas {:login-module "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule"}})] + (let [message-received-count (atom 0) + mapped-fn (get-mapped-fn message-received-count) + handler-fn (default-middleware/protobuf->hash mapped-fn proto-class :default) + streams (try (start-streams {:default {:handler-fn handler-fn}} + (-> (ziggurat-config) + (assoc-in [:stream-router :default :application-id] (rand-application-id)) + (assoc-in [:stream-router :default :changelog-topic-replication-factor] changelog-topic-replication-factor))) + (catch Exception e (.getMessage e)))] + (is (= streams "Invalid value com.example.oauthbearer.OAuthBearerLoginCallbackHandler for configuration sasl.login.callback.handler.class: Class com.example.oauthbearer.OAuthBearerLoginCallbackHandler could not be found."))))) + +(deftest start-streams-test-with-valid-sasl-configs + (binding [*embedded-kafka* (start-embedded-kafka 9093)] + (try + (with-redefs [ziggurat.config/ssl-config (fn [] {:enabled true + :protocol "SASL_SSL" + :mechanism "OAUTHBEARER" + :login-callback-handler "org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerValidatorCallbackHandler" + :jaas {:login-module "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule"}})] + (let [message-received-count (atom 0) + mapped-fn (get-mapped-fn message-received-count) + times 6 + kvs (repeat times message-key-value) + handler-fn (default-middleware/protobuf->hash mapped-fn proto-class :default) + streams (start-streams {:default {:handler-fn handler-fn}} + (-> (ziggurat-config) + (assoc-in [:stream-router :default :bootstrap-servers] "localhost:9093") + (assoc-in [:stream-router :default :application-id] (rand-application-id)) + (assoc-in [:stream-router :default :changelog-topic-replication-factor] changelog-topic-replication-factor)))] + (Thread/sleep 10000) + (IntegrationTestUtils/produceKeyValuesSynchronously (get-in (ziggurat-config) [:stream-router :default :origin-topic]) + kvs + (props-with-sasl-config) + (MockTime.)) + (Thread/sleep 5000) ;;waiting for streams to consume messages + (stop-streams streams) + (is (= times @message-received-count)))) + (finally (stop-embedded-kafka *embedded-kafka*))))) + (deftest stop-stream-test (let [message-received-count (atom 0) mapped-fn (get-mapped-fn message-received-count) From 6961914a1c24f8562681c86c3ef02cf6724d55bc Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Wed, 20 Nov 2024 12:39:20 +0530 Subject: [PATCH 15/26] Add integration tests for streams test --- CHANGELOG.md | 3 + test/ziggurat/streams_test.clj | 124 +-------------------------------- 2 files changed, 5 insertions(+), 122 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ddabea89..e031237d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,9 @@ All notable changes to this project will be documented in this file. This change log follows the conventions of [keepachangelog.com](http://keepachangelog.com/). +## 4.12.0 +- Adds support for ACL auth for kafka streams. + ## 4.11.1 - Fix retry-count returning nil if empty. Returns 0 by default now. diff --git a/test/ziggurat/streams_test.clj b/test/ziggurat/streams_test.clj index cf72b498..74dd3a5d 100644 --- a/test/ziggurat/streams_test.clj +++ b/test/ziggurat/streams_test.clj @@ -1,6 +1,5 @@ (ns ziggurat.streams-test - (:require [clojure.java.shell :refer [sh]] - [clojure.test :refer [deftest is join-fixtures testing use-fixtures]] + (:require [clojure.test :refer [deftest is join-fixtures testing use-fixtures]] [mount.core :as mount] [protobuf.core :as proto] [ziggurat.config :refer [ziggurat-config]] @@ -11,29 +10,18 @@ [ziggurat.streams :refer [add-stream-thread get-stream-thread-count remove-stream-thread start-stream start-streams stop-stream stop-streams]] [ziggurat.streams :refer [handle-uncaught-exception start-stream start-streams stop-stream stop-streams]]) (:import (com.gojek.test.proto Example$Photo) - (java.net URI) - (java.nio.file Files Paths) - (java.nio.file.attribute FileAttribute) (java.util Properties) - (kafka.server KafkaConfig KafkaServer) - (org.apache.kafka.clients CommonClientConfigs) (org.apache.kafka.clients.producer ProducerConfig) - (org.apache.kafka.common.config SaslConfigs) (org.apache.kafka.common.utils MockTime) (org.apache.kafka.streams KafkaStreams$State) (org.apache.kafka.streams KeyValue) (org.apache.kafka.streams.errors StreamsUncaughtExceptionHandler$StreamThreadExceptionResponse) - (org.apache.kafka.streams.integration.utils IntegrationTestUtils) - (scala Some))) + (org.apache.kafka.streams.integration.utils IntegrationTestUtils))) (use-fixtures :once (join-fixtures [fix/mount-config-with-tracer fix/silence-logging fix/mount-metrics])) -(def truststore-path "/tmp/truststore/kafka.server.truststore.jks") -(def saas-config-path "test/ziggurat/kafka_server_jaas.conf") -(def truststore-password "testpassword") - (defn- start-mount [] (mount/start-with-states [[#'ziggurat.messaging.consumer/consumers {:start (constantly nil) @@ -47,91 +35,11 @@ (.put ProducerConfig/KEY_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.ByteArraySerializer") (.put ProducerConfig/VALUE_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.ByteArraySerializer"))) -(defn- props-with-sasl-config [] - (doto (props) - (.put ProducerConfig/BOOTSTRAP_SERVERS_CONFIG "localhost:9093") - (.put CommonClientConfigs/SECURITY_PROTOCOL_CONFIG "SASL_SSL") - (.put SaslConfigs/SASL_MECHANISM "OAUTHBEARER") - (.put SaslConfigs/SASL_LOGIN_CALLBACK_HANDLER_CLASS "org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerValidatorCallbackHandler") - (.put props SaslConfigs/SASL_JAAS_CONFIG - "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;") - (.put props "ssl.truststore.location" truststore-path) - (.put props "ssl.truststore.password" truststore-password) - (.put props "ssl.endpoint.identification.algorithm" "https"))) - (defn- props-with-string-serializer [] (doto (props) (.put ProducerConfig/KEY_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.StringSerializer") (.put ProducerConfig/VALUE_SERIALIZER_CLASS_CONFIG "org.apache.kafka.common.serialization.StringSerializer"))) -(defn create-truststore [] - "Generates a new truststore and self-signed certificate for testing." - (let [some-path (Paths/get (URI. (str "file:///" truststore-path))) - dir-path (.getParent some-path)] - ;; Ensure directory exists - (Files/createDirectories dir-path (into-array FileAttribute [])) - ;; Generate truststore with keytool - (sh "keytool" "-genkeypair" - "-alias" "test-cert" - "-keyalg" "RSA" - "-keystore" truststore-path - "-storepass" truststore-password - "-validity" "365" - "-keysize" "2048" - "-dname" "CN=localhost, OU=Test, O=Ziggurat, L=City, S=State, C=US"))) - -(defn cleanup-truststore [] - "Deletes the generated truststore after the test." - (let [path (Paths/get truststore-path nil)] - (Files/deleteIfExists path))) - -(defn create-broker-config [port log-dir] - "Manually create a Kafka broker configuration." - (let [props (Properties.)] - (.put props "broker.id" "0") - (.put props "log.dirs" (.toString log-dir)) - (.put props "listeners" (str "SASL_SSL://localhost:" port)) - (.put props "advertised.listeners" (str "SASL_SSL://localhost:" port)) - (.put props "listener.security.protocol.map" "SASL_SSL:SASL_SSL") - (.put props "inter.broker.listener.name" "SASL_SSL") - (.put props "ssl.truststore.location" truststore-path) - (.put props "ssl.truststore.password" truststore-password) - (.put props "ssl.endpoint.identification.algorithm" "") - (.put props "sasl.enabled.mechanisms" "OAUTHBEARER") - (.put props "sasl.mechanism.inter.broker.protocol" "OAUTHBEARER") - (.put props "zookeeper.connect" "localhost:2181") - (.put props "sasl.jaas.config" "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;") - props)) - -(defn create-kafka-config [] - "Create Kafka Streams configuration with SASL/SSL." - (let [props (Properties.)] - (.put props "security.protocol" "SASL_SSL") - (.put props "ssl.truststore.location" truststore-path) - (.put props "ssl.truststore.password" truststore-password) - (.put props "sasl.mechanism" "OAUTHBEARER") - (.put props "sasl.jaas.config" "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule required;") - props)) - -(defn start-embedded-kafka [port] - "Start an embedded Kafka broker." - (create-truststore) - (System/setProperty "java.security.auth.login.config" saas-config-path) - (let [log-dir (Files/createTempDirectory "kafka-logs" (into-array FileAttribute [])) - config (KafkaConfig. (create-broker-config port log-dir)) - kafka-server (KafkaServer. config (MockTime.) (new Some "a") false)] - (.startup kafka-server) - kafka-server)) - -(defn stop-embedded-kafka [kafka-server] - "Stop the embedded Kafka broker." - (.shutdown kafka-server) - (cleanup-truststore) - (System/clearProperty "java.security.auth.login.config")) - - -(def ^:dynamic *embedded-kafka* nil) - (def message {:id 7 :path "/photos/h2k3j4h9h23"}) @@ -252,34 +160,6 @@ (catch Exception e (.getMessage e)))] (is (= streams "Invalid value com.example.oauthbearer.OAuthBearerLoginCallbackHandler for configuration sasl.login.callback.handler.class: Class com.example.oauthbearer.OAuthBearerLoginCallbackHandler could not be found."))))) -(deftest start-streams-test-with-valid-sasl-configs - (binding [*embedded-kafka* (start-embedded-kafka 9093)] - (try - (with-redefs [ziggurat.config/ssl-config (fn [] {:enabled true - :protocol "SASL_SSL" - :mechanism "OAUTHBEARER" - :login-callback-handler "org.apache.kafka.common.security.oauthbearer.secured.OAuthBearerValidatorCallbackHandler" - :jaas {:login-module "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule"}})] - (let [message-received-count (atom 0) - mapped-fn (get-mapped-fn message-received-count) - times 6 - kvs (repeat times message-key-value) - handler-fn (default-middleware/protobuf->hash mapped-fn proto-class :default) - streams (start-streams {:default {:handler-fn handler-fn}} - (-> (ziggurat-config) - (assoc-in [:stream-router :default :bootstrap-servers] "localhost:9093") - (assoc-in [:stream-router :default :application-id] (rand-application-id)) - (assoc-in [:stream-router :default :changelog-topic-replication-factor] changelog-topic-replication-factor)))] - (Thread/sleep 10000) - (IntegrationTestUtils/produceKeyValuesSynchronously (get-in (ziggurat-config) [:stream-router :default :origin-topic]) - kvs - (props-with-sasl-config) - (MockTime.)) - (Thread/sleep 5000) ;;waiting for streams to consume messages - (stop-streams streams) - (is (= times @message-received-count)))) - (finally (stop-embedded-kafka *embedded-kafka*))))) - (deftest stop-stream-test (let [message-received-count (atom 0) mapped-fn (get-mapped-fn message-received-count) From 82e526538056a5b1764b971a7d6d20748dc7cd2d Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Tue, 26 Nov 2024 12:32:07 +0530 Subject: [PATCH 16/26] ACL integration attempt with SSL TLS --- Makefile | 13 +-- config-user.properties | 1 + docker-compose-cluster.yml | 46 +++++++--- docker-compose.yml | 1 + kafka_server_jaas.conf | 15 ++++ scripts/create-certs.sh | 82 ++++++++++++++++++ secrets/broker_keystore_creds | 1 + secrets/broker_sslkey_creds | 1 + secrets/broker_truststore_creds | 1 + secrets/kafka.broker.keystore.jks | Bin 0 -> 4887 bytes secrets/kafka.broker.truststore.jks | Bin 0 -> 1258 bytes secrets/snakeoil-ca-1.crt | 23 +++++ secrets/snakeoil-ca-1.key | 30 +++++++ secrets/snakeoil-ca-1.srl | 1 + ...OAuthAuthenticateLoginCallbackHandler.java | 68 +++++++++++++++ test/ziggurat/fixtures.clj | 2 +- test/ziggurat/streams_test.clj | 32 +++++++ zookeeper_server_jaas.conf | 4 + 18 files changed, 305 insertions(+), 16 deletions(-) create mode 100644 config-user.properties create mode 100644 kafka_server_jaas.conf create mode 100755 scripts/create-certs.sh create mode 100644 secrets/broker_keystore_creds create mode 100644 secrets/broker_sslkey_creds create mode 100644 secrets/broker_truststore_creds create mode 100644 secrets/kafka.broker.keystore.jks create mode 100644 secrets/kafka.broker.truststore.jks create mode 100644 secrets/snakeoil-ca-1.crt create mode 100644 secrets/snakeoil-ca-1.key create mode 100644 secrets/snakeoil-ca-1.srl create mode 100644 test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java create mode 100644 zookeeper_server_jaas.conf diff --git a/Makefile b/Makefile index 3af0c08b..7be444cb 100644 --- a/Makefile +++ b/Makefile @@ -18,18 +18,21 @@ test: setup setup-cluster: rm -rf /tmp/ziggurat_kafka_cluster_data - docker-compose -f docker-compose-cluster.yml -p ziggurat down + rm -rf secrets + docker-compose -f docker-compose-cluster.yml -p ziggurat down --remove-orphans lein deps + chmod +x scripts/create-certs.sh + ./scripts/create-certs.sh docker-compose -f docker-compose-cluster.yml -p ziggurat up -d - sleep 30 -# Sleeping for 30s to allow the cluster to come up - docker exec ziggurat_kafka1_1 kafka-topics --create --topic $(topic) --partitions 3 --replication-factor 3 --if-not-exists --zookeeper ziggurat_zookeeper_1 - docker exec ziggurat_kafka1_1 kafka-topics --create --topic $(another_test_topic) --partitions 3 --replication-factor 3 --if-not-exists --zookeeper ziggurat_zookeeper_1 + sleep 60 + docker exec ziggurat-kafka1-1 kafka-topics --create --topic $(topic) --partitions 3 --replication-factor 3 --if-not-exists --zookeeper ziggurat-zookeeper-1 + docker exec ziggurat-kafka1-1 kafka-topics --create --topic $(another_test_topic) --partitions 3 --replication-factor 3 --if-not-exists --zookeeper ziggurat-zookeeper-1 test-cluster: setup-cluster TESTING_TYPE=cluster lein test docker-compose -f docker-compose-cluster.yml down rm -rf /tmp/ziggurat_kafka_cluster_data + rm -rf secrets coverage: setup lein code-coverage diff --git a/config-user.properties b/config-user.properties new file mode 100644 index 00000000..3c0d4117 --- /dev/null +++ b/config-user.properties @@ -0,0 +1 @@ +sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username=client-for-poc password=password123; security.protocol=SASL_PLAINTEXT sasl.mechanism=SCRAM-SHA-256 diff --git a/docker-compose-cluster.yml b/docker-compose-cluster.yml index cb9374d6..3d50a12b 100644 --- a/docker-compose-cluster.yml +++ b/docker-compose-cluster.yml @@ -8,42 +8,67 @@ services: container_name: 'ziggurat_rabbitmq' zookeeper: - image: zookeeper:3.4.9 + image: confluentinc/cp-zookeeper:5.5.0 + platform: linux/amd64 hostname: zookeeper ports: - "2181:2181" environment: - ZOO_MY_ID: 1 - ZOO_PORT: 2181 - ZOO_SERVERS: server.1=zookeeper:2888:3888 - ZOO_TICK_TIME: 2000 + ZOOKEEPER_CLIENT_PORT: 2181 + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/zookeeper_server_jaas.conf + -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider + -Dzookeeper.allowSaslFailedClients=false + -Dzookeeper.requireClientAuthScheme=sasl volumes: - /tmp/ziggurat_kafka_cluster_data/zookeeper/data:/data - /tmp/ziggurat_kafka_cluster_data/zookeeper/datalog:/datalog + - ./zookeeper_server_jaas.conf:/etc/kafka/zookeeper_server_jaas.conf kafka1: image: 'confluentinc/cp-kafka:${CONFLUENT_VERSION}' + platform: linux/amd64 cap_add: - NET_ADMIN - SYS_ADMIN hostname: kafka1 ports: - - "9091:9091" + - "9094:9094" + - "9095:9095" environment: - KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19091,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9091 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19094,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9094,SASL_SSL://${DOCKER_HOST_IP:-127.0.0.1}:9095 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT,SASL_SSL:SASL_SSL KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" KAFKA_BROKER_ID: 1 KAFKA_DEFAULT_REPLICATION_FACTOR: 3 KAFKA_NUM_PARTITIONS: 3 + KAFKA_SASL_ENABLED_MECHANISMS: PLAIN + KAFKA_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-secret\" user_admin=\"admin-secret\" user_client=\"client-secret\";" + KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.auth.SimpleAclAuthorizer + KAFKA_SUPER_USERS: User:admin + KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "false" + KAFKA_SECURITY_PROTOCOL: SASL_SSL + KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/kafka_server_jaas.conf" + KAFKA_SSL_KEYSTORE_FILENAME: kafka.broker.keystore.jks + KAFKA_SSL_KEYSTORE_CREDENTIALS: broker_keystore_creds + KAFKA_SSL_KEY_CREDENTIALS: broker_sslkey_creds + KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.broker.truststore.jks + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: broker_truststore_creds + KAFKA_SSL_CLIENT_AUTH: "required" volumes: - /tmp/ziggurat_kafka_cluster_data/kafka1/data:/var/lib/kafka/data + - ./kafka_server_jaas.conf:/etc/kafka/secrets/kafka_server_jaas.conf + - ./secrets/kafka.broker.keystore.jks:/etc/kafka/secrets/kafka.broker.keystore.jks + - ./secrets/kafka.broker.truststore.jks:/etc/kafka/secrets/kafka.broker.truststore.jks + - ./secrets/broker_keystore_creds:/etc/kafka/secrets/broker_keystore_creds + - ./secrets/broker_sslkey_creds:/etc/kafka/secrets/broker_sslkey_creds + - ./secrets/broker_truststore_creds:/etc/kafka/secrets/broker_truststore_creds depends_on: - zookeeper kafka2: image: 'confluentinc/cp-kafka:${CONFLUENT_VERSION}' + platform: linux/amd64 cap_add: - NET_ADMIN - SYS_ADMIN @@ -54,7 +79,7 @@ services: KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka2:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092 KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL - KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181 + KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" KAFKA_BROKER_ID: 2 KAFKA_DEFAULT_REPLICATION_FACTOR: 3 KAFKA_NUM_PARTITIONS: 3 @@ -65,6 +90,7 @@ services: kafka3: image: 'confluentinc/cp-kafka:${CONFLUENT_VERSION}' + platform: linux/amd64 cap_add: - NET_ADMIN - SYS_ADMIN diff --git a/docker-compose.yml b/docker-compose.yml index 87d24d1d..1a1d0341 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,6 +16,7 @@ services: - ALLOW_ANONYMOUS_LOGIN=yes kafka: image: 'bitnami/kafka:${KAFKA_VERSION}' + platform: linux/amd64 ports: - '9092:9092' container_name: 'ziggurat_kafka' diff --git a/kafka_server_jaas.conf b/kafka_server_jaas.conf new file mode 100644 index 00000000..472ca9a3 --- /dev/null +++ b/kafka_server_jaas.conf @@ -0,0 +1,15 @@ +KafkaServer { + org.apache.kafka.common.security.scram.ScramLoginModule required + username="broker" + password="password"; +}; +Client { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="admin" + password="password"; +}; +KafkaClient { + org.apache.kafka.common.security.scram.ScramLoginModule required + username="client" + password="client-secret"; +}; \ No newline at end of file diff --git a/scripts/create-certs.sh b/scripts/create-certs.sh new file mode 100755 index 00000000..f0f03986 --- /dev/null +++ b/scripts/create-certs.sh @@ -0,0 +1,82 @@ +#!/bin/bash +set -e + +# Create directories for certs +mkdir -p secrets +cd secrets + +# Cleanup files +rm -f *.crt *.csr *_creds *.jks *.srl *.key *.pem *.der *.p12 + +# Generate CA key +openssl req -new -x509 -keyout snakeoil-ca-1.key -out snakeoil-ca-1.crt -days 365 \ + -subj '/CN=ca1.test.confluent.io/OU=TEST/O=CONFLUENT/L=PaloAlto/ST=Ca/C=US' \ + -passin pass:confluent -passout pass:confluent + +# Create broker keystore +keytool -genkey -noprompt \ + -alias broker \ + -dname "CN=broker,OU=TEST,O=CONFLUENT,L=PaloAlto,S=Ca,C=US" \ + -ext "SAN=dns:broker,dns:localhost" \ + -keystore kafka.broker.keystore.jks \ + -keyalg RSA \ + -storepass confluent \ + -keypass confluent \ + -storetype pkcs12 + +# Create the certificate signing request (CSR) +keytool -keystore kafka.broker.keystore.jks -alias broker \ + -certreq -file broker.csr -storepass confluent -keypass confluent \ + -ext "SAN=dns:broker,dns:localhost" + +# Create extfile for SAN +cat << EOF > extfile +[req] +distinguished_name = req_distinguished_name +x509_extensions = v3_req +prompt = no +[req_distinguished_name] +CN = broker +[v3_req] +subjectAltName = @alt_names +[alt_names] +DNS.1 = broker +DNS.2 = localhost +EOF + +# Sign the host certificate with the certificate authority (CA) +openssl x509 -req -CA snakeoil-ca-1.crt -CAkey snakeoil-ca-1.key \ + -in broker.csr -out broker-ca1-signed.crt -days 9999 -CAcreateserial \ + -passin pass:confluent -extensions v3_req -extfile extfile + +# Sign and import the CA cert into the keystore +keytool -noprompt -keystore kafka.broker.keystore.jks -alias CARoot \ + -import -file snakeoil-ca-1.crt -storepass confluent -keypass confluent + +# Sign and import the host certificate into the keystore +keytool -noprompt -keystore kafka.broker.keystore.jks -alias broker \ + -import -file broker-ca1-signed.crt -storepass confluent -keypass confluent \ + -ext "SAN=dns:broker,dns:localhost" + +# Create truststore and import the CA cert +keytool -noprompt -keystore kafka.broker.truststore.jks -alias CARoot \ + -import -file snakeoil-ca-1.crt -storepass confluent -keypass confluent + +# Save creds +echo "confluent" > broker_sslkey_creds +echo "confluent" > broker_keystore_creds +echo "confluent" > broker_truststore_creds + +# Set appropriate permissions +chmod 644 kafka.broker.keystore.jks kafka.broker.truststore.jks \ + broker_sslkey_creds broker_keystore_creds broker_truststore_creds + +# Clean up intermediate files +rm -f broker.csr broker-ca1-signed.crt extfile + +# Verify the keystore and truststore content +echo "Verifying keystore content:" +keytool -list -keystore kafka.broker.keystore.jks -storepass confluent +echo "Verifying truststore content:" +keytool -list -keystore kafka.broker.truststore.jks -storepass confluent + diff --git a/secrets/broker_keystore_creds b/secrets/broker_keystore_creds new file mode 100644 index 00000000..23212273 --- /dev/null +++ b/secrets/broker_keystore_creds @@ -0,0 +1 @@ +confluent diff --git a/secrets/broker_sslkey_creds b/secrets/broker_sslkey_creds new file mode 100644 index 00000000..23212273 --- /dev/null +++ b/secrets/broker_sslkey_creds @@ -0,0 +1 @@ +confluent diff --git a/secrets/broker_truststore_creds b/secrets/broker_truststore_creds new file mode 100644 index 00000000..23212273 --- /dev/null +++ b/secrets/broker_truststore_creds @@ -0,0 +1 @@ +confluent diff --git a/secrets/kafka.broker.keystore.jks b/secrets/kafka.broker.keystore.jks new file mode 100644 index 0000000000000000000000000000000000000000..2843ca350f6332600e8c9505571948a0b20d937d GIT binary patch literal 4887 zcmY+EWmFW5wuXTrhw>0ZcOx-$cMKslG*W}W&64I|wo1ZRF)35mes zpOUq->!y1g=mTp>8^={wuhgLj#~AjlCGRv1OhUY z)^zQTCnqE=<^xZq5GwDZbB5mnIvHc#?S3X*y;-ndw&1)rsaugx)mbBYVJu3_;1TLj zbD7nn`^Q=nVgd>@-AVFK2=mdLH3$!D^5$f0)koN77FnZnB`bUCAP3wwn_rx&pm@lScULFYGVZkQS{y7adQzLJCY-HchP*^g>J54 z`OHgIz*3;1;;J_AWcUYRWYo8c5h&3 zhGm@**-!~hjszh9+Wx)x1;)QA2Utsm2VS!YCTFCr=hMXYI0z3}`1P2gf?94@pcH#t zS6z7JKe>bUDZjfXFuZr4?kKgqhpPR>wdgS(Qen3*E!L+ZIL_8ofo|1rt}!#!pdh`L zrEYUXQ-+qLuMIUgy=uv_Eh&mNs84oay6riEIkvRU+i6OL8$k+kKGLNLmM&~pQxwxF ziM(Qz_GfCMyM?KnSbvPK=8IfS=`4hIS7sY8Un&IC7fU^pyzuQ;s+W;BG~QHzoa{UV zx2U{(k=|J=*W9=i=`ejt980MKx7Oz1s&0Dkrzq0#p^J^jkBFVUk3sMIzH~`ujf=D% z3x|da9Tk5h>MH$3E1$|oO5-Cx-D1MLLh%q)wM$gMiXlY^NUAM(dBJ7jTl@)>n^k}s zp~Qr6V1LFRunM_%f&qrrT`52XUP4`nF@Du{Q*t;vq}W4beIop*2C+U!tE>HG-o2fB z;LoTM>`wNW(1KwQWLFhos?lV(lO0|p>yymuS zfPX}{G5e52l7=<+?3551PBYR&yfL~<&f>BaTvw_M*cmX3Py7L-X85-% ziKs-#m`Si4uzaz+vD~qouzbO6{|Tu@sBkGvTs@swMWn<;CBb4+;!@&b;$S4H`oBvA z_~l4aiGPp?4mQ?5i~m0f_Wv_1{(l?RtxA|VVwtY=rSI0^))qJNLeBhu3>%3gIei}9 ztI7-_l62lkAPy6*RHydW2M!}k0s7be=OSsc8YAbS^oDa7i@$iCVh_yQu5l!v6B;no zR~b1p{pG@nq9J6>`tAXBWizg4+JY;XP-i*T^B|=?p-UUnjW;j zlE(^|1`m$5>ah0-#Yl~eOXOejmzI77{Av{#>`(re+RA9|Qxz=(Y2iYfm?}LI;ki$- zHJ`Obyua*1XwB69w#d~+r8b=3g|gDHM8nR(`xzCDC!<2zN!;>ikYys|Y zJI`DaVAnGP9i*DmyO7+l*O_TuHwi>oz8dyP^)ebkO-oO?NTno7;WYx@eC`}cLi-M| zmpCEXVz&(s`CZ-0T{vf@F6_Tt=1^aXkolY&;*k=0H_s8$O?k;yZ~F6BH^zU^vimn4 zh&M;9J05rl=@`A%>I6^RH}TzQj0TuoXB;mYgtMgZ87eYIuU?XS(CY;>c~XE%GNBfr z15QIlGmMgU1>!3Jp)@ecQ1u4w0-kU`ZgeXn6C$I_(HG?}Llg<>j6_juv``gz00a`X zYIpD1VuESu;84GVQ)VO5Juf{Zfp-af)A5Z@G5l*uL02%X7m5J?o;W$1qV{T)Oa^mh z>VuEyWKr46H+EFMQ!ij&oeJkY;77&QR^v)fG@k7yk;+XCVl!%J{9P2%G*~{xO6|&u(;3_$t!Hf zRL05)ipX@Zpv&HXkOAK;Z}xUikPK-OVG$L*ZOd-`Y;04iZo$l=*D{!1U{HCPt2=g2 zE%#n2!okbSqPUKcaYOk%%xTF4C+(V^Nw;IuH4us|v1q&P(p|Mox+a5KZKyqy`)L02 zZI;;kBB5(B0pKz_NcP*NFD|FKZ=%nLKdKq4t_8MwMbz!3lV7V>cc}HG=RMFwQRJ}s zxp!&av(i2^q_2UINJx=!8ys zvQ!s&H#dI44%#QXM~<%SrWCz59uvg8{bq$M=Z7g{VK4)JDH`Jv;FU!TOS-s0oGJ+zp+qY}-mGY2o6*9N$&?=e0g4vC) zKt1wZ)6BpDC&N7grS#F7D5Z}S{qvni*H6aei1lmEEldDou#BAXy;nN?GD(CsOcz&B z#UZlW=TIKnSONoHH@sbXjSs=al2s?asF~%E~9WkM=aes(&UNB&Ul< zsLZU~z$G`^9rCH~WUZ>(0?{~1H8* zGsG8R#<8|jDP^5`uIXYgFik^$y?_2im20Bo_h7@aJOMDA_lkc^NwF$)4V4v#`!4-N z#lEWBEU^4lu`-NOg0hQPD2=Z3`=JKe(EQBCkWliHwgC>z-?fwl!16 z=&Ie4XlAmHgWxSbDwLS=)%`n~18&7x*=nxN3v^i)!)ewT9B^N^Wzz(VHgBwUQFloX zF}Q!Z$1u-0;~|=2Ey26oa61koOG(7vbMNLnW% zdFj_tN1|{AaFofotlg68^a5pDH^XijmzMkSyPu%*iqKHy9rOKyf+rDtlrp!WN>{Jm zV`|ABj|pZ@O*uuZpwXasaCSm7H-F#Z{J`1@e?}EF;^jiYVe0Q`t;`@UyM@>tnx~&6 za}S^MU3h0Ia*6T26>RI%n1>f*GUTg`507T3tv989sn#o{a_eQc$Tc!(?namR^@Dx@ z2!%AFXe!BRpEtV14-K>Blt7X8G{+LvczM?E_Q{!3;@Pf=4+OR@5P`c!vWs@I&wD(d97MiIuXf2*BS9a3lM-T7iXFFz z>9`DDYUO8a>2!v`NqtB?NIoM3C5>rSiYwgz0N5os4*g#w`t!sI%W938^qNqvcVQt@ zMg@Yyf3^)T$JEgaZviKRk}jEW3>i+6ldtmvHhs0;4g(k!7<*^+NSh5N2UZ5z5;L7U ztlhr4W0!IS49(u}+?PDP)LtZLo?m@XB5(Xt^|@Vhrce**R928daW=y`*Ho z1~(Pa)wDQ*j~aS6gXR&vg8101C!NP6vpUL#-Jj$+rZjEl8K{3v7C0HsMsYACY>8RD z?ddRlWIN=C_vHX_OVyZYJ&%(P8$u(RLkjQhe+vU) zsu>N}ZhfEU&0I>9=^HRcq@Gs=*2`M3tY)KD2sR+|--o0nC9)Knmjs2`uVqB{Ngo0lUj@A=wCWwVzj5xN~^I3vARTU|F%Cd(i`$v5%&2 ze$@~d0wD(ew$Hx1(JR@Tl3;^0f-xz1OzTAgK>QtPxh8F9U}=L`H`X(FeS zEP-L-b!r-hFFAVt97bu z3A;ZL5E_OC5juNmf)3;-Q2<{`q$$Is{(6OrJsJ+qzNn1wY_eo=`> zMIqDW0D@GYYwzvcwsPZ%hVV4b(};KlLTz2%>eAtA_7;|~-Y%V`_m_HB4~|Xp6)gEp zbfW6D5_FU^UEN*pqDta6jpo9_(8xcO0#Y4hU_~$+m8Duzgg_YDCD0ic2ekOYDRj4*-(h%kZ#ga!#JhDe6@ z4FLxRpn?Q-FoFbe0s#Opf&^*?2`Yw2hW8Bt2LUiC1_~;MNQU&%Q znsig|?jKaCeBD!H2PY+L?4;j|Ax^{BC!E=Ue5tRxUVAorYnO9}r51hS0|LwfvZAF; zE}!9a-33`5FVti`X11c{KLC05g>DDa(CHUiZ2x~zG!b6`dY9-mLLlU|0>8yYe!qV; zB)lk}UN;N*D8#m9XWS$2c6#JmytuiA2N^yHOX5@esP`=>#&FWlknrEF$8!i~pIqj~ znCf`t6a8}N0S*Th2ZuCRB9IQ;NF7niHSHr3Qd6qPOEsg1Tm{P=r4Cyf*)ncNTpT*! z_T96-^%OSrw4h_Gzr0ne8VC=eqaVfnC@2gpzT~L_)WY>OQ((w>6hs4CX&4lnI<7Nu zrVgueaggPm9tI2eke}tiiqsdf&Xm9J-#|m_INxHMicJxVEvNGaN{P4+pmz0iE(UW+ z+?l5~NXoW!KTjQZ#yN6hv)I%dW{f$gT>rn_+2J~u6*_S= zI(0uYE7njx#IwmdlPhFP4PuZTM`)c)giqb!0E9Y@bK7f5oG+a)yJ1Vedu%tR3It#R z}G2GTa1VLwNlVrHc~Eb7|G=};tQJtM54jCUupX|6twR* z-ozo>&F+%5y^J}ftXd2iUAbGf%3mv2dGBJ5YV)K;F!{^{vB6IBe+Tky$12zPT~i7E zw!(i%l({vo^O)>9t`Q|K4l>6-A@8TciDsU4P*swbztcaK!jjE}de;BDK(A_cigSN9PM4uD>Sh= zTBUI%yWe>S5!IOHEkeRLiC?%f!dQZyD2Xhtu^(#&r~6ujcj|_1b0ImV<~zge3L2x% zcoJkGLJ~l4DyK_Dd<Gf^&aUA=+uNV~{4{*J&3CxUraiv>6W(J0Irf9Fol{~0 z#uk?hn@y5BG=>RcScozq%RS}V*hDk9bdGz)e-tn`SNT`-` zSNu4ZXJlzAyeGroMohv_=^>`?oyJp(F^y)K+FXHXn|ayUTho33lo+l9Tj1pCWi~1< ztu4mODl>x-_K9!UEdA7pA^*XIahV}GASBm1cF=7eHRUIT4Wx5g=?tTHzTnivzCXx0s{etposQFNdN!< literal 0 HcmV?d00001 diff --git a/secrets/snakeoil-ca-1.crt b/secrets/snakeoil-ca-1.crt new file mode 100644 index 00000000..4d7f6514 --- /dev/null +++ b/secrets/snakeoil-ca-1.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIIDwTCCAqmgAwIBAgIUFMVvwFTLh62igH3ooxloWmTXnAUwDQYJKoZIhvcNAQEL +BQAwcDEeMBwGA1UEAwwVY2ExLnRlc3QuY29uZmx1ZW50LmlvMQ0wCwYDVQQLDARU +RVNUMRIwEAYDVQQKDAlDT05GTFVFTlQxETAPBgNVBAcMCFBhbG9BbHRvMQswCQYD +VQQIDAJDYTELMAkGA1UEBhMCVVMwHhcNMjQxMTI2MDY0OTAxWhcNMjUxMTI2MDY0 +OTAxWjBwMR4wHAYDVQQDDBVjYTEudGVzdC5jb25mbHVlbnQuaW8xDTALBgNVBAsM +BFRFU1QxEjAQBgNVBAoMCUNPTkZMVUVOVDERMA8GA1UEBwwIUGFsb0FsdG8xCzAJ +BgNVBAgMAkNhMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC +AQoCggEBAMI8ZK9qssuzYAGx5vyZ7zJgA82gkDqW/QTf5Jp6GZy6U1yOChz+RJGZ +le3U5CcL1qDmm6rA37uQgMo8skbrovk4PnOdGNsDktJ16q5HFno2C0HQ6lZrhpz1 +oKMVPiY73Bkz7U8M5qvLC5F73VdG+yQCCMQODe0aTjHnIVgiDgkCKuSDgHDc/ke+ +k5FgRF99rZaZd9/Fv3QZakc7pLQhHbsJsmR4+4dW5MpdbWJTMpc7LSzISZ4Ojdbn +K1r6xeId4lbwajw/9y5cCZ4g7LmJLcW286KbKLQHopP51Hf6A0YVQ5ZZe38ZIi3O +5Wzvuw4HYKjrR9OEwc4KZRwL9RWFFhUCAwEAAaNTMFEwHQYDVR0OBBYEFJuW8Iar +qKvy4KN+X3RqYplG3fMeMB8GA1UdIwQYMBaAFJuW8IarqKvy4KN+X3RqYplG3fMe +MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBALMLBbz2qNq56h/L +pCFvN/uld3pqhRZOsNTcB+VwKm1NAuzHOUMpZAjki8/3Czwwv69tjRXdo1zX/F5/ +1CQooue85Ax+HmP8y2uOAIKnFLI1jQOFimAJWwBUrQYg8q6JnQXW3P8Cengses6v ++2F28MqzZ/Q8oAYwYnRagX3+iCL0dTM9iMTWS5nioiWSk5YRjDsP9T4tu4n4JmkU +z3zziHtL22Jh4O1aICRUXw1KX/TEL1RDNhXZLpHIZ4fwGPUv8OAFjufifXyiihNj +nLX6TkInwpz0HXPyLHtLt6oCn2hcRNZ5K/sKBt4Vvo4bTNtW+IiwnEGDzWNn0bdb +0hHH8nU= +-----END CERTIFICATE----- diff --git a/secrets/snakeoil-ca-1.key b/secrets/snakeoil-ca-1.key new file mode 100644 index 00000000..36be7834 --- /dev/null +++ b/secrets/snakeoil-ca-1.key @@ -0,0 +1,30 @@ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIFJDBWBgkqhkiG9w0BBQ0wSTAxBgkqhkiG9w0BBQwwJAQQ4im2+ohZPjY2TpLh +FlqXwgICCAAwDAYIKoZIhvcNAgkFADAUBggqhkiG9w0DBwQI11LdZVnPnNIEggTI +YxwcNTKL9Chn3Gk6TDcfmZUXS33GO+XEfus3Xzcn0T31ZIGbmVNG28rqAVv12Gzn +kKfZ+AFMasrgO1pQ18TJYsDKiQUFCb9hpqjDIEaYFho/9qSN+97GdCjPqNpa/Wpy +AfxSChr+hODrrhCxFHcj/BISYAgHkCXACQcbsSsTa/lUiWhCDcAhc/AenlKbT+Bq +mrKn8SflUqCNMpBy+cBBcxDIi1v260m/s8JBBRsa6mrE8Su+/jAzzh/mxw99obVs +X7S5jt0ro8stkIPi/Si/ny2s2AHud5k/YDEOAcDPv8OXYm9b3OwrtXdaaVTTiQKo +Exd2HkmfnUpE7H9gXEIvGrkdvHvad70jsFO9UFSkH2ciJAcF8HljeqylaIw0MWyG +kyQ1okq+hK1nFoZQpCpMYo206N2oWRQ8pHz/nfFiobuSKnm2WodHn6o6l5ijsMon +328ghO4nmMF1iXvA0KtwR2+9Cud3pUWJMgSTOoIOQQhohxbE7CWA+ntnfWF2WKK4 +7uzZVFUlPNt20tZ2LNsOzQEyianJl6hjqnDv4EWFlzcR13wU/vahC/sbmllSF28A +FwR7O6Nf4dkUmEzlvZG4CsM2oi8ps3zXui61g6oEZSMjMItny3ekBq6VOm41biWW +Sf75JRqYxF21+0xzIv5+5do24mXAKmztLJwihz+2pb1Ddffow/UQxMpS9LasbIle +vmwzz40f2Am+gMgXcHzcIZ303F5Pc3On5cNnGJsRSQjxKyphRNLt7F8Tx0gQYiMK +CUvoQh8O0jXwNw3Uzxar4Ev/57bE9AAmybnmUsjLGQBJyrj4U51p5vSwZes9XYEE +E3Ax1lKTzX/25eJIT2d20VcvtStv34uxz1NFtNX/G3x6fvpGmMCgMgL3SCNAnkou +aNIcsAsifAkUm0VqiNVeULq0EeFMMS39OLpdo8e7cPbcgcgP0GCUk4E9Dh5XVn9L +dFMKa2KyDSlQHTQATX/pB5VD+jOFnCRA+jTTanUP9ASAWxErp0zvNvvWymdPxFGC +SzSUgX5iQ/XfzgN3K0LPFfVVJ+t0LM2X/hX5rH1Li4wNDWPqFBaGlseCaHwbN4n9 +L4e5Ax2wB1Ra41ajat9be0qI7tkVBnnz8if1KeVinr8k0tDE+i9emtrHAhfuaeVM +DJ8q2gmmkZpIEID1IGr2ZyNX7X1rJv3wzvSsTG+CY3EM/b8j9Jzpi8ItVFIhwuLU +3u0aU6569n4p9CL24y1rLhiXSvJccBYB9RXsmbA5HsZFYkZjBbwe89RyTJHAF6LK +EaVgBcvlNIhBXN4KGAd0pjN+517MpDMKQ/XQw6aSk0NtPmteDFzPSS4K+lLHwwJJ +V4DTlAvznaiYoeqojj6PZdYf028P2exziV1h8WWQe3UlmA15eZjlPWWF0bE2xoMn +Fpyx65bTDMdqSApKsVKiYgx03KtuB+R0fiGseCKQ7AldvZjHgFQ3VHxVXfnLx7Wk +vKooirTmA+ksTyYLsdHxVAnpDTn2xC8xGdFogxnplCp9DCavvk8mdmqh5D3Rki9Y +YPUVodr8O7KcgcILF5mearQS3q3bfT8ZCdU8msFmSGT3nvGAou18HFm4M5sj543U +g5MLl1hlPUaYsghoDf7Dl4fstljakDP1 +-----END ENCRYPTED PRIVATE KEY----- diff --git a/secrets/snakeoil-ca-1.srl b/secrets/snakeoil-ca-1.srl new file mode 100644 index 00000000..05530c14 --- /dev/null +++ b/secrets/snakeoil-ca-1.srl @@ -0,0 +1 @@ +0FE87DAB3A7CB3334609E1EA80EA4FC0BEA5D303 diff --git a/test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java b/test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java new file mode 100644 index 00000000..b8c8ea2b --- /dev/null +++ b/test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java @@ -0,0 +1,68 @@ +package oauthbearer; + +import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler; +import org.apache.kafka.common.security.oauthbearer.OAuthBearerTokenCallback; +import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken; +import javax.security.auth.callback.Callback; +import javax.security.auth.callback.UnsupportedCallbackException; +import javax.security.auth.login.AppConfigurationEntry; +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; + +public class OAuthAuthenticateLoginCallbackHandler implements AuthenticateCallbackHandler { + private Map moduleOptions; + + @Override + public void configure(Map configs, String mechanism, List jaasConfigEntries) { + moduleOptions = (Map) jaasConfigEntries.get(0).getOptions(); + } + + @Override + public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { + for (Callback callback : callbacks) { + if (callback instanceof OAuthBearerTokenCallback) { + handleCallback((OAuthBearerTokenCallback) callback); + } else { + throw new UnsupportedCallbackException(callback); + } + } + } + + private void handleCallback(OAuthBearerTokenCallback callback) { + String username = moduleOptions.get("username"); + // Create a simple token - in production this would involve actual OAuth flow + OAuthBearerToken token = new OAuthBearerToken() { + @Override + public String value() { + return "dummy-token"; + } + + @Override + public Long startTimeMs() { + return System.currentTimeMillis(); + } + + @Override + public long lifetimeMs() { + return 3600000L; + } + + @Override + public String principalName() { + return username; + } + + @Override + public Set scope() { + return Collections.emptySet(); + } + }; + callback.token(token); + } + + @Override + public void close() {} +} \ No newline at end of file diff --git a/test/ziggurat/fixtures.clj b/test/ziggurat/fixtures.clj index 36f4bdf6..28b9136a 100644 --- a/test/ziggurat/fixtures.clj +++ b/test/ziggurat/fixtures.clj @@ -29,7 +29,7 @@ (def ^:private bootstrap-servers (if (= (System/getenv "TESTING_TYPE") "local") "localhost:9092" - "localhost:9091,localhost:9092,localhost:9093")) + "localhost:9092,localhost:9093,localhost:9094")) (defn- get-default-or-cluster-config [m] diff --git a/test/ziggurat/streams_test.clj b/test/ziggurat/streams_test.clj index 74dd3a5d..38711c03 100644 --- a/test/ziggurat/streams_test.clj +++ b/test/ziggurat/streams_test.clj @@ -487,3 +487,35 @@ (testing "should return REPLACE_THREAD" (let [r (handle-uncaught-exception :replace-thread t)] (is (= r StreamsUncaughtExceptionHandler$StreamThreadExceptionResponse/REPLACE_THREAD)))))) + +(deftest start-streams-test-with-sasl-ssl-config + (testing "streams should start successfully with valid SASL/SSL configs" + (let [message-received-count (atom 0) + mapped-fn (get-mapped-fn message-received-count) + times 6 + kvs (repeat times message-key-value) + handler-fn (default-middleware/protobuf->hash mapped-fn proto-class :default) + orig-config (ziggurat-config) + certs-dir (.getAbsolutePath (clojure.java.io/file "secrets"))] + (with-redefs [ziggurat.config/ssl-config (fn [] {:enabled true + :protocol "SASL_SSL" + :mechanism "PLAIN" + :username "client" + :password "client-secret" + :ssl-truststore-location (str certs-dir "/kafka.broker.truststore.jks") + :ssl-truststore-password "confluent"})] + (let [streams (start-streams {:default {:handler-fn handler-fn}} + (-> orig-config + (assoc-in [:stream-router :default :application-id] (rand-application-id)) + (assoc-in [:stream-router :default :bootstrap-servers] "localhost:9095") + (assoc-in [:stream-router :default :changelog-topic-replication-factor] changelog-topic-replication-factor)))] + (Thread/sleep 10000) + (let [producer-props (doto (props) + (.put ProducerConfig/BOOTSTRAP_SERVERS_CONFIG "localhost:9094"))] + (IntegrationTestUtils/produceKeyValuesSynchronously (get-in (ziggurat-config) [:stream-router :default :origin-topic]) + kvs + producer-props + (MockTime.))) + (Thread/sleep 10000) + (stop-streams streams) + (is (= times @message-received-count))))))) diff --git a/zookeeper_server_jaas.conf b/zookeeper_server_jaas.conf new file mode 100644 index 00000000..9ad7a531 --- /dev/null +++ b/zookeeper_server_jaas.conf @@ -0,0 +1,4 @@ +Server { + org.apache.zookeeper.server.auth.DigestLoginModule required + user_admin="password"; +}; From 9ff0737bf5e2d43dc83c0b08a238873fb22a554a Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Tue, 26 Nov 2024 12:59:00 +0530 Subject: [PATCH 17/26] WIP: sertting SASL_PLAINTEXT --- docker-compose-cluster.yml | 17 +++-------------- 1 file changed, 3 insertions(+), 14 deletions(-) diff --git a/docker-compose-cluster.yml b/docker-compose-cluster.yml index 3d50a12b..ec2fd03b 100644 --- a/docker-compose-cluster.yml +++ b/docker-compose-cluster.yml @@ -35,8 +35,8 @@ services: - "9094:9094" - "9095:9095" environment: - KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19094,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9094,SASL_SSL://${DOCKER_HOST_IP:-127.0.0.1}:9095 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT,SASL_SSL:SASL_SSL + KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19094,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9094,SASL_PLAINTEXT://${DOCKER_HOST_IP:-127.0.0.1}:9095 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" KAFKA_BROKER_ID: 1 @@ -47,22 +47,11 @@ services: KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.auth.SimpleAclAuthorizer KAFKA_SUPER_USERS: User:admin KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "false" - KAFKA_SECURITY_PROTOCOL: SASL_SSL + KAFKA_SECURITY_PROTOCOL: SASL_PLAINTEXT KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/kafka_server_jaas.conf" - KAFKA_SSL_KEYSTORE_FILENAME: kafka.broker.keystore.jks - KAFKA_SSL_KEYSTORE_CREDENTIALS: broker_keystore_creds - KAFKA_SSL_KEY_CREDENTIALS: broker_sslkey_creds - KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.broker.truststore.jks - KAFKA_SSL_TRUSTSTORE_CREDENTIALS: broker_truststore_creds - KAFKA_SSL_CLIENT_AUTH: "required" volumes: - /tmp/ziggurat_kafka_cluster_data/kafka1/data:/var/lib/kafka/data - ./kafka_server_jaas.conf:/etc/kafka/secrets/kafka_server_jaas.conf - - ./secrets/kafka.broker.keystore.jks:/etc/kafka/secrets/kafka.broker.keystore.jks - - ./secrets/kafka.broker.truststore.jks:/etc/kafka/secrets/kafka.broker.truststore.jks - - ./secrets/broker_keystore_creds:/etc/kafka/secrets/broker_keystore_creds - - ./secrets/broker_sslkey_creds:/etc/kafka/secrets/broker_sslkey_creds - - ./secrets/broker_truststore_creds:/etc/kafka/secrets/broker_truststore_creds depends_on: - zookeeper From 10f50cfcc31fd74587e5354f335069b70e293339 Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Wed, 27 Nov 2024 18:18:13 +0530 Subject: [PATCH 18/26] Working integration test with acl --- Makefile | 115 +++++++++++++++++----------- config-admin.properties | 5 ++ docker-compose-cluster.yml | 56 ++++++++++---- kafka_server_jaas.conf | 10 ++- scripts/create-certs.sh | 82 -------------------- secrets/broker_keystore_creds | 1 - secrets/broker_sslkey_creds | 1 - secrets/broker_truststore_creds | 1 - secrets/kafka.broker.keystore.jks | Bin 4887 -> 0 bytes secrets/kafka.broker.truststore.jks | Bin 1258 -> 0 bytes secrets/snakeoil-ca-1.crt | 23 ------ secrets/snakeoil-ca-1.key | 30 -------- secrets/snakeoil-ca-1.srl | 1 - src/ziggurat/config.clj | 3 +- src/ziggurat/streams.clj | 6 +- test/ziggurat/streams_test.clj | 23 +++--- zookeeper_server_jaas.conf | 10 ++- 17 files changed, 152 insertions(+), 215 deletions(-) create mode 100644 config-admin.properties delete mode 100755 scripts/create-certs.sh delete mode 100644 secrets/broker_keystore_creds delete mode 100644 secrets/broker_sslkey_creds delete mode 100644 secrets/broker_truststore_creds delete mode 100644 secrets/kafka.broker.keystore.jks delete mode 100644 secrets/kafka.broker.truststore.jks delete mode 100644 secrets/snakeoil-ca-1.crt delete mode 100644 secrets/snakeoil-ca-1.key delete mode 100644 secrets/snakeoil-ca-1.srl diff --git a/Makefile b/Makefile index 7be444cb..d5c65671 100644 --- a/Makefile +++ b/Makefile @@ -1,43 +1,72 @@ -.PHONY: all -all: test - -topic="topic" -another_test_topic="another-test-topic" - -setup: - docker-compose down - lein deps - docker-compose up -d - sleep 10 - docker exec ziggurat_kafka /opt/bitnami/kafka/bin/kafka-topics.sh --create --topic $(topic) --partitions 3 --replication-factor 1 --zookeeper ziggurat_zookeeper - docker exec ziggurat_kafka /opt/bitnami/kafka/bin/kafka-topics.sh --create --topic $(another_test_topic) --partitions 3 --replication-factor 1 --zookeeper ziggurat_zookeeper - -test: setup - TESTING_TYPE=local lein test - docker-compose down - -setup-cluster: - rm -rf /tmp/ziggurat_kafka_cluster_data - rm -rf secrets - docker-compose -f docker-compose-cluster.yml -p ziggurat down --remove-orphans - lein deps - chmod +x scripts/create-certs.sh - ./scripts/create-certs.sh - docker-compose -f docker-compose-cluster.yml -p ziggurat up -d - sleep 60 - docker exec ziggurat-kafka1-1 kafka-topics --create --topic $(topic) --partitions 3 --replication-factor 3 --if-not-exists --zookeeper ziggurat-zookeeper-1 - docker exec ziggurat-kafka1-1 kafka-topics --create --topic $(another_test_topic) --partitions 3 --replication-factor 3 --if-not-exists --zookeeper ziggurat-zookeeper-1 - -test-cluster: setup-cluster - TESTING_TYPE=cluster lein test - docker-compose -f docker-compose-cluster.yml down - rm -rf /tmp/ziggurat_kafka_cluster_data - rm -rf secrets - -coverage: setup - lein code-coverage - docker-compose down - -proto: - protoc -I=resources --java_out=test/ resources/proto/example.proto - protoc -I=resources --java_out=test/ resources/proto/person.proto +KAFKA_TOPICS = topic1 topic2 +KAFKA_BROKERS = kafka1:9095 kafka2:9096 kafka3:9097 +ADMIN_CONFIG = /etc/kafka/secrets/config-admin.properties +KAFKA_CONTAINER = ziggurat-kafka1-1 + +.PHONY: setup-cluster create-scram-credentials create-topics setup-acls down up clean restart + +# Main target to setup the entire cluster +setup-cluster: down up wait-for-kafka create-scram-credentials create-topics setup-acls + +# Bring down all containers and clean volumes +down: + @echo "Bringing down all containers..." + docker-compose -f docker-compose-cluster.yml down -v + +# Start all containers +up: + @echo "Starting all containers..." + docker-compose -f docker-compose-cluster.yml up -d + +# Wait for Kafka to be ready +wait-for-kafka: + @echo "Waiting for Kafka to be ready..." + @sleep 30 + +# Restart everything +restart: down up wait-for-kafka + +# Create SCRAM credentials for admin user +create-scram-credentials: + @echo "Creating SCRAM credentials for admin user..." + @docker exec $(KAFKA_CONTAINER) kafka-configs --bootstrap-server kafka1:9095 \ + --alter \ + --add-config 'SCRAM-SHA-256=[password=admin]' \ + --entity-type users \ + --entity-name admin + +# Create all required topics +create-topics: + @for topic in $(KAFKA_TOPICS); do \ + echo "Creating topic: $$topic"; \ + docker exec $(KAFKA_CONTAINER) kafka-topics --bootstrap-server kafka1:9095 \ + --create \ + --if-not-exists \ + --topic $$topic \ + --partitions 3 \ + --replication-factor 3; \ + done + +# Setup ACLs for admin user +setup-acls: + @for topic in $(KAFKA_TOPICS); do \ + echo "Setting up ACLs for topic: $$topic"; \ + docker exec $(KAFKA_CONTAINER) kafka-acls --bootstrap-server kafka1:9095 \ + --add \ + --allow-principal User:admin \ + --operation All \ + --topic $$topic; \ + done + +# Clean up topics (can be used during development) +clean-topics: + @for topic in $(KAFKA_TOPICS); do \ + echo "Deleting topic: $$topic"; \ + docker exec $(KAFKA_CONTAINER) kafka-topics --bootstrap-server kafka1:9095 \ + --delete \ + --topic $$topic; \ + done + +# Show logs +logs: + docker-compose -f docker-compose-cluster.yml logs -f diff --git a/config-admin.properties b/config-admin.properties new file mode 100644 index 00000000..362d68ae --- /dev/null +++ b/config-admin.properties @@ -0,0 +1,5 @@ +security.protocol=SASL_PLAINTEXT +sasl.mechanism=SCRAM-SHA-256 +sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required \ + username="admin" \ + password="admin"; diff --git a/docker-compose-cluster.yml b/docker-compose-cluster.yml index ec2fd03b..005f6ddd 100644 --- a/docker-compose-cluster.yml +++ b/docker-compose-cluster.yml @@ -15,13 +15,11 @@ services: - "2181:2181" environment: ZOOKEEPER_CLIENT_PORT: 2181 - KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/zookeeper_server_jaas.conf + KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/zookeeper_server_jaas.conf -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider - -Dzookeeper.allowSaslFailedClients=false - -Dzookeeper.requireClientAuthScheme=sasl + -Dzookeeper.allowSaslFailedClients=true + -Dzookeeper.requireClientAuthScheme=sasl" volumes: - - /tmp/ziggurat_kafka_cluster_data/zookeeper/data:/data - - /tmp/ziggurat_kafka_cluster_data/zookeeper/datalog:/datalog - ./zookeeper_server_jaas.conf:/etc/kafka/zookeeper_server_jaas.conf kafka1: @@ -39,19 +37,25 @@ services: KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 60000 KAFKA_BROKER_ID: 1 KAFKA_DEFAULT_REPLICATION_FACTOR: 3 KAFKA_NUM_PARTITIONS: 3 - KAFKA_SASL_ENABLED_MECHANISMS: PLAIN - KAFKA_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"admin\" password=\"admin-secret\" user_admin=\"admin-secret\" user_client=\"client-secret\";" + KAFKA_SASL_ENABLED_MECHANISMS: SCRAM-SHA-256 + KAFKA_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.scram.ScramLoginModule required \ + username=\"client\" \ + password=\"client-secret\";" KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.auth.SimpleAclAuthorizer - KAFKA_SUPER_USERS: User:admin + KAFKA_SUPER_USERS: User:ANONYMOUS;User:admin KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "false" KAFKA_SECURITY_PROTOCOL: SASL_PLAINTEXT - KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/kafka_server_jaas.conf" + KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/kafka_server_jaas.conf -Dzookeeper.sasl.client=true -Dzookeeper.sasl.clientconfig=Client" + KAFKA_ZOOKEEPER_SET_ACL: "true" + KAFKA_ZOOKEEPER_SASL_ENABLED: "true" volumes: - /tmp/ziggurat_kafka_cluster_data/kafka1/data:/var/lib/kafka/data - ./kafka_server_jaas.conf:/etc/kafka/secrets/kafka_server_jaas.conf + - ./config-admin.properties:/etc/kafka/secrets/config-admin.properties depends_on: - zookeeper @@ -64,16 +68,29 @@ services: hostname: kafka2 ports: - "9092:9092" + - "9096:9096" environment: - KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka2:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka2:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092,SASL_PLAINTEXT://${DOCKER_HOST_IP:-127.0.0.1}:9096 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 60000 KAFKA_BROKER_ID: 2 KAFKA_DEFAULT_REPLICATION_FACTOR: 3 KAFKA_NUM_PARTITIONS: 3 + KAFKA_SASL_ENABLED_MECHANISMS: SCRAM-SHA-256 + KAFKA_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.scram.ScramLoginModule required \ + username=\"client\" \ + password=\"client-secret\";" + KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.auth.SimpleAclAuthorizer + KAFKA_SUPER_USERS: User:ANONYMOUS;User:admin + KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "false" + KAFKA_SECURITY_PROTOCOL: SASL_PLAINTEXT + KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/kafka_server_jaas.conf" volumes: - /tmp/ziggurat_kafka_cluster_data/kafka2/data:/var/lib/kafka/data + - ./kafka_server_jaas.conf:/etc/kafka/secrets/kafka_server_jaas.conf + - ./config-admin.properties:/etc/kafka/secrets/config-admin.properties depends_on: - zookeeper @@ -86,15 +103,28 @@ services: hostname: kafka3 ports: - "9093:9093" + - "9097:9097" environment: - KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka3:19093,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9093 - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT + KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka3:19093,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9093,SASL_PLAINTEXT://${DOCKER_HOST_IP:-127.0.0.1}:9097 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT,SASL_PLAINTEXT:SASL_PLAINTEXT KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181" + KAFKA_ZOOKEEPER_CONNECTION_TIMEOUT_MS: 60000 KAFKA_BROKER_ID: 3 KAFKA_DEFAULT_REPLICATION_FACTOR: 3 KAFKA_NUM_PARTITIONS: 3 + KAFKA_SASL_ENABLED_MECHANISMS: SCRAM-SHA-256 + KAFKA_SASL_JAAS_CONFIG: "org.apache.kafka.common.security.scram.ScramLoginModule required \ + username=\"client\" \ + password=\"client-secret\";" + KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.auth.SimpleAclAuthorizer + KAFKA_SUPER_USERS: User:ANONYMOUS;User:admin + KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "false" + KAFKA_SECURITY_PROTOCOL: SASL_PLAINTEXT + KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/secrets/kafka_server_jaas.conf" volumes: - /tmp/ziggurat_kafka_cluster_data/kafka3/data:/var/lib/kafka/data + - ./kafka_server_jaas.conf:/etc/kafka/secrets/kafka_server_jaas.conf + - ./config-admin.properties:/etc/kafka/secrets/config-admin.properties depends_on: - zookeeper diff --git a/kafka_server_jaas.conf b/kafka_server_jaas.conf index 472ca9a3..51ea18d6 100644 --- a/kafka_server_jaas.conf +++ b/kafka_server_jaas.conf @@ -1,13 +1,15 @@ KafkaServer { org.apache.kafka.common.security.scram.ScramLoginModule required - username="broker" - password="password"; + username="admin" + password="admin"; }; + Client { - org.apache.kafka.common.security.plain.PlainLoginModule required + org.apache.zookeeper.server.auth.DigestLoginModule required username="admin" - password="password"; + password="admin"; }; + KafkaClient { org.apache.kafka.common.security.scram.ScramLoginModule required username="client" diff --git a/scripts/create-certs.sh b/scripts/create-certs.sh deleted file mode 100755 index f0f03986..00000000 --- a/scripts/create-certs.sh +++ /dev/null @@ -1,82 +0,0 @@ -#!/bin/bash -set -e - -# Create directories for certs -mkdir -p secrets -cd secrets - -# Cleanup files -rm -f *.crt *.csr *_creds *.jks *.srl *.key *.pem *.der *.p12 - -# Generate CA key -openssl req -new -x509 -keyout snakeoil-ca-1.key -out snakeoil-ca-1.crt -days 365 \ - -subj '/CN=ca1.test.confluent.io/OU=TEST/O=CONFLUENT/L=PaloAlto/ST=Ca/C=US' \ - -passin pass:confluent -passout pass:confluent - -# Create broker keystore -keytool -genkey -noprompt \ - -alias broker \ - -dname "CN=broker,OU=TEST,O=CONFLUENT,L=PaloAlto,S=Ca,C=US" \ - -ext "SAN=dns:broker,dns:localhost" \ - -keystore kafka.broker.keystore.jks \ - -keyalg RSA \ - -storepass confluent \ - -keypass confluent \ - -storetype pkcs12 - -# Create the certificate signing request (CSR) -keytool -keystore kafka.broker.keystore.jks -alias broker \ - -certreq -file broker.csr -storepass confluent -keypass confluent \ - -ext "SAN=dns:broker,dns:localhost" - -# Create extfile for SAN -cat << EOF > extfile -[req] -distinguished_name = req_distinguished_name -x509_extensions = v3_req -prompt = no -[req_distinguished_name] -CN = broker -[v3_req] -subjectAltName = @alt_names -[alt_names] -DNS.1 = broker -DNS.2 = localhost -EOF - -# Sign the host certificate with the certificate authority (CA) -openssl x509 -req -CA snakeoil-ca-1.crt -CAkey snakeoil-ca-1.key \ - -in broker.csr -out broker-ca1-signed.crt -days 9999 -CAcreateserial \ - -passin pass:confluent -extensions v3_req -extfile extfile - -# Sign and import the CA cert into the keystore -keytool -noprompt -keystore kafka.broker.keystore.jks -alias CARoot \ - -import -file snakeoil-ca-1.crt -storepass confluent -keypass confluent - -# Sign and import the host certificate into the keystore -keytool -noprompt -keystore kafka.broker.keystore.jks -alias broker \ - -import -file broker-ca1-signed.crt -storepass confluent -keypass confluent \ - -ext "SAN=dns:broker,dns:localhost" - -# Create truststore and import the CA cert -keytool -noprompt -keystore kafka.broker.truststore.jks -alias CARoot \ - -import -file snakeoil-ca-1.crt -storepass confluent -keypass confluent - -# Save creds -echo "confluent" > broker_sslkey_creds -echo "confluent" > broker_keystore_creds -echo "confluent" > broker_truststore_creds - -# Set appropriate permissions -chmod 644 kafka.broker.keystore.jks kafka.broker.truststore.jks \ - broker_sslkey_creds broker_keystore_creds broker_truststore_creds - -# Clean up intermediate files -rm -f broker.csr broker-ca1-signed.crt extfile - -# Verify the keystore and truststore content -echo "Verifying keystore content:" -keytool -list -keystore kafka.broker.keystore.jks -storepass confluent -echo "Verifying truststore content:" -keytool -list -keystore kafka.broker.truststore.jks -storepass confluent - diff --git a/secrets/broker_keystore_creds b/secrets/broker_keystore_creds deleted file mode 100644 index 23212273..00000000 --- a/secrets/broker_keystore_creds +++ /dev/null @@ -1 +0,0 @@ -confluent diff --git a/secrets/broker_sslkey_creds b/secrets/broker_sslkey_creds deleted file mode 100644 index 23212273..00000000 --- a/secrets/broker_sslkey_creds +++ /dev/null @@ -1 +0,0 @@ -confluent diff --git a/secrets/broker_truststore_creds b/secrets/broker_truststore_creds deleted file mode 100644 index 23212273..00000000 --- a/secrets/broker_truststore_creds +++ /dev/null @@ -1 +0,0 @@ -confluent diff --git a/secrets/kafka.broker.keystore.jks b/secrets/kafka.broker.keystore.jks deleted file mode 100644 index 2843ca350f6332600e8c9505571948a0b20d937d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4887 zcmY+EWmFW5wuXTrhw>0ZcOx-$cMKslG*W}W&64I|wo1ZRF)35mes zpOUq->!y1g=mTp>8^={wuhgLj#~AjlCGRv1OhUY z)^zQTCnqE=<^xZq5GwDZbB5mnIvHc#?S3X*y;-ndw&1)rsaugx)mbBYVJu3_;1TLj zbD7nn`^Q=nVgd>@-AVFK2=mdLH3$!D^5$f0)koN77FnZnB`bUCAP3wwn_rx&pm@lScULFYGVZkQS{y7adQzLJCY-HchP*^g>J54 z`OHgIz*3;1;;J_AWcUYRWYo8c5h&3 zhGm@**-!~hjszh9+Wx)x1;)QA2Utsm2VS!YCTFCr=hMXYI0z3}`1P2gf?94@pcH#t zS6z7JKe>bUDZjfXFuZr4?kKgqhpPR>wdgS(Qen3*E!L+ZIL_8ofo|1rt}!#!pdh`L zrEYUXQ-+qLuMIUgy=uv_Eh&mNs84oay6riEIkvRU+i6OL8$k+kKGLNLmM&~pQxwxF ziM(Qz_GfCMyM?KnSbvPK=8IfS=`4hIS7sY8Un&IC7fU^pyzuQ;s+W;BG~QHzoa{UV zx2U{(k=|J=*W9=i=`ejt980MKx7Oz1s&0Dkrzq0#p^J^jkBFVUk3sMIzH~`ujf=D% z3x|da9Tk5h>MH$3E1$|oO5-Cx-D1MLLh%q)wM$gMiXlY^NUAM(dBJ7jTl@)>n^k}s zp~Qr6V1LFRunM_%f&qrrT`52XUP4`nF@Du{Q*t;vq}W4beIop*2C+U!tE>HG-o2fB z;LoTM>`wNW(1KwQWLFhos?lV(lO0|p>yymuS zfPX}{G5e52l7=<+?3551PBYR&yfL~<&f>BaTvw_M*cmX3Py7L-X85-% ziKs-#m`Si4uzaz+vD~qouzbO6{|Tu@sBkGvTs@swMWn<;CBb4+;!@&b;$S4H`oBvA z_~l4aiGPp?4mQ?5i~m0f_Wv_1{(l?RtxA|VVwtY=rSI0^))qJNLeBhu3>%3gIei}9 ztI7-_l62lkAPy6*RHydW2M!}k0s7be=OSsc8YAbS^oDa7i@$iCVh_yQu5l!v6B;no zR~b1p{pG@nq9J6>`tAXBWizg4+JY;XP-i*T^B|=?p-UUnjW;j zlE(^|1`m$5>ah0-#Yl~eOXOejmzI77{Av{#>`(re+RA9|Qxz=(Y2iYfm?}LI;ki$- zHJ`Obyua*1XwB69w#d~+r8b=3g|gDHM8nR(`xzCDC!<2zN!;>ikYys|Y zJI`DaVAnGP9i*DmyO7+l*O_TuHwi>oz8dyP^)ebkO-oO?NTno7;WYx@eC`}cLi-M| zmpCEXVz&(s`CZ-0T{vf@F6_Tt=1^aXkolY&;*k=0H_s8$O?k;yZ~F6BH^zU^vimn4 zh&M;9J05rl=@`A%>I6^RH}TzQj0TuoXB;mYgtMgZ87eYIuU?XS(CY;>c~XE%GNBfr z15QIlGmMgU1>!3Jp)@ecQ1u4w0-kU`ZgeXn6C$I_(HG?}Llg<>j6_juv``gz00a`X zYIpD1VuESu;84GVQ)VO5Juf{Zfp-af)A5Z@G5l*uL02%X7m5J?o;W$1qV{T)Oa^mh z>VuEyWKr46H+EFMQ!ij&oeJkY;77&QR^v)fG@k7yk;+XCVl!%J{9P2%G*~{xO6|&u(;3_$t!Hf zRL05)ipX@Zpv&HXkOAK;Z}xUikPK-OVG$L*ZOd-`Y;04iZo$l=*D{!1U{HCPt2=g2 zE%#n2!okbSqPUKcaYOk%%xTF4C+(V^Nw;IuH4us|v1q&P(p|Mox+a5KZKyqy`)L02 zZI;;kBB5(B0pKz_NcP*NFD|FKZ=%nLKdKq4t_8MwMbz!3lV7V>cc}HG=RMFwQRJ}s zxp!&av(i2^q_2UINJx=!8ys zvQ!s&H#dI44%#QXM~<%SrWCz59uvg8{bq$M=Z7g{VK4)JDH`Jv;FU!TOS-s0oGJ+zp+qY}-mGY2o6*9N$&?=e0g4vC) zKt1wZ)6BpDC&N7grS#F7D5Z}S{qvni*H6aei1lmEEldDou#BAXy;nN?GD(CsOcz&B z#UZlW=TIKnSONoHH@sbXjSs=al2s?asF~%E~9WkM=aes(&UNB&Ul< zsLZU~z$G`^9rCH~WUZ>(0?{~1H8* zGsG8R#<8|jDP^5`uIXYgFik^$y?_2im20Bo_h7@aJOMDA_lkc^NwF$)4V4v#`!4-N z#lEWBEU^4lu`-NOg0hQPD2=Z3`=JKe(EQBCkWliHwgC>z-?fwl!16 z=&Ie4XlAmHgWxSbDwLS=)%`n~18&7x*=nxN3v^i)!)ewT9B^N^Wzz(VHgBwUQFloX zF}Q!Z$1u-0;~|=2Ey26oa61koOG(7vbMNLnW% zdFj_tN1|{AaFofotlg68^a5pDH^XijmzMkSyPu%*iqKHy9rOKyf+rDtlrp!WN>{Jm zV`|ABj|pZ@O*uuZpwXasaCSm7H-F#Z{J`1@e?}EF;^jiYVe0Q`t;`@UyM@>tnx~&6 za}S^MU3h0Ia*6T26>RI%n1>f*GUTg`507T3tv989sn#o{a_eQc$Tc!(?namR^@Dx@ z2!%AFXe!BRpEtV14-K>Blt7X8G{+LvczM?E_Q{!3;@Pf=4+OR@5P`c!vWs@I&wD(d97MiIuXf2*BS9a3lM-T7iXFFz z>9`DDYUO8a>2!v`NqtB?NIoM3C5>rSiYwgz0N5os4*g#w`t!sI%W938^qNqvcVQt@ zMg@Yyf3^)T$JEgaZviKRk}jEW3>i+6ldtmvHhs0;4g(k!7<*^+NSh5N2UZ5z5;L7U ztlhr4W0!IS49(u}+?PDP)LtZLo?m@XB5(Xt^|@Vhrce**R928daW=y`*Ho z1~(Pa)wDQ*j~aS6gXR&vg8101C!NP6vpUL#-Jj$+rZjEl8K{3v7C0HsMsYACY>8RD z?ddRlWIN=C_vHX_OVyZYJ&%(P8$u(RLkjQhe+vU) zsu>N}ZhfEU&0I>9=^HRcq@Gs=*2`M3tY)KD2sR+|--o0nC9)Knmjs2`uVqB{Ngo0lUj@A=wCWwVzj5xN~^I3vARTU|F%Cd(i`$v5%&2 ze$@~d0wD(ew$Hx1(JR@Tl3;^0f-xz1OzTAgK>QtPxh8F9U}=L`H`X(FeS zEP-L-b!r-hFFAVt97bu z3A;ZL5E_OC5juNmf)3;-Q2<{`q$$Is{(6OrJsJ+qzNn1wY_eo=`> zMIqDW0D@GYYwzvcwsPZ%hVV4b(};KlLTz2%>eAtA_7;|~-Y%V`_m_HB4~|Xp6)gEp zbfW6D5_FU^UEN*pqDta6jpo9_(8xcO0#Y4hU_~$+m8Duzgg_YDCD0ic2ekOYDRj4*-(h%kZ#ga!#JhDe6@ z4FLxRpn?Q-FoFbe0s#Opf&^*?2`Yw2hW8Bt2LUiC1_~;MNQU&%Q znsig|?jKaCeBD!H2PY+L?4;j|Ax^{BC!E=Ue5tRxUVAorYnO9}r51hS0|LwfvZAF; zE}!9a-33`5FVti`X11c{KLC05g>DDa(CHUiZ2x~zG!b6`dY9-mLLlU|0>8yYe!qV; zB)lk}UN;N*D8#m9XWS$2c6#JmytuiA2N^yHOX5@esP`=>#&FWlknrEF$8!i~pIqj~ znCf`t6a8}N0S*Th2ZuCRB9IQ;NF7niHSHr3Qd6qPOEsg1Tm{P=r4Cyf*)ncNTpT*! z_T96-^%OSrw4h_Gzr0ne8VC=eqaVfnC@2gpzT~L_)WY>OQ((w>6hs4CX&4lnI<7Nu zrVgueaggPm9tI2eke}tiiqsdf&Xm9J-#|m_INxHMicJxVEvNGaN{P4+pmz0iE(UW+ z+?l5~NXoW!KTjQZ#yN6hv)I%dW{f$gT>rn_+2J~u6*_S= zI(0uYE7njx#IwmdlPhFP4PuZTM`)c)giqb!0E9Y@bK7f5oG+a)yJ1Vedu%tR3It#R z}G2GTa1VLwNlVrHc~Eb7|G=};tQJtM54jCUupX|6twR* z-ozo>&F+%5y^J}ftXd2iUAbGf%3mv2dGBJ5YV)K;F!{^{vB6IBe+Tky$12zPT~i7E zw!(i%l({vo^O)>9t`Q|K4l>6-A@8TciDsU4P*swbztcaK!jjE}de;BDK(A_cigSN9PM4uD>Sh= zTBUI%yWe>S5!IOHEkeRLiC?%f!dQZyD2Xhtu^(#&r~6ujcj|_1b0ImV<~zge3L2x% zcoJkGLJ~l4DyK_Dd<Gf^&aUA=+uNV~{4{*J&3CxUraiv>6W(J0Irf9Fol{~0 z#uk?hn@y5BG=>RcScozq%RS}V*hDk9bdGz)e-tn`SNT`-` zSNu4ZXJlzAyeGroMohv_=^>`?oyJp(F^y)K+FXHXn|ayUTho33lo+l9Tj1pCWi~1< ztu4mODl>x-_K9!UEdA7pA^*XIahV}GASBm1cF=7eHRUIT4Wx5g=?tTHzTnivzCXx0s{etposQFNdN!< diff --git a/secrets/snakeoil-ca-1.crt b/secrets/snakeoil-ca-1.crt deleted file mode 100644 index 4d7f6514..00000000 --- a/secrets/snakeoil-ca-1.crt +++ /dev/null @@ -1,23 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDwTCCAqmgAwIBAgIUFMVvwFTLh62igH3ooxloWmTXnAUwDQYJKoZIhvcNAQEL -BQAwcDEeMBwGA1UEAwwVY2ExLnRlc3QuY29uZmx1ZW50LmlvMQ0wCwYDVQQLDARU -RVNUMRIwEAYDVQQKDAlDT05GTFVFTlQxETAPBgNVBAcMCFBhbG9BbHRvMQswCQYD -VQQIDAJDYTELMAkGA1UEBhMCVVMwHhcNMjQxMTI2MDY0OTAxWhcNMjUxMTI2MDY0 -OTAxWjBwMR4wHAYDVQQDDBVjYTEudGVzdC5jb25mbHVlbnQuaW8xDTALBgNVBAsM -BFRFU1QxEjAQBgNVBAoMCUNPTkZMVUVOVDERMA8GA1UEBwwIUGFsb0FsdG8xCzAJ -BgNVBAgMAkNhMQswCQYDVQQGEwJVUzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCC -AQoCggEBAMI8ZK9qssuzYAGx5vyZ7zJgA82gkDqW/QTf5Jp6GZy6U1yOChz+RJGZ -le3U5CcL1qDmm6rA37uQgMo8skbrovk4PnOdGNsDktJ16q5HFno2C0HQ6lZrhpz1 -oKMVPiY73Bkz7U8M5qvLC5F73VdG+yQCCMQODe0aTjHnIVgiDgkCKuSDgHDc/ke+ -k5FgRF99rZaZd9/Fv3QZakc7pLQhHbsJsmR4+4dW5MpdbWJTMpc7LSzISZ4Ojdbn -K1r6xeId4lbwajw/9y5cCZ4g7LmJLcW286KbKLQHopP51Hf6A0YVQ5ZZe38ZIi3O -5Wzvuw4HYKjrR9OEwc4KZRwL9RWFFhUCAwEAAaNTMFEwHQYDVR0OBBYEFJuW8Iar -qKvy4KN+X3RqYplG3fMeMB8GA1UdIwQYMBaAFJuW8IarqKvy4KN+X3RqYplG3fMe -MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBALMLBbz2qNq56h/L -pCFvN/uld3pqhRZOsNTcB+VwKm1NAuzHOUMpZAjki8/3Czwwv69tjRXdo1zX/F5/ -1CQooue85Ax+HmP8y2uOAIKnFLI1jQOFimAJWwBUrQYg8q6JnQXW3P8Cengses6v -+2F28MqzZ/Q8oAYwYnRagX3+iCL0dTM9iMTWS5nioiWSk5YRjDsP9T4tu4n4JmkU -z3zziHtL22Jh4O1aICRUXw1KX/TEL1RDNhXZLpHIZ4fwGPUv8OAFjufifXyiihNj -nLX6TkInwpz0HXPyLHtLt6oCn2hcRNZ5K/sKBt4Vvo4bTNtW+IiwnEGDzWNn0bdb -0hHH8nU= ------END CERTIFICATE----- diff --git a/secrets/snakeoil-ca-1.key b/secrets/snakeoil-ca-1.key deleted file mode 100644 index 36be7834..00000000 --- a/secrets/snakeoil-ca-1.key +++ /dev/null @@ -1,30 +0,0 @@ ------BEGIN ENCRYPTED PRIVATE KEY----- -MIIFJDBWBgkqhkiG9w0BBQ0wSTAxBgkqhkiG9w0BBQwwJAQQ4im2+ohZPjY2TpLh -FlqXwgICCAAwDAYIKoZIhvcNAgkFADAUBggqhkiG9w0DBwQI11LdZVnPnNIEggTI -YxwcNTKL9Chn3Gk6TDcfmZUXS33GO+XEfus3Xzcn0T31ZIGbmVNG28rqAVv12Gzn -kKfZ+AFMasrgO1pQ18TJYsDKiQUFCb9hpqjDIEaYFho/9qSN+97GdCjPqNpa/Wpy -AfxSChr+hODrrhCxFHcj/BISYAgHkCXACQcbsSsTa/lUiWhCDcAhc/AenlKbT+Bq -mrKn8SflUqCNMpBy+cBBcxDIi1v260m/s8JBBRsa6mrE8Su+/jAzzh/mxw99obVs -X7S5jt0ro8stkIPi/Si/ny2s2AHud5k/YDEOAcDPv8OXYm9b3OwrtXdaaVTTiQKo -Exd2HkmfnUpE7H9gXEIvGrkdvHvad70jsFO9UFSkH2ciJAcF8HljeqylaIw0MWyG -kyQ1okq+hK1nFoZQpCpMYo206N2oWRQ8pHz/nfFiobuSKnm2WodHn6o6l5ijsMon -328ghO4nmMF1iXvA0KtwR2+9Cud3pUWJMgSTOoIOQQhohxbE7CWA+ntnfWF2WKK4 -7uzZVFUlPNt20tZ2LNsOzQEyianJl6hjqnDv4EWFlzcR13wU/vahC/sbmllSF28A -FwR7O6Nf4dkUmEzlvZG4CsM2oi8ps3zXui61g6oEZSMjMItny3ekBq6VOm41biWW -Sf75JRqYxF21+0xzIv5+5do24mXAKmztLJwihz+2pb1Ddffow/UQxMpS9LasbIle -vmwzz40f2Am+gMgXcHzcIZ303F5Pc3On5cNnGJsRSQjxKyphRNLt7F8Tx0gQYiMK -CUvoQh8O0jXwNw3Uzxar4Ev/57bE9AAmybnmUsjLGQBJyrj4U51p5vSwZes9XYEE -E3Ax1lKTzX/25eJIT2d20VcvtStv34uxz1NFtNX/G3x6fvpGmMCgMgL3SCNAnkou -aNIcsAsifAkUm0VqiNVeULq0EeFMMS39OLpdo8e7cPbcgcgP0GCUk4E9Dh5XVn9L -dFMKa2KyDSlQHTQATX/pB5VD+jOFnCRA+jTTanUP9ASAWxErp0zvNvvWymdPxFGC -SzSUgX5iQ/XfzgN3K0LPFfVVJ+t0LM2X/hX5rH1Li4wNDWPqFBaGlseCaHwbN4n9 -L4e5Ax2wB1Ra41ajat9be0qI7tkVBnnz8if1KeVinr8k0tDE+i9emtrHAhfuaeVM -DJ8q2gmmkZpIEID1IGr2ZyNX7X1rJv3wzvSsTG+CY3EM/b8j9Jzpi8ItVFIhwuLU -3u0aU6569n4p9CL24y1rLhiXSvJccBYB9RXsmbA5HsZFYkZjBbwe89RyTJHAF6LK -EaVgBcvlNIhBXN4KGAd0pjN+517MpDMKQ/XQw6aSk0NtPmteDFzPSS4K+lLHwwJJ -V4DTlAvznaiYoeqojj6PZdYf028P2exziV1h8WWQe3UlmA15eZjlPWWF0bE2xoMn -Fpyx65bTDMdqSApKsVKiYgx03KtuB+R0fiGseCKQ7AldvZjHgFQ3VHxVXfnLx7Wk -vKooirTmA+ksTyYLsdHxVAnpDTn2xC8xGdFogxnplCp9DCavvk8mdmqh5D3Rki9Y -YPUVodr8O7KcgcILF5mearQS3q3bfT8ZCdU8msFmSGT3nvGAou18HFm4M5sj543U -g5MLl1hlPUaYsghoDf7Dl4fstljakDP1 ------END ENCRYPTED PRIVATE KEY----- diff --git a/secrets/snakeoil-ca-1.srl b/secrets/snakeoil-ca-1.srl deleted file mode 100644 index 05530c14..00000000 --- a/secrets/snakeoil-ca-1.srl +++ /dev/null @@ -1 +0,0 @@ -0FE87DAB3A7CB3334609E1EA80EA4FC0BEA5D303 diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index bd21a95d..6d048882 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -217,7 +217,8 @@ (let [username (get jaas-config :username) password (get jaas-config :password) login-module (get jaas-config :login-module) - jaas_props (create-jaas-properties username password login-module)] + jaas_props (create-jaas-properties username password login-module) + _ (println (str "JAAS CONFIGS -->> " jaas_props))] (doto properties (.put SaslConfigs/SASL_JAAS_CONFIG jaas_props))) properties)) diff --git a/src/ziggurat/streams.clj b/src/ziggurat/streams.clj index a247b2bc..eb31b070 100644 --- a/src/ziggurat/streams.clj +++ b/src/ziggurat/streams.clj @@ -193,11 +193,13 @@ (let [topology-fn (case (:consumer-type stream-config) :stream-joins stream-joins-topology topology) - top (topology-fn handler-fn stream-config topic-entity channels)] + top (topology-fn handler-fn stream-config topic-entity channels) + properties-x (properties stream-config) + _ (println (str "PROPS X -->>" properties-x))] (when-not (nil? top) (KafkaStreams. ^Topology top - ^Properties (properties stream-config))))) + ^Properties properties-x)))) (defn- merge-consumer-type-config [config] diff --git a/test/ziggurat/streams_test.clj b/test/ziggurat/streams_test.clj index 38711c03..f2535316 100644 --- a/test/ziggurat/streams_test.clj +++ b/test/ziggurat/streams_test.clj @@ -488,22 +488,23 @@ (let [r (handle-uncaught-exception :replace-thread t)] (is (= r StreamsUncaughtExceptionHandler$StreamThreadExceptionResponse/REPLACE_THREAD)))))) -(deftest start-streams-test-with-sasl-ssl-config - (testing "streams should start successfully with valid SASL/SSL configs" +(deftest start-streams-test-with-sasl-config + (testing "streams should start successfully with valid SASL config" (let [message-received-count (atom 0) mapped-fn (get-mapped-fn message-received-count) times 6 kvs (repeat times message-key-value) handler-fn (default-middleware/protobuf->hash mapped-fn proto-class :default) - orig-config (ziggurat-config) - certs-dir (.getAbsolutePath (clojure.java.io/file "secrets"))] - (with-redefs [ziggurat.config/ssl-config (fn [] {:enabled true - :protocol "SASL_SSL" - :mechanism "PLAIN" - :username "client" - :password "client-secret" - :ssl-truststore-location (str certs-dir "/kafka.broker.truststore.jks") - :ssl-truststore-password "confluent"})] + orig-config (ziggurat-config)] + (with-redefs [ziggurat.config/sasl-config (fn [] {:enabled true + :protocol "SASL_PLAINTEXT" + :mechanism "SCRAM-SHA-256" + :jaas { + :login-module "org.apache.kafka.common.security.scram.ScramLoginModule" + :username "admin" + :password "admin" + } + })] (let [streams (start-streams {:default {:handler-fn handler-fn}} (-> orig-config (assoc-in [:stream-router :default :application-id] (rand-application-id)) diff --git a/zookeeper_server_jaas.conf b/zookeeper_server_jaas.conf index 9ad7a531..1742a7e2 100644 --- a/zookeeper_server_jaas.conf +++ b/zookeeper_server_jaas.conf @@ -1,4 +1,10 @@ Server { - org.apache.zookeeper.server.auth.DigestLoginModule required - user_admin="password"; + org.apache.zookeeper.server.auth.DigestLoginModule required + user_admin="admin"; +}; + +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="admin" + password="admin"; }; From e5d4fa652789de62e7c5a0a70ab259e323b1ef21 Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Wed, 27 Nov 2024 18:23:30 +0530 Subject: [PATCH 19/26] Cleanup unused files --- Makefile | 2 +- config-user.properties | 1 - docker-compose-cluster.yml | 4 -- docker-compose.yml | 1 - ...OAuthAuthenticateLoginCallbackHandler.java | 68 ------------------- 5 files changed, 1 insertion(+), 75 deletions(-) delete mode 100644 config-user.properties delete mode 100644 test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java diff --git a/Makefile b/Makefile index d5c65671..11ad3552 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -KAFKA_TOPICS = topic1 topic2 +KAFKA_TOPICS = topic another-test-topic KAFKA_BROKERS = kafka1:9095 kafka2:9096 kafka3:9097 ADMIN_CONFIG = /etc/kafka/secrets/config-admin.properties KAFKA_CONTAINER = ziggurat-kafka1-1 diff --git a/config-user.properties b/config-user.properties deleted file mode 100644 index 3c0d4117..00000000 --- a/config-user.properties +++ /dev/null @@ -1 +0,0 @@ -sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username=client-for-poc password=password123; security.protocol=SASL_PLAINTEXT sasl.mechanism=SCRAM-SHA-256 diff --git a/docker-compose-cluster.yml b/docker-compose-cluster.yml index 005f6ddd..b5e58bd0 100644 --- a/docker-compose-cluster.yml +++ b/docker-compose-cluster.yml @@ -9,7 +9,6 @@ services: zookeeper: image: confluentinc/cp-zookeeper:5.5.0 - platform: linux/amd64 hostname: zookeeper ports: - "2181:2181" @@ -24,7 +23,6 @@ services: kafka1: image: 'confluentinc/cp-kafka:${CONFLUENT_VERSION}' - platform: linux/amd64 cap_add: - NET_ADMIN - SYS_ADMIN @@ -61,7 +59,6 @@ services: kafka2: image: 'confluentinc/cp-kafka:${CONFLUENT_VERSION}' - platform: linux/amd64 cap_add: - NET_ADMIN - SYS_ADMIN @@ -96,7 +93,6 @@ services: kafka3: image: 'confluentinc/cp-kafka:${CONFLUENT_VERSION}' - platform: linux/amd64 cap_add: - NET_ADMIN - SYS_ADMIN diff --git a/docker-compose.yml b/docker-compose.yml index 1a1d0341..87d24d1d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,7 +16,6 @@ services: - ALLOW_ANONYMOUS_LOGIN=yes kafka: image: 'bitnami/kafka:${KAFKA_VERSION}' - platform: linux/amd64 ports: - '9092:9092' container_name: 'ziggurat_kafka' diff --git a/test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java b/test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java deleted file mode 100644 index b8c8ea2b..00000000 --- a/test/com/oauthbearer/OAuthAuthenticateLoginCallbackHandler.java +++ /dev/null @@ -1,68 +0,0 @@ -package oauthbearer; - -import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler; -import org.apache.kafka.common.security.oauthbearer.OAuthBearerTokenCallback; -import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken; -import javax.security.auth.callback.Callback; -import javax.security.auth.callback.UnsupportedCallbackException; -import javax.security.auth.login.AppConfigurationEntry; -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - -public class OAuthAuthenticateLoginCallbackHandler implements AuthenticateCallbackHandler { - private Map moduleOptions; - - @Override - public void configure(Map configs, String mechanism, List jaasConfigEntries) { - moduleOptions = (Map) jaasConfigEntries.get(0).getOptions(); - } - - @Override - public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { - for (Callback callback : callbacks) { - if (callback instanceof OAuthBearerTokenCallback) { - handleCallback((OAuthBearerTokenCallback) callback); - } else { - throw new UnsupportedCallbackException(callback); - } - } - } - - private void handleCallback(OAuthBearerTokenCallback callback) { - String username = moduleOptions.get("username"); - // Create a simple token - in production this would involve actual OAuth flow - OAuthBearerToken token = new OAuthBearerToken() { - @Override - public String value() { - return "dummy-token"; - } - - @Override - public Long startTimeMs() { - return System.currentTimeMillis(); - } - - @Override - public long lifetimeMs() { - return 3600000L; - } - - @Override - public String principalName() { - return username; - } - - @Override - public Set scope() { - return Collections.emptySet(); - } - }; - callback.token(token); - } - - @Override - public void close() {} -} \ No newline at end of file From 73702700f520315e2bccfdf69e9b61d30afb0eb8 Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Thu, 28 Nov 2024 11:49:49 +0530 Subject: [PATCH 20/26] Update acl creation in cluster --- Makefile | 34 ++++++++++++++++++++++++---------- test/ziggurat/streams_test.clj | 24 +++++++++++------------- 2 files changed, 35 insertions(+), 23 deletions(-) diff --git a/Makefile b/Makefile index 11ad3552..c89774f5 100644 --- a/Makefile +++ b/Makefile @@ -29,8 +29,9 @@ restart: down up wait-for-kafka # Create SCRAM credentials for admin user create-scram-credentials: @echo "Creating SCRAM credentials for admin user..." - @docker exec $(KAFKA_CONTAINER) kafka-configs --bootstrap-server kafka1:9095 \ + @docker exec $(KAFKA_CONTAINER) kafka-configs \ --alter \ + --zookeeper zookeeper:2181 \ --add-config 'SCRAM-SHA-256=[password=admin]' \ --entity-type users \ --entity-name admin @@ -39,23 +40,36 @@ create-scram-credentials: create-topics: @for topic in $(KAFKA_TOPICS); do \ echo "Creating topic: $$topic"; \ - docker exec $(KAFKA_CONTAINER) kafka-topics --bootstrap-server kafka1:9095 \ + docker exec $(KAFKA_CONTAINER) kafka-topics \ --create \ + --zookeeper zookeeper:2181 \ --if-not-exists \ --topic $$topic \ --partitions 3 \ --replication-factor 3; \ done -# Setup ACLs for admin user +# Setup ACLs for admin user on all brokers setup-acls: - @for topic in $(KAFKA_TOPICS); do \ - echo "Setting up ACLs for topic: $$topic"; \ - docker exec $(KAFKA_CONTAINER) kafka-acls --bootstrap-server kafka1:9095 \ - --add \ - --allow-principal User:admin \ - --operation All \ - --topic $$topic; \ + @for broker in $(KAFKA_BROKERS); do \ + case $$broker in \ + kafka1:9095) \ + container="ziggurat-kafka1-1" ;; \ + kafka2:9096) \ + container="ziggurat-kafka2-1" ;; \ + kafka3:9097) \ + container="ziggurat-kafka3-1" ;; \ + esac; \ + for topic in $(KAFKA_TOPICS); do \ + echo "Setting up ACLs for topic: $$topic on broker: $$broker using container: $$container"; \ + docker exec $$container kafka-acls \ + --bootstrap-server $$broker \ + --command-config $(ADMIN_CONFIG) \ + --add \ + --allow-principal User:admin \ + --operation All \ + --topic $$topic; \ + done \ done # Clean up topics (can be used during development) diff --git a/test/ziggurat/streams_test.clj b/test/ziggurat/streams_test.clj index f2535316..fdbc99fe 100644 --- a/test/ziggurat/streams_test.clj +++ b/test/ziggurat/streams_test.clj @@ -499,24 +499,22 @@ (with-redefs [ziggurat.config/sasl-config (fn [] {:enabled true :protocol "SASL_PLAINTEXT" :mechanism "SCRAM-SHA-256" - :jaas { - :login-module "org.apache.kafka.common.security.scram.ScramLoginModule" + :jaas {:login-module "org.apache.kafka.common.security.scram.ScramLoginModule" :username "admin" - :password "admin" - } - })] + :password "admin"}})] + (let [streams (start-streams {:default {:handler-fn handler-fn}} - (-> orig-config - (assoc-in [:stream-router :default :application-id] (rand-application-id)) - (assoc-in [:stream-router :default :bootstrap-servers] "localhost:9095") - (assoc-in [:stream-router :default :changelog-topic-replication-factor] changelog-topic-replication-factor)))] + (-> orig-config + (assoc-in [:stream-router :default :application-id] (rand-application-id)) + (assoc-in [:stream-router :default :bootstrap-servers] "localhost:9095") + (assoc-in [:stream-router :default :changelog-topic-replication-factor] changelog-topic-replication-factor)))] (Thread/sleep 10000) (let [producer-props (doto (props) - (.put ProducerConfig/BOOTSTRAP_SERVERS_CONFIG "localhost:9094"))] + (.put ProducerConfig/BOOTSTRAP_SERVERS_CONFIG "localhost:9094"))] (IntegrationTestUtils/produceKeyValuesSynchronously (get-in (ziggurat-config) [:stream-router :default :origin-topic]) - kvs - producer-props - (MockTime.))) + kvs + producer-props + (MockTime.))) (Thread/sleep 10000) (stop-streams streams) (is (= times @message-received-count))))))) From 6c88271360f16f0f88bd9eee2a20ca80ef397df6 Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Thu, 28 Nov 2024 15:30:16 +0530 Subject: [PATCH 21/26] Fix makefile --- Makefile | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index c89774f5..1d0deb16 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ KAFKA_BROKERS = kafka1:9095 kafka2:9096 kafka3:9097 ADMIN_CONFIG = /etc/kafka/secrets/config-admin.properties KAFKA_CONTAINER = ziggurat-kafka1-1 -.PHONY: setup-cluster create-scram-credentials create-topics setup-acls down up clean restart +.PHONY: all # Main target to setup the entire cluster setup-cluster: down up wait-for-kafka create-scram-credentials create-topics setup-acls @@ -84,3 +84,16 @@ clean-topics: # Show logs logs: docker-compose -f docker-compose-cluster.yml logs -f + +test-cluster: setup-cluster + TESTING_TYPE=cluster lein test + docker-compose -f docker-compose-cluster.yml down + rm -rf /tmp/ziggurat_kafka_cluster_data + +coverage: setup-cluster + lein code-coverage + docker-compose down + +proto: + protoc -I=resources --java_out=test/ resources/proto/example.proto + protoc -I=resources --java_out=test/ resources/proto/person.proto \ No newline at end of file From 3aa50e6a0b91187c623dadaaeb59a21587b5ba3c Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Thu, 28 Nov 2024 15:51:26 +0530 Subject: [PATCH 22/26] Fix container names --- Makefile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 1d0deb16..61b8c3a2 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ KAFKA_TOPICS = topic another-test-topic KAFKA_BROKERS = kafka1:9095 kafka2:9096 kafka3:9097 ADMIN_CONFIG = /etc/kafka/secrets/config-admin.properties -KAFKA_CONTAINER = ziggurat-kafka1-1 +KAFKA_CONTAINER = ziggurat_kafka1_1 .PHONY: all @@ -54,11 +54,11 @@ setup-acls: @for broker in $(KAFKA_BROKERS); do \ case $$broker in \ kafka1:9095) \ - container="ziggurat-kafka1-1" ;; \ + container="ziggurat_kafka1_1" ;; \ kafka2:9096) \ - container="ziggurat-kafka2-1" ;; \ + container="ziggurat_kafka2_1" ;; \ kafka3:9097) \ - container="ziggurat-kafka3-1" ;; \ + container="ziggurat_kafka3_1" ;; \ esac; \ for topic in $(KAFKA_TOPICS); do \ echo "Setting up ACLs for topic: $$topic on broker: $$broker using container: $$container"; \ From 1e3fd4e0de2f74114d0f29fad3e33dc1e6bad92e Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Thu, 28 Nov 2024 17:27:52 +0530 Subject: [PATCH 23/26] fix cloverage pipeline --- Makefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 61b8c3a2..9b65a6a2 100644 --- a/Makefile +++ b/Makefile @@ -92,7 +92,9 @@ test-cluster: setup-cluster coverage: setup-cluster lein code-coverage - docker-compose down + docker-compose -f docker-compose-cluster.yml down + rm -rf /tmp/ziggurat_kafka_cluster_data + proto: protoc -I=resources --java_out=test/ resources/proto/example.proto From f4189a2515a8f876aa18215546bd91f3f62408c0 Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Fri, 29 Nov 2024 10:59:36 +0530 Subject: [PATCH 24/26] remove the logs --- src/ziggurat/config.clj | 3 +-- src/ziggurat/streams.clj | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index 6d048882..bd21a95d 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -217,8 +217,7 @@ (let [username (get jaas-config :username) password (get jaas-config :password) login-module (get jaas-config :login-module) - jaas_props (create-jaas-properties username password login-module) - _ (println (str "JAAS CONFIGS -->> " jaas_props))] + jaas_props (create-jaas-properties username password login-module)] (doto properties (.put SaslConfigs/SASL_JAAS_CONFIG jaas_props))) properties)) diff --git a/src/ziggurat/streams.clj b/src/ziggurat/streams.clj index eb31b070..a247b2bc 100644 --- a/src/ziggurat/streams.clj +++ b/src/ziggurat/streams.clj @@ -193,13 +193,11 @@ (let [topology-fn (case (:consumer-type stream-config) :stream-joins stream-joins-topology topology) - top (topology-fn handler-fn stream-config topic-entity channels) - properties-x (properties stream-config) - _ (println (str "PROPS X -->>" properties-x))] + top (topology-fn handler-fn stream-config topic-entity channels)] (when-not (nil? top) (KafkaStreams. ^Topology top - ^Properties properties-x)))) + ^Properties (properties stream-config))))) (defn- merge-consumer-type-config [config] From f0d0f389e4fbfd2998c8bfdb328e7ad661ff785f Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Fri, 29 Nov 2024 14:46:24 +0530 Subject: [PATCH 25/26] remove additional rm --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index 9b65a6a2..6fab3e68 100644 --- a/Makefile +++ b/Makefile @@ -93,7 +93,6 @@ test-cluster: setup-cluster coverage: setup-cluster lein code-coverage docker-compose -f docker-compose-cluster.yml down - rm -rf /tmp/ziggurat_kafka_cluster_data proto: From dad1197abbc8893a367592b24afbddf60463c8d6 Mon Sep 17 00:00:00 2001 From: Uddeshya Singh Date: Mon, 2 Dec 2024 15:19:11 +0530 Subject: [PATCH 26/26] Add documentation for login-callback-handler --- docker-compose-cluster.yml | 2 ++ src/ziggurat/config.clj | 3 +++ 2 files changed, 5 insertions(+) diff --git a/docker-compose-cluster.yml b/docker-compose-cluster.yml index b5e58bd0..e651ddc7 100644 --- a/docker-compose-cluster.yml +++ b/docker-compose-cluster.yml @@ -20,6 +20,8 @@ services: -Dzookeeper.requireClientAuthScheme=sasl" volumes: - ./zookeeper_server_jaas.conf:/etc/kafka/zookeeper_server_jaas.conf + - /tmp/ziggurat_kafka_cluster_data/zookeeper/data:/data + - /tmp/ziggurat_kafka_cluster_data/zookeeper/datalog:/datalog kafka1: image: 'confluentinc/cp-kafka:${CONFLUENT_VERSION}' diff --git a/src/ziggurat/config.clj b/src/ziggurat/config.clj index bd21a95d..5db3d9ab 100644 --- a/src/ziggurat/config.clj +++ b/src/ziggurat/config.clj @@ -246,9 +246,12 @@ :ssl-keystore-password <> :mechanism <> :protocol <> + :login-callback-handler <> {:jaas {:username <> :password <> :login-module <>}}} + Note - In the event you need to utilize OAUTHBEARER SASL mechanism, the :login-callback-handler + will be utilized for handling the initiated callbacks from the broker and returning appropriate tokens. " (let [ssl-configs-enabled (:enabled ssl-config-map) jaas-config (get ssl-config-map :jaas)