Open ssthom opened 5 years ago
I am a Go noob but I was able to get SCRAM sha256 and sha512 working using this patch
diff --git a/core/internal/helpers/sarama.go b/core/internal/helpers/sarama.go
index 99458a1..3bdf0d7 100644
--- a/core/internal/helpers/sarama.go
+++ b/core/internal/helpers/sarama.go
@@ -118,6 +118,14 @@ func GetSaramaConfigFromClientProfile(profileName string) *sarama.Config {
saramaConfig.Net.SASL.Handshake = viper.GetBool("sasl." + saslName + ".handshake-first")
saramaConfig.Net.SASL.User = viper.GetString("sasl." + saslName + ".username")
saramaConfig.Net.SASL.Password = viper.GetString("sasl." + saslName + ".password")
+ algorithm := viper.GetString("sasl." + saslName + ".algorithm")
+ if algorithm == "sha512" {
+ saramaConfig.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA512} }
+ saramaConfig.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA512)
+ } else if algorithm == "sha256" {
+ saramaConfig.Net.SASL.SCRAMClientGeneratorFunc = func() sarama.SCRAMClient { return &XDGSCRAMClient{HashGeneratorFcn: SHA256} }
+ saramaConfig.Net.SASL.Mechanism = sarama.SASLMechanism(sarama.SASLTypeSCRAMSHA256)
+ }
}
return saramaConfig
diff --git a/core/internal/helpers/scram_client.go b/core/internal/helpers/scram_client.go
new file mode 100644
index 0000000..ca1a922
--- /dev/null
+++ b/core/internal/helpers/scram_client.go
@@ -0,0 +1,36 @@
+package helpers
+
+import (
+ "crypto/sha256"
+ "crypto/sha512"
+ "hash"
+
+ "github.com/xdg/scram"
+)
+
+var SHA256 scram.HashGeneratorFcn = func() hash.Hash { return sha256.New() }
+var SHA512 scram.HashGeneratorFcn = func() hash.Hash { return sha512.New() }
+
+type XDGSCRAMClient struct {
+ *scram.Client
+ *scram.ClientConversation
+ scram.HashGeneratorFcn
+}
+
+func (x *XDGSCRAMClient) Begin(userName, password, authzID string) (err error) {
+ x.Client, err = x.HashGeneratorFcn.NewClient(userName, password, authzID)
+ if err != nil {
+ return err
+ }
+ x.ClientConversation = x.Client.NewConversation()
+ return nil
+}
+
+func (x *XDGSCRAMClient) Step(challenge string) (response string, err error) {
+ response, err = x.ClientConversation.Step(challenge)
+ return
+}
+
+func (x *XDGSCRAMClient) Done() bool {
+ return x.ClientConversation.Done()
+}
and this dockerfile
# based on the file
FROM golang:1.11-alpine as builder
ENV DEP_VERSION="0.5.0"
RUN apk add --no-cache curl \
gcc \
git \
libc-dev \
patch && \
curl -L -s https://github.com/golang/dep/releases/download/v${DEP_VERSION}/dep-linux-amd64 -o $GOPATH/bin/dep && \
chmod +x $GOPATH/bin/dep && \
mkdir -p $GOPATH/src/github.com/linkedin/Burrow
WORKDIR $GOPATH/src/github.com/linkedin
RUN git clone https://github.com/linkedin/Burrow.git
WORKDIR $GOPATH/src/github.com/linkedin/Burrow
ADD files/scram.patch /tmp/scram.patch
RUN git apply /tmp/scram.patch
RUN $GOPATH/bin/dep ensure -add github.com/xdg/scram@master
RUN $GOPATH/bin/dep ensure -update
RUN go build -o /tmp/burrow .
FROM alpine:3.9
RUN apk update && apk upgrade \
&& apk add ca-certificates \
bash \
&& rm -rf /var/cache/apk/*
COPY --from=builder /tmp/burrow /app/
COPY ./files/templates/*.tmpl /etc/burrow/
CMD ["/app/burrow", "--config-dir", "/etc/burrow"]
if they are of any help to anyone
@jbresciani your patch works like a charm!!! Good work and thanks!!!
@jbresciani your patch works like a charm!!! Good work and thanks!!!
Hi @tsrikanth06 @jbresciani - Could you provide the config that worked for you please? I tried with this patch but I still get
"error":"kafka: client has run out of available brokers to talk to (Is your cluster reachable?)"}
config:
[sasl.mysasl]
username="username"
password="password"
algorithm="sha512"
handshake-first=true
[tls.mytlsprofile]
noverify=true
[client-profile.test]
client-id="burrow-test"
kafka-version="0.10.2.1"
tls="mytlsprofile"
sasl="mysasl"
[cluster.local]
class-name="kafka"
servers=[ "brokerhostname:9094" ]
client-profile="test"
topic-refresh=120
offset-refresh=30
@bai - Tagging you because I see that most of the recent commits are from you. Could you please help with SASL_SSL configuration for Burrow?
my config currently looks like
[zookeeper]
servers=['10.0.0.11:2182', '10.0.0.12:2182', '10.0.0.13:2182']
timeout=6
root-path="/burrow/notifier"
[client-profile.mycluster]
client-id="burrow"
servers=['10.0.0.1:9092', '10.0.0.2:9092', '10.0.0.3:9092', '10.0.0.4:9092', '10.0.0.5:9092']
sasl="mycluster"
[sasl.mycluster]
handshake-first=true
algorithm=sha512
[cluster.mycluster]
class-name="kafka"
client-profile="mycluster"
offset-refresh=10
servers=['10.0.0.1:9092', '10.0.0.2:9092', '10.0.0.3:9092', '10.0.0.4:9092', '10.0.0.5:9092']
topic-refresh=120
[consumer.mycluster]
class-name="kafka"
client-profile="mycluster"
cluster="mycluster"
group-blacklist="^(console-consumer-|python-kafka-consumer-).*$"
group-whitelist=""
servers=['10.0.0.1:9092', '10.0.0.2:9092', '10.0.0.3:9092', '10.0.0.4:9092', '10.0.0.5:9092']
[httpserver.default]
address=":8080"
[storage.default]
class-name="inmemory"
expire-group=10800
intervals=15
min-distance=1
workers=20
[notifier.default-slack]
class-name="http"
extras={ channel="burrow_alerts" }
interval=1800
keepalive=30
method-close="POST"
send-close=true
template-open="/etc/burrow/default-slack-post.tmpl"
template-close="/etc/burrow/default-slack-delete.tmpl"
threshold=2
timeout=5
url-open="https://hooks.slack.com/services/HOOK/URL"
url-close="https://hooks.slack.com/services/HOOK/URL"
with the following ENV var's set in the burrow pod at deploy time, this way I don't need to hardcode them in the configmap and then store secrets in git.
BURROW_SASL_MYCLUSTER_USERNAME=burrow_username
BURROW_SASL_MYCLUSTER_PASSWORD=burrow_password
I've since moved to SASL_SSL and my config now looks like
[zookeeper]
servers=['10.0.0.11:2182', '10.0.0.12:2182', '10.0.0.13:2182']
timeout=6
root-path="/burrow/notifier"
[client-profile.mycluster]
client-id="burrow"
kafka-version="2.2.0"
sasl="mycluster"
tls="mycluster"
[sasl.mycluster]
handshake-first=true
[tls.mycluster]
cafile="/certs/kafka_ca_cert.pem"
noverify=true
[cluster.mycluster]
class-name="kafka"
client-profile="mycluster"
offset-refresh=10
servers=['10.0.0.1:9092', '10.0.0.2:9092', '10.0.0.3:9092', '10.0.0.4:9092', '10.0.0.5:9092']
topic-refresh=120
[consumer.mycluster]
class-name="kafka"
client-profile="mycluster"
cluster="mycluster"
group-blacklist="^(console-consumer-|python-kafka-consumer-).*$"
group-whitelist=""
servers=['10.0.0.1:9092', '10.0.0.2:9092', '10.0.0.3:9092', '10.0.0.4:9092', '10.0.0.5:9092']
Thanks @jbresciani for providing your config. I tried this configuration (using this patch and also the master branch) but it still fails with the same error.
I tried creating the pem file from the client.jks using the following command. Is this the right approach?
keytool -exportcert -alias <alias> -keystore kafka.client.jks -rfc -file cert.pem
I have also tried @jbresciani and it works
So can this patch be added to the project? Looks like burrow only supports PLAINTEXT SASL mechanism, and I need SHA512...
Ignore. Turns out you can set SASL mechanism: https://github.com/linkedin/Burrow/blob/master/core/internal/helpers/sarama.go#L129-L139
Example config:
[sasl.blablabla]
username="blablabla"
password="blablabla"
mechanism="SCRAM-SHA-512"
I am still blaming documentation because this was not mentioned... ðŸ˜
Currently, there is no way to configure the SASL mechanism in a SASL Profile. So it defaults to PLAIN
Looks like we simply need to set the mechanism here based on configuration: https://github.com/linkedin/Burrow/blob/master/core/internal/helpers/sarama.go#L114
Then we will be able to support the following SASL Mechanisms https://github.com/Shopify/sarama/blob/master/broker.go#L55