diff --git a/clients/cmd/promtail/promtail-kafka-sasl-plain.yaml b/clients/cmd/promtail/promtail-kafka-sasl-plain.yaml new file mode 100644 index 000000000000..dfc7995826a7 --- /dev/null +++ b/clients/cmd/promtail/promtail-kafka-sasl-plain.yaml @@ -0,0 +1,26 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +clients: + - url: http://localhost:3100/loki/api/v1/push + +scrape_configs: + - job_name: kafka-sasl-plain + kafka: + use_incoming_timestamp: false + brokers: + - localhost:29092 + authentication: + type: sasl + sasl_config: + mechanism: PLAIN + user: kafkaadmin + password: kafkaadmin-pass + use_tls: false + group_id: kafka_group + topics: + - foo + - ^promtail.* + labels: + job: kafka-sasl-plain \ No newline at end of file diff --git a/clients/cmd/promtail/promtail-kafka-sasl-scram.yaml b/clients/cmd/promtail/promtail-kafka-sasl-scram.yaml new file mode 100644 index 000000000000..26170162f7d6 --- /dev/null +++ b/clients/cmd/promtail/promtail-kafka-sasl-scram.yaml @@ -0,0 +1,26 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +clients: + - url: http://localhost:3100/loki/api/v1/push + +scrape_configs: + - job_name: kafka-sasl-plain + kafka: + use_incoming_timestamp: false + brokers: + - localhost:29092 + authentication: + type: sasl + sasl_config: + mechanism: SCRAM-SHA-512 + user: kafkaadmin + password: kafkaadmin-pass + use_tls: false + group_id: kafka_group + topics: + - foo + - ^promtail.* + labels: + job: kafka-sasl-plain \ No newline at end of file diff --git a/clients/cmd/promtail/promtail-kafka-sasl-ssl.yaml b/clients/cmd/promtail/promtail-kafka-sasl-ssl.yaml new file mode 100644 index 000000000000..28aa5a3d5b28 --- /dev/null +++ b/clients/cmd/promtail/promtail-kafka-sasl-ssl.yaml @@ -0,0 +1,28 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +clients: + - url: http://localhost:3100/loki/api/v1/push + +scrape_configs: + - job_name: kafka-sasl-plain + kafka: + use_incoming_timestamp: false + brokers: + - localhost:29092 + authentication: + type: sasl + sasl_config: + mechanism: PLAIN + user: kafkaadmin + password: kafkaadmin-pass + use_tls: true + ca_file: ../../../tools/kafka/secrets/promtail-kafka-ca.pem + insecure_skip_verify: true + group_id: kafka_group + topics: + - foo + - ^promtail.* + labels: + job: kafka-sasl-plain \ No newline at end of file diff --git a/clients/cmd/promtail/promtail-kafka-ssl.yaml b/clients/cmd/promtail/promtail-kafka-ssl.yaml new file mode 100644 index 000000000000..65d5dd9b84e5 --- /dev/null +++ b/clients/cmd/promtail/promtail-kafka-ssl.yaml @@ -0,0 +1,27 @@ +server: + http_listen_port: 9080 + grpc_listen_port: 0 + +clients: + - url: http://localhost:3100/loki/api/v1/push + +scrape_configs: + - job_name: kafka-mtls + kafka: + use_incoming_timestamp: false + brokers: + - localhost:29092 + authentication: + type: ssl + tls_config: + ca_file: ../../../tools/kafka/secrets/promtail-kafka-ca.pem + cert_file: ../../../tools/kafka/secrets/kafka.consumer.keystore.cer.pem + key_file: ../../../tools/kafka/secrets/kafka.consumer.keystore.key.pem + server_name: localhost + insecure_skip_verify: true + group_id: kafka_mtls_group + topics: + - foo + - ^promtail.* + labels: + job: kafka-mtls \ No newline at end of file diff --git a/tools/kafka/.gitignore b/tools/kafka/.gitignore new file mode 100644 index 000000000000..a5591bc99ad1 --- /dev/null +++ b/tools/kafka/.gitignore @@ -0,0 +1,8 @@ +*.crt +*.jks +*_creds +*.key +*.pem +*.csr +*.srl +*.p12 \ No newline at end of file diff --git a/tools/kafka/README.md b/tools/kafka/README.md index 26f8bf05c761..03a6242a84e4 100644 --- a/tools/kafka/README.md +++ b/tools/kafka/README.md @@ -15,6 +15,16 @@ To discover available brokers you can use the `make print-brokers`. Finally to stop the compose stack use `make stop-kafka`. This will result in all topics being lost with their messages. +## Running secure kafka locally + +To test authentication, you need to start the Kafka container which is configured with authentication. + +You can also use `make start-kafka` in appropriate directory like `sasl-scram` you need. + +In addition, you need to create certificates using `make create-certs` when using SSL/TLS. + +If you don't need to authenticate, you should use the tools in `plain` directory. + ## Working with Topic In Kafka before sending messages you need to create and select the topic you want to use for the exchange. diff --git a/tools/kafka/Makefile b/tools/kafka/plain/Makefile similarity index 94% rename from tools/kafka/Makefile rename to tools/kafka/plain/Makefile index a65da1eda610..7cb6fd59d0d1 100644 --- a/tools/kafka/Makefile +++ b/tools/kafka/plain/Makefile @@ -4,7 +4,7 @@ TOPIC ?= promtail RF ?= 1 PARTS ?= 3 -BROKER_LIST := $(shell ./broker-list.sh $(HOST_IP)) +BROKER_LIST := $(shell ../broker-list.sh $(HOST_IP)) DOCKER_RUN := docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -e HOST_IP=$(HOST_IP) -i -t wurstmeister/kafka /bin/bash -c start-kafka: diff --git a/tools/kafka/docker-compose.yml b/tools/kafka/plain/docker-compose.yml similarity index 100% rename from tools/kafka/docker-compose.yml rename to tools/kafka/plain/docker-compose.yml diff --git a/tools/kafka/sasl-plain/Makefile b/tools/kafka/sasl-plain/Makefile new file mode 100644 index 000000000000..c11af73ce0cc --- /dev/null +++ b/tools/kafka/sasl-plain/Makefile @@ -0,0 +1,16 @@ + +HOST_IP ?= host.docker.internal +TOPIC ?= promtail +RF ?= 1 +PARTS ?= 3 + +BROKER_LIST := $(shell ../broker-list.sh $(HOST_IP)) + +start-kafka: + docker-compose up -d + +stop-kafka: + docker-compose down + +print-brokers: + @echo $(BROKER_LIST) \ No newline at end of file diff --git a/tools/kafka/sasl-plain/conf/kafka.jaas.conf b/tools/kafka/sasl-plain/conf/kafka.jaas.conf new file mode 100644 index 000000000000..a89d8b380513 --- /dev/null +++ b/tools/kafka/sasl-plain/conf/kafka.jaas.conf @@ -0,0 +1,19 @@ +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="kafkaadmin" + password="kafkaadmin-pass" + user_kafkaadmin="kafkaadmin-pass" + ; +}; +KafkaClient { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="kafkaadmin" + password="kafkaadmin-pass" + ; +}; +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="super" + password="adminsecret" + ; +}; \ No newline at end of file diff --git a/tools/kafka/sasl-plain/conf/zookeeper.jaas.conf b/tools/kafka/sasl-plain/conf/zookeeper.jaas.conf new file mode 100644 index 000000000000..465231267f6d --- /dev/null +++ b/tools/kafka/sasl-plain/conf/zookeeper.jaas.conf @@ -0,0 +1,4 @@ +Server { + org.apache.zookeeper.server.auth.DigestLoginModule required + user_super="adminsecret"; +}; \ No newline at end of file diff --git a/tools/kafka/sasl-plain/docker-compose.yml b/tools/kafka/sasl-plain/docker-compose.yml new file mode 100644 index 000000000000..f875885a800d --- /dev/null +++ b/tools/kafka/sasl-plain/docker-compose.yml @@ -0,0 +1,32 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + ports: + - "22181:22181" + environment: + ZOOKEEPER_SERVER_ID: 1 + ZOOKEEPER_CLIENT_PORT: 22181 + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper.jaas.conf + -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider + -Dzookeeper.requireClientAuthScheme=sasl + volumes: + - ./conf:/etc/kafka/secrets + + kafka: + image: confluentinc/cp-kafka:6.2.1 + depends_on: + - zookeeper + ports: + - "29092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:22181 + KAFKA_ADVERTISED_LISTENERS: SASL_PLAINTEXT://kafka:9092 + KAFKA_SECURITY_INTER_BROKER_PROTOCOL: SASL_PLAINTEXT + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAIN + KAFKA_SASL_ENABLED_MECHANISMS: PLAIN + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/kafka.jaas.conf + volumes: + - ./conf:/etc/kafka/secrets + - /var/run/docker.sock:/var/run/docker.sock \ No newline at end of file diff --git a/tools/kafka/sasl-scram/Makefile b/tools/kafka/sasl-scram/Makefile new file mode 100644 index 000000000000..5df9cf5cf0d6 --- /dev/null +++ b/tools/kafka/sasl-scram/Makefile @@ -0,0 +1,22 @@ + +HOST_IP ?= host.docker.internal +TOPIC ?= promtail +RF ?= 1 +PARTS ?= 3 + +BROKER_LIST := $(shell ../broker-list.sh $(HOST_IP)) + +start-kafka: + docker-compose up -d zookeeper + docker-compose exec zookeeper kafka-configs \ + --zookeeper localhost:22181 \ + --alter \ + --add-config 'SCRAM-SHA-512=[iterations=8192,password=kafkaadmin-pass],SCRAM-SHA-512=[password=kafkaadmin-pass]' \ + --entity-type users --entity-name kafkaadmin + docker-compose up -d kafka + +stop-kafka: + docker-compose down + +print-brokers: + @echo $(BROKER_LIST) \ No newline at end of file diff --git a/tools/kafka/sasl-scram/conf/kafka.jaas.conf b/tools/kafka/sasl-scram/conf/kafka.jaas.conf new file mode 100644 index 000000000000..3050a64acd33 --- /dev/null +++ b/tools/kafka/sasl-scram/conf/kafka.jaas.conf @@ -0,0 +1,18 @@ +KafkaServer { + org.apache.kafka.common.security.scram.ScramLoginModule required + username="kafkaadmin" + password="kafkaadmin-pass" + ; +}; +KafkaClient { + org.apache.kafka.common.security.scram.ScramLoginModule required + username="kafkaadmin" + password="kafkaadmin-pass" + ; +}; +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="super" + password="adminsecret" + ; +}; \ No newline at end of file diff --git a/tools/kafka/sasl-scram/conf/zookeeper.jaas.conf b/tools/kafka/sasl-scram/conf/zookeeper.jaas.conf new file mode 100644 index 000000000000..09a258a936ef --- /dev/null +++ b/tools/kafka/sasl-scram/conf/zookeeper.jaas.conf @@ -0,0 +1,10 @@ +Server { + org.apache.zookeeper.server.auth.DigestLoginModule required + user_super="adminsecret"; +}; +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="super" + password="adminsecret" + ; +}; \ No newline at end of file diff --git a/tools/kafka/sasl-scram/docker-compose.yml b/tools/kafka/sasl-scram/docker-compose.yml new file mode 100644 index 000000000000..f01ec571d177 --- /dev/null +++ b/tools/kafka/sasl-scram/docker-compose.yml @@ -0,0 +1,32 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + ports: + - "22181:22181" + environment: + ZOOKEEPER_SERVER_ID: 1 + ZOOKEEPER_CLIENT_PORT: 22181 + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper.jaas.conf + -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider + -Dzookeeper.requireClientAuthScheme=sasl + volumes: + - ./conf:/etc/kafka/secrets + + kafka: + image: confluentinc/cp-kafka:6.2.1 + depends_on: + - zookeeper + ports: + - "29092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:22181 + KAFKA_ADVERTISED_LISTENERS: SASL_PLAINTEXT://kafka:9092 + KAFKA_SECURITY_INTER_BROKER_PROTOCOL: SASL_PLAINTEXT + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: SCRAM-SHA-512 + KAFKA_SASL_ENABLED_MECHANISMS: SCRAM-SHA-512 + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/kafka.jaas.conf + volumes: + - ./conf:/etc/kafka/secrets + - /var/run/docker.sock:/var/run/docker.sock \ No newline at end of file diff --git a/tools/kafka/sasl-ssl/Makefile b/tools/kafka/sasl-ssl/Makefile new file mode 100644 index 000000000000..5d26c3b42e63 --- /dev/null +++ b/tools/kafka/sasl-ssl/Makefile @@ -0,0 +1,19 @@ + +HOST_IP ?= host.docker.internal +TOPIC ?= promtail +RF ?= 1 +PARTS ?= 3 + +BROKER_LIST := $(shell ../broker-list.sh $(HOST_IP)) + +create-certs: + bash ../secrets/create-certs.sh + +start-kafka: + docker-compose up -d + +stop-kafka: + docker-compose down + +print-brokers: + @echo $(BROKER_LIST) \ No newline at end of file diff --git a/tools/kafka/sasl-ssl/conf/kafka.jaas.conf b/tools/kafka/sasl-ssl/conf/kafka.jaas.conf new file mode 100644 index 000000000000..a89d8b380513 --- /dev/null +++ b/tools/kafka/sasl-ssl/conf/kafka.jaas.conf @@ -0,0 +1,19 @@ +KafkaServer { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="kafkaadmin" + password="kafkaadmin-pass" + user_kafkaadmin="kafkaadmin-pass" + ; +}; +KafkaClient { + org.apache.kafka.common.security.plain.PlainLoginModule required + username="kafkaadmin" + password="kafkaadmin-pass" + ; +}; +Client { + org.apache.zookeeper.server.auth.DigestLoginModule required + username="super" + password="adminsecret" + ; +}; \ No newline at end of file diff --git a/tools/kafka/sasl-ssl/conf/zookeeper.jaas.conf b/tools/kafka/sasl-ssl/conf/zookeeper.jaas.conf new file mode 100644 index 000000000000..465231267f6d --- /dev/null +++ b/tools/kafka/sasl-ssl/conf/zookeeper.jaas.conf @@ -0,0 +1,4 @@ +Server { + org.apache.zookeeper.server.auth.DigestLoginModule required + user_super="adminsecret"; +}; \ No newline at end of file diff --git a/tools/kafka/sasl-ssl/docker-compose.yml b/tools/kafka/sasl-ssl/docker-compose.yml new file mode 100644 index 000000000000..a3917f7d10a3 --- /dev/null +++ b/tools/kafka/sasl-ssl/docker-compose.yml @@ -0,0 +1,39 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + ports: + - "22181:22181" + environment: + ZOOKEEPER_SERVER_ID: 1 + ZOOKEEPER_CLIENT_PORT: 22181 + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/secrets/zookeeper.jaas.conf + -Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider + -Dzookeeper.requireClientAuthScheme=sasl + volumes: + - ./conf:/etc/kafka/secrets + + kafka: + image: confluentinc/cp-kafka:6.2.1 + depends_on: + - zookeeper + ports: + - "29092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:22181 + KAFKA_ADVERTISED_LISTENERS: SASL_SSL://kafka:9092 + KAFKA_SECURITY_INTER_BROKER_PROTOCOL: SASL_SSL + KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAIN + KAFKA_SASL_ENABLED_MECHANISMS: PLAIN + KAFKA_SSL_KEYSTORE_FILENAME: kafka.broker.keystore.jks + KAFKA_SSL_KEYSTORE_CREDENTIALS: broker_keystore_creds + KAFKA_SSL_KEY_CREDENTIALS: broker_sslkey_creds + KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.broker.truststore.jks + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: broker_truststore_creds + KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: " " + KAFKA_SSL_CLIENT_AUTH: requested + KAFKA_OPTS: -Djava.security.auth.login.config=/etc/kafka/jaas/kafka.jaas.conf + volumes: + - ../secrets:/etc/kafka/secrets + - ./conf:/etc/kafka/jaas \ No newline at end of file diff --git a/tools/kafka/secrets/create-certs.sh b/tools/kafka/secrets/create-certs.sh new file mode 100755 index 000000000000..83c29ed23b22 --- /dev/null +++ b/tools/kafka/secrets/create-certs.sh @@ -0,0 +1,76 @@ +#!/bin/bash + +cd `dirname $0` + +set -o nounset \ + -o errexit \ + -o verbose \ + -o xtrace + +# Generate CA key +openssl req -new -x509 \ + -out promtail-kafka-ca.pem \ + -keyout promtail-kafka-ca-key.pem \ + -days 36500 \ + -subj '/CN=ca.promtail.io/OU=TEST/O=Grafana/L=PaloAlto/S=Ca/C=US' \ + -passin pass:promtail \ + -passout pass:promtail + +for i in broker producer consumer +do + echo "[INFO] Creating ${i} certs" + + # Create keystores + keytool -genkey -noprompt \ + -alias "${i}" \ + -dname "CN=${i}.promtail.io, OU=TEST, O=Grafana, L=PaloAlto, S=Ca, C=US" \ + -keystore "kafka.${i}.keystore.jks" \ + -keyalg RSA \ + -storepass promtail \ + -keypass promtail + + # Create CSR, sign the key and import back into keystore + keytool -noprompt -keystore "kafka.${i}.keystore.jks" -alias "${i}" -certreq -file "${i}.csr" -storepass promtail -keypass promtail + + openssl x509 -req \ + -CA promtail-kafka-ca.pem \ + -CAkey promtail-kafka-ca-key.pem \ + -in "${i}.csr" -out "${i}-ca-signed.crt" \ + -days 36500 \ + -CAcreateserial \ + -passin pass:promtail + + keytool -noprompt -keystore "kafka.${i}.keystore.jks" \ + -alias CARoot \ + -import -file promtail-kafka-ca.pem \ + -storepass promtail -keypass promtail + + keytool -noprompt -keystore "kafka.${i}.keystore.jks" \ + -alias "${i}" \ + -import -file "${i}-ca-signed.crt" \ + -storepass promtail -keypass promtail + + # Create truststore and import the CA cert. + keytool -noprompt -keystore "kafka.${i}.truststore.jks" \ + -alias CARoot \ + -import -file promtail-kafka-ca.pem \ + -storepass promtail -keypass promtail + + # Convert jks to pem encoding + keytool -noprompt -importkeystore \ + -srckeystore "kafka.${i}.keystore.jks" \ + -destkeystore "kafka.${i}.keystore.p12" \ + -deststoretype PKCS12 \ + -storepass promtail -keypass promtail \ + -srcstorepass promtail -deststorepass promtail + openssl pkcs12 -in "kafka.${i}.keystore.p12" -nokeys \ + -out "kafka.${i}.keystore.cer.pem" \ + -passin pass:promtail -passout pass:promtail + openssl pkcs12 -in "kafka.${i}.keystore.p12" -nodes -nocerts \ + -out "kafka.${i}.keystore.key.pem" \ + -passin pass:promtail -passout pass:promtail + + echo "promtail" > "${i}_sslkey_creds" + echo "promtail" > "${i}_keystore_creds" + echo "promtail" > "${i}_truststore_creds" +done \ No newline at end of file diff --git a/tools/kafka/ssl/Makefile b/tools/kafka/ssl/Makefile new file mode 100644 index 000000000000..5d26c3b42e63 --- /dev/null +++ b/tools/kafka/ssl/Makefile @@ -0,0 +1,19 @@ + +HOST_IP ?= host.docker.internal +TOPIC ?= promtail +RF ?= 1 +PARTS ?= 3 + +BROKER_LIST := $(shell ../broker-list.sh $(HOST_IP)) + +create-certs: + bash ../secrets/create-certs.sh + +start-kafka: + docker-compose up -d + +stop-kafka: + docker-compose down + +print-brokers: + @echo $(BROKER_LIST) \ No newline at end of file diff --git a/tools/kafka/ssl/docker-compose.yml b/tools/kafka/ssl/docker-compose.yml new file mode 100644 index 000000000000..6df926af85ff --- /dev/null +++ b/tools/kafka/ssl/docker-compose.yml @@ -0,0 +1,30 @@ +version: '2' +services: + zookeeper: + image: confluentinc/cp-zookeeper:latest + ports: + - "22181:22181" + environment: + ZOOKEEPER_SERVER_ID: 1 + ZOOKEEPER_CLIENT_PORT: 22181 + + kafka: + image: confluentinc/cp-kafka:6.2.1 + depends_on: + - zookeeper + ports: + - "29092:9092" + environment: + KAFKA_BROKER_ID: 1 + KAFKA_ZOOKEEPER_CONNECT: zookeeper:22181 + KAFKA_ADVERTISED_LISTENERS: SSL://kafka:9092 + KAFKA_SECURITY_INTER_BROKER_PROTOCOL: SSL + KAFKA_SSL_KEYSTORE_FILENAME: kafka.broker.keystore.jks + KAFKA_SSL_KEYSTORE_CREDENTIALS: broker_keystore_creds + KAFKA_SSL_KEY_CREDENTIALS: broker_sslkey_creds + KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.broker.truststore.jks + KAFKA_SSL_TRUSTSTORE_CREDENTIALS: broker_truststore_creds + KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: " " + KAFKA_SSL_CLIENT_AUTH: required + volumes: + - ../secrets:/etc/kafka/secrets \ No newline at end of file