Kafka 连接是否真的可以连接在 SSL 端口中运行的代理引导服务器。 如果我在 Connect 配置中更改为 PLAINTEXT://broker.local:9092 ,它可以工作,但容器无法加载 SSL://broker.local:19092 。 那么想知道连接是否真的需要非 SSL 配置?
我在容器启动期间遇到以下问题。经纪人和动物园管理员没有其他问题。
connect | Error while getting broker list.
connect | java.util.concurrent.ExecutionException: org.apache.kafka.common.errors.TimeoutException: Timed out waiting for a node assignment. Call: listN odes
connect | at java.base/java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:395)
connect | at java.base/java.util.concurrent.CompletableFuture.get(CompletableFuture.java:2005)
connect | at org.apache.kafka.common.internals.KafkaFutureImpl.get(KafkaFutureImpl.java:165)
connect | at io.confluent.admin.utils.ClusterStatus.isKafkaReady(ClusterStatus.java:147)
connect | at io.confluent.admin.utils.cli.KafkaReadyCommand.main(KafkaReadyCommand.java:149)
connect | Caused by: org.apache.kafka.common.errors.TimeoutException: Timed out waiting for a node assignment. Call: listNodes
control-center | [2024-02-29 17:37:20,612] INFO unable to get command store (io.confluent.command.CommandStore)
connect | Expected 1 brokers but found only 0. Trying to query Kafka for metadata again ...
connect | Expected 1 brokers but found only 0. Brokers found [].
connect | Using log4j config /etc/cp-base-new/log4j.properties
connect exited with code 1
下面是我的 Zookeeper、Broker 和 Connect 配置:
---
version: "3"
services:
zookeeper:
image: confluentinc/cp-zookeeper:7.5.3
container_name: zookeeper
networks:
kafka:
aliases:
- zookeeper.local
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 22181
ZOOKEEPER_TICK_TIME: 2000
ZOOKEEPER_LOG4J_ROOT_LOGLEVEL: ERROR
ZOOKEEPER_LOG4J_LOGLEVEL: ERROR
volumes:
- zk-data:/var/lib/zookeeper/data
- zk-txn-logs:/var/lib/zookeeper/log
broker:
image: confluentinc/cp-enterprise-kafka:7.5.3
container_name: broker
networks:
kafka:
aliases:
- broker.local
depends_on:
- zookeeper
ports:
- "19092:19092"
- "9092:9092"
environment:
KAFKA_LOG4J_ROOT_LOGLEVEL: ERROR
KAFKA_LOG4J_LOGLEVEL: ERROR
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper.local:22181
KAFKA_ADVERTISED_LISTENERS: SSL://broker.local:19092
KAFKA_SSL_KEYSTORE_FILENAME: kafka.broker.keystore.jks
KAFKA_SSL_KEYSTORE_CREDENTIALS: broker_keystore_creds
KAFKA_SSL_KEY_CREDENTIALS: broker_sslkey_creds
KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.broker.truststore.jks
KAFKA_SSL_TRUSTSTORE_CREDENTIALS: broker_truststore_creds
KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: ""
KAFKA_LISTENER_NAME_INTERNAL_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: ""
KAFKA_SSL_CLIENT_AUTH: requested
KAFKA_SECURITY_INTER_BROKER_PROTOCOL: SSL
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker.local:19092
CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper.local:22181
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
CONFLUENT_METRICS_REPORTER_SECURITY_PROTOCOL: SSL
CONFLUENT_METRICS_REPORTER_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.metrics.truststore.jks
CONFLUENT_METRICS_REPORTER_SSL_KEYSTORE_LOCATION: /etc/kafka/secrets/kafka.metrics.keystore.jks
CONFLUENT_METRICS_REPORTER_SSL_TRUSTSTORE_PASSWORD: awesomekafka
CONFLUENT_METRICS_REPORTER_SSL_KEYSTORE_PASSWORD: awesomekafka
CONFLUENT_METRICS_REPORTER_SSL_KEY_PASSWORD: awesomekafka
CONFLUENT_METRICS_ENABLE: "true"
CONFLUENT_SUPPORT_CUSTOMER_ID: anonymous
volumes:
- kafka-data:/var/lib/kafka/data
- ./secrets:/etc/kafka/secrets
connect:
image: confluentinc/cp-kafka-connect:7.5.3
build:
context: .
dockerfile: Dockerfile
hostname: connect
container_name: connect
depends_on:
- zookeeper
- broker
ports:
- "8083:8083"
networks:
# - localnet
kafka:
aliases:
- connect.local
environment:
CONNECT_BOOTSTRAP_SERVERS: 'broker.local:19092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR,com.mongodb.kafka=DEBUG"
CONNECT_PLUGIN_PATH: /usr/share/confluent-hub-components
CONNECT_ZOOKEEPER_CONNECT: zookeeper.local:22181
万一有人遇到同样的问题..我解决了这个问题:看起来像 Connect 上缺少的配置需要通过 SSL 端口进行通信:
CONNECT_SECURITY_PROTOCOL: SSL
CONNECT_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.connect.truststore.jks
CONNECT_SSL_TRUSTSTORE_PASSWORD: awesomekafka
CONNECT_CONSUMER_SECURITY_PROTOCOL: SSL
CONNECT_CONSUMER_SSL_TRUSTSTORE_LOCATION: /etc/kafka/secrets/kafka.connect.truststore.jks
CONNECT_CONSUMER_SSL_TRUSTSTORE_PASSWORD: awesomekafka