ahsen m
04/02/2022, 1:47 AMOpening socket connection to server kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181. Will not attempt to authenticate using SASL (unknown error)
53
Socket connection established, initiating session, client: /10.48.12.58:54236, server: kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181
52
Unable to read additional data from server sessionid 0x0, likely server has closed socket, closing socket connection and attempting reconnect
51
Opening socket connection to server kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181. Will not attempt to authenticate using SASL (unknown error)
50
Socket connection established, initiating session, client: /10.48.12.58:54248, server: kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181
49
Unable to read additional data from server sessionid 0x0, likely server has closed socket, closing socket connection and attempting reconnect
48
Opening socket connection to server kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181. Will not attempt to authenticate using SASL (unknown error)
47
Socket connection established, initiating session, client: /10.48.12.58:54268, server: kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181
46
Unable to read additional data from server sessionid 0x0, likely server has closed socket, closing socket connection and attempting reconnect
45
Opening socket connection to server kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181. Will not attempt to authenticate using SASL (unknown error)
44
Socket connection established, initiating session, client: /10.48.12.58:54274, server: kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181
43
Unable to read additional data from server sessionid 0x0, likely server has closed socket, closing socket connection and attempting reconnect
42
Opening socket connection to server kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181. Will not attempt to authenticate using SASL (unknown error)
41
Socket connection established, initiating session, client: /10.48.12.58:54278, server: kafka-cluster-zookeeper-client.kafka-cluster.svc.cluster.local/10.52.8.240:2181
40
Unable to read additional data from server sessionid 0x0, likely server has closed socket, closing socket connection and attempting reconnect
39
Failed to initialize Pinot Broker Starter
38
java.lang.NullPointerException: null
37
at org.apache.helix.manager.zk.client.ZkConnectionManager.cleanupInactiveWatchers(ZkConnectionManager.java:112) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
36
at org.apache.helix.manager.zk.client.ZkConnectionManager.close(ZkConnectionManager.java:95) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
35
at org.apache.helix.manager.zk.client.ZkConnectionManager.close(ZkConnectionManager.java:91) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
34
at org.apache.helix.manager.zk.zookeeper.ZkClient.connect(ZkClient.java:1620) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
33
at org.apache.helix.manager.zk.zookeeper.ZkClient.<init>(ZkClient.java:186) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
32
at org.apache.helix.manager.zk.ZkClient.<init>(ZkClient.java:87) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
31
at org.apache.helix.manager.zk.client.ZkConnectionManager.<init>(ZkConnectionManager.java:41) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
30
at org.apache.helix.manager.zk.client.SharedZkClientFactory.getOrCreateZkConnectionNamanger(SharedZkClientFactory.java:60) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
29
at org.apache.helix.manager.zk.client.SharedZkClientFactory.buildZkClient(SharedZkClientFactory.java:40) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
28
at org.apache.pinot.common.utils.ServiceStartableUtils.applyClusterConfig(ServiceStartableUtils.java:54) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
27
at org.apache.pinot.broker.broker.helix.BaseBrokerStarter.init(BaseBrokerStarter.java:118) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
26
at org.apache.pinot.tools.service.PinotServiceManager.startBroker(PinotServiceManager.java:137) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
25
at org.apache.pinot.tools.service.PinotServiceManager.startRole(PinotServiceManager.java:92) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
24
at org.apache.pinot.tools.admin.command.StartServiceManagerCommand$1.lambda$run$0(StartServiceManagerCommand.java:275) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
23
at org.apache.pinot.tools.admin.command.StartServiceManagerCommand.startPinotService(StartServiceManagerCommand.java:301) [pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
22
at org.apache.pinot.tools.admin.command.StartServiceManagerCommand$1.run(StartServiceManagerCommand.java:275) [pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
21
Failed to start a Pinot [BROKER] at 31.159 since launch
20
java.lang.NullPointerException: null
19
at org.apache.helix.manager.zk.client.ZkConnectionManager.cleanupInactiveWatchers(ZkConnectionManager.java:112) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
18
at org.apache.helix.manager.zk.client.ZkConnectionManager.close(ZkConnectionManager.java:95) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
17
at org.apache.helix.manager.zk.client.ZkConnectionManager.close(ZkConnectionManager.java:91) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
16
at org.apache.helix.manager.zk.zookeeper.ZkClient.connect(ZkClient.java:1620) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
15
at org.apache.helix.manager.zk.zookeeper.ZkClient.<init>(ZkClient.java:186) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
14
at org.apache.helix.manager.zk.ZkClient.<init>(ZkClient.java:87) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
13
at org.apache.helix.manager.zk.client.ZkConnectionManager.<init>(ZkConnectionManager.java:41) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
12
at org.apache.helix.manager.zk.client.SharedZkClientFactory.getOrCreateZkConnectionNamanger(SharedZkClientFactory.java:60) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
11
at org.apache.helix.manager.zk.client.SharedZkClientFactory.buildZkClient(SharedZkClientFactory.java:40) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
10
at org.apache.pinot.common.utils.ServiceStartableUtils.applyClusterConfig(ServiceStartableUtils.java:54) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
9
at org.apache.pinot.broker.broker.helix.BaseBrokerStarter.init(BaseBrokerStarter.java:118) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
8
at org.apache.pinot.tools.service.PinotServiceManager.startBroker(PinotServiceManager.java:137) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
7
at org.apache.pinot.tools.service.PinotServiceManager.startRole(PinotServiceManager.java:92) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
6
at org.apache.pinot.tools.admin.command.StartServiceManagerCommand$1.lambda$run$0(StartServiceManagerCommand.java:275) ~[pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
5
at org.apache.pinot.tools.admin.command.StartServiceManagerCommand.startPinotService(StartServiceManagerCommand.java:301) [pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
4
at org.apache.pinot.tools.admin.command.StartServiceManagerCommand$1.run(StartServiceManagerCommand.java:275) [pinot-all-0.10.0-SNAPSHOT-jar-with-dependencies.jar:0.10.0-SNAPSHOT-078c711d35769be2dc4e4b7e235e06744cf0bba7]
3
Shutting down Pinot Service Manager with all running Pinot instances...
2
Shutting down Pinot Service Manager admin application...
1
Deregistering service status handler
Mayank
ahsen m
04/02/2022, 1:50 AM# ------------------------------------------------------------------------------
# Pinot Broker:
# ------------------------------------------------------------------------------
broker:
name: broker
replicaCount: 1
podManagementPolicy: Parallel
podSecurityContext: {}
# fsGroup: 2000
securityContext: {}
jvmOpts: "-Xms256M -Xmx1G -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -Xlog:gc*:file=/opt/pinot/gc-pinot-broker.log"
log4j2ConfFile: /opt/pinot/conf/log4j2.xml
pluginsDir: /opt/pinot/plugins
routingTable:
builderClass: random
probes:
endpoint: "/health"
livenessEnabled: true
readinessEnabled: true
persistence:
extraVolumes: []
extraVolumeMounts: []
service:
annotations: {}
clusterIP: "None"
externalIPs: []
loadBalancerIP: ""
loadBalancerSourceRanges: []
type: ClusterIP
protocol: TCP
port: 8099
name: broker
nodePort: ""
external:
enabled: false
type: LoadBalancer
port: 8099
# For example, in private GKE cluster, you might add <http://cloud.google.com/load-balancer-type|cloud.google.com/load-balancer-type>: Internal
annotations: {}
ingress:
v1beta1:
enabled: false
v1:
enabled: false
resources: {}
# resources:
# requests:
# cpu: 500m
# limits:
# cpu: 1000m
nodeSelector: {}
# nodeSelector:
# service: pinot-cluster
# affinity: {}
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: scope
operator: In
values:
- highcpu
# podAntiAffinity:
# preferredDuringSchedulingIgnoredDuringExecution:
# - podAffinityTerm:
# labelSelector:
# matchLabels:
# <http://app.kubernetes.io/component|app.kubernetes.io/component>: worker
# <http://app.kubernetes.io/instance|app.kubernetes.io/instance>: trino
# <http://app.kubernetes.io/name|app.kubernetes.io/name>: trino
# namespaces:
# - trino-cluster
# topologyKey: <http://kubernetes.io/hostname|kubernetes.io/hostname>
# weight: 1
tolerations: []
podAnnotations: {}
updateStrategy:
type: RollingUpdate
# Use envFrom to define all of the ConfigMap or Secret data as container environment variables.
# ref: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-pod-configmap/#configure-all-key-value-pairs-in-a-configmap-as-container-environment-variables>
# ref: <https://kubernetes.io/docs/tasks/inject-data-application/distribute-credentials-secure/#configure-all-key-value-pairs-in-a-secret-as-container-environment-variables>
envFrom: []
# - configMapRef:
# name: special-config
# - secretRef:
# name: test-secret
# Use extraEnv to add individual key value pairs as container environment variables.
# ref: <https://kubernetes.io/docs/tasks/inject-data-application/define-environment-variable-container/>
extraEnv: []
# - name: PINOT_CUSTOM_ENV
# value: custom-value
# Extra configs will be appended to pinot-broker.conf file
extra:
configs: |-
pinot.set.instance.id.to.hostname=true
# --------------
ahsen m
04/02/2022, 1:56 AM2022-04-02 01:53:24,604 ERROR Unsuccessful handshake with session 0x0 (org.apache.zookeeper.server.NettyServerCnxnFactory) [nioEventLoopGroup-7-1]
55
2022-04-02 01:53:24,604 WARN Exception caught (org.apache.zookeeper.server.NettyServerCnxnFactory) [nioEventLoopGroup-7-1]
54
io.netty.handler.codec.DecoderException: io.netty.handler.ssl.NotSslRecordException: not an SSL/TLS record: 0000002d000000000000000000000000000075300000000000000000000000100000000000000000000000000000000000
53
at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:477)
52
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:276)
51
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
50
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
49
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357)
48
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410)
47
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379)
46
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365)
45
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919)
44
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:166)
43
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:719)
42
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:655)
41
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:581)
40
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:493)
39
at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:986)
38
at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74)
37
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
36
at java.base/java.lang.Thread.run(Thread.java:829)
35
Caused by: io.netty.handler.ssl.NotSslRecordException: not an SSL/TLS record: 0000002d000000000000000000000000000075300000000000000000000000100000000000000000000000000000000000
34
at io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1214)
33
at io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1284)
32
at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:507)
31
at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:446)
30
... 17 more
Mayank
ahsen m
04/02/2022, 6:51 PMXiang Fu
ahsen m
04/08/2022, 5:26 AMNagendra Gautham Gondi
08/31/2022, 7:07 PM