Hello guys, I'm trying to work with standalone con...
# troubleshoot
g
Hello guys, I'm trying to work with standalone consumers to take some of the burdens of the GMS. However, the MAE consumer is always breaking and I can't identify the reason. I'll pass on this thread the error that usually starts to appear when some entity is created or changed. DataHub version: v0.8.39 Deployment: GKE
Copy code
14:36:05.385 [ThreadPoolTaskExecutor-1] ERROR o.s.k.l.KafkaMessageListenerContainer$ListenerConsumer - Consumer exception
java.lang.IllegalStateException: This error handler cannot process 'SerializationException's directly; please consider configuring an 'ErrorHandlingDeserializer' in the value and/or key deserializer
        at org.springframework.kafka.listener.SeekUtils.seekOrRecover(SeekUtils.java:194)
        at org.springframework.kafka.listener.SeekToCurrentErrorHandler.handle(SeekToCurrentErrorHandler.java:112)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1604)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1212)
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:750)
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition MetadataChangeLog_Versioned_v1-0 at offset 51009. If needed, please seek past the record to continue consumption.
Caused by: org.apache.kafka.common.errors.SerializationException: Error retrieving Avro unknown schema for id 1
Caused by: java.net.MalformedURLException: no protocol: datahub/schemas/ids/1?fetchMaxId=false
        at java.net.URL.<init>(URL.java:611)
        at java.net.URL.<init>(URL.java:508)
        at java.net.URL.<init>(URL.java:457)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.sendHttpRequest(RestService.java:257)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.httpRequest(RestService.java:351)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:659)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:641)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaByIdFromRegistry(CachedSchemaRegistryClient.java:217)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaBySubjectAndId(CachedSchemaRegistryClient.java:291)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaById(CachedSchemaRegistryClient.java:276)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer$DeserializationContext.schemaFromRegistry(AbstractKafkaAvroDeserializer.java:273)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:97)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:76)
        at io.confluent.kafka.serializers.KafkaAvroDeserializer.deserialize(KafkaAvroDeserializer.java:55)
        at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
        at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
        at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
        at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
        at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
        at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
        at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
        at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1290)
        at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1248)
        at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1216)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1414)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1251)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1163)
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:750)
14:36:05.385 [ThreadPoolTaskExecutor-1] ERROR o.s.k.l.KafkaMessageListenerContainer$ListenerConsumer - Consumer exception
java.lang.IllegalStateException: This error handler cannot process 'SerializationException's directly; please consider configuring an 'ErrorHandlingDeserializer' in the value and/or key deserializer
        at org.springframework.kafka.listener.SeekUtils.seekOrRecover(SeekUtils.java:194)
        at org.springframework.kafka.listener.SeekToCurrentErrorHandler.handle(SeekToCurrentErrorHandler.java:112)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1604)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1212)
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:750)
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition MetadataChangeLog_Versioned_v1-0 at offset 51009. If needed, please seek past the record to continue consumption.
Caused by: org.apache.kafka.common.errors.SerializationException: Error retrieving Avro unknown schema for id 1
Caused by: java.net.MalformedURLException: no protocol: datahub/schemas/ids/1?fetchMaxId=false
        at java.net.URL.<init>(URL.java:611)
        at java.net.URL.<init>(URL.java:508)
        at java.net.URL.<init>(URL.java:457)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.sendHttpRequest(RestService.java:257)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.httpRequest(RestService.java:351)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:659)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:641)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaByIdFromRegistry(CachedSchemaRegistryClient.java:217)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaBySubjectAndId(CachedSchemaRegistryClient.java:291)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaById(CachedSchemaRegistryClient.java:276)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer$DeserializationContext.schemaFromRegistry(AbstractKafkaAvroDeserializer.java:273)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:97)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:76)
        at io.confluent.kafka.serializers.KafkaAvroDeserializer.deserialize(KafkaAvroDeserializer.java:55)
        at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
        at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
        at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
        at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
        at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
        at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
        at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
MAE consumer loops and repeats this error message indefinitely
s
g
I updated to the new version but the problem persists Describing the pod:
Copy code
Name:         datahub-dev-mae-consumer-6b897c499f-h6p5h
Namespace:    dev
Priority:     0
Node:         gke-dsc-prod-pool-4-638da54c-1e03/
Start Time:   Thu, 30 Jun 2022 11:54:30 -0300
Labels:       app=datahub
              component=maeConsumer
              heritage=Helm
              pod-template-hash=6b897c499f
              release=datahub-dev
Annotations:  <none>
Status:       Running
IP:           
IPs:
  IP:           
Controlled By:  ReplicaSet/datahub-dev-mae-consumer-6b897c499f
Containers:
  datahub-mae-consumer:
    Container ID:   <containerd://cc28f15c8d7f4624288f6203d886253ad99c65ad4f96759a5553e7040dff3f7>d
    Image:          linkedin/datahub-mae-consumer:v0.8.40
    Image ID:       <http://docker.io/linkedin/datahub-mae-consumer@sha256:835672c3c49f3c8df116be93b2d783374daf43286145c601ec6f4ffdbd9f1139|docker.io/linkedin/datahub-mae-consumer@sha256:835672c3c49f3c8df116be93b2d783374daf43286145c601ec6f4ffdbd9f1139>
    Ports:          9091/TCP, 4318/TCP
    Host Ports:     0/TCP, 0/TCP
    State:          Running
      Started:      Thu, 30 Jun 2022 11:54:37 -0300
    Ready:          True
    Restart Count:  0
    Limits:
      cpu:     1
      memory:  1Gi
    Requests:
      cpu:      200m
      memory:   256Mi
    Liveness:   http-get http://:mae-port/actuator/health delay=60s timeout=1s period=30s #success=1 #failure=9
    Readiness:  http-get http://:mae-port/actuator/health delay=60s timeout=1s period=30s #success=1 #failure=8
    Environment:
      MAE_CONSUMER_ENABLED:              true
      PE_CONSUMER_ENABLED:               true
      GRAPH_SERVICE_IMPL:                elasticsearch
      METADATA_AUDIT_EVENT_NAME:         MetadataAuditEvent_v4
      DATAHUB_USAGE_EVENT_NAME:          DataHubUsageEvent_v1
      ENTITY_REGISTRY_CONFIG_PATH:       /datahub/datahub-mae-consumer/resources/entity-registry.yml
      DATAHUB_GMS_HOST:                  datahub-dev-gms-service
      DATAHUB_GMS_PORT:                  8080
      DATAHUB_SECRET:                    <set to the key 'datahub.gms.secret' in secret 'datahub-gms'>  Optional: false
      DATAHUB_APP_VERSION:               v0.8.40
      DATAHUB_PLAY_MEM_BUFFER_SIZE:      15MB
      DATAHUB_ANALYTICS_ENABLED:         true
      KAFKA_BOOTSTRAP_SERVER:            datahub-dev-kafka:9092
      SCHEMA_REGISTRY_TYPE:              KAFKA
      ELASTIC_CLIENT_HOST:               datahub-dev-elasticsearch-master
      ELASTIC_CLIENT_PORT:               9200
      ELASTIC_CLIENT_USE_SSL:            false
      DATAHUB_TRACKING_TOPIC:            DataHubUsageEvent_v1
      KAFKA_SCHEMAREGISTRY_URL:          datahub-dev
      ELASTICSEARCH_HOST:                datahub-dev-elasticsearch-master
      ELASTICSEARCH_PORT:                9200
      UI_INGESTION_ENABLED:              true
      UI_INGESTION_DEFAULT_CLI_VERSION:  0.8.40.0
      JMXPORT:                           1099
    Mounts:
      /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-n95xz (ro)
Conditions:
  Type              Status
  Initialized       True 
  Ready             True 
  ContainersReady   True 
  PodScheduled      True 
Volumes:
  kube-api-access-n95xz:
    Type:                    Projected (a volume that contains injected data from multiple sources)
    TokenExpirationSeconds:  3607
    ConfigMapName:           kube-root-ca.crt
    ConfigMapOptional:       <nil>
    DownwardAPI:             true
QoS Class:                   Burstable
Node-Selectors:              <none>
Tolerations:                 <http://node.kubernetes.io/not-ready:NoExecute|node.kubernetes.io/not-ready:NoExecute> op=Exists for 300s
                             <http://node.kubernetes.io/unreachable:NoExecute|node.kubernetes.io/unreachable:NoExecute> op=Exists for 300s
Events:
  Type    Reason     Age   From               Message
  ----    ------     ----  ----               -------
  Normal  Scheduled  7m7s  default-scheduler  Successfully assigned dev/datahub-dev-mae-consumer-6b897c499f-h6p5h to gke-dsc-prod-pool-4-638da54c-1e03
  Normal  Pulling    7m6s  kubelet            Pulling image "linkedin/datahub-mae-consumer:v0.8.40"
  Normal  Pulled     7m    kubelet            Successfully pulled image "linkedin/datahub-mae-consumer:v0.8.40" in 6.202813895s
  Normal  Created    7m    kubelet            Created container datahub-mae-consumer
  Normal  Started    7m    kubelet            Started container datahub-mae-consumer
But when I see the logs:
Copy code
15:03:39.925 [ThreadPoolTaskExecutor-1] ERROR o.s.k.l.KafkaMessageListenerContainer$ListenerConsumer - Consumer exception
java.lang.IllegalStateException: This error handler cannot process 'SerializationException's directly; please consider configuring an 'ErrorHandlingDeserializer' in the value and/or key deserializer
        at org.springframework.kafka.listener.SeekUtils.seekOrRecover(SeekUtils.java:194)
        at org.springframework.kafka.listener.SeekToCurrentErrorHandler.handle(SeekToCurrentErrorHandler.java:112)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1604)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1212)
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:750)
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition MetadataChangeLog_Versioned_v1-0 at offset 152964. If needed, please seek past the record to continue consumption.
Caused by: org.apache.kafka.common.errors.SerializationException: Error retrieving Avro unknown schema for id 1
Caused by: java.net.MalformedURLException: no protocol: datahub-dev/schemas/ids/1?fetchMaxId=false
        at java.net.URL.<init>(URL.java:611)
        at java.net.URL.<init>(URL.java:508)
        at java.net.URL.<init>(URL.java:457)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.sendHttpRequest(RestService.java:257)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.httpRequest(RestService.java:351)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:659)
        at io.confluent.kafka.schemaregistry.client.rest.RestService.getId(RestService.java:641)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaByIdFromRegistry(CachedSchemaRegistryClient.java:217)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaBySubjectAndId(CachedSchemaRegistryClient.java:291)
        at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.getSchemaById(CachedSchemaRegistryClient.java:276)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer$DeserializationContext.schemaFromRegistry(AbstractKafkaAvroDeserializer.java:273)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:97)
        at io.confluent.kafka.serializers.AbstractKafkaAvroDeserializer.deserialize(AbstractKafkaAvroDeserializer.java:76)
        at io.confluent.kafka.serializers.KafkaAvroDeserializer.deserialize(KafkaAvroDeserializer.java:55)
        at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
        at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
        at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
        at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
        at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
        at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
        at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
        at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1290)
        at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1248)
        at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1216)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1414)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1251)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1163)
        at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
        at java.lang.Thread.run(Thread.java:750)
15:03:39.925 [ThreadPoolTaskExecutor-1] ERROR o.s.k.l.KafkaMessageListenerContainer$ListenerConsumer - Consumer exception
java.lang.IllegalStateException: This error handler cannot process 'SerializationException's directly; please consider configuring an 'ErrorHandlingDeserializer' in the value and/or key deserializer
        at org.springframework.kafka.listener.SeekUtils.seekOrRecover(SeekUtils.java:194)
        at org.springframework.kafka.listener.SeekToCurrentErrorHandler.handle(SeekToCurrentErrorHandler.java:112)
        at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1604)
        at org.springframework.kafka.listener.KafkaMessageListene^C
o
This looks like an issue with your Schema Registry connection? Is Schema Registry running and configured correctly?
g
I have this configuration on values.yaml:
Copy code
...
bootstrap:
  server: "datahub-dev-kafka:9092"
zookeeper:
  server: "datahub-dev-zookeeper:2181"
schemaregistry:
  url: "<http://datahub-dev-cp-schema-registry:8081>"
...
And describing the schema registry pod:
Copy code
Name:         datahub-dev-cp-schema-registry-7db98b79cf-cb22f
Namespace:    dev
Priority:     0
Node:         gke-dsc-prod-pool-4-1054ff91-u9lu/
Start Time:   Thu, 30 Jun 2022 10:44:55 -0300
Labels:       app=cp-schema-registry
              pod-template-hash=7db98b79cf
              release=datahub-dev
Annotations:  <http://kubernetes.io/limit-ranger|kubernetes.io/limit-ranger>:
                LimitRanger plugin set: cpu, memory request for container prometheus-jmx-exporter; cpu, memory request for container cp-schema-registry-se...
              <http://prometheus.io/port|prometheus.io/port>: 5556
              <http://prometheus.io/scrape|prometheus.io/scrape>: true
Status:       Running
IP:           
IPs:
  IP:           
Controlled By:  ReplicaSet/datahub-dev-cp-schema-registry-7db98b79cf
Containers:
  prometheus-jmx-exporter:
    Container ID:  <containerd://97893aed1d333f254246b56c15c6c458b529b9d2148e296c350d820cec98772>e
    Image:         solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143
    Image ID:      <http://docker.io/solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143|docker.io/solsson/kafka-prometheus-jmx-exporter@sha256:6f82e2b0464f50da8104acd7363fb9b995001ddff77d248379f8788e78946143>
    Port:          5556/TCP
    Host Port:     0/TCP
    Command:
      java
      -XX:+UnlockExperimentalVMOptions
      -XX:+UseCGroupMemoryLimitForHeap
      -XX:MaxRAMFraction=1
      -XshowSettings:vm
      -jar
      jmx_prometheus_httpserver.jar
      5556
      /etc/jmx-schema-registry/jmx-schema-registry-prometheus.yml
    State:          Running
      Started:      Thu, 30 Jun 2022 10:44:57 -0300
    Ready:          True
    Restart Count:  0
    Requests:
      cpu:        300m
      memory:     500Mi
    Environment:  <none>
    Mounts:
      /etc/jmx-schema-registry from jmx-config (rw)
      /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-l8c2j (ro)
  cp-schema-registry-server:
    Container ID:   <containerd://69fdcc5654ffd9daeac77f0de1883d0f11319656b0ba016fe1a3e9a2a7794ce>d
    Image:          confluentinc/cp-schema-registry:6.0.1
    Image ID:       <http://docker.io/confluentinc/cp-schema-registry@sha256:b52e16cf232e3c9acd677ae8944de813e16fa541a367d9f805b300c5d2be1a1f|docker.io/confluentinc/cp-schema-registry@sha256:b52e16cf232e3c9acd677ae8944de813e16fa541a367d9f805b300c5d2be1a1f>
    Ports:          8081/TCP, 5555/TCP
    Host Ports:     0/TCP, 0/TCP
    State:          Running
      Started:      Thu, 30 Jun 2022 10:45:57 -0300
    Last State:     Terminated
      Reason:       Error
      Exit Code:    1
      Started:      Thu, 30 Jun 2022 10:44:58 -0300
      Finished:     Thu, 30 Jun 2022 10:45:56 -0300
    Ready:          True
    Restart Count:  1
    Requests:
      cpu:     300m
      memory:  500Mi
    Environment:
      SCHEMA_REGISTRY_HOST_NAME:                      (v1:status.podIP)
      SCHEMA_REGISTRY_LISTENERS:                     <http://0.0.0.0:8081>
      SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS:  datahub-dev-kafka:9092
      SCHEMA_REGISTRY_KAFKASTORE_GROUP_ID:           datahub-dev
      SCHEMA_REGISTRY_MASTER_ELIGIBILITY:            true
      SCHEMA_REGISTRY_HEAP_OPTS:                     -Xms512M -Xmx512M
      JMX_PORT:                                      5555
    Mounts:
      /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-l8c2j (ro)
Conditions:
  Type              Status
  Initialized       True 
  Ready             True 
  ContainersReady   True 
  PodScheduled      True 
Volumes:
  jmx-config:
    Type:      ConfigMap (a volume populated by a ConfigMap)
    Name:      datahub-dev-cp-schema-registry-jmx-configmap
    Optional:  false
  kube-api-access-l8c2j:
    Type:                    Projected (a volume that contains injected data from multiple sources)
    TokenExpirationSeconds:  3607
    ConfigMapName:           kube-root-ca.crt
    ConfigMapOptional:       <nil>
    DownwardAPI:             true
QoS Class:                   Burstable
Node-Selectors:              <none>
Tolerations:                 <http://node.kubernetes.io/not-ready:NoExecute|node.kubernetes.io/not-ready:NoExecute> op=Exists for 300s
                             <http://node.kubernetes.io/unreachable:NoExecute|node.kubernetes.io/unreachable:NoExecute> op=Exists for 300s
Events:                      <none>