I was able to connect to Confluent cloud when runn...
# ingestion
s
I was able to connect to Confluent cloud when running datahuv v0.8.6 on GCP GKE thanks to @big-carpet-38439. Notes here in case it helps. Dataset not showing in search results. But the data is showing going directly to the URL so data is there
helm chart value
Copy code
datahub-gms:
  enabled: true
  image:
    repository: linkedin/datahub-gms
    tag: "{{datahub_tag}}"
  service:
    type: NodePort
  nodeSelector:
    temp: "false"
  resources:
    limits:
      cpu: 1000m
      memory: 1Gi
    requests:
      cpu: 500m
      memory: 0.5Gi

datahub-frontend:
  enabled: true
  image:
    repository: linkedin/datahub-frontend-react
    tag: "{{datahub_tag}}"
  service:
    type: NodePort
  nodeSelector:
    temp: "false"
  resources:
    limits:
      cpu: 1000m
      memory: 1Gi
    requests:
      cpu: 500m
      memory: 0.5Gi
  extraEnvs:
    - name: AUTH_OIDC_ENABLED
      value: "true"
    - name: AUTH_OIDC_CLIENT_ID
      valueFrom:
        secretKeyRef:
          name: superset-google-oauth-key
          key: token
    - name: AUTH_OIDC_CLIENT_SECRET
      valueFrom:
        secretKeyRef:
          name: superset-google-oauth-secret
          key: token
    - name: AUTH_OIDC_SCOPE
      value: "openid email"
    - name: AUTH_OIDC_DISCOVERY_URI
      value: "<https://accounts.google.com/.well-known/openid-configuration>"
    - name: AUTH_OIDC_BASE_URL
      value: "https://{{ datahub_domain }}"
    - name: AUTH_OIDC_USER_NAME_CLAIM
      value: "email"
    - name: AUTH_OIDC_USER_NAME_CLAIM_REGEX
      value: "([^@]+)"

datahub-mae-consumer:
  enabled: true
  image:
    repository: linkedin/datahub-mae-consumer
    tag: "{{datahub_tag}}"
  nodeSelector:
    temp: "true"
  resources:
    limits:
      cpu: 1000m
      memory: 1Gi
    requests:
      cpu: 500m
      memory: 0.5Gi

datahub-mce-consumer:
  enabled: true
  image:
    repository: linkedin/datahub-mce-consumer
    tag: "{{datahub_tag}}"
  nodeSelector:
    temp: "true"
  resources:
    limits:
      cpu: 1000m
      memory: 1Gi
    requests:
      cpu: 500m
      memory: 0.5Gi

datahub-ingestion-cron:
  enabled: false
  image:
    repository: linkedin/datahub-ingestion
    tag: "{{datahub_tag}}"

elasticsearchSetupJob:
  enabled: false
  image:
    repository: linkedin/datahub-elasticsearch-setup
    tag: "{{datahub_tag}}"


kafkaSetupJob:
  enabled: false
  image:
    repository: linkedin/datahub-kafka-setup
    tag: "{{datahub_tag}}"

mysqlSetupJob:
  enabled: false
  image:
    repository: acryldata/datahub-mysql-setup
    tag: "{{datahub_tag}}"

datahubUpgrade:
  enabled: false
  image:
    repository: acryldata/datahub-upgrade
    tag: "{{datahub_tag}}"
  noCodeDataMigration:
    sqlDbType: "MYSQL"

global:
  graph_service_impl: elasticsearch
  datahub_analytics_enabled: true
  datahub_standalone_consumers_enabled: true

  kafka:
    bootstrap:
      server: "{{ kafka_bootstrap_server_url }}"
    schemaregistry:
      url: "https://{{ kafka_schema_registry_url }}"


  elasticsearch:
    host: "elasticsearch-master.apps.svc.cluster.local"
    port: "9200"
    indexPrefix: "datahub"

  sql:
    datasource:
      host: "{{ sql_host }}:3306"
      hostForMysqlClient: "{{ sql_host }}"
      port: "3306"
      url: "jdbc:mysql://{{ sql_host }}:3306/{{ sql_database }}?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8&enabledTLSProtocols=TLSv1.2"
      driver: "com.mysql.jdbc.Driver"
      username: "{{sql_user}}"
      password:
        secretRef: datahub-sql-secret
        secretKey: token

  datahub:
    gms:
      port: "8080"
    mae_consumer:
      port: "9091"
    appVersion: "1.0"

  springKafkaConfigurationOverrides:
    security.protocol: "SASL_SSL"
    sasl.jaas.config: "org.apache.kafka.common.security.plain.PlainLoginModule required username='{{ kafka_key_id }}' password='{{ kafka_key_secret }}';"
    sasl.mechanism: "PLAIN"
    client.dns.lookup: "use_all_dns_ips"
    basic.auth.credentials.source: "USER_INFO"
    <http://basic.auth.user.info|basic.auth.user.info>: "{{ kafka_schema_registry_key_id }}:{{ kafka_schema_registry_key_secret }}"
b
I'll also be raising a PR with a confluent cloud guide
We are trying to work through running the reindexing job cc @early-lamp-41924