Hello everyone, I tried datahub using the quicksta...
# all-things-deployment
a
Hello everyone, I tried datahub using the quickstart command and now I want to deploy it in a server using docker compose, but it seems that there isn't one docker-compose that corresponds to the quickstart deployment, but it looks like I can generate it with this python file generate_docker_quickstart.py, am I right?
I got to this as a first version after using it:
Copy code
networks:
  default:
    name: datahub_network
services:
  broker:
    container_name: broker
    depends_on:
    - zookeeper
    environment:
    - KAFKA_BROKER_ID=1
    - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
    - KAFKA_LISTENER_SECURITY_PROTOCOL_MAP=PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
    - KAFKA_ADVERTISED_LISTENERS=<PLAINTEXT://broker:29092>,PLAINTEXT_<HOST://localhost:9092>
    - KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR=1
    - KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS=0
    - KAFKA_HEAP_OPTS=-Xms256m -Xmx256m
    hostname: broker
    image: confluentinc/cp-kafka:5.4.0
    ports:
    - 29092:29092
    - 9092:9092
  datahub-frontend-react:
    container_name: datahub-frontend-react
    depends_on:
    - datahub-gms
    environment:
    - DATAHUB_GMS_HOST=datahub-gms
    - DATAHUB_GMS_PORT=8080
    - DATAHUB_SECRET=YouKnowNothing
    - DATAHUB_APP_VERSION=1.0
    - DATAHUB_PLAY_MEM_BUFFER_SIZE=10MB
    - JAVA_OPTS= -Dhttp.port=9002 -Dconfig.file=datahub-frontend/conf/application.conf
      -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf -Dlogback.configurationFile=datahub-frontend/conf/logback.xml
      -Dlogback.debug=false -Dpidfile.path=/dev/null
    - KAFKA_BOOTSTRAP_SERVER=broker:29092
    - DATAHUB_TRACKING_TOPIC=DataHubUsageEvent_v1
    - ELASTIC_CLIENT_HOST=elasticsearch
    - ELASTIC_CLIENT_PORT=9200
    hostname: datahub-frontend-react
    image: linkedin/datahub-frontend-react:v0.8.16
    ports:
    - 9002:9002
  datahub-gms:
    container_name: datahub-gms
    depends_on:
    - mysql
    environment:
    - DATASET_ENABLE_SCSI=false
    - EBEAN_DATASOURCE_USERNAME=datahub
    - EBEAN_DATASOURCE_PASSWORD=datahub
    - EBEAN_DATASOURCE_HOST=mysql:3306
    - EBEAN_DATASOURCE_URL=jdbc:<mysql://mysql:3306/datahub?verifyServerCertificate=false&useSSL=true&useUnicode=yes&characterEncoding=UTF-8&enabledTLSProtocols=TLSv1.2>
    - EBEAN_DATASOURCE_DRIVER=com.mysql.jdbc.Driver
    - KAFKA_BOOTSTRAP_SERVER=broker:29092
    - KAFKA_SCHEMAREGISTRY_URL=<http://schema-registry:8081>
    - ELASTICSEARCH_HOST=elasticsearch
    - ELASTICSEARCH_PORT=9200
    - JAVA_OPTS=-Xms1g -Xmx1g
    - ENTITY_REGISTRY_CONFIG_PATH=/datahub/datahub-gms/resources/entity-registry.yml
    - MAE_CONSUMER_ENABLED=true
    - MCE_CONSUMER_ENABLED=true
    hostname: datahub-gms
    image: linkedin/datahub-gms:v0.8.16
    ports:
    - 8080:8080
  elasticsearch:
    container_name: elasticsearch
    environment:
    - discovery.type=single-node
    - xpack.security.enabled=false
    - ES_JAVA_OPTS=-Xms256m -Xmx256m
    healthcheck:
      retries: 4
      start_period: 2m
      test:
      - CMD-SHELL
      - curl -sS --fail '<http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=0s>'
        || exit 1
    hostname: elasticsearch
    image: elasticsearch:7.9.3
    mem_limit: 1g
    ports:
    - 9200:9200
    volumes:
    - esdata:/usr/share/elasticsearch/data
  elasticsearch-setup:
    container_name: elasticsearch-setup
    depends_on:
    - elasticsearch
    environment:
    - ELASTICSEARCH_HOST=elasticsearch
    - ELASTICSEARCH_PORT=9200
    - ELASTICSEARCH_PROTOCOL=http
    hostname: elasticsearch-setup
    image: linkedin/datahub-elasticsearch-setup:v0.8.16
  kafka-setup:
    container_name: kafka-setup
    depends_on:
    - broker
    - schema-registry
    environment:
    - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
    - KAFKA_BOOTSTRAP_SERVER=broker:29092
    hostname: kafka-setup
    image: linkedin/datahub-kafka-setup:v0.8.16
  mysql:
    command: --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci
    container_name: mysql
    environment:
    - MYSQL_DATABASE=datahub
    - MYSQL_USER=datahub
    - MYSQL_PASSWORD=datahub
    - MYSQL_ROOT_PASSWORD=datahub
    hostname: mysql
    image: mysql:5.7
    ports:
    - 3306:3306
    volumes:
    - ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql
    - mysqldata:/var/lib/mysql
  mysql-setup:
    container_name: mysql-setup
    depends_on:
    - mysql
    environment:
    - MYSQL_HOST=mysql
    - MYSQL_PORT=3306
    - MYSQL_USERNAME=datahub
    - MYSQL_PASSWORD=datahub
    - DATAHUB_DB_NAME=datahub
    hostname: mysql-setup
    image: acryldata/datahub-mysql-setup:head
  schema-registry:
    container_name: schema-registry
    depends_on:
    - zookeeper
    - broker
    environment:
    - SCHEMA_REGISTRY_HOST_NAME=schemaregistry
    - SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL=zookeeper:2181
    hostname: schema-registry
    image: confluentinc/cp-schema-registry:5.4.0
    ports:
    - 8081:8081
  zookeeper:
    container_name: zookeeper
    environment:
    - ZOOKEEPER_CLIENT_PORT=2181
    - ZOOKEEPER_TICK_TIME=2000
    hostname: zookeeper
    image: confluentinc/cp-zookeeper:5.4.0
    ports:
    - 2181:2181
    volumes:
    - zkdata:/var/opt/zookeeper
version: '2.3'
volumes:
  esdata: null
  mysqldata: null
  zkdata: null
Does it look alright? I'm not sure
e
Hey! The docker-compose file is under /docker/quickstart
plus1 1
docker quickstart simply deploys one of these yaml files
plus1 1
b
Let us know if you have additional questions!