From 8868f35b3683066e8a0a90eb99ed9cd862718af6 Mon Sep 17 00:00:00 2001 From: Matheus Mota Date: Mon, 7 Sep 2020 17:55:27 -0300 Subject: [PATCH] adding kafka drop interface --- docker-compose.yml | 108 ++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 97 insertions(+), 11 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index bed6999..d697dca 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,23 +9,23 @@ services: - HARENA_LOGGER_FLASK_HOST=0.0.0.0 - HARENA_LOGGER_FLASK_PORT=10030 - HARENA_LOGGER_FLASK_DEBUG=False - - HARENA_LOGGER_BROKER_HOST=harena-logger-broker + - HARENA_LOGGER_BROKER_HOST=broker - HARENA_LOGGER_BROKER_PORT=1883 - - HARENA_LOGGER_MONGODB_HOST=harena-logger-rawdata + - HARENA_LOGGER_MONGODB_HOST=mongodb - HARENA_LOGGER_MONGODB_PORT=27017 - HARENA_LOGGER_MONGODB_DB=harena_logger - HARENA_LOGGER_MONGODB_COLLECTION=executions ports: - 10030:10030 depends_on: - - harena-logger-broker - - harena-logger-rawdata + - kafka1 + - mongodb restart: always networks: - harena-logger - harena-logger-rawdata: + mongodb: image: mongo:latest environment: - MONGO_DATA_DIR=/data/db @@ -33,23 +33,109 @@ services: ports: - 10031:27017 volumes: - - harena_logger_rawdata:/data/db + - harena_logger_mongodb_data:/data/db # command: mongod --smallfiles --logpath=/dev/null # --quiet networks: - harena-logger - harena-logger-broker: - image: eclipse-mosquitto + zoo1: + image: zookeeper:3.4.9 + hostname: zoo1 ports: - - 10032:1883 - restart: always + - "2181:2181" + environment: + ZOO_MY_ID: 1 + ZOO_PORT: 2181 + ZOO_SERVERS: server.1=zoo1:2888:3888 + volumes: + - harena_logger_kafka_zoo1_data:/data + - harena_logger_kafka_zoo1_datalog:/datalog + networks: + - harena-logger + + + kafka1: + image: confluentinc/cp-kafka:5.5.1 + hostname: kafka1 + ports: + - "9092:9092" + environment: + KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092 + KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT + KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL + KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181" + KAFKA_BROKER_ID: 1 + KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO" + KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 + volumes: + - harena_logger_kafka1_data:/var/lib/kafka/data + depends_on: + - zoo1 + networks: + - harena-logger + + kafka-connect: + image: confluentinc/cp-kafka-connect:5.1.2 + build: + context: . + dockerfile: Dockerfile + hostname: kafka-connect + container_name: kafka-connect + depends_on: + - zoo1 + - kafka1 + ports: + - "8083:8083" + + environment: + CONNECT_BOOTSTRAP_SERVERS: 'kafka1:29092' + CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect + CONNECT_REST_PORT: 8083 + CONNECT_GROUP_ID: compose-connect-group + CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs + CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000 + CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets + CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status + CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1 + CONNECT_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter + CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter + CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter" + CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO" + CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR,com.mongodb.kafka=DEBUG" + CONNECT_PLUGIN_PATH: /usr/share/confluent-hub-components + CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181' + # Assumes image is based on confluentinc/kafka-connect-datagen:latest which is pulling 5.2.2 Connect image + CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.2.2.jar + CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor" + CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor" + command: "bash -c 'if [ ! -d /usr/share/confluent-hub-components/confluentinc-kafka-connect-datagen ]; then echo \"WARNING: Did not find directory for kafka-connect-datagen (did you remember to run: docker-compose up -d --build ?)\"; fi ; /etc/confluent/docker/run'" + volumes: + - ./kafka-connect-mongodb:/usr/share/confluent-hub-components/kafka-connect-mongodb + networks: + - harena-logger + + + kafdrop: + image: obsidiandynamics/kafdrop:latest + depends_on: + - kafka1 + ports: + - 9000:9000 + environment: + KAFKA_BROKERCONNECT: kafka1:19092 networks: - harena-logger volumes: - harena_logger_rawdata: + harena_logger_mongodb_data: + harena_logger_kafka_zoo1_data: + harena_logger_kafka_zoo1_datalog: + harena_logger_kafka1_data: networks: