Skip to content

Commit

Permalink
Merge pull request #15 from datasci4health/development
Browse files Browse the repository at this point in the history
Development
  • Loading branch information
matheusmota authored Sep 7, 2020
2 parents 3367ce8 + 33a1db3 commit ae89f35
Show file tree
Hide file tree
Showing 6 changed files with 385 additions and 57 deletions.
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@ RUN pip3 install --upgrade -r requirements.txt

ADD ./src .

CMD ["python3", "server.py"]
CMD ["python3", "-u", "server.py"]
150 changes: 150 additions & 0 deletions docker-compose-dev.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
version: '3'

services:


harena-logger:
build: .
environment:
- HARENA_LOGGER_FLASK_HOST=0.0.0.0
- HARENA_LOGGER_FLASK_PORT=10030
- HARENA_LOGGER_FLASK_DEBUG=True
- FLASK_DEBUG=True
- FLASK_ENV=development
- HARENA_LOGGER_MONGODB_HOST=mongodb
- HARENA_LOGGER_MONGODB_PORT=27017
- HARENA_LOGGER_MONGODB_DB=harena_logger
- HARENA_LOGGER_MONGODB_COLLECTION=event_logs
- HARENA_LOGGER_KAFKA_BROKERS=kafka1:19092
- PYTHONUNBUFFERED=1
- PYTHONIOENCODING=UTF-8

ports:
- 10030:10030
depends_on:
- kafka1
- mongodb
- zoo1
restart: always
networks:
- harena-logger
volumes:
- ./src:/app/src


mongodb:
image: mongo:latest
environment:
- MONGO_DATA_DIR=/data/db
- MONGO_LOG_DIR=/dev/null
ports:
- 10031:27017
volumes:
- harena_logger_mongodb_data:/data/db
# command: mongod --smallfiles --logpath=/dev/null # --quiet
networks:
- harena-logger


zoo1:
image: zookeeper:3.4.9
hostname: zoo1
ports:
- "2181:2181"
environment:
ZOO_MY_ID: 1
ZOO_PORT: 2181
ZOO_SERVERS: server.1=zoo1:2888:3888
volumes:
- harena_logger_kafka_zoo1_data:/data
- harena_logger_kafka_zoo1_datalog:/datalog
networks:
- harena-logger


kafka1:
image: confluentinc/cp-kafka:5.5.1
hostname: kafka1
ports:
- "9092:9092"
environment:
KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
KAFKA_BROKER_ID: 1
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
volumes:
- harena_logger_kafka1_data:/var/lib/kafka/data
depends_on:
- zoo1
networks:
- harena-logger

# kafka-connect:
# image: confluentinc/cp-kafka-connect:5.1.2
# build:
# context: .
# dockerfile: Dockerfile
# hostname: kafka-connect
# container_name: kafka-connect
# depends_on:
# - zoo1
# - kafka1
# ports:
# - "8083:8083"

# environment:
# CONNECT_BOOTSTRAP_SERVERS: 'kafka1:29092'
# CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect
# CONNECT_REST_PORT: 8083
# CONNECT_GROUP_ID: compose-connect-group
# CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
# CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
# CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
# CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
# CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
# CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
# CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
# CONNECT_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
# CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
# CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
# CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
# CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
# CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR,com.mongodb.kafka=DEBUG"
# CONNECT_PLUGIN_PATH: /usr/share/confluent-hub-components
# CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
# # Assumes image is based on confluentinc/kafka-connect-datagen:latest which is pulling 5.2.2 Connect image
# CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.2.2.jar
# CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
# CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
# command: "bash -c 'if [ ! -d /usr/share/confluent-hub-components/confluentinc-kafka-connect-datagen ]; then echo \"WARNING: Did not find directory for kafka-connect-datagen (did you remember to run: docker-compose up -d --build ?)\"; fi ; /etc/confluent/docker/run'"
# volumes:
# - ./kafka-connect-mongodb:/usr/share/confluent-hub-components/kafka-connect-mongodb
# networks:
# - harena-logger


kafdrop:
image: obsidiandynamics/kafdrop:latest
depends_on:
- kafka1
ports:
- 9000:9000
environment:
KAFKA_BROKERCONNECT: kafka1:19092
networks:
- harena-logger


volumes:
harena_logger_mongodb_data:
harena_logger_kafka_zoo1_data:
harena_logger_kafka_zoo1_datalog:
harena_logger_kafka1_data:


networks:
harena-logger:
driver: bridge
119 changes: 105 additions & 14 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,48 +8,139 @@ services:
environment:
- HARENA_LOGGER_FLASK_HOST=0.0.0.0
- HARENA_LOGGER_FLASK_PORT=10030
- HARENA_LOGGER_FLASK_DEBUG=False
- HARENA_LOGGER_BROKER_HOST=harena-logger-broker
- HARENA_LOGGER_BROKER_PORT=1883
- HARENA_LOGGER_MONGODB_HOST=harena-logger-rawdata
- HARENA_LOGGER_FLASK_DEBUG=True
- FLASK_DEBUG=True
- FLASK_ENV=production
- HARENA_LOGGER_MONGODB_HOST=mongodb
- HARENA_LOGGER_MONGODB_PORT=27017
- HARENA_LOGGER_MONGODB_DB=harena_logger
- HARENA_LOGGER_MONGODB_COLLECTION=executions
- HARENA_LOGGER_MONGODB_COLLECTION=event_logs
- HARENA_LOGGER_KAFKA_BROKERS=kafka1:19092
- PYTHONUNBUFFERED=1
- PYTHONIOENCODING=UTF-8
ports:
- 10030:10030
depends_on:
- harena-logger-broker
- harena-logger-rawdata
- kafka1
- mongodb
- zoo1
restart: always
restart: always
networks:
- harena-logger


harena-logger-rawdata:
mongodb:
image: mongo:latest
environment:
- MONGO_DATA_DIR=/data/db
- MONGO_LOG_DIR=/dev/null
ports:
- 10031:27017
volumes:
- harena_logger_rawdata:/data/db
- harena_logger_mongodb_data:/data/db
# command: mongod --smallfiles --logpath=/dev/null # --quiet
networks:
- harena-logger


harena-logger-broker:
image: eclipse-mosquitto
zoo1:
image: zookeeper:3.4.9
hostname: zoo1
ports:
- 10032:1883
restart: always
- "2181:2181"
environment:
ZOO_MY_ID: 1
ZOO_PORT: 2181
ZOO_SERVERS: server.1=zoo1:2888:3888
volumes:
- harena_logger_kafka_zoo1_data:/data
- harena_logger_kafka_zoo1_datalog:/datalog
networks:
- harena-logger


kafka1:
image: confluentinc/cp-kafka:5.5.1
hostname: kafka1
ports:
- "9092:9092"
environment:
KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka1:19092,LISTENER_DOCKER_EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
KAFKA_ZOOKEEPER_CONNECT: "zoo1:2181"
KAFKA_BROKER_ID: 1
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
volumes:
- harena_logger_kafka1_data:/var/lib/kafka/data
depends_on:
- zoo1
networks:
- harena-logger

kafka-connect:
image: confluentinc/cp-kafka-connect:5.1.2
build:
context: .
dockerfile: Dockerfile
hostname: kafka-connect
container_name: kafka-connect
depends_on:
- zoo1
- kafka1
ports:
- "8083:8083"

environment:
CONNECT_BOOTSTRAP_SERVERS: 'kafka1:29092'
CONNECT_REST_ADVERTISED_HOST_NAME: kafka-connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "org.apache.kafka.connect.json.JsonConverter"
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR,com.mongodb.kafka=DEBUG"
CONNECT_PLUGIN_PATH: /usr/share/confluent-hub-components
CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
# Assumes image is based on confluentinc/kafka-connect-datagen:latest which is pulling 5.2.2 Connect image
CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-5.2.2.jar
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
command: "bash -c 'if [ ! -d /usr/share/confluent-hub-components/confluentinc-kafka-connect-datagen ]; then echo \"WARNING: Did not find directory for kafka-connect-datagen (did you remember to run: docker-compose up -d --build ?)\"; fi ; /etc/confluent/docker/run'"
volumes:
- ./kafka-connect-mongodb:/usr/share/confluent-hub-components/kafka-connect-mongodb
networks:
- harena-logger


kafdrop:
image: obsidiandynamics/kafdrop:latest
depends_on:
- kafka1
ports:
- 9000:9000
environment:
KAFKA_BROKERCONNECT: kafka1:19092
networks:
- harena-logger


volumes:
harena_logger_rawdata:
harena_logger_mongodb_data:
harena_logger_kafka_zoo1_data:
harena_logger_kafka_zoo1_datalog:
harena_logger_kafka1_data:


networks:
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,5 @@ flask-restful
#flask-migrate
paho-mqtt
pymongo
kafka
kafka-python
coloredlogs
22 changes: 18 additions & 4 deletions src/config.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,32 @@
import os


class Config(object):

HARENA_LOGGER_FLASK_HOST = os.environ.get('HARENA_LOGGER_FLASK_HOST', '0.0.0.0')
HARENA_LOGGER_FLASK_PORT = int(os.environ.get('HARENA_LOGGER_FLASK_PORT', 10030))
HARENA_LOGGER_FLASK_DEBUG = bool(os.environ.get('HARENA_LOGGER_FLASK_DEBUG', False))
HARENA_LOGGER_FLASK_DEBUG = bool(os.environ.get('HARENA_LOGGER_FLASK_DEBUG', True))

HARENA_LOGGER_MONGODB_HOST = os.environ.get('HARENA_LOGGER_MONGODB_HOST', 'localhost')
HARENA_LOGGER_MONGODB_PORT = int(os.environ.get('HARENA_LOGGER_MONGODB_PORT', 10031))
HARENA_LOGGER_MONGODB_URL ="mongodb://{0}:{1}/".format(HARENA_LOGGER_MONGODB_HOST, HARENA_LOGGER_MONGODB_PORT)
HARENA_LOGGER_MONGODB_DB = os.environ.get('HARENA_LOGGER_MONGODB_DB', 'harena_logger')
HARENA_LOGGER_MONGODB_COLLECTION = os.environ.get('HARENA_LOGGER_MONGODB_COLLECTION', 'executions')
HARENA_LOGGER_MONGODB_COLLECTION = os.environ.get('HARENA_LOGGER_MONGODB_COLLECTION', 'event_logs')

HARENA_LOGGER_KAFKA_BROKERS = os.environ.get('HARENA_LOGGER_KAFKA_BROKERS', 'kafka1:19092')
HARENA_LOGGER_KAFKA_TOPIC = os.environ.get('HARENA_LOGGER_KAFKA_TOPIC', 'harena-logs')
HARENA_LOGGER_INTERVAL_S = int(os.environ.get('HARENA_LOGGER_INTERVAL_S', 10))


HARENA_LOGGER_BROKER_HOST = os.environ.get('HARENA_LOGGER_BROKER_HOST', 'localhost')
HARENA_LOGGER_BROKER_PORT = int(os.environ.get('HARENA_LOGGER_BROKER_PORT', 10032))
# LOGGING SETTINGS
LOGGING_NAME = os.environ.get('LOGGING_NAME', 'harena-logger')
LOGGING_LEVEL = os.environ.get('LOGGING_LEVEL', 'DEBUG')

LOGGING_STYLES = ('info=blue;'
'warning=green;'
'error=red;'
'critical=red,bold;'
'debug=white')

LOGGING_FORMAT = ('%(asctime) -19s | %(levelname) -8s | %(threadName) -10s | '
'%(funcName) -16s | %(message)s')
Loading

0 comments on commit ae89f35

Please sign in to comment.