Skip to content

Commit

Permalink
Merge pull request #19 from datasci4health/development
Browse files Browse the repository at this point in the history
Development
  • Loading branch information
matheusmota authored Sep 7, 2020
2 parents b02667b + 370d1fa commit 64b6d4f
Showing 1 changed file with 17 additions and 15 deletions.
32 changes: 17 additions & 15 deletions src/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,14 @@ def run(self):
mongodb_collection = mongodb_db[self.mongodb_collection]

print("Checking for newly streamed messages during at least {} seconds...".format(self.delay))

for message in self.kafka_consumer:
#print ("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, message.offset, message.key, message.value))
print("Found {} events inside a message ".format(len(message.value['harena-log-stream'])))

for event in message.value['harena-log-stream']:
mongodb_collection.insert_one(event)
try:
for message in self.kafka_consumer:
#print ("%s:%d:%d: key=%s value=%s" % (message.topic, message.partition, message.offset, message.key, message.value))
print("Found {} events inside a message ".format(len(message.value['harena-log-stream'])))
for event in message.value['harena-log-stream']:
mongodb_collection.insert_one(event)
except:
print("Object is not a JSON or does not have an 'harena-log-stream' array")


print("Waiting time ({} seconds) for new messages ended.".format(self.delay))
Expand Down Expand Up @@ -84,20 +85,21 @@ def __init__(self, kafka_producer, target_topic):
@cross_origin(origin='*')
def post(self):

# To do: properly evaluate message body parsing
message = request.get_json()
message['server_timestamp'] = "{}".format(int(round(time.time() * 1000)))
try:
# To do: properly evaluate message body parsing
message = request.get_json()
message['server_timestamp'] = "{}".format(int(round(time.time() * 1000)))

# Asynchronous by default
future = self.kafka_producer.send(self.target_topic, json.dumps(message).encode('utf-8'))
# Asynchronous by default
future = self.kafka_producer.send(self.target_topic, json.dumps(message).encode('utf-8'))

# Block for 'synchronous' sends
try:
# Block for 'synchronous' sends
record_metadata = future.get(timeout=10)
except KafkaError:
# Decide what to do if produce request failed...
log.exception()
pass
except:
print("could not validate the json")

# Successful result returns assigned partition and offset
# print(future)
Expand Down

0 comments on commit 64b6d4f

Please sign in to comment.