Skip to content

Commit

Permalink
release v0.1 alpha
Browse files Browse the repository at this point in the history
Merge branch 'develop' into master

* develop:
  fix typo
  add batch query google finance data source - explicitly set stock tickers to UPPERCASE ASCII str
  improve logging and fix mirror bugs
  add continuous run
  add test-requirements.txt
  retry query if request fails
  add support for optional keys
  log uncaught exception
  add logging support
  add multiple datasources support and googlefinance api
  support for ifttt
  • Loading branch information
jiahaoliang committed Feb 17, 2017
2 parents 7915dc2 + b476fd5 commit 5f87211
Show file tree
Hide file tree
Showing 17 changed files with 476 additions and 49 deletions.
2 changes: 1 addition & 1 deletion cool_finance.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,4 @@
from cool_finance.server import main

if __name__ == "__main__":
sys.exit(main())
sys.exit(main())
22 changes: 21 additions & 1 deletion cool_finance/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,28 @@

CONFIG_FILE = "./cool_finance.json"

START_HOUR_MIN_SEC = (7, 57, 00)
LOG_LEVEL = "INFO"
LOG_FORMAT = '%(asctime)s - %(name)s - %(threadName)s - ' \
'%(levelname)s - %(message)s'
# DEBUG_LOG_LEVEL must stricter than LOG_LEVEL
# if DEBUG_LOG_DIR is not None, DEBUG_LOG_LEVEL must be set
# if if DEBUG_LOG_DIR is None, DEBUG_LOG_LEVEL would be ignored
DEBUG_LOG_DIR = None
DEBUG_LOG_LEVEL = "DEBUG"

# set START_NOW will override the START_HOUR_MIN_SEC
# and start the server immediately
START_NOW = True
START_HOUR_MIN_SEC = (9, 30, 00)
END_HOUR_MIN_SEC = (16, 00, 00)
TIMEZONE = 'US/Eastern'
# Sat, Sun are closed market day
CLOSED_WEEKDAYS = [5, 6]
# Only one notification will be generated
# every NOTIFICATION_INTERVAL_S seconds for same stock.
NOTIFICATION_INTERVAL_S = 300
# Guarantee query result is within QUERY_PRECISION_S seconds up to date.
# A significant large number (10x, 100x) can reduce query amount to
# date source server. Tweak it if you have a daily query limit.
# Google data source doesn't seem to have a limit.
QUERY_PRECISION_S = 0.1
Empty file.
15 changes: 15 additions & 0 deletions cool_finance/data_sources/constants.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# datasource vendor names
BASE_VENDOR = "BASE"
GOOGLE_FINANCE_VENDOR = "google_finance"
GOOGLE_FINANCE_BATCH_VENDOR = "google_finance_batch"

DEFAULT_DATASOURCE = GOOGLE_FINANCE_BATCH_VENDOR

INDEX = "index" # NASDAQ, NYSE...
LAST_TRADE_PRICE = "last_trade_price"
LAST_TRADE_DATETIME = "last_trade_datetime"
LAST_TRADE_DATE = "last_trade_date"
LAST_TRADE_TIME = "last_trade_time"
YIELD = "yield"
STOCK_SYMBOL = "stock_symbol"
DIVIDEND = "dividend"
27 changes: 27 additions & 0 deletions cool_finance/data_sources/manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from cool_finance.data_sources import constants as const
from cool_finance.data_sources.vendors import (common, google_finance,
google_finance_batch)


class DataSourceManager(object):

_supported_vendors = {
const.BASE_VENDOR: common.BaseSource,
const.GOOGLE_FINANCE_VENDOR: google_finance.GoogleFinance,
const.GOOGLE_FINANCE_BATCH_VENDOR:
google_finance_batch.GoogleFinanceBatch
}

_support_batch_query_vendor = {
const.GOOGLE_FINANCE_BATCH_VENDOR:
google_finance_batch.GoogleFinanceBatch
}

def __init__(self, default_vendor=const.DEFAULT_DATASOURCE):
self._default_vendor = default_vendor

def get_vendor(self, vendor=None):
# return the vendor class
if not vendor:
vendor = self._default_vendor
return self._supported_vendors[vendor]
Empty file.
103 changes: 103 additions & 0 deletions cool_finance/data_sources/vendors/common.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import logging

from cool_finance.data_sources import constants as const

logger = logging.getLogger(__name__)


class BaseSource(object):

vendor_name = const.BASE_VENDOR

# _supported_apis = []
_support_data_json = True
_translate_vendor_specific_key_to_common_key = True
# _data_json_keys = {common_key:vendor_specific_key}
# Those keys are required keys.
# It will raise an KeyError is Data_json misses any of those.
_data_json_keys = {
const.INDEX: const.INDEX,
const.STOCK_SYMBOL: const.STOCK_SYMBOL,
const.LAST_TRADE_PRICE: const.LAST_TRADE_PRICE,
const.LAST_TRADE_DATETIME: const.LAST_TRADE_DATETIME,
const.LAST_TRADE_DATE: const.LAST_TRADE_DATE,
const.LAST_TRADE_TIME: const.LAST_TRADE_TIME
}
# _data_json_optional_keys are optional keys.
# It will NOT raise an KeyError is Data_json misses any of those.
_data_json_optional_keys = {
const.YIELD: const.YIELD,
const.DIVIDEND: const.DIVIDEND
}

_data_json_related_apis = [
"fetch_data_json",
"refresh_data_json",
"get_data_json",
"get_value_from_data_json"
]

def __init__(self, stock_symbol, *args, **kwargs):
stock_symbol = stock_symbol.encode('utf-8').upper()
self.stock_symbol = stock_symbol
if self._support_data_json:
self._data_json = None

def __getattr__(self, name):
if (not self._support_data_json and
name in self._data_json_related_apis):
raise NotImplementedError(
"Method %s is not implemented by data source: %s.",
name, self.vendor_name)
else:
return super(BaseSource, self).__getattribute__(name)

def _key_translate(self, data_json):
if self._translate_vendor_specific_key_to_common_key:
for c_key, v_key in self._data_json_keys.items():
data_json[c_key] = data_json.pop(v_key)

for c_key, v_key in self._data_json_optional_keys.items():
if data_json.get(v_key):
data_json[c_key] = data_json.pop(v_key)

return data_json

def _fetch(self, *args, **kwargs):
# make your vendor specific request here...
# return a json dict
stock_symbol = self.stock_symbol
data_json = {}
for key in self._data_json_keys.keys():
data_json[key] = "foo"
return data_json

def fetch_data_json(self, *args, **kwargs):
data_json = self._fetch(args, kwargs)
self._data_json = self._key_translate(data_json)
return self._data_json

def fetch(self, *args, **kwargs):
return self.fetch_data_json(args, kwargs)

def refresh_data_json(self, *args, **kwargs):
return self.fetch_data_json(args, kwargs)

def refresh(self, *args, **kwargs):
return self.refresh_data_json(args, kwargs)

def get_data_json(self):
return self._data_json

def get_value_from_data_json(self, key, is_common_key=True):
# if is_common_key, keys must be in self._data_json_keys.keys()
# if not is_common_key, it means vendor specific key
if is_common_key and key not in self._data_json_keys.keys():
raise KeyError(
"Common key %s is not supported by data source %s.",
key, self.vendor_name)

return self._data_json[key]

def get_price(self):
return self._data_json[const.LAST_TRADE_PRICE]
31 changes: 31 additions & 0 deletions cool_finance/data_sources/vendors/google_finance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import logging

from googlefinance import getQuotes

from cool_finance.data_sources import constants as const
from cool_finance.data_sources.vendors.common import BaseSource

logger = logging.getLogger(__name__)


class GoogleFinance(BaseSource):

vendor_name = const.GOOGLE_FINANCE_VENDOR

_data_json_keys = {
const.INDEX: "Index",
const.STOCK_SYMBOL: "StockSymbol",
const.LAST_TRADE_PRICE: "LastTradePrice",
const.LAST_TRADE_DATETIME: "LastTradeDateTime",
const.LAST_TRADE_TIME: "LastTradeTime"
}

_data_json_optional_keys = {
const.YIELD: "Yield",
const.DIVIDEND: "Dividend"
}

def _fetch(self, *args, **kwargs):
# googlefinance has a bug if the getQuote() input is unicode
# change the type to str explicitly
return getQuotes(self.stock_symbol)[0]
83 changes: 83 additions & 0 deletions cool_finance/data_sources/vendors/google_finance_batch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
import datetime
from threading import Lock

from googlefinance import getQuotes

from cool_finance.constants import QUERY_PRECISION_S
from cool_finance.data_sources import constants as const
from cool_finance.data_sources.vendors.google_finance import GoogleFinance
from cool_finance.log import logger

LAST_UPDATE = "last_update"
STOCKS_DATA = "stocks_data"


class GoogleFinanceBatchHandler(object):

_symbol_key = GoogleFinance._data_json_keys[const.STOCK_SYMBOL]

def __init__(self):
self._stocks_list = []
# self._stocks_data = {"last_update":datetime,
# "stocks_data": {
# "ABC":{...}},
# "BCD":{...}} } }
self._stocks_data = {LAST_UPDATE: None,
STOCKS_DATA: {}}
self._data_access_lock = Lock()

def add_stock(self, stock_symbol):
# googlefinance has a bug if the getQuote() input is unicode
# change the type to str explicitly
self._stocks_list.append(stock_symbol)

def fetch(self, stock_symbol):
# Multiple works could call fetch() as well as _should_update()
# and _fetch_batch(). Must use the _data_access_lock to guard them.
with self._data_access_lock:
if self._should_update():
self._stocks_data = self._fetch_batch(self._stocks_list)
logger.debug("Look for %s, new request sent to Google",
stock_symbol)
return dict(self._stocks_data[STOCKS_DATA][stock_symbol])

def _should_update(self):
if self._stocks_data[LAST_UPDATE]:
now = datetime.datetime.now()
last_update = self._stocks_data[LAST_UPDATE]
delta = now - last_update
if delta.total_seconds() <= QUERY_PRECISION_S:
return False
return True

def _fetch_batch(self, stocks_list):
if not stocks_list:
stocks_list = self._stocks_list
quotes_list = getQuotes(stocks_list)
now = datetime.datetime.now()
self._stocks_data[LAST_UPDATE] = now
self._stocks_data[STOCKS_DATA].clear()
for item in quotes_list:
stock_symbol = item[self._symbol_key]
self._stocks_data[STOCKS_DATA][stock_symbol] = item
return self._stocks_data


batch_handler = GoogleFinanceBatchHandler()


class GoogleFinanceBatch(GoogleFinance):

vendor_name = const.GOOGLE_FINANCE_BATCH_VENDOR

def __init__(self, stock_symbol):
super(GoogleFinanceBatch, self).__init__(stock_symbol)
global batch_handler
self.batch_handler = batch_handler
# googlefinance has a bug if the getQuote() input is unicode
# change the type to str explicitly
stock_symbol = stock_symbol.encode('utf-8')
self.batch_handler.add_stock(stock_symbol)

def _fetch(self, *args, **kwargs):
return self.batch_handler.fetch(self.stock_symbol)
1 change: 0 additions & 1 deletion cool_finance/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,4 +17,3 @@ def insert_one(self, data_json, collection_name):
collection = self.db[collection_name]
id = collection.insert_one(data_json).inserted_id
return id

39 changes: 39 additions & 0 deletions cool_finance/log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import datetime
import logging
import traceback
import os
import sys

from cool_finance import constants

# create logger
logger = logging.getLogger(__name__)
if constants.DEBUG_LOG_DIR:
logger.setLevel(constants.DEBUG_LOG_LEVEL)
else:
logger.setLevel(constants.LOG_LEVEL)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(constants.LOG_LEVEL)
# create formatter and add it to the handlers
formatter = logging.Formatter(constants.LOG_FORMAT)
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)

# create file handler which logs even debug messages
if constants.DEBUG_LOG_DIR:
filename = "debug-" + \
datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + ".log"
fh = logging.FileHandler(os.path.join(constants.DEBUG_LOG_DIR, filename))
fh.setLevel(constants.DEBUG_LOG_LEVEL)
fh.setFormatter(formatter)
logger.addHandler(fh)


def log_uncaught_exceptions(ex_cls, ex, tb):
logger.error(''.join(traceback.format_tb(tb)))
logger.error('{0}: {1}'.format(ex_cls, ex))

sys.excepthook = log_uncaught_exceptions

Loading

0 comments on commit 5f87211

Please sign in to comment.