Skip to content

Commit

Permalink
new instruments, notebooks (#150)
Browse files Browse the repository at this point in the history
  • Loading branch information
AnastasiyaB authored Dec 1, 2020
1 parent bc02081 commit bcfb2f8
Show file tree
Hide file tree
Showing 30 changed files with 7,272 additions and 18,072 deletions.
54 changes: 27 additions & 27 deletions gs_quant/api/gs/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,20 +24,19 @@
import numpy
import pandas as pd
from cachetools import TTLCache
from gs_quant.target.assets import FieldFilterMap
from gs_quant.target.common import MarketDataVendor, PricingLocation
from gs_quant.target.coordinates import MDAPIDataBatchResponse, MDAPIDataQuery, MDAPIDataQueryResponse, MDAPIQueryField
from gs_quant.target.data import DataQuery, DataQueryResponse
from gs_quant.target.data import DataSetEntity

from gs_quant.api.data import DataApi
from gs_quant.base import Base
from gs_quant.data.core import DataContext, DataFrequency
from gs_quant.errors import MqValueError
from gs_quant.markets import MarketDataCoordinate
from gs_quant.session import GsSession
from gs_quant.target.common import MarketDataVendor, PricingLocation
from gs_quant.target.coordinates import MDAPIDataBatchResponse, MDAPIDataQuery, MDAPIDataQueryResponse, MDAPIQueryField
from gs_quant.target.data import DataQuery, DataQueryResponse
from gs_quant.target.data import DataSetEntity
from .assets import GsIdType
from ...target.assets import EntityQuery
from ...target.assets import EntityQuery, FieldFilterMap

_logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -316,42 +315,43 @@ def build_market_data_query(asset_ids: List[str], query_type: QueryType, where:
}

@classmethod
def get_data_providers(cls, entity_id: str) -> Dict:
def get_data_providers(cls,
entity_id: str,
availability: Optional[Dict] = None) -> Dict:
"""Return daily and real-time data providers
:param entity_id: identifier of entity i.e. asset, country, subdivision
:param availability: Optional Measures Availability response for the entity
:return: dictionary of available data providers
** Usage **
Return a dictionary containing a set of dataset providers for each available data field.
For each field will return a dict of daily and real-time dataset providers where available.
"""
response = availability if availability else GsSession.current._get(f'/data/measures/{entity_id}/availability')
if 'errorMessages' in response:
raise MqValueError(f"Data availability request {response['requestId']} "
f"failed: {response.get('errorMessages', '')}")

GsSession.current: GsSession
body = GsSession.current._get(f'/data/measures/{entity_id}/availability')
if 'errorMessages' in body:
raise MqValueError(f"data availablity request {body['requestId']} failed: {body.get('errorMessages', '')}")
if 'data' not in body:
providers = {}
else:
providers = {}

all_data_mappings = sorted(body['data'], key=lambda x: x['rank'], reverse=True)
if 'data' not in response:
return {}

for source in all_data_mappings:
providers = {}
all_data_mappings = sorted(response['data'], key=lambda x: x['rank'], reverse=True)

freq = source.get('frequency', 'End Of Day')
dataset_field = source.get('datasetField', '')
rank = source.get('rank')
for source in all_data_mappings:
freq = source.get('frequency', 'End Of Day')
dataset_field = source.get('datasetField', '')
rank = source.get('rank')

providers.setdefault(dataset_field, {})
providers.setdefault(dataset_field, {})

if rank:
if freq == 'End Of Day':
providers[dataset_field][DataFrequency.DAILY] = source['datasetId']
elif freq == 'Real Time':
providers[dataset_field][DataFrequency.REAL_TIME] = source['datasetId']
if rank:
if freq == 'End Of Day':
providers[dataset_field][DataFrequency.DAILY] = source['datasetId']
elif freq == 'Real Time':
providers[dataset_field][DataFrequency.REAL_TIME] = source['datasetId']

return providers

Expand Down
4 changes: 3 additions & 1 deletion gs_quant/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,14 @@
under the License.
"""

from gs_quant.session import GsSession
import socket
import requests


def handle_proxy(url, params):
if socket.getfqdn().split('.')[-2:] == ['gs', 'com']:
session = GsSession.get()
if socket.getfqdn().split('.')[-2:] == ['gs', 'com'] or session.is_internal():
try:
import gs_quant_internal
proxies = gs_quant_internal.__proxies__
Expand Down
5 changes: 5 additions & 0 deletions gs_quant/backtests/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,3 +59,8 @@ class Backtest(__Backtest):
def get_results(self) -> Tuple[BacktestResult, ...]:
from gs_quant.api.gs.backtests import GsBacktestApi
return GsBacktestApi.get_results(backtest_id=self.id)


class MarketModel(EnumBase, Enum):
STICKY_FIXED_STRIKE = "SFK"
STICKY_DELTA = "SD"
11 changes: 6 additions & 5 deletions gs_quant/backtests/strategy_systematic.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

import gs_quant.target.backtests as backtests
from gs_quant.api.gs.backtests import GsBacktestApi
from gs_quant.backtests.core import Backtest, QuantityType, TradeInMethod
from gs_quant.backtests.core import Backtest, QuantityType, TradeInMethod, MarketModel
from gs_quant.errors import MqValueError
from gs_quant.markets import PricingContext
from gs_quant.target.backtests import *
Expand All @@ -30,7 +30,6 @@

BACKTEST_TYPE_NAME = 'VolatilityFlow'
BACKTEST_TYPE_VALUE = 'Volatility Flow'
EQ_MARKET_MODEL = 'SFK'
ISO_FORMAT = r"^([0-9]{4})-([0-9]{2})-([0-9]{2})$"


Expand All @@ -50,13 +49,15 @@ def __init__(self,
cost_netting: bool = False,
currency: Union[Currency, str] = Currency.USD,
trade_in_signals: Tuple[BacktestSignalSeriesItem, ...] = None,
trade_out_signals: Tuple[BacktestSignalSeriesItem, ...] = None):
trade_out_signals: Tuple[BacktestSignalSeriesItem, ...] = None,
market_model: Union[MarketModel, str] = MarketModel.STICKY_FIXED_STRIKE):
self.__cost_netting = cost_netting
self.__currency = get_enum_value(Currency, currency)
self.__name = name
self.__backtest_type = BACKTEST_TYPE_NAME

trade_in_method = get_enum_value(TradeInMethod, trade_in_method).value
market_model = get_enum_value(MarketModel, market_model).value

self.__trading_parameters = BacktestTradingParameters(
quantity=quantity,
Expand All @@ -76,7 +77,7 @@ def __init__(self,
instrument=instrument,
notional_percentage=notional_percentage,
hedge=BacktestStrategyUnderlierHedge(risk_details=delta_hedge),
market_model=EQ_MARKET_MODEL))
market_model=market_model))
else:
for underlier in underliers:
if isinstance(underlier, tuple):
Expand All @@ -94,7 +95,7 @@ def __init__(self,
instrument=instrument,
notional_percentage=notional_percentage,
hedge=BacktestStrategyUnderlierHedge(risk_details=delta_hedge),
market_model=EQ_MARKET_MODEL))
market_model=market_model))

backtest_parameters_class: Base = getattr(backtests, self.__backtest_type + 'BacktestParameters')
backtest_parameter_args = {
Expand Down
Empty file added gs_quant/content/__init__.py
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -573,7 +573,7 @@
"metadata": {},
"outputs": [],
"source": [
"FRED_API_KEY = 'e18f3ca8d7fef2bd82294ae95d88f2d1'\n",
"FRED_API_KEY = 'YOUR_KEY_HERE'\n",
"fred_API = FredDataApi(api_key=FRED_API_KEY)\n",
"fred_pull = fred_API.build_query(start=start_date, end=end_date)\n",
"\n",
Expand Down Expand Up @@ -1014,7 +1014,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.0"
"version": "3.7.4"
}
},
"nbformat": 4,
Expand Down
523 changes: 523 additions & 0 deletions gs_quant/content/made_with_gs_quant/13-Cyclicals.ipynb

Large diffs are not rendered by default.

167 changes: 167 additions & 0 deletions gs_quant/content/reports_and_screens/00_fx/0000_vol_screen.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,167 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"pycharm": {
"is_executing": true
}
},
"outputs": [],
"source": [
"from gs_quant.data import Dataset\n",
"from gs_quant.timeseries import percentiles, volatility, last_value, Returns\n",
"from gs_quant.datetime import business_day_offset\n",
"import seaborn as sns\n",
"import pandas as pd\n",
"pd.options.display.float_format = '{:,.2f}'.format \n",
"import matplotlib.pyplot as plt\n",
"from scipy import stats\n",
"import warnings\n",
"from datetime import date\n",
"warnings.filterwarnings('ignore')\n",
"sns.set(style=\"darkgrid\", color_codes=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from gs_quant.session import GsSession\n",
"# external users should substitute their client id and secret; please skip this step if using internal jupyterhub\n",
"GsSession.use(client_id=None, client_secret=None, scopes=('run_analytics',)) "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Screen Functions"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def format_df(data_dict):\n",
" df = pd.concat(data_dict, axis=1)\n",
" df.columns = data_dict.keys()\n",
" return df.fillna(method='ffill').dropna()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"pycharm": {
"is_executing": true
}
},
"outputs": [],
"source": [
"def volatility_screen(crosses, start_date, end_date, tenor='3m', history='2y', plot=True):\n",
" #replace with premium dataset for more history\n",
" fxspot_dataset, fxvol_dataset = Dataset('FXSPOT'), Dataset('FXIMPLIEDVOL')\n",
" spot_data, impvol_data, spot_fx, data = {}, {}, {}, {}\n",
" for cross in crosses:\n",
" spot_fx[cross] = fxspot_dataset.get_data(start_date, end_date, bbid=cross)[['spot']].drop_duplicates(keep='last')['spot']\n",
" spot_data[cross] = volatility(spot_fx[cross], tenor) # realized vol \n",
" impvol_data[cross] = fxvol_dataset.get_data(start_date, end_date, bbid=cross, tenor=tenor, deltaStrike='DN', location='NYC')[['impliedVolatility']]* 100\n",
"\n",
" spdata, ivdata = format_df(spot_data), format_df(impvol_data)\n",
" diff = ivdata.subtract(spdata).dropna()\n",
" for cross in crosses:\n",
" data[cross] = {'Spot': last_value(spot_fx[cross]),\n",
" f'{tenor} Implied': last_value(ivdata[cross]),\n",
" f'{tenor} Realized': last_value(spdata[cross]),\n",
" 'Diff': last_value(diff[cross]),\n",
" f'{history} Implied Low': min(ivdata[cross]),\n",
" f'{history} Implied High': max(ivdata[cross]),\n",
" '%-ile': last_value(percentiles(ivdata[cross]))\n",
" }\n",
" df = pd.DataFrame(data)\n",
" vol_screen = df.transpose()\n",
" \n",
" if plot:\n",
" for fx in vol_screen.index:\n",
" plt.scatter(vol_screen.loc[fx]['%-ile'], vol_screen.loc[fx]['Diff'])\n",
" plt.legend(vol_screen.index,loc='best', bbox_to_anchor=(0.9, -0.13), ncol=3)\n",
" \n",
" plt.xlabel('Percentile of Current Implied Vol')\n",
" plt.ylabel('Implied vs Realized Vol')\n",
" plt.title('Entry Point vs Richness')\n",
" plt.show()\n",
" return vol_screen.sort_values(by=['Diff']).style.background_gradient(subset=['Diff']).format(\"{:.1f}\")"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### FX Implied Volatility Screen\n",
"Let's pull [GS FX Spot](https://marquee.gs.com/s/developer/datasets/FXSPOT) and [GS FX Implied Volatility](https://marquee.gs.com/s/developer/datasets/FXIMPLIEDVOL) and look at implied vs realized vol as well as current implied level as percentile relative to the last 2 years. Note, FX Spot uses GS NYC closes.\n",
"\n",
"\n",
"If you are looking for additional history or coverage, please see our premium version [link](https://marquee.gs.com/s/developer/datasets/FXIMPLIEDVOL_PREMIUM).\n",
"\n",
"The FX Structuring team uses this analysis to screen for the most attractive vols to buy in the 3m tenor by looking at where implied trades in its own history and where realized trades in relationship with implieds.\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Entry Point vs Richness\n",
"\n",
"Note: Lower left corner shows currencies with low and cheap vol. Upper right corner\n",
"shows currencies with high and rich vol."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from gs_quant.datetime import business_day_offset\n",
"from dateutil.relativedelta import relativedelta\n",
"g10 = ['USDJPY', 'EURUSD', 'AUDUSD', 'GBPUSD', 'USDCAD', 'USDNOK', 'NZDUSD', 'USDSEK', 'USDCHF']\n",
"\n",
"end = business_day_offset(date.today(), -1, roll='forward')\n",
"start = business_day_offset(end - relativedelta(years=2), -1, roll='forward')\n",
"\n",
"tenor = '3m'\n",
"history = '2y'\n",
"\n",
"screen = volatility_screen(g10, start, end, tenor, history, plot=True)\n",
"screen"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.7.4"
}
},
"nbformat": 4,
"nbformat_minor": 4
}
Loading

0 comments on commit bcfb2f8

Please sign in to comment.