Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Gs Quant release 0.9.102 #270

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion gs_quant/analytics/processors/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
ReturnsProcessor, BetaProcessor, FXImpliedCorrProcessor
from .special_processors import EntityProcessor, CoordinateProcessor
from .statistics_processors import PercentilesProcessor, PercentileProcessor, StdMoveProcessor, \
CovarianceProcessor, ZscoresProcessor, MeanProcessor, VarianceProcessor, SumProcessor, StdDevProcessor
CovarianceProcessor, ZscoresProcessor, MeanProcessor, VarianceProcessor, SumProcessor, StdDevProcessor, \
CompoundGrowthRate
from .utility_processors import LastProcessor, AppendProcessor, AdditionProcessor, SubtractionProcessor, \
MultiplicationProcessor, DivisionProcessor, MinProcessor, MaxProcessor, NthLastProcessor, OneDayProcessor
from .scale_processors import ScaleProcessor, BarMarkerProcessor, SpotMarkerProcessor, ScaleShape
22 changes: 16 additions & 6 deletions gs_quant/backtests/triggers.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@
from gs_quant.backtests.actions import Action, AddTradeAction, AddTradeActionInfo
from gs_quant.backtests.backtest_objects import BackTest, PredefinedAssetBacktest
from gs_quant.backtests.backtest_utils import make_list, CalcType
from gs_quant.datetime.relative_date import RelativeDateSchedule
from gs_quant.backtests.data_sources import *
from gs_quant.datetime.relative_date import RelativeDateSchedule
from gs_quant.risk.transform import Transformer
from gs_quant.risk import RiskMeasure


class TriggerDirection(Enum):
Expand All @@ -41,7 +43,8 @@ def __init__(self):


class PeriodicTriggerRequirements(TriggerRequirements):
def __init__(self, start_date=None, end_date=None, frequency=None, calendar=None):
def __init__(self, start_date: dt.date = None, end_date: dt.date = None, frequency: str = None,
calendar: str = None):
super().__init__()
self.start_date = start_date
self.end_date = end_date
Expand All @@ -50,27 +53,29 @@ def __init__(self, start_date=None, end_date=None, frequency=None, calendar=None


class IntradayTriggerRequirements(TriggerRequirements):
def __init__(self, start_time, end_time, frequency):
def __init__(self, start_time: dt.datetime, end_time: dt.datetime, frequency: str):
super().__init__()
self.start_time = start_time
self.end_time = end_time
self.frequency = frequency


class MktTriggerRequirements(TriggerRequirements):
def __init__(self, data_source, trigger_level, direction):
def __init__(self, data_source: DataSource, trigger_level: float, direction: TriggerDirection):
super().__init__()
self.data_source = data_source
self.trigger_level = trigger_level
self.direction = direction


class RiskTriggerRequirements(TriggerRequirements):
def __init__(self, risk, trigger_level, direction):
def __init__(self, risk: RiskMeasure, trigger_level: float, direction: TriggerDirection,
risk_transformation: Optional[Transformer] = None):
super().__init__()
self.risk = risk
self.trigger_level = trigger_level
self.direction = direction
self.risk_transformation = risk_transformation


class AggregateTriggerRequirements(TriggerRequirements):
Expand Down Expand Up @@ -258,7 +263,12 @@ def __init__(self,
self._risks += [trigger_requirements.risk]

def has_triggered(self, state: dt.date, backtest: BackTest = None) -> TriggerInfo:
risk_value = backtest.results[state][self._trigger_requirements.risk].aggregate()
if self.trigger_requirements.risk_transformation is None:
risk_value = backtest.results[state][self._trigger_requirements.risk].aggregate()
else:
risk_value = backtest.results[state][self._trigger_requirements.risk].transform(
risk_transformation=self.trigger_requirements.risk_transformation).aggregate(
allow_mismatch_risk_keys=True)
if self._trigger_requirements.direction == TriggerDirection.ABOVE:
if risk_value > self._trigger_requirements.trigger_level:
return TriggerInfo(True)
Expand Down
44 changes: 14 additions & 30 deletions gs_quant/models/risk_model_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,44 +205,28 @@ def batch_and_upload_partial_data(model_id: str, data: dict, max_asset_size: int
date = data.get('date')
_upload_factor_data_if_present(model_id, data, date)
sleep(2)
_repeat_try_catch_request(_batch_data_v2, model_id=model_id, data=data, max_asset_size=max_asset_size, date=date)


def _batch_data_v2(model_id: str, data: dict, max_asset_size: int, date: Union[str, dt.date]):
if data.get('assetData'):
asset_data_list, target_size = _batch_input_data({'assetData': data.get('assetData')}, max_asset_size)
for i in range(len(asset_data_list)):
final_upload = True if i == len(asset_data_list) - 1 else False
for risk_model_data_type in ["assetData", "issuerSpecificCovariance", "factorPortfolios"]:
_repeat_try_catch_request(_batch_data_v2, model_id=model_id, data=data.get(risk_model_data_type),
data_type=risk_model_data_type, max_asset_size=max_asset_size, date=date)
sleep(2)


def _batch_data_v2(model_id: str, data: dict, data_type: str, max_asset_size: int, date: Union[str, dt.date]):
if data:
if data_type in ["issuerSpecificCovariance", "factorPortfolios"]:
max_asset_size //= 2
data_list, _ = _batch_input_data({data_type: data}, max_asset_size)
for i in range(len(data_list)):
final_upload = True if i == len(data_list) - 1 else False
try:
res = GsFactorRiskModelApi.upload_risk_model_data(model_id=model_id,
model_data={'assetData': asset_data_list[i],
'date': date},
model_data={data_type: data_list[i], 'date': date},
partial_upload=True,
final_upload=final_upload)
logging.info(res)
except (MqRequestError, Exception) as e:
raise e

if 'issuerSpecificCovariance' in data.keys() or 'factorPortfolios' in data.keys():
for optional_input_key in ['issuerSpecificCovariance', 'factorPortfolios']:
if data.get(optional_input_key):
optional_data = data.get(optional_input_key)
optional_data_list, target_size = _batch_input_data({optional_input_key: optional_data},
max_asset_size // 2)
logging.info(f'{optional_input_key} being uploaded for {date}...')
for i in range(len(optional_data_list)):
final_upload = True if i == len(optional_data_list) - 1 else False
try:
res = GsFactorRiskModelApi.upload_risk_model_data(model_id=model_id,
model_data={
optional_input_key: optional_data_list[i],
'date': date},
partial_upload=True,
final_upload=final_upload)
logging.info(res)
except (MqRequestError, Exception) as e:
raise e


def batch_and_upload_coverage_data(date: dt.date, gsid_list: list, model_id: str):
update_time = dt.datetime.today().strftime("%Y-%m-%dT%H:%M:%SZ")
Expand Down
123 changes: 121 additions & 2 deletions gs_quant/test/models/test_risk_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,11 @@

from gs_quant.models.risk_model import FactorRiskModel, MacroRiskModel, ReturnFormat, Unit
from gs_quant.session import *
from gs_quant.target.risk_models import RiskModel as Risk_Model, RiskModelCoverage, RiskModelTerm,\
from gs_quant.target.risk_models import RiskModel as Risk_Model, RiskModelCoverage, RiskModelTerm, \
RiskModelUniverseIdentifier, RiskModelType, RiskModelDataAssetsRequest as DataAssetsRequest, \
RiskModelDataMeasure as Measure, RiskModelUniverseIdentifierRequest as UniverseIdentifier
import datetime as dt


empty_entitlements = {
"execute": [],
"edit": [],
Expand Down Expand Up @@ -677,5 +676,125 @@ def test_get_specific_return(mocker):
assert response == specific_return_response


def test_upload_risk_model_data(mocker):
model = mock_risk_model(mocker)
risk_model_data = {
'date': '2023-04-14',
'assetData': {
'universe': ['2407966', '2046251', 'USD'],
'specificRisk': [12.09, 45.12, 3.09],
'factorExposure': [
{'1': 0.23, '2': 0.023},
{'1': 0.23},
{'3': 0.23, '2': 0.023}
],
'totalRisk': [0.12, 0.45, 1.2]
},
'factorData': [
{
'factorId': '1',
'factorName': 'USD',
'factorCategory': 'Currency',
'factorCategoryId': 'CUR'
},
{
'factorId': '2',
'factorName': 'ST',
'factorCategory': 'ST',
'factorCategoryId': 'ST'
},
{
'factorId': '3',
'factorName': 'IND',
'factorCategory': 'IND',
'factorCategoryId': 'IND'
}
],
'covarianceMatrix': [[0.089, 0.0123, 0.345],
[0.0123, 3.45, 0.345],
[0.345, 0.345, 1.23]],
'issuerSpecificCovariance': {
'universeId1': ['2407966'],
'universeId2': ['2046251'],
'covariance': [0.03754]
},
'factorPortfolios': {
'universe': ['2407966', '2046251'],
'portfolio': [{'factorId': 1, 'weights': [0.25, 0.75]},
{'factorId': 2, 'weights': [0.25, 0.75]},
{'factorId': 3, 'weights': [0.25, 0.75]}]
}
}

base_url = f"/risk/models/data/{model.id}?partialUpload=true"
date = risk_model_data.get("date")
max_asset_batch_size = 2

batched_asset_data = [
{"assetData": {key: value[i:i + max_asset_batch_size] for key, value in
risk_model_data.get("assetData").items()}, "date": date,
} for i in range(0, len(risk_model_data.get("assetData").get("universe")), max_asset_batch_size)
]

max_asset_batch_size //= 2
batched_factor_portfolios = [
{"factorPortfolios":
{key: (value[i:i + max_asset_batch_size] if key in "universe" else
[{"factorId": factor_weights.get("factorId"),
"weights": factor_weights.get("weights")[i:i + max_asset_batch_size]} for factor_weights in value])
for key, value in risk_model_data.get("factorPortfolios").items()},
"date": date
} for i in range(0, len(risk_model_data.get("factorPortfolios").get("universe")), max_asset_batch_size)
]

expected_factor_data_calls = [
mock.call(base_url, {"date": date, "factorData": risk_model_data.get("factorData"),
"covarianceMatrix": risk_model_data.get("covarianceMatrix")}, timeout=200)
]

expected_asset_data_calls = []
for batch_num, batch_asset_payload in enumerate(batched_asset_data):
final_upload_flag = 'true' if batch_num == len(batched_asset_data) - 1 else 'false'
expected_asset_data_calls.append(
mock.call(f"{base_url}&finalUpload={final_upload_flag}", batch_asset_payload, timeout=200)
)

expected_factor_portfolios_data_calls = []
for batch_num, batched_fp_payload in enumerate(batched_factor_portfolios):
final_upload_flag = 'true' if batch_num == len(batched_factor_portfolios) - 1 else 'false'
expected_factor_portfolios_data_calls.append(
mock.call(f"{base_url}&finalUpload={final_upload_flag}", batched_fp_payload, timeout=200)
)

expected_isc_data_calls = [
mock.call(f"{base_url}&finalUpload=true",
{"issuerSpecificCovariance": risk_model_data.get("issuerSpecificCovariance"), "date": date},
timeout=200)
]

expected_calls = expected_factor_data_calls + expected_asset_data_calls + \
expected_isc_data_calls + expected_factor_portfolios_data_calls

# mock GsSession
mocker.patch.object(
GsSession.__class__,
'default_value',
return_value=GsSession.get(
Environment.QA,
'client_id',
'secret'))
mocker.patch.object(GsSession.current, '_post', return_value='Upload Successful')

max_asset_batch_size = 2
model.upload_data(risk_model_data, max_asset_batch_size=max_asset_batch_size)

call_args_list = GsSession.current._post.call_args_list

assert len(call_args_list) == len(expected_calls)
assert call_args_list == expected_calls

GsSession.current._post.assert_has_calls(expected_calls, any_order=False)


if __name__ == "__main__":
pytest.main([__file__])
30 changes: 30 additions & 0 deletions gs_quant/test/timeseries/test_backtesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,36 @@ def test_basket_series():
index=dates)
assert_series_equal(mreb, basket_series([mreb], [1], rebal_freq=RebalFreq.MONTHLY))

dates = [
datetime.datetime(2019, 1, 1),
datetime.datetime(2019, 1, 2),
datetime.datetime(2019, 1, 3),
datetime.datetime(2019, 1, 4),
datetime.datetime(2019, 1, 5),
datetime.datetime(2019, 1, 8),
datetime.datetime(2019, 1, 9),
datetime.datetime(2019, 1, 10),
datetime.datetime(2019, 1, 11),
datetime.datetime(2019, 1, 12),
datetime.datetime(2019, 1, 13)
]
wreb = pd.Series(
[100.0, 105, 110, 115, 120, 125,
130, 135, 140, 145, 150],
index=dates)

wreb_2 = pd.Series(
[100.0, 105, 110, 115, 120, 125,
130, 135, 140, 145, 150],
index=dates)

ret_wreb = pd.Series(
[100.0, 110.0, 120.0, 130.0, 140.0, 150.0,
162.0, 174.0, 186.0, 198.0, 210.0],
index=dates)

assert_series_equal(ret_wreb, basket_series([wreb, wreb_2], [1, 1], rebal_freq=RebalFreq.WEEKLY))


def _mock_spot_data():
dates = pd.date_range(start='2021-01-01', periods=6)
Expand Down
25 changes: 21 additions & 4 deletions gs_quant/test/timeseries/test_measures_fx_vol.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,13 @@

def test_currencypair_to_tdapi_fxfwd_asset():
mock_eur = Cross('MA8RY265Q34P7TWZ', 'EURUSD')
replace = Replacer()
xrefs = replace('gs_quant.timeseries.measures_fx_vol._get_tdapi_fxo_assets', Mock())
xrefs.return_value = 'MA8RY265Q34P7TWZ'
bbid_mock = replace('gs_quant.timeseries.measures_fx_vol.Asset.get_identifier', Mock())
bbid_mock.return_value = {'EURUSD'}
assert _currencypair_to_tdapi_fxfwd_asset(mock_eur) == "MA8RY265Q34P7TWZ"
replace.restore()


def test_currencypair_to_tdapi_fxo_asset(mocker):
Expand Down Expand Up @@ -153,7 +159,9 @@ def test_get_tdapi_fxo_assets():
replace = Replacer()
assets = replace('gs_quant.timeseries.measures.GsAssetApi.get_many_assets', Mock())
assets.return_value = [mock_asset_1]
assert 'MAW8SAXPSKYA94E2' == tm_fxo._get_tdapi_fxo_assets()
kwargs = dict(asset_parameters_expiration_date='5y', asset_parameters_call_currency='USD',
asset_parameters_put_currency='EUR')
assert 'MAW8SAXPSKYA94E2' == tm_fxo._get_tdapi_fxo_assets(**kwargs)
replace.restore()

assets = replace('gs_quant.timeseries.measures.GsAssetApi.get_many_assets', Mock())
Expand Down Expand Up @@ -199,7 +207,8 @@ def test_get_tdapi_fxo_assets():
def mock_curr(_cls, _q):
d = {
'impliedVolatility': [1, 2, 3],
'fwdPoints': [4, 5, 6]
'fwdPoints': [4, 5, 6],
'forwardPoint': [7, 8, 9]
}
df = MarketDataResponseFrame(data=d, index=_index * 3)
df.dataset_ids = _test_datasets
Expand Down Expand Up @@ -295,8 +304,16 @@ def test_fwd_points(mocker):
args['settlement_date'] = '6m'

args['real_time'] = True
with pytest.raises(NotImplementedError):
tm_fxo.fwd_points(**args)
xrefs = replace('gs_quant.timeseries.measures.Asset.get_identifier', Mock())
xrefs.return_value = 'EURUSD'
identifiers = replace('gs_quant.timeseries.measures_fx_vol._get_tdapi_fxo_assets', Mock())
identifiers.return_value = {'MAGZMXVM0J282ZTR'}
mocker.patch.object(GsDataApi, 'get_market_data', return_value=mock_curr(None, None))
actual = tm_fxo.fwd_points(**args)
expected = tm.ExtendedSeries([7, 8, 9], index=_index * 3, name='forwardPoint')
expected.dataset_ids = _test_datasets
assert_series_equal(expected, actual)
assert actual.dataset_ids == _test_datasets
args['real_time'] = False

args['asset'] = Cross('MAGZMXVM0J282ZTR', 'EURUSD')
Expand Down
10 changes: 7 additions & 3 deletions gs_quant/test/utils/mock_request.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from typing import List
from unittest import mock

from gs_quant.errors import MqUninitialisedError
from gs_quant.session import GsSession, Environment


Expand All @@ -45,9 +46,12 @@ def __enter__(self):
self.mocker.patch.object(self.api, self.method, side_effect=self.mock_calc_create_new_files if str(
self.save_files).casefold() == 'new' else self.mock_calc_create_files)
else:
from gs_quant.session import OAuth2Session
OAuth2Session.init = mock.MagicMock(return_value=None)
GsSession.use(Environment.PROD, 'fake_client_id', 'fake_secret', application=self.application)
try:
_ = GsSession.current
except MqUninitialisedError:
from gs_quant.session import OAuth2Session
OAuth2Session._authenticate = mock.MagicMock(return_value=None)
GsSession.use(Environment.PROD, 'fake_client_id', 'fake_secret', application=self.application)
self.mocker.patch.object(self.api, self.method, side_effect=self.mock_calc)

def mock_calc(self, *args, **kwargs):
Expand Down
Loading