Skip to content

Commit

Permalink
Merge pull request #10018 from gem/totloss
Browse files Browse the repository at this point in the history
Fix extract_agg_curves to handle '+' separated total loss types
  • Loading branch information
ptormene authored Oct 4, 2024
2 parents b143c73 + a4a1e5b commit b44dd5c
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 7 deletions.
16 changes: 16 additions & 0 deletions openquake/calculators/event_based_risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,19 @@ def gen_outputs(df, crmodel, rng, monitor):
yield out


def check_tot_loss_unit_consistency(units, total_losses, loss_types):
total_losses_units = set()
for separate_lt in total_losses.split('+'):
assert separate_lt in loss_types
for unit, lt in zip(units, loss_types):
if separate_lt == lt:
total_losses_units.add(unit)
if len(total_losses_units) != 1:
logging.warning(
'The units of the single components of the total losses'
' are not homogeneous: %s" ' % total_losses_units)


def set_oqparam(oq, assetcol, dstore):
"""
Set the attributes .M, .K, .A, .ideduc, ._sec_losses
Expand All @@ -264,6 +277,9 @@ def set_oqparam(oq, assetcol, dstore):

ideduc = assetcol['ideductible'].any()
if oq.total_losses:
units = dstore['exposure'].cost_calculator.get_units(oq.loss_types)
check_tot_loss_unit_consistency(
units.split(), oq.total_losses, oq.loss_types)
sec_losses.append(
partial(total_losses, kind=oq.total_losses, ideduc=ideduc))
elif ideduc:
Expand Down
15 changes: 12 additions & 3 deletions openquake/calculators/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from urllib.parse import parse_qs
from urllib.parse import parse_qs, quote_plus
from functools import lru_cache
import operator
import logging
Expand Down Expand Up @@ -120,11 +120,17 @@ def parse(query_string, info={}):
{'kind': ['mean'], 'k': [0], 'rlzs': False}
>>> parse('kind=rlz-3&imt=PGA&site_id=0', {'stats': {}})
{'kind': ['rlz-3'], 'imt': ['PGA'], 'site_id': [0], 'k': [3], 'rlzs': True}
>>> parse(
... 'loss_type=structural+nonstructural&absolute=True&kind=rlzs')['lt']
['structural+nonstructural']
"""
qdic = parse_qs(query_string)
for key, val in sorted(qdic.items()):
# convert site_id to an int, loss_type to an int, etc
if key == 'loss_type':
# NOTE: loss types such as 'structural+nonstructural' need to be
# quoted, otherwise the plus would turn into a space
val = [quote_plus(lt) for lt in val]
qdic[key] = [LOSSID[k] for k in val]
qdic['lt'] = val
else:
Expand Down Expand Up @@ -793,8 +799,11 @@ def extract_agg_curves(dstore, what):
if qdic['absolute'] == [1]:
pass
elif qdic['absolute'] == [0]:
evalue, = dstore['agg_values'][agg_id][lts]
arr /= evalue
evalue_sum = 0
for lts_item in lts:
for lt in lts_item.split('+'):
evalue_sum += dstore['agg_values'][agg_id][lt]
arr /= evalue_sum
else:
raise ValueError('"absolute" must be 0 or 1 in %s' % what)
attrs = dict(shape_descr=['kind', 'return_period', 'ep_field'] + tagnames)
Expand Down
10 changes: 6 additions & 4 deletions openquake/calculators/post_risk.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
U16 = numpy.uint16
U32 = numpy.uint32


class FakeBuilder:
eff_time = 0.
pla_factor = None
Expand Down Expand Up @@ -305,7 +306,7 @@ def store_aggcurves(oq, agg_ids, rbe_df, builder, loss_cols,
limit_states=' '.join(oq.limit_states),
units=units, ep_fields=ep_fields)


# aggcurves are built in parallel, aggrisk sequentially
def build_store_agg(dstore, oq, rbe_df, num_events):
"""
Expand Down Expand Up @@ -385,10 +386,11 @@ def build_store_agg(dstore, oq, rbe_df, num_events):
agg * tr if oq.investigation_time else agg/ne)
fix_dtypes(acc)
aggrisk = pandas.DataFrame(acc)
dstore.create_df('aggrisk', aggrisk, limit_states=' '.join(oq.limit_states))
dstore.create_df('aggrisk', aggrisk,
limit_states=' '.join(oq.limit_states))
if oq.investigation_time and loss_cols:
store_aggcurves(
oq, agg_ids, rbe_df, builder, loss_cols, events, num_events, dstore)
store_aggcurves(oq, agg_ids, rbe_df, builder, loss_cols, events,
num_events, dstore)
return aggrisk


Expand Down

0 comments on commit b44dd5c

Please sign in to comment.