diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index cc537cb30989..50dd0ecd134e 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -62,6 +62,8 @@ on:
push:
branches: [master, 'engine-*', moster, 'ongine-*']
+ pull_request:
+
jobs:
docs:
runs-on: ubuntu-latest
@@ -100,6 +102,7 @@ jobs:
pip install -U pip
pip install -r ./requirements-py38-linux64.txt
pip install sphinx==6.2 pydata-sphinx-theme
+ pip install --upgrade myst-parser
- name: Install oq engine
run: |
pip install -e .[dev]
@@ -171,8 +174,13 @@ jobs:
PDF_VER="(master)"
elif [ "$BUILD" == "dev" ]; then
DOCS_BASE="${DOCS_BASE}.dev/"
- TARGET="oq-engine/$GITHUB_REF_NAME/"
- PDF_VER="($GITHUB_REF_NAME)"
+ if echo "$GITHUB_REF_NAME" | grep -q '[0-9]\+/merge'; then
+ TARGET="oq-engine/$GITHUB_HD_REF/"
+ PDF_VER="($GITHUB_HD_REF)"
+ else
+ TARGET="oq-engine/$GITHUB_REF_NAME/"
+ PDF_VER="($GITHUB_REF_NAME)"
+ fi
PDFDOCS="manuals/"
else
echo "BUILD [$BUILD] not recognized"
diff --git a/.github/workflows/engine_nightly_test.yml b/.github/workflows/engine_nightly_test.yml
index 5f60823a8aef..10b3b62ad536 100644
--- a/.github/workflows/engine_nightly_test.yml
+++ b/.github/workflows/engine_nightly_test.yml
@@ -22,7 +22,6 @@ jobs:
python-version: [3.8, 3.9, "3.10"]
env:
GITHUB_HEAD_REF: ${{ github.head_ref }}
- GITHUB_REPOSITORY: ${{ github.repository }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@@ -33,9 +32,9 @@ jobs:
run: |
if [[ "$GITHUB_HEAD_REF" != "" ]]
then
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ python install.py devel --version=$GITHUB_HEAD_REF
else
- python install.py devel --repository=$GITHUB_REPOSITORY
+ python install.py devel
fi
- name: Calculators and documentation tests
run: |
@@ -63,9 +62,9 @@ jobs:
run: |
if [[ "$GITHUB_HEAD_REF" != "" ]]
then
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ python install.py devel --version=$GITHUB_HEAD_REF
else
- python install.py devel --repository=$GITHUB_REPOSITORY
+ python install.py devel
fi
- name: Hazardlib tests
run: |
@@ -94,9 +93,9 @@ jobs:
run: |
if [[ "$GITHUB_HEAD_REF" != "" ]]
then
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ python install.py devel --version=$GITHUB_HEAD_REF
else
- python install.py devel --repository=$GITHUB_REPOSITORY
+ python install.py devel
fi
- name: Server 'PUBLIC' mode tests
run: |
@@ -124,9 +123,9 @@ jobs:
run: |
if [[ "$GITHUB_HEAD_REF" != "" ]]
then
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ python install.py devel --version=$GITHUB_HEAD_REF
else
- python install.py devel --repository=$GITHUB_REPOSITORY
+ python install.py devel
fi
- name: Server 'READ_ONLY' mode tests
run: |
@@ -154,9 +153,9 @@ jobs:
run: |
if [[ "$GITHUB_HEAD_REF" != "" ]]
then
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ python install.py devel --version=$GITHUB_HEAD_REF
else
- python install.py devel --repository=$GITHUB_REPOSITORY
+ python install.py devel
fi
- name: Server 'AELO' mode tests
run: |
diff --git a/.github/workflows/engine_pr_test.yml b/.github/workflows/engine_pr_test.yml
index de2787b69d74..b74f118692ee 100644
--- a/.github/workflows/engine_pr_test.yml
+++ b/.github/workflows/engine_pr_test.yml
@@ -15,7 +15,7 @@ jobs:
python-version: ["3.10"]
env:
GITHUB_HEAD_REF: ${{ github.head_ref }}
- GITHUB_REPOSITORY: ${{ github.repository }}
+ FROM_FORK: ${{ github.event.pull_request.head.repo.fork }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@@ -24,7 +24,11 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ if [[ $FROM_FORK == "true" ]]; then
+ python install.py devel --version=$GITHUB_HEAD_REF --from_fork
+ else
+ python install.py devel --version=$GITHUB_HEAD_REF
+ fi
- name: Calculators and documentation tests
run: |
set -e
@@ -41,7 +45,7 @@ jobs:
python-version: ["3.10"]
env:
GITHUB_HEAD_REF: ${{ github.head_ref }}
- GITHUB_REPOSITORY: ${{ github.repository }}
+ FROM_FORK: ${{ github.event.pull_request.head.repo.fork }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@@ -50,7 +54,11 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ if [[ $FROM_FORK == "true" ]]; then
+ python install.py devel --version=$GITHUB_HEAD_REF --from_fork
+ else
+ python install.py devel --version=$GITHUB_HEAD_REF
+ fi
- name: Hazardlib tests
run: |
set -e
@@ -69,7 +77,7 @@ jobs:
python-version: ["3.10"]
env:
GITHUB_HEAD_REF: ${{ github.head_ref }}
- GITHUB_REPOSITORY: ${{ github.repository }}
+ FROM_FORK: ${{ github.event.pull_request.head.repo.fork }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@@ -78,7 +86,11 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ if [[ $FROM_FORK == "true" ]]; then
+ python install.py devel --version=$GITHUB_HEAD_REF --from_fork
+ else
+ python install.py devel --version=$GITHUB_HEAD_REF
+ fi
- name: Server 'PUBLIC' mode tests
run: |
source ~/openquake/bin/activate
@@ -94,7 +106,7 @@ jobs:
python-version: ["3.10"]
env:
GITHUB_HEAD_REF: ${{ github.head_ref }}
- GITHUB_REPOSITORY: ${{ github.repository }}
+ FROM_FORK: ${{ github.event.pull_request.head.repo.fork }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@@ -103,7 +115,11 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ if [[ $FROM_FORK == "true" ]]; then
+ python install.py devel --version=$GITHUB_HEAD_REF --from_fork
+ else
+ python install.py devel --version=$GITHUB_HEAD_REF
+ fi
- name: Server 'READ_ONLY' mode tests
run: |
source ~/openquake/bin/activate
@@ -119,7 +135,7 @@ jobs:
python-version: ["3.10"]
env:
GITHUB_HEAD_REF: ${{ github.head_ref }}
- GITHUB_REPOSITORY: ${{ github.repository }}
+ FROM_FORK: ${{ github.event.pull_request.head.repo.fork }}
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
@@ -128,7 +144,11 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
- python install.py devel --version=$GITHUB_HEAD_REF --repository=$GITHUB_REPOSITORY
+ if [[ $FROM_FORK == "true" ]]; then
+ python install.py devel --version=$GITHUB_HEAD_REF --from_fork
+ else
+ python install.py devel --version=$GITHUB_HEAD_REF
+ fi
- name: Server 'AELO' mode tests
run: |
source ~/openquake/bin/activate
diff --git a/.github/workflows/macos_intel_install.yml b/.github/workflows/macos_intel_install.yml
index a210f372348f..326370efb2bc 100644
--- a/.github/workflows/macos_intel_install.yml
+++ b/.github/workflows/macos_intel_install.yml
@@ -18,7 +18,7 @@ jobs:
strategy:
matrix:
os: [macos-13]
- python-version: [3.8, 3.9, "3.10"]
+ python-version: ["3.10"]
steps:
# This Checkout use git-ref keyword from dispatch
- name: Clone Repository (Master)
diff --git a/README.md b/README.md
index a6658795760a..d7d9fc5cc378 100644
--- a/README.md
+++ b/README.md
@@ -106,66 +106,71 @@ The OpenQuake Engine is developed by the **[Global Earthquake Model Foundation (
## Public Partners
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/nerc.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/dpc.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/gns_science.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/aus.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/nrcan.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/NTU.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/nset.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/swiss_1.jpg)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/tem.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/public/usaid.jpg)
+
+
+
+
+
+
+
+
+
+
+
+
## Private Partners
#### Governors
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/governors/eucentre.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/governors/FMGlobal.jpg)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/governors/hannoverRe.jpg)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/governors/munichRe.jpg)
-
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/governors/swissRe.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/governors/verisk.png)
-
+
+
+
+
+
+
+
+
+
+
+
#### Advisors
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/advisors/axa.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/advisors/descartes.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/advisors/oneconcern.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/advisors/guycarpenter.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/advisors/partnerRe.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/advisors/safehub.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/private/advisors/wtw.png)
-
+
+
+
+
+
+
## Associate Partners
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/apdim.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/cssc.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/EERI_GEM.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/iaee.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/iaspei.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/IRDRICSU.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/istructe.jpg)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/oecd.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/undrr.jpg)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/unesco.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/associate/usgs.jpg)
+
+
+
+
+
+
+
+
+
## Project Partners
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/project/aon.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/project/sg.jpg)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/project/sura.png)
+
+
+
+
## Products Distribution Partners
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/prod_distr/imagecat.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/prod_distr/nasdaq.png)
-![](https://github.com/gem/oq-infrastructure/raw/master/logos/prod_distr/verisk.png)
+
+
+
+
+
+
***
diff --git a/debian/changelog b/debian/changelog
index 6868e97f592e..6332db05ff7f 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,4 +1,13 @@
[Michele Simionato]
+ * Required at least Python 3.10.6 on mac and windows
+
+ [Claudio Schill]
+ * Optimized the Kuehn sigma_mu_adjustment calculation by replacing the
+ multi-step interpolation with a single 2D/3D space interpolation
+
+ [Michele Simionato]
+ * Fixed a bug in calculations with a filtered site collection using the
+ HM2018CorrelationModel
* Internal: raised a clear error message when get_composite_source_model is
called without passing a datastore in presence of multifault sources
diff --git a/demos/risk/EventBasedDamage/job.ini b/demos/risk/EventBasedDamage/job.ini
index de0664de55af..6a4a75b9108b 100644
--- a/demos/risk/EventBasedDamage/job.ini
+++ b/demos/risk/EventBasedDamage/job.ini
@@ -29,6 +29,8 @@ ses_per_logic_tree_path = 1000
minimum_intensity = 0.05
minimum_magnitude = 5.3
intensity_measure_types_and_levels = {'PGA': logscale(1e-10, 1.4, 20)}
+collect_rlzs = false
+individual_rlzs = true
[risk_calculation]
exposure_file = exposure_model.xml
@@ -37,4 +39,3 @@ consequence_file = {'taxonomy': 'consequences_by_taxonomy.csv'}
asset_hazard_distance = 20
risk_investigation_time = 1
discrete_damage_distribution = true
-return_periods = 500
diff --git a/doc/installing/README.md b/doc/installing/README.md
index 129a7e66ca76..763e8254ad6d 100644
--- a/doc/installing/README.md
+++ b/doc/installing/README.md
@@ -66,17 +66,17 @@ See instructions for the [universal installer](./universal.md) script. This scri
The OpenQuake Engine is also available on **[PyPI](https://pypi.python.org/pypi/openquake.engine)** and can be installed in any Python 3 environment via `pip`:
```
- $ pip install -r https://raw.githubusercontent.com/gem/oq-engine/master/requirements-py38-linux64.txt openquake.engine
+ $ pip install -r https://raw.githubusercontent.com/gem/oq-engine/master/requirements-py310-linux64.txt openquake.engine
```
- This works for Linux and Python 3.8. You can trivially adapt the command to Python 3.9 and 3.10, and to other operating systems. For instance for Windows and Python 3.8, it would be
+ This works for Linux and Python 3.10. You can trivially adapt the command to other operating systems. For instance for Windows it would be
```
- $ pip install -r https://raw.githubusercontent.com/gem/oq-engine/master/requirements-py38-win64.txt openquake.engine
+ $ pip install -r https://raw.githubusercontent.com/gem/oq-engine/master/requirements-py310-win64.txt openquake.engine
```
- and for Mac and Python 3.8, it would be
+ and for Mac it would be
```
- $ pip install -r https://raw.githubusercontent.com/gem/oq-engine/master/requirements-py38-macos.txt openquake.engine
+ $ pip install -r https://raw.githubusercontent.com/gem/oq-engine/master/requirements-py310-macos_arm64.txt openquake.engine
```
**Cloud**
diff --git a/doc/installing/cluster.md b/doc/installing/cluster.md
index 7d4886bba704..efc36e5edfad 100644
--- a/doc/installing/cluster.md
+++ b/doc/installing/cluster.md
@@ -171,6 +171,19 @@ currently running with respect to the number of cores available (for instance
on the host 192.168.2.1 only 1 core of 64 is running, while in the other
two workers 7 cores are running each).
+There are a few useful commands to manage the workers, to be run as user
+`openquake`:
+
+- `oq workers start` is used to start the workers
+- `oq workers stop` is used to stop the workers nicely
+- `oq workers kill` is used to send a hard `kill -9` to the workers
+- `oq workers debug` is used to test that the installation is correct
+
+If a calculation is stuck in the "executing" state due to an IT
+problem (like the cluster running out of memory followed by an `oq
+workers kill`) you can fix its status with the command `oq abort XXX`
+where `XXX` is the calculation ID.
+
## Running calculations
Jobs can be submitted through the master node using the `oq engine` command line interface, the API or the WebUI if active. See the documentation about [how to run a calculation](../running/unix.md) or about how to use the [WebUI](../running/server.md)
diff --git a/doc/installing/development.md b/doc/installing/development.md
index bec20aee4bfe..5b16740f3eb7 100644
--- a/doc/installing/development.md
+++ b/doc/installing/development.md
@@ -10,7 +10,12 @@ wanting to do everything manually.
Knowledge of [Python](https://www.python.org/) (and its [virtual environments](https://docs.python.org/3.9/tutorial/venv.html)), [git](https://git-scm.com/) and [software development](https://xkcd.com/844/) are required.
Some software prerequisites are needed to build the development environment.
-Python 3.8, 3.9 and 3.10 are supported.
+First of your you need a Python version supported by the engine.
+At the moment we recommend Python 3.10, which is the only version
+supported on Windows and macOS.
+
+**NB: Python 3.11 and 3.12 are not supported yet, so please do NOT install
+such versions**
### Ubuntu
diff --git a/doc/installing/docker.md b/doc/installing/docker.md
index e07b4b8d463d..a613e0310ffc 100644
--- a/doc/installing/docker.md
+++ b/doc/installing/docker.md
@@ -11,7 +11,7 @@ For more information about operating system support (which includes Linux, macOS
Each container includes:
-- Python 3.8 from official docker image
+- Python 3.10 from official docker image
- Python dependencies (numpy, scipy, h5py...)
- OpenQuake Engine and Hazardlib
- The `oq` command line tool
diff --git a/doc/installing/universal.md b/doc/installing/universal.md
index 7647a32ba707..5e5525d69853 100644
--- a/doc/installing/universal.md
+++ b/doc/installing/universal.md
@@ -52,11 +52,7 @@ C:\>python.exe install.py user
_*Note 1*: Make sure to run the script located under /Applications/Python 3.X/Install Certificates.command, after Python has been installed, to update the SSL certificates bundle see [see FAQ](../faq.md#certificate-verification-on-macOS)._
```
$ curl -O https://raw.githubusercontent.com/gem/oq-engine/master/install.py
-$ python3.9 install.py user
-```
-_*Note 2*: Users can decided the preferred Python version (e.g., `$python3.9 install.py user`)_
-
-_*Note 3*: Users with the M1 CPU must use Python 3.9 (e.g., `$python3.9 install.py user`)_
+$ python3.10 install.py user
**on Linux:**
```
@@ -64,14 +60,18 @@ $ curl -O https://raw.githubusercontent.com/gem/oq-engine/master/install.py
$ /usr/bin/python3 install.py user
```
-This installation method will create a Python virtual environment in `$HOME/openquake` and will install the engine on it.
-After that, you can activate the virtual environment with
+This installation method will create a Python virtual environment in
+`$HOME/openquake` and will install the engine on it. After that, you
+can activate the virtual environment with
**on Windows:**
```
C:\>%USERPROFILE%\openquake\Scripts\activate.bat
```
-
+or, when using PowerShell,
+```
+C:\>%USERPROFILE%\openquake\Scripts\activate.ps1
+```
**on macOS and Linux:**
```
$ source $HOME/openquake/bin/activate
@@ -123,26 +123,25 @@ _*Note 1*: Make sure to run the script located under /Applications/Python 3.X/In
```
$ git clone https://github.com/gem/oq-engine.git
$ cd oq-engine
-$ python3.9 install.py devel
-```
-_*Note 2*: Users can choose the preferred Python version (e.g., `$python3.9 install.py user`)_
-
-_*Note 3*: Users with M1 CPU must use Python 3.9 (e.g., `$python3.9 install.py user`)_
+$ python3.10 install.py devel
**on Linux:**
```
$ git clone https://github.com/gem/oq-engine.git
$ cd oq-engine && /usr/bin/python3 install.py devel
```
-
-This installation method will create a Python virtual environment in `$HOME/openquake` and will install the engine
-in development mode in this environment. Then, activate the virtual environment with
+This installation method will create a Python virtual environment in
+`$HOME/openquake` and will install the engine in development mode in
+this environment. Then, activate the virtual environment with
**on Windows:**
```
C:\>%USERPROFILE%\openquake\Scripts\activate.bat
```
-
+or, when using PowerShell,
+```
+C:\>%USERPROFILE%\openquake\Scripts\activate.ps1
+```
**on macOS and Linux:**
```
$ source $HOME/openquake/bin/activate
diff --git a/doc/manual/conf.py b/doc/manual/conf.py
index abdd48b8a6ad..a35803387903 100644
--- a/doc/manual/conf.py
+++ b/doc/manual/conf.py
@@ -30,6 +30,7 @@
# ones.
extensions = [
'sphinx.ext.mathjax',
+ 'myst_parser',
]
# Add any paths that contain templates here, relative to this directory.
@@ -38,8 +39,11 @@
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+# source_suffix = '.rst'
#source_suffix = 'oq-manual.rst'
+source_suffix = {'.rst': 'restructuredtext',
+ '.md': 'markdown',
+ }
# The encoding of source files.
source_encoding = 'utf-8-sig'
@@ -105,7 +109,11 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ['_build', 'old']
+exclude_patterns = ['_build', 'old',
+ 'build/html/_static/vendor/fontawesome/6.1.2/js/all.min.js.LICENSE.txt',
+ 'build/html/_static/vendor/fontawesome/6.1.2/LICENSE.txt',
+ 'build/html/_static/scripts/bootstrap.js.LICENSE.txt',
+ ]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -328,10 +336,11 @@
'pointsize': '12pt',
# Additional stuff for the LaTeX preamble.
- # 'preamble': '',
+ 'preamble': '\\usepackage{gensymb}',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
+ 'inputenc': '\\usepackage[utf8]{inputenc}',
'utf8extra': r"""
\DeclareUnicodeCharacter{22EE}{\ensuremath{\vdots}}
""",
diff --git a/doc/manual/hazard.rst b/doc/manual/hazard.rst
index 58b09911f75a..aa73f813fefa 100644
--- a/doc/manual/hazard.rst
+++ b/doc/manual/hazard.rst
@@ -2083,7 +2083,7 @@ exceedence (poes) as those specified by the later option ``poes``. The
probabilities specified here correspond to the set investigation time.
Specifying poes will output hazard maps. For more information about the
outputs of the calculation, see the section: “Description of hazard
-output” (page ).
+outputs”.
Seismic hazard disaggregation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2630,9 +2630,9 @@ In particular, the median is computed as the q=0.5 quantile.
Outputs from Classical PSHA
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-By default, the
-classical PSHA calculator computes and stores hazard curves for each
-logic tree sample considered.
+
+By default, the classical PSHA calculator computes and stores hazard
+curves for each logic tree sample considered.
When the PSHA input model doesn’t contain epistemic uncertainties the
results is a set of hazard curves (one for each investigated site). The
@@ -2641,9 +2641,7 @@ hazard curves obtained for a calculation with a given identifier
```` (see
Section :ref:`sec-exporting_hazard_results`
for an explanation about how to obtain the list of calculations
-performed with their corresponding ID):
-
-::
+performed with their corresponding ID)::
user@ubuntu:~$ oq engine --lo
id | name
@@ -2652,52 +2650,32 @@ performed with their corresponding ID):
To export from the database the outputs (in this case hazard curves)
contained in one of the output identifies, one can do so with the
-following command:
-
-::
+following command::
user@ubuntu:~$ oq engine --export-output
Alternatively, if the user wishes to export all of the outputs
associated with a particular calculation then they can use the
-``--export-outputs`` with the corresponding calculation key:
-
-::
+``--export-outputs`` with the corresponding calculation key::
user@ubuntu:~$ oq engine --export-outputs
-The exports will produce one or more nrml files containing the seismic
-hazard curves, as represented below in
-`the listing ` below.
+The exports will produce one or more CSV files containing the seismic
+hazard curves as represented in the listing
+` below.
.. container:: listing
- .. code:: xml
+ .. code:: csv
:number-lines:
- :name: lst:output_hazard_curves_xml
+ :name: lst:output_hazard_curves_csv
-
-
-
- 0.005 0.007 0.0098 ... 1.09 1.52 2.13
-
-
- 10.0 45.0
-
- 1.0 1.0 1.0 ... 0.000688359310522 0.0 0.0
-
- ...
-
-
- lon lat
-
- poe1 poe2 ... poeN
-
-
-
+ #,,,,,"generated_by='OpenQuake engine 3.18.0-gitabf2de85b8', start_date='2023-10-03T06:09:08', checksum=2107362341, kind='mean', investigation_time=1.0, imt='PGA'"
+ lon,lat,depth,poe-0.1000000,poe-0.4000000,poe-0.6000000
+ 0.00000,0.00000,-0.10000,4.553860E-01,5.754042E-02,6.354511E-03
+ 0.10000,0.00000,-0.10000,1.522632E-01,0.000000E+00,0.000000E+00
+ 0.20000,0.00000,-0.10000,3.037810E-03,0.000000E+00,0.000000E+00
+ 0.30000,0.00000,-0.10000,0.000000E+00,0.000000E+00,0.000000E+00
Notwithstanding the intuitiveness of this file, let’s have a brief
overview of the information included. The overall content of this file
@@ -2705,25 +2683,14 @@ is a list of hazard curves, one for each investigated site, computed
using a PSHA input model representing one possible realisation obtained
using the complete logic tree structure.
-The attributes of the ``hazardCurves`` element (see text in red) specify
-the path of the logic tree used to create the seismic source model
-(``sourceModelTreePath``) and the ground motion model (``gsimTreePath``)
-plus the intensity measure type and the investigation time used to
-compute the probability of exceedance.
-
-The ``IMLs`` element (in green in the example) contains the values of
-shaking used by the engine to compute the probability of exceedance in
-the investigation time. For each site this file contains a
-``hazardCurve`` element which has the coordinates (longitude and
-latitude in decimal degrees) of the site and the values of the
-probability of exceedance for all the intensity measure levels specified
-in the ``IMLs`` element.
+The first commented line contains some metadata like the version of the
+engine used to generate the file, the start date of the calculation, a
+checksum, the kind of hazard curves generated (in the example they are
+mean curves), the investigation time and the IMT used (in the example PGA).
If the hazard calculation is configured to produce results including
seismic hazard maps and uniform hazard spectra, then the list of outputs
-would display the following:
-
-::
+would display the following::
user@ubuntu:~$ oq engine --lo
id | name
@@ -2733,28 +2700,22 @@ would display the following:
5 | Realizations
6 | Uniform Hazard Spectra
-:ref:`The first listing ` below
-shows a sample of the nrml file used to describe a hazard map, and and
+:ref:`The first listing ` below
+shows a sample of the CSV file used to describe a hazard map, and and
:ref:`the second listing ` below shows a sample of the
-nrml used to describe a uniform hazard spectrum.
+CSC used to describe a uniform hazard spectrum.
.. container:: listing
.. code:: xml
:number-lines:
- :name: lst:output_hazard_map_xml
+ :name: lst:output_hazard_map_csv
-
-
-
-
-
-
- ...
-
-
+ #,,,,"generated_by='OpenQuake engine 3.18.0-gitabf2de85b8', start_date='2023-10-03T06:09:09', checksum=969346546, kind='mean', investigation_time=1.0"
+ lon,lat,PGA-0.002105,SA(0.2)-0.002105,SA(1.0)-0.002105
+ -123.23738,49.27479,3.023730E-03,1.227876E-02,1.304533E-02
+ -123.23282,49.26162,2.969411E-03,1.210481E-02,1.294509E-02
+ -123.20480,49.26786,2.971350E-03,1.211078E-02,1.294870E-02
.. container:: listing
@@ -2762,32 +2723,16 @@ nrml used to describe a uniform hazard spectrum.
:number-lines:
:name: lst:output_uhs
-
-
-
- 0.0 0.025 0.1 0.2
-
-
- 0.0 0.0
-
- 0.3 0.5 0.2 0.1
-
-
-
- 0.0 1.0
-
- 0.3 0.5 0.2 0.1
-
-
-
+ #,,,,"generated_by='OpenQuake engine 3.15.0-git7c5b3f1678', start_date='2022-05-14T10:44:47', checksum=2967670219, kind='rlz-001', investigation_time=1.0"
+ lon,lat,0.002105~PGA,0.002105~SA(0.2),0.002105~SA(1.0)
+ -123.23738,49.27479,2.651139E-03,1.120929E-02,1.218275E-02
+ -123.23282,49.26162,2.603451E-03,1.105909E-02,1.208975E-02
+ -123.20480,49.26786,2.605109E-03,1.106432E-02,1.209299E-02
Outputs from Hazard Disaggregation
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-The
-OpenQuake engine output of a disaggregation analysis corresponds to the
+
+The OpenQuake engine output of a disaggregation analysis corresponds to the
combination of a hazard curve and a multidimensional matrix containing
the results of the disaggregation. For a typical disaggregation
calculation the list of outputs are the following:
diff --git a/doc/manual/index_toc.rst_part b/doc/manual/index_toc.rst_part
index f1b4324db06f..34adc689e595 100644
--- a/doc/manual/index_toc.rst_part
+++ b/doc/manual/index_toc.rst_part
@@ -7,4 +7,6 @@
risk
secondary_perils
thanks
- glossary
\ No newline at end of file
+ glossary
+
+
diff --git a/doc/manual/secondary_perils.rst b/doc/manual/secondary_perils.rst
index b16a2a94054d..5547b7a0adc5 100644
--- a/doc/manual/secondary_perils.rst
+++ b/doc/manual/secondary_perils.rst
@@ -12,5 +12,4 @@ Tools for preparing the data for these models are also presented. This can be a
:caption: Contents:
sep/sep_models
- sep/liquefaction_data_prep
- sep/sep_tutorials
\ No newline at end of file
+ sep/sep_tutorials
diff --git a/doc/manual/sep/liquefaction_data_prep.md b/doc/manual/sep/liquefaction_data_prep.md
index 31d04b751332..d5f2b79868d3 100644
--- a/doc/manual/sep/liquefaction_data_prep.md
+++ b/doc/manual/sep/liquefaction_data_prep.md
@@ -1,3 +1,6 @@
+---
+orphan: true
+---
# Site characterization for probabilistic liquefaction analysis
There are many methods to calculate the probabilities and displacements that result from liquefaction. In OpenQuake, we have several models, the methods developed by the US Federal Emergency Management Agency through their HAZUS project, and geospatial methods recently developed by [Zhu et al. (2015)][z15], [Zhu et al. (17)][z17], [Rashidian et al. (2020)][rb20], [Akhlagi et al. (2021)][akh21], [Bozzoni et al. (2021)][b21],[Allstadt et al. (2022)][all22], [Todorovic and Silva (2022)][ts22].
@@ -163,4 +166,4 @@ available [here][precip].
[srap]: https://github.com/gem/oq-engine/blob/ef33b5e0dfdca7a214dac99d4d7214086023ab39/openquake/sep/utils.py#L22
[wald_allen_07]: https://github.com/gem/oq-engine/blob/ef33b5e0dfdca7a214dac99d4d7214086023ab39/openquake/sep/utils.py#L260
-[wrapper]: https://github.com/gem/oq-engine/blob/ef33b5e0dfdca7a214dac99d4d7214086023ab39/openquake/sep/utils.py#L227
\ No newline at end of file
+[wrapper]: https://github.com/gem/oq-engine/blob/ef33b5e0dfdca7a214dac99d4d7214086023ab39/openquake/sep/utils.py#L227
diff --git a/doc/manual/sep/sep_models.rst b/doc/manual/sep/sep_models.rst
index e315ac57d3d5..efdf340df96e 100644
--- a/doc/manual/sep/sep_models.rst
+++ b/doc/manual/sep/sep_models.rst
@@ -83,17 +83,17 @@ the causative earthquake.
The model is quite simple. An explanatory variable :math:`X` is
calculated as:
-.. math:: X = 24.1 + 2.067\, ln\, PGA_{M,SM} + 0.355\,CTI − 4.784\, ln\, Vs30\ \(2)
+.. math:: X = 24.1 + 2.067\, ln\, PGA_{M,SM} + 0.355\,CTI - 4.784\, ln\, Vs30 (2)
and the final probability is the logistic function
-.. math:: P(L) = \frac{1}{1+e^X}\ \(3)
+.. math:: P(L) = \frac{1}{1+e^X} (3)
The term :math:`PGA_{M,SM}` is the PGA corrected by magnitude scaling
factor (MSF) that serves as proxy for earthquake duration. The :math:`MSF`
is calculated as per Idriss et al. (1991):
-.. math:: MSF = \{10^2.24}{M^2.56}\ \(4)
+.. math:: MSF = {10^2.24}{M^2.56} (4)
Both the :math:`CTI` and the :math:`Vs30` may be derived from digital
elevation data. The :math:`Vs30` may be estimated from the topographic
@@ -110,7 +110,7 @@ The CTI (Moore et al., 1991) is a proxy for soil wetness that relates
the topographic slope of a point to the upstream drainage area of that
point, through the relation
-.. math:: CTI = \ln (d_a / \tan \delta)\ \(4)
+.. math:: CTI = \ln (d_a / \tan \delta) (4)
where :math:`d_a` is the upstream drainage area per unit width through
the flow direction (i.e. relating to the DEM resolution). It was
@@ -132,9 +132,9 @@ input variables (i.e., :math:`PGA_{M,SM}`, :math:`CTI`, :math:`Vs30`)
and functional form as in Zhu et al. (2015). Regression parameters are
calibrated based on the liquefaction case histories observed during
seismic events in Europe. The implemented model is associated with the
-ADASYN sampling algorithm. The explanatory variable :math:`X`is computed as:
+ADASYN sampling algorithm. The explanatory variable :math:`X` is computed as:
-.. math:: X = -11.489 + 3.864\, ln\, PGA_{M} + 2.328\,CTI − 0.091\, ln\, Vs30\ \(5)
+.. math:: X = -11.489 + 3.864\, ln\, PGA_{M} + 2.328\,CTI - 0.091\, ln\, Vs30 (5)
and the probability of liquefaction in calculated using equation (3).
@@ -165,15 +165,15 @@ HydroSHEDS database (Lehner et al. 2008). Water table depth is retreived from a
global dataset by Fan et al (2013).Distance to the nearest coastline data
was computed from https://oceancolor.gsfc.nasa.gov.
-The explanatory varibale :math:`X`is calculated as:
+The explanatory varibale :math:`X` is calculated as:
Model 1:
.. math:: X = 12.435 + 0.301\, ln\, PGV - 2.615\, ln\, Vs30 + 0.0005556\, precip
-.. math:: -0.0287\, \sqrt{d_{c}} + 0.0666\,d_{r} - 0.0369\, \sqrt{d_{c}} \cdot d_{r}\ \(6)
+.. math:: -0.0287\, \sqrt{d_{c}} + 0.0666\,d_{r} - 0.0369\, \sqrt{d_{c}} \cdot d_{r} (6)
Model 2:
.. math:: X = 8.801 + 0.334\, ln\, PGV - 1.918\, ln\, Vs30 + 0.0005408\, precip
-.. math:: -0.2054\, d_{w} -0.0333\, wtd\ \(7)
+.. math:: -0.2054\, d_{w} -0.0333\, wtd (7)
and the probability of liquefaction is calculated using equation (3).
Zero probability is heuristically assigned if :math:`PGV < 3 cm/s ` or
@@ -188,7 +188,7 @@ pixel. Logistic regression with the same form was fit for the two models,
with only difference in squaring the denominator to improve the fit. The
regression coefficients are given in Table 2.
-.. math:: L(P) = \frac{a}{1+b\,e^(-c\,P)}^2\ \(8)
+.. math:: L(P) = \frac{a}{1+b\,e^(-c\,P)}^2 (8)
+--------------+-----------+-----------+
| Parameters | Model 1 | Model 2 |
@@ -199,6 +199,7 @@ regression coefficients are given in Table 2.
+--------------+-----------+-----------+
| c | 11.43 | 9.165 |
+--------------+-----------+-----------+
+
Table 2: Parameters for relating proba-
bilities to areal liquefaction percent.
@@ -216,7 +217,7 @@ Additional novelty introduced in this model is the magnitude scaling factor
:math:`MSF` to multiply the :math:`PGV` to mitigate the potential over-prediction
in earthquake with low magnitude.
-.. :math:: MSF = \frac{1}{1+e^(-2\,[M-6])}\ \(9)
+.. :math:: MSF = \frac{1}{1+e^(-2\,[M-6])} (9)
The explanatory variable :math:`X` is evaluated using the equation (7) that corr-
esponds to the general model of Zhu et al. (2017). The spatial extent is evaluated
@@ -240,11 +241,11 @@ explanatory variables of two candidate models are:
Model 1:
.. math:: X = 4.925 + 0.694\, ln\, PGV - 0.459\, \sqrt{TRI} - 0.403\, ln\, d_{c}+1
-.. math:: -0.309\, \ln\, d_{r}+1 - 0.164\, \sqrt{Z_{wb}}\ \(10)
+.. math:: -0.309\, \ln\, d_{r}+1 - 0.164\, \sqrt{Z_{wb}} (10)
Model 2:
.. math:: X = 9.504 + 0.706\, ln\, PGV - 0.994\, ln\, Vs30 - 0.389\, ln\, d_{c}+1
-.. math:: -0.291\, \ln\, d_{r}+1 - 0.205\, \sqrt{Z_{wb}}\ \(11)
+.. math:: -0.291\, \ln\, d_{r}+1 - 0.205\, \sqrt{Z_{wb}} (11)
and the probability of liquefaction is calculated using equation (3).
Zero probability is heuristically assigned if :math:`PGV < 3 cm` or
@@ -293,7 +294,7 @@ Lateral spreading (Hazus)
The expected permanent displacement due to lateral spreading given the
susceptibility category can be determined as:
-.. :math:: E[PGD_{SC}] = K_{\Delta}\times E[PGD|(PGA/PL_{SC})=a]\ \(12)
+.. :math:: E[PGD_{SC}] = K_{\Delta}\times E[PGD|(PGA/PL_{SC})=a] (12)
Where:
:math:`E[PGD|(PGA/PL_{SC})=a]` is the expected ground displacement given
@@ -307,7 +308,7 @@ and is calculated as:
:math:`PGA(t)` is theminimum shaking level to induce liquefaction (see Table 1)
:math:`K_{\Delta}` is the displacement correction factor given thhat modify
the displacement term for magnitudes other than :math:`M7.5`:
-.. :math:: K_{\Delta} = 0.0086\, M^3 - 0.0914\, M^2 + 0.4698\, M - 0.9835\ \(13)
+.. :math:: K_{\Delta} = 0.0086\, M^3 - 0.0914\, M^2 + 0.4698\, M - 0.9835 (13)
Vertical settlements (Hazus)
@@ -336,6 +337,7 @@ category (LSC).
+----------------+-----------------------+
| none | 0 |
+----------------+-----------------------+
+
Table 3: Ground settlements amplitudes for
liquefaction susceptibility categories.
@@ -356,20 +358,20 @@ the critical acceleration :math:`a_{c}`, the block starts to move. The
crtical acceleration accounts for the shear strength and geometrical
characteristics of the sliding surface, and is calculated as:
-.. :math:: a_{c} = (FS-1)\,g\,sin(\alpha)\ \(14)
+.. :math:: a_{c} = (FS-1)\,g\,sin(\alpha) (14)
The lower bound of :math:`a_{c}` is set to 0.05 to avoid unrealistically
large displacements.
The static factor of safety is calculated as:
.. :math:: FS = \frac{c'}{\gamma\, t\, sin(\alpha)} + frac{tan(\phi')}{tan(\alpha)} -
-.. :math:: frac{m\, \gamma_{w}\, tan(\phi')}{\gamma\, tan(\alpha)}\ \(15)
+.. :math:: frac{m\, \gamma_{w}\, tan(\phi')}{\gamma\, tan(\alpha)} (15)
where:
:math:`c' [Pa]` is the effective cohession with typical values ranging
from 20 kPa for soils up to 20 MPa for unfaulted rocks.
-:math:`\alpha [\degrees]` is the slope angle.
-:math:`\phi' [\degrees]` is the effective friction angle with typical values
+:math:`\alpha [\degree]` is the slope angle.
+:math:`\phi' [\degree]` is the effective friction angle with typical values
ranging from 30 to 40 degrees.
:math:`\gamma [kg/m^3]` is the dry_density of the soil or rock. It ranges
from ~1500 :math:`kg/m^3` for soils to ~ 2500 - 3200 :math:`kg/m^3`.
@@ -393,7 +395,7 @@ The displacement is in units of meters.
.. :math:: logD_{N} = -2.710 +
.. :math:: log[(1-\frac{a_{c}}{a_{max}})^2.335\, (\frac{a_{c}}{a_{max}})^-1.478] +
-.. :math:: 0.424\, M \± 0.454\ \(16)
+.. :math:: 0.424\, M \± 0.454 (16)
The computed displacements do not necessarily correspond directly to
measurable slope movements in the field, but the modeled displacements
@@ -403,7 +405,7 @@ and fit the data with Weilbull curve. The following equation can be used
to estimate the probability of slope failure as a function of Newmark
displacement.
-.. :math:: P(f) = 0.335\, [1-e^(-0.048\, D_{n}^1.565)]\ \(17)
+.. :math:: P(f) = 0.335\, [1-e^(-0.048\, D_{n}^1.565)] (17)
The rock-slope failures are the other common effect observed in earthquakes.
@@ -412,34 +414,34 @@ associated with rock-slope failures and discontinuities common in rock masses.
The static factor of safety is computed as:
.. :math:: FS = \frac{2\, (c+c_{r})\, sin(\alpha)}{\gamma\, h\, sin(\beta)} +
-.. :math:: \frac{tan(\phi)}{tan(\beta)}\ \(18)
+.. :math:: \frac{tan(\phi)}{tan(\beta)} (18)
where:
:math:`c [Pa]` is the cohession with typical values ranging from 20 kPa
for soils up to 20 MPa for unfaulted rocks.
:math:`c_{r}` is the cohesion provided by the root systems of vegetated
hillslopes. Here, we adopted the default value of 0 root cohesion.
-:math:`\alpha [\degrees]` is the slope angle.
+:math:`\alpha [\degree]` is the slope angle.
:math:`\gamma [kg/m^3]` is the dry_density of the soil or rock. It ranges
from ~1500 :math:`kg/m^3` for soils to ~ 2500 - 3200 :math:`kg/m^3`.
:math:`h [m]` is the vertical height of the failure mass and it corresponds
to 1/4 of the local relief :math:`H` calculated based on the moving
window analysis.
-:math:`\phi' [\degrees]` is the effective friction angle with typical values
+:math:`\phi' [\degree]` is the effective friction angle with typical values
ranging from 30 to 40 degrees.
:math:`\beta` is the slope's critical angle calculated as:
-.. :math:: \beta = \frac{\alpha + \phi}{0.5}\ \(19)
+.. :math:: \beta = \frac{\alpha + \phi}{0.5} (19)
The critical acceleration is computed similarly to equation (14). For rock-
slope failures, the :math:`\alpha` term is replaced with :math:`\beta`.
-.. :math:: a_{c} = (FS-1)\,g\,sin(\beta)\ \(20)
+.. :math:: a_{c} = (FS-1)\,g\,sin(\beta) (20)
Finaly, the coseismic displacements are estimated using Jibson’s (2007) sliding
block displacement regression equation:
.. :math:: logD_{N} = 0.215 +
-.. :math:: log[(1-\frac{a_{c}}{a_{max}})^2.341\, (\frac{a_{c}}{a_{max}})^-1.438]\ \(21)
+.. :math:: log[(1-\frac{a_{c}}{a_{max}})^2.341\, (\frac{a_{c}}{a_{max}})^-1.438] (21)
diff --git a/doc/manual/sep/sep_tutorials.rst b/doc/manual/sep/sep_tutorials.rst
index 15ea4141e68d..4add69dc2f69 100644
--- a/doc/manual/sep/sep_tutorials.rst
+++ b/doc/manual/sep/sep_tutorials.rst
@@ -5,7 +5,3 @@ Several tutorials are available for preparing data and performing calculations
relating to secondary perils (liquefaction).
These tutorials are given as Jupyter Notebooks:
-
-.. toctree::
- liq_site_prep
- liquefaction_analysis
diff --git a/install.py b/install.py
index f436720cab8b..5a73ddcf332a 100644
--- a/install.py
+++ b/install.py
@@ -177,7 +177,7 @@ class devel(user):
WantedBy=multi-user.target
'''
-PYVER = sys.version_info[:2]
+PYVER = sys.version_info
PLATFORM = {'linux': ('linux64',), # from sys.platform to requirements.txt
'darwin': ('macos',),
'win32': ('win64',)}
@@ -204,13 +204,13 @@ def ensure(pip=None, pyvenv=None):
% sys.executable))
-def get_requirements_branch(version, inst, repository):
+def get_requirements_branch(version, inst, from_fork):
"""
Convert "version" into a branch name
"""
- repository_owner, repository_name = repository.split('/')
- # in forks of oq-engine, always read requirements from master
- if repository_owner != 'gem' and repository_name == 'oq-engine':
+ # in actions triggered by forks we want requirements to be taken from
+ # master
+ if from_fork:
return 'master'
# in cases such as 'install.py user', for instance while running tests from
# another gem repository, we need requirements to be read from the latest
@@ -267,8 +267,14 @@ def before_checks(inst, venv, port, remove, usage):
inst.DBPORT = int(port)
# check python version
- if PYVER < (3, 8):
- sys.exit('Error: you need at least Python 3.8, but you have %s' %
+ if sys.platform == 'linux':
+ # requires Python >= 3.8.0
+ if PYVER < (3, 8, 0):
+ sys.exit('Error: you need at least Python 3.8, but you have %s' %
+ '.'.join(map(str, sys.version_info)))
+ elif PYVER < (3, 10, 6):
+ # requires Python >= 3.10.6
+ sys.exit('Error: you need at least Python 3.10.6, but you have %s' %
'.'.join(map(str, sys.version_info)))
# check platform
@@ -343,7 +349,7 @@ def fix_version(commit, venv):
f.write(''.join(lines))
-def install(inst, version, repository):
+def install(inst, version, from_fork):
"""
Install the engine in one of the three possible modes
"""
@@ -390,13 +396,14 @@ def install(inst, version, repository):
'pip', 'wheel'])
# install the requirements
- branch = get_requirements_branch(version, inst, repository)
+ branch = get_requirements_branch(version, inst, from_fork)
if sys.platform == 'darwin':
mac = '_' + platform.machine(), # x86_64 or arm64
else:
mac = '',
req = f'https://raw.githubusercontent.com/gem/oq-engine/{branch}/' \
- 'requirements-py%d%d-%s%s.txt' % (PYVER + PLATFORM[sys.platform] + mac)
+ 'requirements-py%d%d-%s%s.txt' % (
+ PYVER[:2] + PLATFORM[sys.platform] + mac)
subprocess.check_call(
[pycmd, '-m', 'pip', 'install',
@@ -448,19 +455,13 @@ def install(inst, version, repository):
if (inst is server and not os.path.exists(inst.OQ) or
inst is devel_server and not os.path.exists(inst.OQ)):
os.symlink(oqreal, inst.OQ)
- if inst is user:
- if sys.platform == 'win32':
- print(f'Please activate the virtualenv with {inst.VENV}'
- '\\Scripts\\activate.bat')
- else:
- print(f'Please add an alias oq={oqreal} in your .bashrc or equiv')
- elif inst is devel:
- if sys.platform == 'win32':
- print(f'Please activate the virtualenv with {inst.VENV}'
- '\\Scripts\\activate.bat')
- else:
- print(f'Please activate the venv with source {inst.VENV}'
- '/bin/activate')
+ if sys.platform == 'win32' and inst in (user, devel):
+ print(f'Please activate the virtualenv with {inst.VENV}'
+ f'\\Scripts\\activate.bat (in CMD) or {inst.VENV}'
+ '\\Scripts\\activate.ps1 (in PowerShell)')
+ elif inst in (user, devel):
+ print(f'Please activate the venv with source {inst.VENV}'
+ '/bin/activate')
# create systemd services
if ((inst is server and os.path.exists('/run/systemd/system')) or
@@ -541,10 +542,11 @@ def remove(inst):
help="version to install (default stable)")
parser.add_argument("--dbport",
help="DbServer port (default 1907 or 1908)")
- parser.add_argument("--repository",
- help=("The owner and repository name. For example,"
- " 'gem/oq-engine' or 'forkowner/oq-engine'"),
- default='gem/oq-engine')
+ # NOTE: This flag should be set when installing the engine from an action
+ # triggered by a fork
+ parser.add_argument("--from_fork", dest='from_fork', action='store_true',
+ help=argparse.SUPPRESS)
+ parser.set_defaults(from_fork=False)
args = parser.parse_args()
if args.inst:
inst = globals()[args.inst]
@@ -553,6 +555,6 @@ def remove(inst):
if args.remove:
remove(inst)
else:
- install(inst, args.version, args.repository)
+ install(inst, args.version, args.from_fork)
else:
sys.exit("Please specify the kind of installation")
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py
index 3379ffe86367..cecd418bc845 100644
--- a/openquake/calculators/event_based.py
+++ b/openquake/calculators/event_based.py
@@ -278,7 +278,11 @@ def event_based(proxies, cmaker, stations, dstore, monitor):
df = computer.compute_all(dstore, rmon, cmon, umon)
else: # regular GMFs
with mmon:
- mean_stds = cmaker.get_mean_stds([computer.ctx])
+ mean_stds = cmaker.get_mean_stds(
+ [computer.ctx], split_by_mag=False)
+ # avoid numba type error
+ computer.ctx.flags.writeable = True
+
df = computer.compute_all(mean_stds, max_iml, cmon, umon)
sig_eps.append(computer.build_sig_eps(se_dt))
dt = time.time() - t0
diff --git a/openquake/calculators/extract.py b/openquake/calculators/extract.py
index 68b6f040a722..9822fc4feb16 100644
--- a/openquake/calculators/extract.py
+++ b/openquake/calculators/extract.py
@@ -736,13 +736,12 @@ def extract_agg_curves(dstore, what):
"""
info = get_info(dstore)
qdic = parse(what, info)
- tagdict = qdic.copy()
- for a in ('k', 'rlzs', 'kind', 'loss_type', 'absolute'):
- del tagdict[a]
+
+ tagnames = dstore['oqparam'].aggregate_by[0]
k = qdic['k'] # rlz or stat index
- lts = tagdict.pop('lt') # loss type string
+ lts = qdic['lt']
[l] = qdic['loss_type'] # loss type index
- tagnames = sorted(tagdict)
+ tagdict = {tag: qdic[tag] for tag in tagnames}
if set(tagnames) != info['tagnames']:
raise ValueError('Expected tagnames=%s, got %s' %
(info['tagnames'], tagnames))
diff --git a/openquake/calculators/post_risk.py b/openquake/calculators/post_risk.py
index a00d34f7c054..e2d7280e2405 100644
--- a/openquake/calculators/post_risk.py
+++ b/openquake/calculators/post_risk.py
@@ -401,6 +401,7 @@ def build_reinsurance(dstore, num_events):
units=dstore['cost_calculator'].get_units(
oq.loss_types))
+
@base.calculators.add('post_risk')
class PostRiskCalculator(base.RiskCalculator):
"""
@@ -550,8 +551,7 @@ def post_execute(self, ok):
'%s != %s\nsee %s', lt, agg, avg, url)
# save agg_curves-stats
- if (self.R > 1 and 'aggcurves' in self.datastore and
- 'risk' in oq.calculation_mode):
+ if self.R > 1 and 'aggcurves' in self.datastore:
save_curve_stats(self.datastore)
diff --git a/openquake/calculators/tests/event_based_test.py b/openquake/calculators/tests/event_based_test.py
index 54e34c95e909..1f59d52bcde9 100644
--- a/openquake/calculators/tests/event_based_test.py
+++ b/openquake/calculators/tests/event_based_test.py
@@ -40,7 +40,7 @@
blocksize, case_1, case_2, case_3, case_4, case_5, case_6, case_7,
case_8, case_9, case_10, case_12, case_13, case_14, case_15, case_16,
case_17, case_18, case_19, case_20, case_21, case_22, case_23, case_24,
- case_25, case_26, case_27, case_28, case_29, case_30, src_mutex)
+ case_25, case_26, case_27, case_28, case_29, case_30, case_31, src_mutex)
from openquake.qa_tests_data.event_based.spatial_correlation import (
case_1 as sc1, case_2 as sc2, case_3 as sc3)
@@ -597,3 +597,9 @@ def test_30(self):
out = self.run_calc(case_30.__file__, 'job.ini', exports='csv')
[fname] = out['ruptures', 'csv']
self.assertEqualFiles('expected/ruptures.csv', fname, delta=1E-6)
+
+ def test_31(self):
+ # HM2018CorrelationModel with filtered site collection
+ self.run_calc(case_31.__file__, 'job.ini', exports='csv')
+ [f] = export(('avg_gmf', 'csv'), self.calc.datastore)
+ self.assertEqualFiles('expected/avg_gmf.csv', f)
diff --git a/openquake/hazardlib/calc/conditioned_gmfs.py b/openquake/hazardlib/calc/conditioned_gmfs.py
index 3a4084b684aa..2561355a85a4 100644
--- a/openquake/hazardlib/calc/conditioned_gmfs.py
+++ b/openquake/hazardlib/calc/conditioned_gmfs.py
@@ -229,6 +229,7 @@ def compute_all(
"""
:returns: (dict with fields eid, sid, gmv_X, ...), dt
"""
+ self.init_eid_rlz_sig_eps()
data = AccumDict(accum=[])
rng = numpy.random.default_rng(self.seed)
for g, (gsim, rlzs) in enumerate(self.cmaker.gsims.items()):
diff --git a/openquake/hazardlib/calc/gmf.py b/openquake/hazardlib/calc/gmf.py
index 42654756269e..7a87a8795d50 100644
--- a/openquake/hazardlib/calc/gmf.py
+++ b/openquake/hazardlib/calc/gmf.py
@@ -26,7 +26,7 @@
from openquake.baselib.general import AccumDict
from openquake.baselib.performance import Monitor, compile
from openquake.hazardlib.const import StdDev
-from openquake.hazardlib.source.rupture import get_eid_rlz
+from openquake.hazardlib.source.rupture import EBRupture, get_eid_rlz
from openquake.hazardlib.cross_correlation import NoCrossCorrelation
from openquake.hazardlib.gsim.base import ContextMaker, FarAwayRupture
from openquake.hazardlib.imt import from_string
@@ -82,7 +82,7 @@ def set_max_min(array, mean, max_iml, min_iml, mmi_index):
array[n, :, e] = 0
-@compile("uint32[:,:](uint32[:],uint32[:],uint32[:],uint32[:])")
+@compile("(uint32[:],uint32[:],uint32[:],uint32[:])")
def build_eid_sid_rlz(allrlzs, sids, eids, rlzs):
eid_sid_rlz = numpy.zeros((3, len(sids) * len(eids)), U32)
idx = 0
@@ -98,12 +98,12 @@ def build_eid_sid_rlz(allrlzs, sids, eids, rlzs):
class GmfComputer(object):
"""
- Given an earthquake rupture, the ground motion field computer computes
+ Given an earthquake rupture, the GmfComputer computes
ground shaking over a set of sites, by randomly sampling a ground
shaking intensity model.
:param rupture:
- Rupture to calculate ground motion fields radiated from.
+ EBRupture to calculate ground motion fields radiated from.
:param :class:`openquake.hazardlib.site.SiteCollection` sitecol:
a complete SiteCollection
@@ -131,10 +131,10 @@ class GmfComputer(object):
case no secondary perils need to be evaluated.
"""
# The GmfComputer is called from the OpenQuake Engine. In that case
- # the rupture is an higher level containing a
+ # the rupture is an EBRupture instance containing a
# :class:`openquake.hazardlib.source.rupture.Rupture` instance as an
# attribute. Then the `.compute(gsim, num_events, ms)` method is called and
- # a matrix of size (I, N, E) is returned, where I is the number of
+ # a matrix of size (M, N, E) is returned, where M is the number of
# IMTs, N the number of affected sites and E the number of events. The
# seed is extracted from the underlying rupture.
def __init__(self, rupture, sitecol, cmaker, correlation_model=None,
@@ -152,37 +152,27 @@ def __init__(self, rupture, sitecol, cmaker, correlation_model=None,
self.correlation_model = correlation_model
self.amplifier = amplifier
self.sec_perils = sec_perils
- # `rupture` is an EBRupture instance in the engine
- if hasattr(rupture, 'rupture'):
- self.ebrupture = rupture
- self.seed = rupture.seed
- rupture = rupture.rupture # the underlying rupture
- else: # in the hazardlib tests
- self.ebrupture = {'e0': 0, 'n_occ': 1, 'seed': rupture.seed}
- self.seed = rupture.seed
+ self.ebrupture = rupture
+ self.seed = rupture.seed
+ rupture = rupture.rupture # the underlying rupture
ctxs = list(cmaker.get_ctx_iter([rupture], sitecol))
if not ctxs:
raise FarAwayRupture
[self.ctx] = ctxs
+ self.N = len(self.ctx)
if correlation_model: # store the filtered sitecol
self.sites = sitecol.complete.filtered(self.ctx.sids)
self.cross_correl = cross_correl or NoCrossCorrelation(
cmaker.truncation_level)
self.gmv_fields = [f'gmv_{m}' for m in range(len(cmaker.imts))]
- self.init_eid_rlz_sig_eps()
def init_eid_rlz_sig_eps(self):
"""
Initialize the attributes eid, rlz, sig, eps with shapes E, E, EM, EM
"""
self.rlzs = numpy.concatenate(list(self.cmaker.gsims.values()))
- if isinstance(self.ebrupture, dict): # with keys e0, n_occ, seed
- dic = self.ebrupture
- else:
- dic = vars(self.ebrupture)
- eid, rlz = get_eid_rlz(dic, self.rlzs, self.cmaker.scenario)
- self.eid, self.rlz = eid, rlz
- self.N = len(self.ctx)
+ self.eid, self.rlz = get_eid_rlz(
+ vars(self.ebrupture), self.rlzs, self.cmaker.scenario)
self.E = E = len(self.eid)
self.M = M = len(self.gmv_fields)
self.sig = numpy.zeros((E, M), F32) # same for all events
@@ -218,6 +208,7 @@ def update(self, data, array, rlzs, mean_stds, max_iml=None):
if max_iml is None:
M = len(self.cmaker.imts)
max_iml = numpy.full(M, numpy.inf, float)
+
set_max_min(array, mean, max_iml, min_iml, mmi_index)
data['gmv'].append(array)
@@ -263,6 +254,7 @@ def compute_all(self, mean_stds, max_iml=None,
"""
:returns: DataFrame with fields eid, rlz, sid, gmv_X, ...
"""
+ self.init_eid_rlz_sig_eps()
rng = numpy.random.default_rng(self.seed)
data = AccumDict(accum=[])
for g, (gs, rlzs) in enumerate(self.cmaker.gsims.items()):
@@ -309,16 +301,15 @@ def compute(self, gsim, idxs, mean_stds, rng):
return result.transpose(1, 0, 2)
def _compute(self, mean_stds, m, imt, gsim, intra_eps, idxs):
- # sets self.sig
+ # sets self.sig, returns gmf
im = imt.string
+ mean, sig, tau, phi = mean_stds
if self.cmaker.truncation_level <= 1E-9:
# for truncation_level = 0 there is only mean, no stds
if self.correlation_model:
raise ValueError('truncation_level=0 requires '
'no correlation model')
- mean, _, _, _ = mean_stds
- gmf = exp(mean, im!='MMI')[:, None]
- gmf = gmf.repeat(len(intra_eps[0]), axis=1)
+ gmf = exp(mean, im!='MMI')[:, None].repeat(len(idxs), axis=1)
elif gsim.DEFINED_FOR_STANDARD_DEVIATION_TYPES == {StdDev.TOTAL}:
# If the GSIM provides only total standard deviation, we need
# to compute mean and total standard deviation at the sites
@@ -327,18 +318,14 @@ def _compute(self, mean_stds, m, imt, gsim, intra_eps, idxs):
if self.correlation_model:
raise CorrelationButNoInterIntraStdDevs(
self.correlation_model, gsim)
-
- mean, sig, _, _ = mean_stds
gmf = exp(mean[:, None] + sig[:, None] * intra_eps, im!='MMI')
self.sig[idxs, m] = numpy.nan
else:
- mean, sig, tau, phi = mean_stds
# the [:, None] is used to implement multiplication by row;
# for instance if a = [1 2], b = [[1 2] [3 4]] then
# a[:, None] * b = [[1 2] [6 8]] which is the expected result;
# otherwise one would get multiplication by column [[1 4] [3 8]]
intra_res = phi[:, None] * intra_eps # shape (N, E)
-
if self.correlation_model is not None:
intra_res = self.correlation_model.apply_correlation(
self.sites, imt, intra_res, phi)
@@ -383,7 +370,7 @@ def ground_motion_fields(rupture, sites, imts, gsim, truncation_level,
Float, number of standard deviations for truncation of the intensity
distribution
:param realizations:
- Integer number of GMF realizations to compute.
+ Integer number of GMF simulations to compute.
:param correlation_model:
Instance of correlation model object. See
:mod:`openquake.hazardlib.correlation`. Can be ``None``, in which case
@@ -394,18 +381,21 @@ def ground_motion_fields(rupture, sites, imts, gsim, truncation_level,
:returns:
Dictionary mapping intensity measure type objects (same
as in parameter ``imts``) to 2d numpy arrays of floats,
- representing different realizations of ground shaking intensity
+ representing different simulations of ground shaking intensity
for all sites in the collection. First dimension represents
- sites and second one is for realizations.
+ sites and second one is for simulations.
"""
cmaker = ContextMaker(rupture.tectonic_region_type, {gsim: U32([0])},
dict(truncation_level=truncation_level,
- imtls={str(imt): [1] for imt in imts}))
+ imtls={str(imt): numpy.array([0.])
+ for imt in imts}))
cmaker.scenario = True
- rupture.seed = seed
- gc = GmfComputer(rupture, sites, cmaker, correlation_model)
- gc.ebrupture['n_occ'] = realizations
- mean_stds = cmaker.get_mean_stds([gc.ctx])[:, 0]
+ ebr = EBRupture(
+ rupture, source_id=0, trt_smr=0, n_occ=realizations, id=0, e0=0)
+ ebr.seed = seed
+ gc = GmfComputer(ebr, sites, cmaker, correlation_model)
+ mean_stds = cmaker.get_mean_stds([gc.ctx])[:, 0] # shape (4, M, N)
+ gc.init_eid_rlz_sig_eps()
res = gc.compute(gsim, U32([0]), mean_stds,
numpy.random.default_rng(seed))
return {imt: res[:, m] for m, imt in enumerate(gc.imts)}
diff --git a/openquake/hazardlib/contexts.py b/openquake/hazardlib/contexts.py
index 0954f2c71b27..a57d111b8cf3 100644
--- a/openquake/hazardlib/contexts.py
+++ b/openquake/hazardlib/contexts.py
@@ -435,7 +435,7 @@ def _init2(self, param, extraparams):
try:
self.min_iml = param['min_iml']
except KeyError:
- self.min_iml = [0. for imt in self.imtls]
+ self.min_iml = numpy.array([0. for imt in self.imtls])
self.reqv = param.get('reqv')
if self.reqv is not None:
self.REQUIRES_DISTANCES.add('repi')
diff --git a/openquake/hazardlib/correlation.py b/openquake/hazardlib/correlation.py
index 0668ab4f08db..fc3a9b357389 100644
--- a/openquake/hazardlib/correlation.py
+++ b/openquake/hazardlib/correlation.py
@@ -40,7 +40,7 @@ def apply_correlation(self, sites, imt, residuals, stddev_intra=0):
represents sites (the length as ``sites`` parameter) and
second one represents different realizations (samples).
:param stddev_intra:
- Intra-event standard deviation array. Note that different sites do
+ Intra-event standard deviation array (phi). Different sites do
not necessarily have the same intra-event standard deviation.
:returns:
Array of the same structure and semantics as ``residuals``
@@ -171,17 +171,14 @@ def _get_correlation_matrix(self, sites, imt):
def apply_correlation(self, sites, imt, residuals, stddev_intra):
"""
- Apply correlation to randomly sampled residuals.
-
- See Parent function
+ Apply correlation to randomly sampled residuals
"""
- # stddev_intra is repeated if there is only one value
- if len(stddev_intra) == 1:
- stddev_intra = numpy.full(len(sites.complete), stddev_intra)
- # Reshape 'stddev_intra' if needed
- stddev_intra = stddev_intra.squeeze()
- if not stddev_intra.shape:
- stddev_intra = stddev_intra[None]
+ # TODO: the case of filtered sites is probably managed incorrectly
+ # NB: this is SLOW and we cannot use the cache as in JB2009 because
+ # we are not using the complete site collection
+ nsites = len(sites)
+ assert len(residuals) == len(stddev_intra) == nsites
+ D = numpy.diag(stddev_intra) # phi as a diagonal matrix
if self.uncertainty_multiplier == 0: # No uncertainty
@@ -190,37 +187,30 @@ def apply_correlation(self, sites, imt, residuals, stddev_intra):
# normalized, sampled from a standard normal distribution.
# For this, every row of 'residuals' (every site) is divided by its
# corresponding standard deviation element.
- residuals_norm = residuals / stddev_intra[sites.sids, None]
-
- # Lower diagonal of the Cholesky decomposition from/to cache
- try:
- cormaLow = self.cache[imt]
- except KeyError:
- # Note that instead of computing the whole correlation matrix
- # corresponding to sites.complete, here we compute only the
- # correlation matrix corresponding to sites.
- cormaLow = numpy.linalg.cholesky(
- numpy.diag(stddev_intra[sites.sids]) @
- self._get_correlation_matrix(sites, imt) @
- numpy.diag(stddev_intra[sites.sids]))
- self.cache[imt] = cormaLow
+ residuals_norm = residuals / stddev_intra[:, None]
+
+ # Lower diagonal of the Cholesky decomposition
+ # Note that instead of computing the whole correlation matrix
+ # corresponding to sites.complete, here we compute only the
+ # correlation matrix corresponding to sites
+ cormaLow = numpy.linalg.cholesky(
+ D @ self._get_correlation_matrix(sites, imt) @ D)
# Apply correlation
- return numpy.dot(cormaLow, residuals_norm)
+ return cormaLow @ residuals_norm
else: # Variability (uncertainty) is included
- nsim = len(residuals[1])
- nsites = len(residuals)
+ nsim = residuals.shape[1]
# Re-sample all the residuals
residuals_correlated = residuals * 0
for isim in range(0, nsim):
+ # FIXME: the seed is not set!
corma = self._get_correlation_matrix(sites, imt)
- cov = (numpy.diag(stddev_intra[sites.sids]) @ corma @
- numpy.diag(stddev_intra[sites.sids]))
+ # NB: corma is different at each loop since contains randomicity
residuals_correlated[0:, isim] = (
numpy.random.multivariate_normal(
- numpy.zeros(nsites), cov, 1))
+ numpy.zeros(nsites), D @ corma @ D, 1))
return residuals_correlated
diff --git a/openquake/hazardlib/gsim/kuehn_2020.py b/openquake/hazardlib/gsim/kuehn_2020.py
index 0775a38de2bd..353aced046f4 100644
--- a/openquake/hazardlib/gsim/kuehn_2020.py
+++ b/openquake/hazardlib/gsim/kuehn_2020.py
@@ -37,7 +37,7 @@
import numpy as np
import os
import h5py
-from scipy.interpolate import interp1d
+from scipy.interpolate import RegularGridInterpolator
from openquake.hazardlib.gsim.base import GMPE, CoeffsTable, add_alias
from openquake.hazardlib import const
@@ -531,41 +531,60 @@ def get_sigma_mu_adjustment(model, imt, mag, rrup):
sigma_mu for the scenarios (numpy.ndarray)
"""
if not model:
- return 0.0
+ return np.zeros_like(mag)
+
+ # Get the correct sigma_mu model
+ is_SA = imt.string not in "PGA PGV"
+ sigma_mu_model = model["SA"] if is_SA else model[imt.string]
+
+ model_m = model["M"]
+ model_r = model["R"]
+
+ # Extend the sigma_mu_model as needed
+ # Prevents having to deal with values
+ # outside the model range manually
+ if np.any(mag > model["M"][-1]):
+ sigma_mu_model = np.concatenate(
+ (sigma_mu_model, sigma_mu_model[-1, :][np.newaxis, :]), axis=0)
+ model_m = np.concatenate((model_m, [mag.max()]), axis=0)
+ if np.any(mag < model["M"][0]):
+ sigma_mu_model = np.concatenate(
+ (sigma_mu_model[0, :][np.newaxis, :], sigma_mu_model), axis=0)
+ model_m = np.concatenate(([mag.min()], model_m), axis=0)
+ if np.any(rrup > model["R"][-1]):
+ sigma_mu_model = np.concatenate(
+ (sigma_mu_model, sigma_mu_model[:, -1][:, np.newaxis]), axis=1)
+ model_r = np.concatenate((model_r, [rrup.max()]), axis=0)
+ if np.any(rrup < model["R"][0]):
+ sigma_mu_model = np.concatenate(
+ (sigma_mu_model[:, 0][:, np.newaxis], sigma_mu_model), axis=1)
+ model_r = np.concatenate(([rrup.min()], model_r), axis=0)
+
if imt.string in "PGA PGV":
- # PGA and PGV are 2D arrays of dimension [nmags, ndists]
- sigma_mu = model[imt.string]
- if mag <= model["M"][0]:
- sigma_mu_m = sigma_mu[0, :]
- elif mag >= model["M"][-1]:
- sigma_mu_m = sigma_mu[-1, :]
- else:
- intpl1 = interp1d(model["M"], sigma_mu, axis=0)
- sigma_mu_m = intpl1(mag)
- # Linear interpolation with distance
- intpl2 = interp1d(model["R"], sigma_mu_m, bounds_error=False,
- fill_value=(sigma_mu_m[0], sigma_mu_m[-1]))
- return intpl2(rrup)
- # In the case of SA the array is of dimension [nmags, ndists, nperiods]
- # Get values for given magnitude
- if mag <= model["M"][0]:
- sigma_mu_m = model["SA"][0, :, :]
- elif mag >= model["M"][-1]:
- sigma_mu_m = model["SA"][-1, :, :]
+ # Linear interpolation
+ interp = RegularGridInterpolator(
+ (model_m, model_r), sigma_mu_model, bounds_error=True,)
+ sigma_mu = interp(np.stack((mag, rrup), axis=1))
else:
- intpl1 = interp1d(model["M"], model["SA"], axis=0)
- sigma_mu_m = intpl1(mag)
- # Get values for period - N.B. ln T, linear sigma mu interpolation
- if imt.period <= model["periods"][0]:
- sigma_mu_t = sigma_mu_m[:, 0]
- elif imt.period >= model["periods"][-1]:
- sigma_mu_t = sigma_mu_m[:, -1]
- else:
- intpl2 = interp1d(np.log(model["periods"]), sigma_mu_m, axis=1)
- sigma_mu_t = intpl2(np.log(imt.period))
- intpl3 = interp1d(model["R"], sigma_mu_t, bounds_error=False,
- fill_value=(sigma_mu_t[0], sigma_mu_t[-1]))
- return intpl3(rrup)
+ model_t = model["periods"]
+
+ # Extend for extreme periods as needed
+ if np.any(imt.period > model["periods"][-1]):
+ sigma_mu_model = np.concatenate(
+ (sigma_mu_model, sigma_mu_model[:, :, -1][:, :, np.newaxis]), axis=2)
+ model_t = np.concatenate((model_t, [imt.period.max()]), axis=0)
+ if np.any(imt.period < model["periods"][0]):
+ sigma_mu_model = np.concatenate(
+ (sigma_mu_model[:, :, 0][:, :, np.newaxis], sigma_mu_model), axis=2)
+ model_t = np.concatenate(([imt.period.min()], model_t), axis=0)
+
+ # Linear interpolation
+ interp = RegularGridInterpolator(
+ (model_m, model_r, np.log(model_t)), sigma_mu_model, bounds_error=True,)
+ sigma_mu = interp(
+ np.stack((mag, rrup, np.ones_like(mag) * np.log(imt.period)), axis=1))
+
+ return sigma_mu
class KuehnEtAl2020SInter(GMPE):
@@ -726,9 +745,8 @@ def compute(self, ctx: np.recarray, imts, mean, sig, tau, phi):
ctx, pga1100)
# Apply the sigma mu adjustment if necessary
if self.sigma_mu_epsilon:
- [mag] = np.unique(np.round(ctx.mag, 2))
sigma_mu_adjust = get_sigma_mu_adjustment(
- self.sigma_mu_model, imt, mag, ctx.rrup)
+ self.sigma_mu_model, imt, ctx.mag, ctx.rrup)
mean[m] += self.sigma_mu_epsilon * sigma_mu_adjust
# Get standard deviations
tau[m] = C["tau"]
diff --git a/openquake/qa_tests_data/event_based/case_31/__init__.py b/openquake/qa_tests_data/event_based/case_31/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/openquake/qa_tests_data/event_based/case_31/expected/avg_gmf.csv b/openquake/qa_tests_data/event_based/case_31/expected/avg_gmf.csv
new file mode 100644
index 000000000000..27a5b9cb129f
--- /dev/null
+++ b/openquake/qa_tests_data/event_based/case_31/expected/avg_gmf.csv
@@ -0,0 +1,102 @@
+#,,,,"generated_by='OpenQuake engine 3.18.0-git6ee84744a4', start_date='2023-10-18T08:46:31', checksum=193965386"
+site_id,lon,lat,gmv_SA(0.3),gsd_SA(0.3)
+0,-2.00061E+00,2.02920E-01,1.66161E-08,3.83684E+03
+1,-2.00061E+00,-2.46750E-01,7.08940E-08,6.33337E+03
+2,-2.00059E+00,6.52580E-01,1.04708E-10,2.66500E+00
+3,-2.00058E+00,-6.96410E-01,2.63923E-08,4.25542E+03
+4,-2.00053E+00,1.10224E+00,0.00000E+00,0.00000E+00
+5,-2.00053E+00,-1.14607E+00,1.47151E-09,6.14167E+02
+6,-2.00045E+00,1.55190E+00,0.00000E+00,0.00000E+00
+7,-2.00044E+00,-1.59573E+00,1.74891E-10,2.31377E+01
+8,-2.00034E+00,2.00156E+00,0.00000E+00,0.00000E+00
+9,-2.00033E+00,-2.04539E+00,0.00000E+00,0.00000E+00
+10,-1.55095E+00,2.02920E-01,2.08466E-03,1.91645E+02
+11,-1.55095E+00,-2.46750E-01,1.02306E-02,3.25967E+00
+12,-1.55090E+00,6.52580E-01,2.29330E-05,4.75726E+03
+13,-1.55089E+00,-6.96410E-01,9.26077E-03,6.89180E+00
+14,-1.55079E+00,1.10224E+00,1.96522E-10,3.25687E+01
+15,-1.55077E+00,-1.14607E+00,2.11505E-04,1.71172E+03
+16,-1.55062E+00,1.55190E+00,0.00000E+00,0.00000E+00
+17,-1.55061E+00,-1.59573E+00,1.23530E-07,7.75930E+03
+18,-1.55041E+00,2.00156E+00,0.00000E+00,0.00000E+00
+19,-1.55038E+00,-2.04539E+00,2.24175E-10,4.33195E+01
+20,-1.10129E+00,2.02920E-01,1.74861E-02,2.27762E+00
+21,-1.10128E+00,-2.46750E-01,2.21503E-02,2.58062E+00
+22,-1.10121E+00,6.52580E-01,5.44196E-03,3.57584E+01
+23,-1.10119E+00,-6.96410E-01,1.89384E-02,2.64911E+00
+24,-1.10104E+00,1.10224E+00,1.15034E-05,6.17678E+03
+25,-1.10102E+00,-1.14607E+00,1.30415E-02,2.48723E+00
+26,-1.10080E+00,1.55190E+00,1.06558E-10,3.01743E+00
+27,-1.10077E+00,-1.59573E+00,5.01971E-04,7.97006E+02
+28,-1.10047E+00,2.00156E+00,0.00000E+00,0.00000E+00
+29,-1.10043E+00,-2.04539E+00,6.65613E-08,6.19559E+03
+30,-6.51620E-01,2.02920E-01,2.94546E-02,2.31100E+00
+31,-6.51620E-01,-2.46750E-01,5.01483E-02,2.55007E+00
+32,-6.51520E-01,6.52580E-01,1.38517E-02,2.01028E+00
+33,-6.51500E-01,-6.96410E-01,3.85551E-02,2.56882E+00
+34,-6.51300E-01,1.10224E+00,1.14091E-03,3.25311E+02
+35,-6.51270E-01,-1.14607E+00,2.22350E-02,2.27361E+00
+36,-6.50970E-01,1.55190E+00,1.32426E-08,3.15846E+03
+37,-6.50940E-01,-1.59573E+00,1.02663E-02,5.49126E+00
+38,-6.50530E-01,2.00156E+00,0.00000E+00,0.00000E+00
+39,-6.50490E-01,-2.04539E+00,4.43623E-06,8.06900E+03
+40,-2.01960E-01,2.02920E-01,4.44669E-02,2.49045E+00
+41,-2.01950E-01,-2.46750E-01,1.36568E-01,2.77572E+00
+42,-2.01830E-01,6.52580E-01,1.96015E-02,2.24945E+00
+43,-2.01810E-01,-6.96410E-01,8.92375E-02,2.96026E+00
+44,-2.01560E-01,1.10224E+00,2.47168E-03,1.38391E+02
+45,-2.01520E-01,-1.14607E+00,2.99529E-02,2.31927E+00
+46,-2.01150E-01,1.55190E+00,6.95609E-08,5.94987E+03
+47,-2.01100E-01,-1.59573E+00,1.28681E-02,2.23196E+00
+48,-2.00600E-01,2.00156E+00,0.00000E+00,0.00000E+00
+49,-2.00540E-01,-2.04539E+00,4.95191E-05,3.73854E+03
+50,2.47710E-01,2.02920E-01,3.80965E-02,2.53644E+00
+51,2.47710E-01,-2.46750E-01,6.51902E-02,2.57147E+00
+52,2.47860E-01,6.52580E-01,1.66356E-02,2.44861E+00
+53,2.47890E-01,-6.96410E-01,5.34652E-02,2.59047E+00
+54,2.48190E-01,1.10224E+00,1.13762E-03,4.23517E+02
+55,2.48230E-01,-1.14607E+00,2.22328E-02,2.44921E+00
+56,2.48680E-01,1.55190E+00,7.28563E-08,6.31607E+03
+57,2.48740E-01,-1.59573E+00,1.19628E-02,2.29466E+00
+58,2.49340E-01,2.00156E+00,0.00000E+00,0.00000E+00
+59,2.49410E-01,-2.04539E+00,1.74536E-05,5.88702E+03
+60,6.97370E-01,2.02920E-01,1.95185E-02,2.46287E+00
+61,6.97380E-01,-2.46750E-01,2.32399E-02,2.24467E+00
+62,6.97550E-01,6.52580E-01,1.10836E-02,2.34874E+00
+63,6.97580E-01,-6.96410E-01,2.21145E-02,2.27606E+00
+64,6.97930E-01,1.10224E+00,1.31575E-05,6.37823E+03
+65,6.97980E-01,-1.14607E+00,1.40843E-02,2.33929E+00
+66,6.98500E-01,1.55190E+00,1.05567E-09,4.24703E+02
+67,6.98570E-01,-1.59573E+00,8.09910E-03,5.31183E+00
+68,6.99270E-01,2.00156E+00,0.00000E+00,0.00000E+00
+69,6.99360E-01,-2.04539E+00,3.81603E-08,4.76493E+03
+70,1.14703E+00,2.02920E-01,6.85712E-03,1.62976E+01
+71,1.14704E+00,-2.46750E-01,1.16328E-02,2.20476E+00
+72,1.14724E+00,6.52580E-01,2.20384E-04,1.60423E+03
+73,1.14728E+00,-6.96410E-01,1.17465E-02,2.27644E+00
+74,1.14768E+00,1.10224E+00,4.15107E-08,4.70646E+03
+75,1.14773E+00,-1.14607E+00,1.03014E-02,2.08519E+00
+76,1.14833E+00,1.55190E+00,1.12186E-10,4.10427E+00
+77,1.14841E+00,-1.59573E+00,7.78538E-07,8.60038E+03
+78,1.14921E+00,2.00156E+00,0.00000E+00,0.00000E+00
+79,1.14930E+00,-2.04539E+00,5.71386E-10,2.13829E+02
+80,1.59670E+00,2.02920E-01,1.52223E-06,8.73191E+03
+81,1.59671E+00,-2.46750E-01,2.63000E-05,4.79965E+03
+82,1.59693E+00,6.52580E-01,3.35592E-09,1.21846E+03
+83,1.59697E+00,-6.96410E-01,9.37763E-06,6.24420E+03
+84,1.59742E+00,1.10224E+00,0.00000E+00,0.00000E+00
+85,1.59748E+00,-1.14607E+00,1.27214E-08,3.10437E+03
+86,1.59816E+00,1.55190E+00,0.00000E+00,0.00000E+00
+87,1.59824E+00,-1.59573E+00,5.52216E-10,1.99022E+02
+88,1.59914E+00,2.00156E+00,0.00000E+00,0.00000E+00
+89,1.59925E+00,-2.04539E+00,0.00000E+00,0.00000E+00
+90,2.04636E+00,2.02920E-01,0.00000E+00,0.00000E+00
+91,2.04637E+00,-2.46750E-01,0.00000E+00,0.00000E+00
+92,2.04662E+00,6.52580E-01,0.00000E+00,0.00000E+00
+93,2.04666E+00,-6.96410E-01,1.11555E-10,3.83227E+00
+94,2.04716E+00,1.10224E+00,0.00000E+00,0.00000E+00
+95,2.04723E+00,-1.14607E+00,0.00000E+00,0.00000E+00
+96,2.04798E+00,1.55190E+00,0.00000E+00,0.00000E+00
+97,2.04808E+00,-1.59573E+00,0.00000E+00,0.00000E+00
+98,2.04908E+00,2.00156E+00,0.00000E+00,0.00000E+00
+99,2.04920E+00,-2.04539E+00,0.00000E+00,0.00000E+00
diff --git a/openquake/qa_tests_data/event_based/case_31/gmpe_logic_tree.xml b/openquake/qa_tests_data/event_based/case_31/gmpe_logic_tree.xml
new file mode 100644
index 000000000000..10bdddc339ff
--- /dev/null
+++ b/openquake/qa_tests_data/event_based/case_31/gmpe_logic_tree.xml
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+ BooreEtAl2014
+ 0.6
+
+
+
+ CampbellBozorgnia2014
+ 0.4
+
+
+
+
+
+
+ BooreEtAl2014
+ 0.2
+
+
+
+ CampbellBozorgnia2014
+ 0.8
+
+
+
+
+
+
diff --git a/openquake/qa_tests_data/event_based/case_31/job.ini b/openquake/qa_tests_data/event_based/case_31/job.ini
new file mode 100644
index 000000000000..4ab4de3ae1dc
--- /dev/null
+++ b/openquake/qa_tests_data/event_based/case_31/job.ini
@@ -0,0 +1,37 @@
+[general]
+
+description = HM2018 with a filtered site collection
+calculation_mode = event_based
+ses_seed = 24
+
+[geometry]
+region = -2.0 -2.0, -2.0 2.0, 2.0 2.0, 2.0 -2.0
+region_grid_spacing = 50.0
+
+[logic_tree]
+number_of_logic_tree_samples = 0
+
+[erf]
+rupture_mesh_spacing = 5
+width_of_mfd_bin = 1.0
+area_source_discretization = 10.0
+
+[site_params]
+reference_vs30_type = measured
+reference_vs30_value = 600.0
+reference_depth_to_2pt5km_per_sec = 5.0
+reference_depth_to_1pt0km_per_sec = 100.0
+
+[calculation]
+source_model_logic_tree_file = source_model_logic_tree.xml
+gsim_logic_tree_file = gmpe_logic_tree.xml
+investigation_time = 50.0
+intensity_measure_types = SA(0.3)
+truncation_level = 3
+maximum_distance = 200.0
+minimum_magnitude = 5.5
+
+[event_based_params]
+ses_per_logic_tree_path = 1
+ground_motion_correlation_model = HM2018
+ground_motion_correlation_params = {'uncertainty_multiplier': 0}
diff --git a/openquake/qa_tests_data/event_based/case_31/source_model.xml b/openquake/qa_tests_data/event_based/case_31/source_model.xml
new file mode 100644
index 000000000000..17faee8c0aaa
--- /dev/null
+++ b/openquake/qa_tests_data/event_based/case_31/source_model.xml
@@ -0,0 +1,114 @@
+
+
+
+
+
+
+
+
+
+
+ -5.0000000E-01 -5.0000000E-01 -3.0000000E-01 -1.0000000E-01 1.0000000E-01 2.0000000E-01 3.0000000E-01 -8.0000000E-01
+
+
+
+
+
+ 0.0000000E+00
+
+
+ 1.0000000E+01
+
+
+
+ WC1994
+
+
+ 1.0000000E+00
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ -6.0000000E-01 -6.0000000E-01 -4.0000000E-01 -2.0000000E-01 0.0000000E-01 1.0000000E-01 2.0000000E-01 -9.0000000E-01
+
+
+
+
+
+ 0.0000000E+00
+
+
+ 1.0000000E+01
+
+
+
+ WC1994
+
+
+ 1.0000000E+00
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/openquake/qa_tests_data/event_based/case_31/source_model_logic_tree.xml b/openquake/qa_tests_data/event_based/case_31/source_model_logic_tree.xml
new file mode 100644
index 000000000000..a489cd3c6d9d
--- /dev/null
+++ b/openquake/qa_tests_data/event_based/case_31/source_model_logic_tree.xml
@@ -0,0 +1,13 @@
+
+
+
+
+
+ source_model.xml
+ 1.0
+
+
+
+
diff --git a/openquake/sep/classes.py b/openquake/sep/classes.py
index d01b03f04656..27935198f450 100644
--- a/openquake/sep/classes.py
+++ b/openquake/sep/classes.py
@@ -336,11 +336,8 @@ def prepare(self, sites):
def compute(self, mag, imt_gmf, sites):
out = []
- imts = dict(imt_gmf)
- assert "PGA" in imts
- assert "PGV" in imts
- # pga = None
- # pgv = None
+ pga = None
+ pgv = None
for im, gmf in imt_gmf:
if im.string == 'PGV':
pgv = gmf
@@ -381,11 +378,8 @@ def prepare(self, sites):
def compute(self, mag, imt_gmf, sites):
out = []
- imts = dict(imt_gmf)
- assert "PGA" in imts
- assert "PGV" in imts
- # pga = None
- # pgv = None
+ pga = None
+ pgv = None
for im, gmf in imt_gmf:
if im.string == 'PGV':
pgv = gmf
@@ -463,7 +457,7 @@ def compute(self, mag, imt_gmf, sites):
for im, gmf in imt_gmf:
if im.string == 'PGV':
prob_liq, out_class = akhlagi_etal_2021_model_b(
- pgv=gmf, vs30_coeff=sites.vs30_coeff, dc=sites.dc,
+ pgv=gmf, vs30=sites.vs30, dc=sites.dc,
dr=sites.dr, zwb=sites.zwb)
out.append(prob_liq)
out.append(out_class)
diff --git a/openquake/sep/liquefaction/liquefaction.py b/openquake/sep/liquefaction/liquefaction.py
index 9898958e5936..04bcdd117425 100644
--- a/openquake/sep/liquefaction/liquefaction.py
+++ b/openquake/sep/liquefaction/liquefaction.py
@@ -473,9 +473,9 @@ def akhlagi_etal_2021_model_a(
:param tri:
Topographic roughness index, unitless
:param dc:
- Distance to the nearest coast, measured in m
+ Distance to the nearest coast, measured in km
:param dr:
- Distance to the nearest river, measured in m
+ Distance to the nearest river, measured in km
:param zwb:
Elevation above the nearest water body, measured in m
@@ -484,6 +484,7 @@ def akhlagi_etal_2021_model_a(
out_class: Binary output 0 or 1, i.e., liquefaction nonoccurrence
or liquefaction occurrence occurrence.
"""
+
Xg = (pgv_coeff * np.log(pgv) + tri_coeff * np.sqrt(tri)
+ dc_coeff * np.log(dc + 1) + dr_coeff * np.log(dr + 1)
+ zwb_coeff * np.sqrt(zwb) + intercept)
@@ -525,7 +526,7 @@ def akhlagi_etal_2021_model_b(
Peak Ground Velocity, measured in cm/s
:param vs30:
Shear-wave velocity averaged over the upper 30 m of the earth at the
- site, measured in cm/s
+ site, measured in m/s
:param dc:
Distance to the nearest coast, measured in m
:param dr:
@@ -538,6 +539,7 @@ def akhlagi_etal_2021_model_b(
out_class: Binary output 0 or 1, i.e., liquefaction nonoccurrence
or liquefaction occurrence occurrence.
"""
+
Xg = (pgv_coeff * np.log(pgv) + vs30_coeff * np.log(vs30)
+ dc_coeff * np.log(dc + 1) + dr_coeff * np.log(dr + 1)
+ zwb_coeff * np.sqrt(zwb) + intercept)
diff --git a/openquake/sep/tests/test_sep_suite_2.py b/openquake/sep/tests/test_sep_suite_2.py
index 89d35abd9e46..7146b14154e6 100644
--- a/openquake/sep/tests/test_sep_suite_2.py
+++ b/openquake/sep/tests/test_sep_suite_2.py
@@ -242,8 +242,9 @@ def test_akhlagi_2021_model_a(self):
pgv=self.pgv, tri=self.sites["tri"], dc=self.sites["dc"], dr=self.sites["dr"],
zwb=self.sites["zwb"])
- zlp = np.array([0.949740005, 0.660621806, 0.982407491, 0.972949802, 0.989203293,
- 0.992850986, 1.58769E-05, 0.526810919, 0.988698807, 0.908844299])
+ zlp = np.array([0.949740, 0.660622, 0.982408, 0.972950, 0.989203,
+ 0.992851, 0.000016, 0.526811, 0.988699, 0.908844
+])
clq = np.array([1, 1, 1, 1, 1, 1, 0, 1, 1, 1])
@@ -255,8 +256,8 @@ def test_akhlagi_2021_b(self):
pgv=self.pgv, vs30=self.sites["vs30"], dc=self.sites["dc"], dr=self.sites["dr"],
zwb=self.sites["zwb"])
- zlp = np.array([0.973289334, 0.974526031, 0.988384878, 0.988594348, 0.990864928,
- 0.992975958, 0.991752215, 0.989183352, 0.992783854, 0.990591401])
+ zlp = np.array([0.973289, 0.974526, 0.988385, 0.988594, 0.990865, 0.992976,
+ 0.991752, 0.989183, 0.992784, 0.990591])
clq = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1])